From 2536dbe248009814d93b062d8c6c0910883572a0 Mon Sep 17 00:00:00 2001 From: Brad Cowie Date: Wed, 1 Dec 2021 16:20:18 +1300 Subject: [PATCH 1/2] Import networkx 2.5 source. --- .appveyor.yml | 68 - .codecov.yml | 9 - .coveragerc | 4 - .gitignore | 49 - .mailmap | 4 - .travis.yml | 122 -- CONTRIBUTING.rst | 290 +++- CONTRIBUTORS.rst | 165 --- INSTALL.rst | 76 +- LICENSE.txt | 2 +- MANIFEST.in | 6 +- PKG-INFO | 120 ++ README.md | 1 - README.rst | 44 +- debian/.git-dpm | 11 - debian/README.Debian | 25 - debian/README.source | 10 - debian/changelog | 810 ++++++++++- debian/compat | 1 - debian/control | 82 +- debian/copyright | 301 +++- debian/networkx-docs.docs | 2 - .../patches/0005-use-debian-mathjax.js.patch | 21 + ...ll-from-sphinx-gallery-requires-netw.patch | 20 + debian/patches/10_doc_relocation | 25 + ...ee687d0db533555384f92ce3670d47c45aec.patch | 58 + ...ocs-remove-expected_failing_examples.patch | 0 debian/patches/series | 6 + .../patches/use_local_object.inv_files.patch | 27 + debian/python-networkx-doc.doc-base | 20 + debian/rules | 82 +- debian/source/include-binaries | 2 + debian/source/options | 1 + debian/upstream/metadata | 5 + debian/watch | 2 +- doc/Makefile | 2 +- doc/README.md | 33 - doc/_static/custom.css | 26 + doc/_templates/dev_banner.html | 6 + doc/_templates/eol_banner.html | 6 + doc/_templates/layout.html | 10 + doc/bibliography.rst | 28 +- doc/citing.rst | 8 +- doc/conf.py | 195 ++- doc/credits.rst | 190 ++- doc/developer/code_of_conduct.rst | 1 + doc/developer/core_developer.rst | 170 +++ doc/developer/deprecations.rst | 56 + doc/developer/gitwash/configure_git.rst | 8 +- .../gitwash/development_workflow.rst | 2 +- doc/developer/gitwash/index.rst | 4 +- doc/developer/index.rst | 7 + doc/developer/nxeps/_static/nxep-0000.png | Bin 0 -> 12925 bytes doc/developer/nxeps/index.rst | 17 + doc/developer/nxeps/nxep-0000.rst | 283 ++++ doc/developer/nxeps/nxep-0001.rst | 158 +++ doc/developer/nxeps/nxep-template.rst | 90 ++ RELEASE.rst => doc/developer/release.rst | 31 +- doc/developer/roadmap.rst | 4 + doc/developer/values.rst | 54 + doc/index.rst | 10 +- doc/news.rst | 235 ++-- doc/reference/algorithms/approximation.rst | 14 +- doc/reference/algorithms/assortativity.rst | 17 +- doc/reference/algorithms/asteroidal.rst | 10 + doc/reference/algorithms/bipartite.rst | 14 + doc/reference/algorithms/bridges.rst | 1 + doc/reference/algorithms/centrality.rst | 53 +- doc/reference/algorithms/chordal.rst | 1 + doc/reference/algorithms/clique.rst | 5 +- doc/reference/algorithms/clustering.rst | 2 +- doc/reference/algorithms/coloring.rst | 1 + doc/reference/algorithms/community.rst | 24 +- doc/reference/algorithms/component.rst | 5 - doc/reference/algorithms/core.rst | 2 + doc/reference/algorithms/d_separation.rst | 9 + doc/reference/algorithms/dag.rst | 2 + doc/reference/algorithms/dispersion.rst | 12 - .../algorithms/distance_measures.rst | 3 + ...efficiency.rst => efficiency_measures.rst} | 2 +- doc/reference/algorithms/euler.rst | 4 + doc/reference/algorithms/graph_hashing.rst | 9 + doc/reference/algorithms/index.rst | 18 +- doc/reference/algorithms/isolates.rst | 1 + .../algorithms/isomorphism.ismags.rst | 23 + doc/reference/algorithms/isomorphism.rst | 17 +- doc/reference/algorithms/isomorphism.vf2.rst | 4 +- doc/reference/algorithms/link_prediction.rst | 1 + .../algorithms/lowest_common_ancestors.rst | 11 + doc/reference/algorithms/matching.rst | 1 + doc/reference/algorithms/moral.rst | 9 + .../algorithms/node_classification.rst | 16 +- doc/reference/algorithms/non_randomness.rst | 9 + doc/reference/algorithms/operators.rst | 2 + doc/reference/algorithms/planar_drawing.rst | 9 + doc/reference/algorithms/planarity.rst | 11 + doc/reference/algorithms/regular.rst | 11 + doc/reference/algorithms/shortest_paths.rst | 11 +- doc/reference/algorithms/similarity.rst | 14 + doc/reference/algorithms/simple_paths.rst | 1 + doc/reference/algorithms/smallworld.rst | 12 + doc/reference/algorithms/smetric.rst | 9 + doc/reference/algorithms/sparsifiers.rst | 9 + doc/reference/algorithms/threshold.rst | 10 + doc/reference/algorithms/traversal.rst | 9 + doc/reference/algorithms/tree.rst | 8 + doc/reference/algorithms/triads.rst | 5 + doc/reference/classes/digraph.rst | 3 +- doc/reference/classes/graph.rst | 3 +- doc/reference/classes/index.rst | 53 +- doc/reference/classes/multidigraph.rst | 3 +- doc/reference/classes/multigraph.rst | 3 +- doc/reference/convert.rst | 2 + doc/reference/drawing.rst | 10 +- doc/reference/functions.rst | 26 +- doc/reference/generators.rst | 73 +- doc/reference/glossary.rst | 20 +- doc/reference/index.rst | 1 + doc/reference/introduction.rst | 4 +- doc/reference/linalg.rst | 32 +- doc/reference/randomness.rst | 88 ++ doc/reference/readwrite/gexf.rst | 1 + doc/reference/readwrite/graphml.rst | 2 + doc/reference/readwrite/json_graph.rst | 2 + doc/reference/readwrite/pajek.rst | 4 +- doc/reference/utils.rst | 9 +- doc/release/api_0.99.rst | 78 +- doc/release/api_1.0.rst | 36 +- doc/release/api_1.4.rst | 2 +- doc/release/api_1.5.rst | 10 +- doc/release/api_1.6.rst | 18 +- doc/release/contribs.py | 42 +- doc/release/index.rst | 15 + .../migration_guide_from_1.x_to_2.0.rst | 36 +- doc/release/release_2.0.rst | 6 +- doc/release/release_2.2.rst | 166 +++ doc/release/release_2.3.rst | 98 ++ doc/release/release_2.4.rst | 409 ++++++ doc/release/release_2.5.rst | 493 +++++++ doc/release/release_dev.rst | 4 +- .../report_functions_without_rst_generated.py | 34 + doc/requirements.txt | 2 - doc/team.rst | 270 ++++ doc/tutorial.rst | 137 +- examples/3d_drawing/mayavi2_spring.py | 27 +- examples/README.txt | 4 +- examples/advanced/plot_heavy_metal_umlaut.py | 42 +- ....py => plot_iterated_dynamical_systems.py} | 35 +- .../advanced/plot_parallel_betweenness.py | 70 +- .../{beam_search.py => plot_beam_search.py} | 60 +- examples/algorithms/plot_blockmodel.py | 54 +- examples/algorithms/plot_davis_club.py | 13 +- examples/algorithms/plot_decomposition.py | 40 + .../algorithms/plot_krackhardt_centrality.py | 17 +- examples/algorithms/{rcm.py => plot_rcm.py} | 15 +- examples/applications/README.txt | 2 + .../{circuits.py => plot_circuits.py} | 91 +- examples/basic/plot_properties.py | 31 +- examples/basic/plot_read_write.py | 21 +- examples/drawing/plot_atlas.py | 68 +- examples/drawing/plot_chess_masters.py | 189 ++- examples/drawing/plot_circular_tree.py | 21 +- examples/drawing/plot_degree_histogram.py | 9 +- examples/drawing/plot_degree_rank.py | 10 +- examples/drawing/plot_directed.py | 23 +- examples/drawing/plot_edge_colormap.py | 13 +- examples/drawing/plot_ego_graph.py | 39 +- examples/drawing/plot_four_grids.py | 16 +- examples/drawing/plot_giant_component.py | 53 +- examples/drawing/plot_house_with_colors.py | 13 +- examples/drawing/plot_knuth_miles.py | 97 +- examples/drawing/plot_labels_and_colors.py | 57 +- examples/drawing/plot_lanl_routes.py | 71 +- examples/drawing/plot_multipartite_graph.py | 43 + examples/drawing/plot_node_colormap.py | 3 - .../drawing/plot_random_geometric_graph.py | 18 +- examples/drawing/plot_sampson.py | 40 +- examples/drawing/plot_simple_path.py | 1 - examples/drawing/plot_spectral_grid.py | 5 +- examples/drawing/plot_unix_email.py | 65 +- examples/drawing/plot_weighted_graph.py | 30 +- examples/graph/atlas2.py | 34 - examples/graph/dot_atlas.py | 25 + examples/graph/plot_degree_sequence.py | 16 +- examples/graph/plot_erdos_renyi.py | 23 +- ...ce.py => plot_expected_degree_sequence.py} | 15 +- examples/graph/plot_football.py | 33 +- examples/graph/plot_karate_club.py | 4 +- .../graph/plot_napoleon_russian_campaign.py | 47 +- examples/graph/plot_roget.py | 78 +- examples/graph/plot_words.py | 74 + examples/graph/words.py | 89 -- examples/javascript/force.py | 31 +- .../javascript/force/{README => README.txt} | 0 examples/javascript/force/force.css | 18 +- examples/javascript/force/force.html | 8 +- examples/javascript/force/force.js | 139 +- examples/jit/plot_rgraph.py | 6 +- ...butes.py => plot_pygraphviz_attributes.py} | 17 +- examples/pygraphviz/plot_pygraphviz_draw.py | 19 + examples/pygraphviz/plot_pygraphviz_simple.py | 23 + examples/pygraphviz/plot_write_dotfile.py | 23 + examples/pygraphviz/pygraphviz_draw.py | 29 - examples/pygraphviz/pygraphviz_simple.py | 33 - examples/pygraphviz/write_dotfile.py | 48 - examples/subclass/plot_antigraph.py | 129 +- examples/subclass/plot_printgraph.py | 65 +- networkx.egg-info/dependency_links.txt | 1 + networkx.egg-info/not-zip-safe | 1 + networkx.egg-info/top_level.txt | 1 + networkx/__init__.py | 79 +- networkx/algorithms/__init__.py | 16 +- networkx/algorithms/approximation/__init__.py | 9 +- networkx/algorithms/approximation/clique.py | 19 +- .../approximation/clustering_coefficient.py | 25 +- .../algorithms/approximation/connectivity.py | 94 +- .../approximation/dominating_set.py | 19 +- .../approximation/independent_set.py | 13 +- .../algorithms/approximation/kcomponents.py | 59 +- networkx/algorithms/approximation/matching.py | 7 +- networkx/algorithms/approximation/ramsey.py | 16 +- .../algorithms/approximation/steinertree.py | 74 +- .../approximation/tests/__init__.py | 0 .../tests/test_approx_clust_coeff.py | 24 +- .../approximation/tests/test_clique.py | 44 +- .../approximation/tests/test_connectivity.py | 129 +- .../tests/test_dominating_set.py | 14 +- .../tests/test_independent_set.py | 3 +- .../approximation/tests/test_kcomponents.py | 166 ++- .../approximation/tests/test_matching.py | 3 +- .../approximation/tests/test_ramsey.py | 23 +- .../approximation/tests/test_steinertree.py | 89 +- .../approximation/tests/test_treewidth.py | 269 ++++ .../approximation/tests/test_vertex_cover.py | 16 +- .../algorithms/approximation/treewidth.py | 249 ++++ .../algorithms/approximation/vertex_cover.py | 8 - .../algorithms/assortativity/connectivity.py | 47 +- .../algorithms/assortativity/correlation.py | 144 +- networkx/algorithms/assortativity/mixing.py | 75 +- .../assortativity/neighbor_degree.py | 54 +- networkx/algorithms/assortativity/pairs.py | 50 +- .../assortativity/tests/__init__.py | 0 .../assortativity/tests/base_test.py | 72 +- .../assortativity/tests/test_connectivity.py | 127 +- .../assortativity/tests/test_correlation.py | 55 +- .../assortativity/tests/test_mixing.py | 159 +-- .../tests/test_neighbor_degree.py | 60 +- .../assortativity/tests/test_pairs.py | 126 +- networkx/algorithms/asteroidal.py | 168 +++ networkx/algorithms/bipartite/__init__.py | 17 +- networkx/algorithms/bipartite/basic.py | 89 +- networkx/algorithms/bipartite/centrality.py | 36 +- networkx/algorithms/bipartite/cluster.py | 100 +- networkx/algorithms/bipartite/covering.py | 12 +- networkx/algorithms/bipartite/edgelist.py | 167 ++- networkx/algorithms/bipartite/generators.py | 294 ++-- networkx/algorithms/bipartite/matching.py | 181 ++- networkx/algorithms/bipartite/matrix.py | 58 +- networkx/algorithms/bipartite/projection.py | 71 +- networkx/algorithms/bipartite/redundancy.py | 26 +- networkx/algorithms/bipartite/spectral.py | 39 +- .../algorithms/bipartite/tests/__init__.py | 0 .../algorithms/bipartite/tests/test_basic.py | 123 +- .../bipartite/tests/test_centrality.py | 249 ++-- .../bipartite/tests/test_cluster.py | 60 +- .../bipartite/tests/test_covering.py | 33 +- .../bipartite/tests/test_edgelist.py | 113 +- .../bipartite/tests/test_generators.py | 400 ++++-- .../bipartite/tests/test_matching.py | 224 ++- .../algorithms/bipartite/tests/test_matrix.py | 69 +- .../bipartite/tests/test_project.py | 418 +++--- .../bipartite/tests/test_redundancy.py | 25 +- .../tests/test_spectral_bipartivity.py | 47 +- networkx/algorithms/boundary.py | 22 +- networkx/algorithms/bridges.py | 29 +- networkx/algorithms/centrality/__init__.py | 5 + networkx/algorithms/centrality/betweenness.py | 112 +- .../centrality/betweenness_subset.py | 76 +- networkx/algorithms/centrality/closeness.py | 213 ++- .../centrality/current_flow_betweenness.py | 160 +-- .../current_flow_betweenness_subset.py | 82 +- .../centrality/current_flow_closeness.py | 48 +- networkx/algorithms/centrality/degree_alg.py | 35 +- networkx/algorithms/centrality/dispersion.py | 16 +- networkx/algorithms/centrality/eigenvector.py | 71 +- networkx/algorithms/centrality/flow_matrix.py | 40 +- networkx/algorithms/centrality/group.py | 366 +++++ networkx/algorithms/centrality/harmonic.py | 16 +- networkx/algorithms/centrality/katz.py | 75 +- networkx/algorithms/centrality/load.py | 34 +- networkx/algorithms/centrality/percolation.py | 123 ++ networkx/algorithms/centrality/reaching.py | 36 +- .../algorithms/centrality/second_order.py | 138 ++ .../algorithms/centrality/subgraph_alg.py | 144 +- .../algorithms/centrality/tests/__init__.py | 0 .../tests/test_betweenness_centrality.py | 645 ++++++--- .../test_betweenness_centrality_subset.py | 176 ++- .../tests/test_closeness_centrality.py | 334 ++++- ...est_current_flow_betweenness_centrality.py | 141 +- ...rent_flow_betweenness_centrality_subset.py | 142 +- .../tests/test_current_flow_closeness.py | 32 +- .../tests/test_degree_centrality.py | 126 +- .../centrality/tests/test_dispersion.py | 47 +- .../tests/test_eigenvector_centrality.py | 165 ++- .../algorithms/centrality/tests/test_group.py | 154 +++ .../tests/test_harmonic_centrality.py | 109 +- .../centrality/tests/test_katz_centrality.py | 307 ++-- .../centrality/tests/test_load_centrality.py | 349 +++-- .../tests/test_percolation_centrality.py | 81 ++ .../centrality/tests/test_reaching.py | 73 +- .../tests/test_second_order_centrality.py | 68 + .../centrality/tests/test_subgraph.py | 98 +- .../centrality/tests/test_trophic.py | 304 ++++ .../centrality/tests/test_voterank.py | 61 + networkx/algorithms/centrality/trophic.py | 168 +++ .../algorithms/centrality/voterank_alg.py | 75 + networkx/algorithms/chains.py | 23 +- networkx/algorithms/chordal.py | 304 ++-- networkx/algorithms/clique.py | 258 +++- networkx/algorithms/cluster.py | 257 +++- networkx/algorithms/coloring/__init__.py | 4 +- .../algorithms/coloring/equitable_coloring.py | 513 +++++++ .../algorithms/coloring/greedy_coloring.py | 111 +- .../greedy_coloring_with_interchange.py | 61 +- .../algorithms/coloring/tests/__init__.py | 0 .../coloring/tests/test_coloring.py | 845 ++++++++---- networkx/algorithms/communicability_alg.py | 62 +- networkx/algorithms/community/__init__.py | 5 +- .../{asyn_fluidc.py => asyn_fluid.py} | 62 +- networkx/algorithms/community/centrality.py | 23 +- .../community/community_generators.py | 409 ------ .../algorithms/community/community_utils.py | 38 +- networkx/algorithms/community/kclique.py | 23 +- .../algorithms/community/kernighan_lin.py | 205 ++- .../algorithms/community/label_propagation.py | 55 +- networkx/algorithms/community/lukes.py | 227 +++ .../algorithms/community/modularity_max.py | 89 +- networkx/algorithms/community/quality.py | 143 +- .../algorithms/community/tests/__init__.py | 0 .../community/tests/test_asyn_fluid.py | 127 ++ .../community/tests/test_asyn_fluidc.py | 120 -- .../community/tests/test_centrality.py | 46 +- .../community/tests/test_generators.py | 89 -- .../community/tests/test_kclique.py | 54 +- .../community/tests/test_kernighan_lin.py | 54 +- .../community/tests/test_label_propagation.py | 132 +- .../algorithms/community/tests/test_lukes.py | 154 +++ .../community/tests/test_modularity_max.py | 32 +- .../community/tests/test_quality.py | 51 +- .../algorithms/community/tests/test_utils.py | 24 +- networkx/algorithms/components/attracting.py | 51 +- networkx/algorithms/components/biconnected.py | 70 +- networkx/algorithms/components/connected.py | 72 +- .../algorithms/components/semiconnected.py | 37 +- .../components/strongly_connected.py | 135 +- .../algorithms/components/tests/__init__.py | 0 .../components/tests/test_attracting.py | 77 +- .../components/tests/test_biconnected.py | 206 +-- .../components/tests/test_connected.py | 93 +- .../components/tests/test_semiconnected.py | 47 +- .../tests/test_strongly_connected.py | 172 ++- .../components/tests/test_subgraph_copies.py | 87 -- .../components/tests/test_weakly_connected.py | 67 +- .../algorithms/components/weakly_connected.py | 57 +- networkx/algorithms/connectivity/__init__.py | 24 +- .../algorithms/connectivity/connectivity.py | 118 +- networkx/algorithms/connectivity/cuts.py | 135 +- .../algorithms/connectivity/disjoint_paths.py | 125 +- .../connectivity/edge_augmentation.py | 262 ++-- .../connectivity/edge_kcomponents.py | 68 +- .../algorithms/connectivity/kcomponents.py | 29 +- networkx/algorithms/connectivity/kcutsets.py | 145 +- .../algorithms/connectivity/stoerwagner.py | 63 +- .../algorithms/connectivity/tests/__init__.py | 0 .../connectivity/tests/test_connectivity.py | 297 ++-- .../connectivity/tests/test_cuts.py | 155 ++- .../connectivity/tests/test_disjoint_paths.py | 228 ++- .../tests/test_edge_augmentation.py | 284 ++-- .../tests/test_edge_kcomponents.py | 226 ++- .../connectivity/tests/test_kcomponents.py | 284 ++-- .../connectivity/tests/test_kcutsets.py | 124 +- .../connectivity/tests/test_stoer_wagner.py | 80 +- networkx/algorithms/connectivity/utils.py | 20 +- networkx/algorithms/core.py | 219 ++- networkx/algorithms/covering.py | 25 +- networkx/algorithms/cuts.py | 36 +- networkx/algorithms/cycles.py | 209 +-- networkx/algorithms/d_separation.py | 136 ++ networkx/algorithms/dag.py | 415 ++++-- networkx/algorithms/distance_measures.py | 421 ++++-- networkx/algorithms/distance_regular.py | 41 +- networkx/algorithms/dominance.py | 14 +- networkx/algorithms/dominating.py | 8 +- .../{efficiency.py => efficiency_measures.py} | 31 +- networkx/algorithms/euler.py | 228 ++- networkx/algorithms/flow/boykovkolmogorov.py | 88 +- networkx/algorithms/flow/capacityscaling.py | 220 +-- networkx/algorithms/flow/dinitz_alg.py | 62 +- networkx/algorithms/flow/edmondskarp.py | 67 +- networkx/algorithms/flow/gomory_hu.py | 48 +- networkx/algorithms/flow/maxflow.py | 156 ++- networkx/algorithms/flow/mincost.py | 94 +- networkx/algorithms/flow/networksimplex.py | 195 +-- networkx/algorithms/flow/preflowpush.py | 129 +- .../algorithms/flow/shortestaugmentingpath.py | 111 +- networkx/algorithms/flow/tests/__init__.py | 0 .../algorithms/flow/tests/test_gomory_hu.py | 89 +- .../algorithms/flow/tests/test_maxflow.py | 579 ++++---- .../flow/tests/test_maxflow_large_graph.py | 108 +- .../algorithms/flow/tests/test_mincost.py | 637 ++++----- networkx/algorithms/flow/utils.py | 71 +- networkx/algorithms/graph_hashing.py | 151 ++ networkx/algorithms/graphical.py | 72 +- networkx/algorithms/hierarchy.py | 15 +- networkx/algorithms/hybrid.py | 22 +- networkx/algorithms/isolate.py | 22 +- networkx/algorithms/isomorphism/__init__.py | 2 + networkx/algorithms/isomorphism/ismags.py | 1153 ++++++++++++++++ networkx/algorithms/isomorphism/isomorph.py | 48 +- .../algorithms/isomorphism/isomorphvf2.py | 610 ++++---- .../algorithms/isomorphism/matchhelpers.py | 119 +- .../isomorphism/temporalisomorphvf2.py | 91 +- .../algorithms/isomorphism/tests/__init__.py | 0 .../isomorphism/tests/test_ismags.py | 327 +++++ .../isomorphism/tests/test_isomorphism.py | 47 +- .../isomorphism/tests/test_isomorphvf2.py | 302 +++- .../isomorphism/tests/test_match_helpers.py | 66 +- .../tests/test_temporalisomorphvf2.py | 53 +- .../tests/test_tree_isomorphism.py | 289 ++++ .../isomorphism/tests/test_vf2userfunc.py | 155 +-- .../isomorphism/tree_isomorphism.py | 279 ++++ .../algorithms/isomorphism/vf2userfunc.py | 27 +- networkx/algorithms/link_analysis/hits_alg.py | 75 +- .../algorithms/link_analysis/pagerank_alg.py | 115 +- .../link_analysis/tests/__init__.py | 0 .../link_analysis/tests/test_hits.py | 88 +- .../link_analysis/tests/test_pagerank.py | 178 ++- networkx/algorithms/link_prediction.py | 240 ++-- .../algorithms/lowest_common_ancestors.py | 70 +- networkx/algorithms/matching.py | 137 +- networkx/algorithms/minors.py | 189 ++- networkx/algorithms/mis.py | 40 +- networkx/algorithms/moral.py | 47 + .../node_classification/__init__.py | 7 +- .../algorithms/node_classification/hmn.py | 51 +- .../algorithms/node_classification/lgc.py | 54 +- .../node_classification/tests/__init__.py | 0 .../tests/test_harmonic_function.py | 112 +- .../test_local_and_global_consistency.py | 102 +- .../algorithms/node_classification/utils.py | 9 +- networkx/algorithms/non_randomness.py | 82 ++ networkx/algorithms/operators/all.py | 53 +- networkx/algorithms/operators/binary.py | 137 +- networkx/algorithms/operators/product.py | 68 +- .../algorithms/operators/tests/__init__.py | 0 .../algorithms/operators/tests/test_all.py | 219 +-- .../algorithms/operators/tests/test_binary.py | 368 +++-- .../operators/tests/test_product.py | 292 ++-- .../algorithms/operators/tests/test_unary.py | 70 +- networkx/algorithms/operators/unary.py | 26 +- networkx/algorithms/planar_drawing.py | 464 +++++++ networkx/algorithms/planarity.py | 1115 +++++++++++++++ networkx/algorithms/reciprocity.py | 19 +- networkx/algorithms/regular.py | 191 +++ networkx/algorithms/richclub.py | 38 +- networkx/algorithms/shortest_paths/astar.py | 81 +- networkx/algorithms/shortest_paths/dense.py | 85 +- networkx/algorithms/shortest_paths/generic.py | 328 +++-- .../shortest_paths/tests/__init__.py | 0 .../shortest_paths/tests/test_astar.py | 219 +-- .../shortest_paths/tests/test_dense.py | 225 ++- .../shortest_paths/tests/test_dense_numpy.py | 76 +- .../shortest_paths/tests/test_generic.py | 384 +++-- .../shortest_paths/tests/test_unweighted.py | 104 +- .../shortest_paths/tests/test_weighted.py | 956 ++++++++----- .../algorithms/shortest_paths/unweighted.py | 98 +- .../algorithms/shortest_paths/weighted.py | 568 ++++---- networkx/algorithms/similarity.py | 862 +++++++++--- networkx/algorithms/simple_paths.py | 379 +++-- networkx/algorithms/smallworld.py | 378 +++++ networkx/algorithms/smetric.py | 9 +- networkx/algorithms/sparsifiers.py | 293 ++++ networkx/algorithms/structuralholes.py | 46 +- networkx/algorithms/swap.py | 60 +- networkx/algorithms/tests/__init__.py | 0 networkx/algorithms/tests/test_asteroidal.py | 24 + networkx/algorithms/tests/test_boundary.py | 141 +- networkx/algorithms/tests/test_bridges.py | 66 +- networkx/algorithms/tests/test_chains.py | 70 +- networkx/algorithms/tests/test_chordal.py | 116 +- networkx/algorithms/tests/test_clique.py | 332 +++-- networkx/algorithms/tests/test_cluster.py | 439 ++++-- .../algorithms/tests/test_communicability.py | 113 +- networkx/algorithms/tests/test_core.py | 119 +- networkx/algorithms/tests/test_covering.py | 44 +- networkx/algorithms/tests/test_cuts.py | 68 +- networkx/algorithms/tests/test_cycles.py | 225 +-- .../algorithms/tests/test_d_separation.py | 156 +++ networkx/algorithms/tests/test_dag.py | 427 ++++-- .../tests/test_distance_measures.py | 285 +++- .../algorithms/tests/test_distance_regular.py | 53 +- networkx/algorithms/tests/test_dominance.py | 262 ++-- networkx/algorithms/tests/test_dominating.py | 34 +- networkx/algorithms/tests/test_efficiency.py | 29 +- networkx/algorithms/tests/test_euler.py | 187 ++- .../algorithms/tests/test_graph_hashing.py | 42 + networkx/algorithms/tests/test_graphical.py | 124 +- networkx/algorithms/tests/test_hierarchy.py | 29 +- networkx/algorithms/tests/test_hybrid.py | 17 +- networkx/algorithms/tests/test_isolate.py | 21 +- .../algorithms/tests/test_link_prediction.py | 597 ++++---- .../tests/test_lowest_common_ancestors.py | 253 ++-- networkx/algorithms/tests/test_matching.py | 444 ++++-- .../tests/test_max_weight_clique.py | 180 +++ networkx/algorithms/tests/test_minors.py | 187 +-- networkx/algorithms/tests/test_mis.py | 87 +- networkx/algorithms/tests/test_moral.py | 16 + .../algorithms/tests/test_non_randomness.py | 14 + .../algorithms/tests/test_planar_drawing.py | 272 ++++ networkx/algorithms/tests/test_planarity.py | 439 ++++++ networkx/algorithms/tests/test_reciprocity.py | 18 +- networkx/algorithms/tests/test_regular.py | 81 ++ networkx/algorithms/tests/test_richclub.py | 77 +- networkx/algorithms/tests/test_similarity.py | 691 ++++++++- .../algorithms/tests/test_simple_paths.py | 685 ++++++--- networkx/algorithms/tests/test_smallworld.py | 59 + networkx/algorithms/tests/test_smetric.py | 11 +- networkx/algorithms/tests/test_sparsifiers.py | 137 ++ .../algorithms/tests/test_structuralholes.py | 128 +- networkx/algorithms/tests/test_swap.py | 56 +- networkx/algorithms/tests/test_threshold.py | 262 ++-- networkx/algorithms/tests/test_tournament.py | 56 +- networkx/algorithms/tests/test_triads.py | 148 +- networkx/algorithms/tests/test_vitality.py | 23 +- networkx/algorithms/tests/test_voronoi.py | 49 +- networkx/algorithms/tests/test_wiener.py | 20 +- networkx/algorithms/threshold.py | 362 ++--- networkx/algorithms/tournament.py | 72 +- networkx/algorithms/traversal/__init__.py | 1 + networkx/algorithms/traversal/beamsearch.py | 15 +- .../traversal/breadth_first_search.py | 256 +++- .../traversal/depth_first_search.py | 86 +- networkx/algorithms/traversal/edgebfs.py | 175 +++ networkx/algorithms/traversal/edgedfs.py | 176 ++- .../algorithms/traversal/tests/__init__.py | 0 .../traversal/tests/test_beamsearch.py | 17 +- .../algorithms/traversal/tests/test_bfs.py | 86 +- .../algorithms/traversal/tests/test_dfs.py | 147 +- .../traversal/tests/test_edgebfs.py | 151 ++ .../traversal/tests/test_edgedfs.py | 94 +- networkx/algorithms/tree/__init__.py | 1 + networkx/algorithms/tree/branchings.py | 223 +-- networkx/algorithms/tree/coding.py | 45 +- networkx/algorithms/tree/decomposition.py | 86 ++ networkx/algorithms/tree/mst.py | 159 +-- networkx/algorithms/tree/operations.py | 33 +- networkx/algorithms/tree/recognition.py | 22 +- networkx/algorithms/tree/tests/__init__.py | 0 .../algorithms/tree/tests/test_branchings.py | 261 +++- networkx/algorithms/tree/tests/test_coding.py | 58 +- .../tree/tests/test_decomposition.py | 79 ++ networkx/algorithms/tree/tests/test_mst.py | 198 ++- .../algorithms/tree/tests/test_operations.py | 18 +- .../algorithms/tree/tests/test_recognition.py | 134 +- networkx/algorithms/triads.py | 384 ++++- networkx/algorithms/vitality.py | 16 +- networkx/algorithms/voronoi.py | 14 +- networkx/algorithms/wiener.py | 19 +- networkx/classes/coreviews.py | 235 ++-- networkx/classes/digraph.py | 497 +++---- networkx/classes/filters.py | 32 +- networkx/classes/function.py | 486 ++++--- networkx/classes/graph.py | 611 ++++---- networkx/classes/graphviews.py | 395 +++--- networkx/classes/multidigraph.py | 353 ++--- networkx/classes/multigraph.py | 369 +++-- networkx/classes/ordered.py | 67 +- networkx/classes/reportviews.py | 423 +++--- networkx/classes/tests/__init__.py | 0 networkx/classes/tests/historical_tests.py | 493 ++++--- networkx/classes/tests/test_coreviews.py | 345 +++-- networkx/classes/tests/test_digraph.py | 299 ++-- .../classes/tests/test_digraph_historical.py | 126 +- networkx/classes/tests/test_filters.py | 266 ++-- networkx/classes/tests/test_function.py | 578 +++++--- networkx/classes/tests/test_graph.py | 755 +++++----- .../classes/tests/test_graph_historical.py | 12 +- networkx/classes/tests/test_graphviews.py | 259 ++-- networkx/classes/tests/test_multidigraph.py | 414 +++--- networkx/classes/tests/test_multigraph.py | 349 ++--- networkx/classes/tests/test_ordered.py | 34 +- networkx/classes/tests/test_reportviews.py | 1229 ++++++++++------- networkx/classes/tests/test_special.py | 152 +- networkx/classes/tests/test_subgraphviews.py | 328 ++--- networkx/conftest.py | 200 +++ networkx/convert.py | 240 ++-- networkx/convert_matrix.py | 684 +++++---- networkx/drawing/layout.py | 720 +++++++--- networkx/drawing/nx_agraph.py | 225 +-- networkx/drawing/nx_pydot.py | 220 ++- networkx/drawing/nx_pylab.py | 773 ++++++----- networkx/drawing/tests/__init__.py | 0 networkx/drawing/tests/test_agraph.py | 211 ++- networkx/drawing/tests/test_layout.py | 391 ++++-- networkx/drawing/tests/test_pydot.py | 66 +- networkx/drawing/tests/test_pylab.py | 257 +++- networkx/exception.py | 47 +- networkx/generators/__init__.py | 5 + networkx/generators/atlas.py | 29 +- networkx/generators/classic.py | 280 ++-- networkx/generators/cographs.py | 66 + networkx/generators/community.py | 792 +++++++++-- networkx/generators/degree_seq.py | 253 ++-- networkx/generators/directed.py | 195 ++- networkx/generators/duplication.py | 59 +- networkx/generators/ego.py | 27 +- networkx/generators/expanders.py | 108 +- networkx/generators/geometric.py | 283 ++-- networkx/generators/harary_graph.py | 197 +++ networkx/generators/internet_as_graphs.py | 442 ++++++ networkx/generators/intersection.py | 54 +- networkx/generators/interval_graph.py | 69 + networkx/generators/joint_degree_seq.py | 454 +++++- networkx/generators/lattice.py | 163 +-- networkx/generators/line.py | 149 +- networkx/generators/mycielski.py | 13 +- networkx/generators/nonisomorphic_trees.py | 17 +- networkx/generators/random_clustered.py | 44 +- networkx/generators/random_graphs.py | 518 ++++--- networkx/generators/small.py | 337 +++-- networkx/generators/social.py | 587 +++++--- networkx/generators/spectral_graph_forge.py | 179 +++ networkx/generators/stochastic.py | 14 +- networkx/generators/sudoku.py | 128 ++ networkx/generators/tests/__init__.py | 0 networkx/generators/tests/test_atlas.py | 41 +- networkx/generators/tests/test_classic.py | 492 ++++--- networkx/generators/tests/test_cographs.py | 20 + networkx/generators/tests/test_community.py | 287 ++-- networkx/generators/tests/test_degree_seq.py | 140 +- networkx/generators/tests/test_directed.py | 75 +- networkx/generators/tests/test_duplication.py | 50 +- networkx/generators/tests/test_ego.py | 16 +- networkx/generators/tests/test_expanders.py | 77 +- networkx/generators/tests/test_geometric.py | 149 +- .../generators/tests/test_harary_graph.py | 135 ++ .../tests/test_internet_as_graphs.py | 189 +++ .../generators/tests/test_intersection.py | 24 +- .../generators/tests/test_interval_graph.py | 144 ++ .../generators/tests/test_joint_degree_seq.py | 116 +- networkx/generators/tests/test_lattice.py | 150 +- networkx/generators/tests/test_line.py | 148 +- networkx/generators/tests/test_mycielski.py | 40 +- .../tests/test_nonisomorphic_trees.py | 55 +- .../generators/tests/test_random_clustered.py | 29 +- .../generators/tests/test_random_graphs.py | 234 ++-- networkx/generators/tests/test_small.py | 326 ++--- .../tests/test_spectral_graph_forge.py | 48 + networkx/generators/tests/test_stochastic.py | 48 +- networkx/generators/tests/test_sudoku.py | 91 ++ networkx/generators/tests/test_trees.py | 67 +- networkx/generators/tests/test_triads.py | 19 +- networkx/generators/trees.py | 50 +- networkx/generators/triads.py | 56 +- networkx/linalg/__init__.py | 2 + networkx/linalg/algebraicconnectivity.py | 195 +-- networkx/linalg/attrmatrix.py | 177 +-- networkx/linalg/bethehessianmatrix.py | 78 ++ networkx/linalg/graphmatrix.py | 57 +- networkx/linalg/laplacianmatrix.py | 246 +++- networkx/linalg/modularitymatrix.py | 88 +- networkx/linalg/spectrum.py | 101 +- networkx/linalg/tests/__init__.py | 0 .../tests/test_algebraic_connectivity.py | 472 ++++--- networkx/linalg/tests/test_attrmatrix.py | 108 ++ networkx/linalg/tests/test_bethehessian.py | 42 + networkx/linalg/tests/test_graphmatrix.py | 411 ++++-- networkx/linalg/tests/test_laplacian.py | 291 ++-- networkx/linalg/tests/test_modularity.py | 105 +- networkx/linalg/tests/test_spectrum.py | 84 +- networkx/readwrite/adjlist.py | 157 +-- networkx/readwrite/edgelist.py | 273 ++-- networkx/readwrite/gexf.py | 831 +++++------ networkx/readwrite/gml.py | 537 +++---- networkx/readwrite/gpickle.py | 32 +- networkx/readwrite/graph6.py | 102 +- networkx/readwrite/graphml.py | 600 ++++---- networkx/readwrite/json_graph/adjacency.py | 54 +- networkx/readwrite/json_graph/cytoscape.py | 26 +- networkx/readwrite/json_graph/jit.py | 49 +- networkx/readwrite/json_graph/node_link.py | 95 +- .../readwrite/json_graph/tests/__init__.py | 0 .../json_graph/tests/test_adjacency.py | 45 +- .../json_graph/tests/test_cytoscape.py | 55 +- .../readwrite/json_graph/tests/test_jit.py | 37 +- .../json_graph/tests/test_node_link.py | 101 +- .../readwrite/json_graph/tests/test_tree.py | 23 +- networkx/readwrite/json_graph/tree.py | 43 +- networkx/readwrite/leda.py | 34 +- networkx/readwrite/multiline_adjlist.py | 222 ++- networkx/readwrite/nx_shp.py | 68 +- networkx/readwrite/nx_yaml.py | 62 +- networkx/readwrite/p2g.py | 26 +- networkx/readwrite/pajek.py | 145 +- networkx/readwrite/sparse6.py | 90 +- networkx/readwrite/tests/__init__.py | 0 networkx/readwrite/tests/test_adjlist.py | 187 +-- networkx/readwrite/tests/test_edgelist.py | 173 ++- networkx/readwrite/tests/test_gexf.py | 531 +++++-- networkx/readwrite/tests/test_gml.py | 454 ++++-- networkx/readwrite/tests/test_gpickle.py | 75 +- networkx/readwrite/tests/test_graph6.py | 67 +- networkx/readwrite/tests/test_graphml.py | 635 ++++++--- networkx/readwrite/tests/test_leda.py | 39 +- networkx/readwrite/tests/test_p2g.py | 35 +- networkx/readwrite/tests/test_pajek.py | 139 +- networkx/readwrite/tests/test_shp.py | 158 +-- networkx/readwrite/tests/test_sparse6.py | 112 +- networkx/readwrite/tests/test_yaml.py | 41 +- networkx/relabel.py | 133 +- networkx/release.py | 119 +- networkx/testing/__init__.py | 1 + networkx/testing/test.py | 34 + networkx/testing/tests/__init__.py | 0 networkx/testing/tests/test_utils.py | 65 +- networkx/testing/utils.py | 28 +- networkx/tests/README | 24 - networkx/tests/test.py | 45 - networkx/tests/test_all_random_functions.py | 233 ++++ networkx/tests/test_convert.py | 155 ++- networkx/tests/test_convert_numpy.py | 208 ++- networkx/tests/test_convert_pandas.py | 309 +++-- networkx/tests/test_convert_scipy.py | 176 +-- networkx/tests/test_exceptions.py | 30 +- networkx/tests/test_relabel.py | 316 +++-- networkx/utils/contextmanagers.py | 20 +- networkx/utils/decorators.py | 155 ++- networkx/utils/heaps.py | 41 +- networkx/utils/mapped_queue.py | 20 +- networkx/utils/misc.py | 253 +++- networkx/utils/random_sequence.py | 68 +- networkx/utils/rcm.py | 18 +- networkx/utils/tests/__init__.py | 0 networkx/utils/tests/test.txt | 2 +- networkx/utils/tests/test_contextmanager.py | 10 +- networkx/utils/tests/test_decorators.py | 259 +++- networkx/utils/tests/test_heaps.py | 124 +- networkx/utils/tests/test_mapped_queue.py | 62 +- networkx/utils/tests/test_misc.py | 209 +-- networkx/utils/tests/test_random_sequence.py | 36 +- networkx/utils/tests/test_rcm.py | 68 +- networkx/utils/tests/test_unionfind.py | 34 +- networkx/utils/union_find.py | 32 +- requirements/README.md | 27 - requirements/default.txt | 1 - requirements/doc.txt | 6 - requirements/extras.txt | 9 - requirements/test.txt | 4 - setup.cfg | 21 +- setup.py | 210 +-- tools/appveyor/install.ps1 | 101 -- tools/appveyor/run_with_env.cmd | 47 - tools/gitwash_dumper.py | 235 ---- tools/travis/before_install.sh | 22 - tools/travis/build_docs.sh | 12 - tools/travis/deploy-key.enc | 4 - tools/travis/deploy_docs.sh | 58 - tools/travis/linux_install.sh | 32 - tools/travis/osx_install.sh | 35 - tools/travis/script.sh | 37 - 770 files changed, 60757 insertions(+), 32560 deletions(-) delete mode 100644 .appveyor.yml delete mode 100644 .codecov.yml delete mode 100644 .coveragerc delete mode 100644 .gitignore delete mode 100644 .mailmap delete mode 100644 .travis.yml delete mode 100644 CONTRIBUTORS.rst create mode 100644 PKG-INFO delete mode 100644 README.md delete mode 100644 debian/.git-dpm delete mode 100644 debian/README.Debian delete mode 100644 debian/README.source delete mode 100644 debian/compat delete mode 100644 debian/networkx-docs.docs create mode 100644 debian/patches/0005-use-debian-mathjax.js.patch create mode 100644 debian/patches/0006-skip-plot_football-from-sphinx-gallery-requires-netw.patch create mode 100644 debian/patches/10_doc_relocation create mode 100644 debian/patches/2bfdee687d0db533555384f92ce3670d47c45aec.patch create mode 100644 debian/patches/docs-remove-expected_failing_examples.patch create mode 100644 debian/patches/series create mode 100644 debian/patches/use_local_object.inv_files.patch create mode 100644 debian/python-networkx-doc.doc-base create mode 100644 debian/source/include-binaries create mode 100644 debian/source/options create mode 100644 debian/upstream/metadata delete mode 100644 doc/README.md create mode 100644 doc/_static/custom.css create mode 100644 doc/_templates/dev_banner.html create mode 100644 doc/_templates/eol_banner.html create mode 100644 doc/_templates/layout.html create mode 100644 doc/developer/code_of_conduct.rst create mode 100644 doc/developer/core_developer.rst create mode 100644 doc/developer/deprecations.rst create mode 100644 doc/developer/nxeps/_static/nxep-0000.png create mode 100644 doc/developer/nxeps/index.rst create mode 100644 doc/developer/nxeps/nxep-0000.rst create mode 100644 doc/developer/nxeps/nxep-0001.rst create mode 100644 doc/developer/nxeps/nxep-template.rst rename RELEASE.rst => doc/developer/release.rst (77%) create mode 100644 doc/developer/roadmap.rst create mode 100644 doc/developer/values.rst create mode 100644 doc/reference/algorithms/asteroidal.rst create mode 100644 doc/reference/algorithms/d_separation.rst delete mode 100644 doc/reference/algorithms/dispersion.rst rename doc/reference/algorithms/{efficiency.rst => efficiency_measures.rst} (70%) create mode 100644 doc/reference/algorithms/graph_hashing.rst create mode 100644 doc/reference/algorithms/isomorphism.ismags.rst create mode 100644 doc/reference/algorithms/lowest_common_ancestors.rst create mode 100644 doc/reference/algorithms/moral.rst create mode 100644 doc/reference/algorithms/non_randomness.rst create mode 100644 doc/reference/algorithms/planar_drawing.rst create mode 100644 doc/reference/algorithms/planarity.rst create mode 100644 doc/reference/algorithms/regular.rst create mode 100644 doc/reference/algorithms/similarity.rst create mode 100644 doc/reference/algorithms/smallworld.rst create mode 100644 doc/reference/algorithms/smetric.rst create mode 100644 doc/reference/algorithms/sparsifiers.rst create mode 100644 doc/reference/algorithms/threshold.rst create mode 100644 doc/reference/randomness.rst create mode 100644 doc/release/release_2.2.rst create mode 100644 doc/release/release_2.3.rst create mode 100644 doc/release/release_2.4.rst create mode 100644 doc/release/release_2.5.rst create mode 100644 doc/release/report_functions_without_rst_generated.py delete mode 100644 doc/requirements.txt create mode 100644 doc/team.rst rename examples/advanced/{iterated_dynamical_systems.py => plot_iterated_dynamical_systems.py} (90%) rename examples/algorithms/{beam_search.py => plot_beam_search.py} (69%) create mode 100644 examples/algorithms/plot_decomposition.py rename examples/algorithms/{rcm.py => plot_rcm.py} (61%) create mode 100644 examples/applications/README.txt rename examples/applications/{circuits.py => plot_circuits.py} (50%) create mode 100644 examples/drawing/plot_multipartite_graph.py delete mode 100644 examples/graph/atlas2.py create mode 100644 examples/graph/dot_atlas.py rename examples/graph/{expected_degree_sequence.py => plot_expected_degree_sequence.py} (56%) create mode 100644 examples/graph/plot_words.py delete mode 100644 examples/graph/words.py rename examples/javascript/force/{README => README.txt} (100%) rename examples/pygraphviz/{pygraphviz_attributes.py => plot_pygraphviz_attributes.py} (67%) create mode 100644 examples/pygraphviz/plot_pygraphviz_draw.py create mode 100644 examples/pygraphviz/plot_pygraphviz_simple.py create mode 100644 examples/pygraphviz/plot_write_dotfile.py delete mode 100644 examples/pygraphviz/pygraphviz_draw.py delete mode 100644 examples/pygraphviz/pygraphviz_simple.py delete mode 100644 examples/pygraphviz/write_dotfile.py create mode 100644 networkx.egg-info/dependency_links.txt create mode 100644 networkx.egg-info/not-zip-safe create mode 100644 networkx.egg-info/top_level.txt create mode 100644 networkx/algorithms/approximation/tests/__init__.py create mode 100644 networkx/algorithms/approximation/tests/test_treewidth.py create mode 100644 networkx/algorithms/approximation/treewidth.py create mode 100644 networkx/algorithms/assortativity/tests/__init__.py create mode 100644 networkx/algorithms/asteroidal.py create mode 100644 networkx/algorithms/bipartite/tests/__init__.py create mode 100644 networkx/algorithms/centrality/group.py create mode 100644 networkx/algorithms/centrality/percolation.py create mode 100644 networkx/algorithms/centrality/second_order.py create mode 100644 networkx/algorithms/centrality/tests/__init__.py create mode 100644 networkx/algorithms/centrality/tests/test_group.py create mode 100644 networkx/algorithms/centrality/tests/test_percolation_centrality.py create mode 100644 networkx/algorithms/centrality/tests/test_second_order_centrality.py create mode 100644 networkx/algorithms/centrality/tests/test_trophic.py create mode 100644 networkx/algorithms/centrality/tests/test_voterank.py create mode 100644 networkx/algorithms/centrality/trophic.py create mode 100644 networkx/algorithms/centrality/voterank_alg.py create mode 100644 networkx/algorithms/coloring/equitable_coloring.py create mode 100644 networkx/algorithms/coloring/tests/__init__.py rename networkx/algorithms/community/{asyn_fluidc.py => asyn_fluid.py} (73%) delete mode 100644 networkx/algorithms/community/community_generators.py create mode 100644 networkx/algorithms/community/lukes.py create mode 100644 networkx/algorithms/community/tests/__init__.py create mode 100644 networkx/algorithms/community/tests/test_asyn_fluid.py delete mode 100644 networkx/algorithms/community/tests/test_asyn_fluidc.py delete mode 100644 networkx/algorithms/community/tests/test_generators.py create mode 100644 networkx/algorithms/community/tests/test_lukes.py create mode 100644 networkx/algorithms/components/tests/__init__.py delete mode 100644 networkx/algorithms/components/tests/test_subgraph_copies.py create mode 100644 networkx/algorithms/connectivity/tests/__init__.py create mode 100644 networkx/algorithms/d_separation.py rename networkx/algorithms/{efficiency.py => efficiency_measures.py} (85%) create mode 100644 networkx/algorithms/flow/tests/__init__.py create mode 100644 networkx/algorithms/graph_hashing.py create mode 100644 networkx/algorithms/isomorphism/ismags.py create mode 100644 networkx/algorithms/isomorphism/tests/__init__.py create mode 100644 networkx/algorithms/isomorphism/tests/test_ismags.py create mode 100644 networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py create mode 100644 networkx/algorithms/isomorphism/tree_isomorphism.py create mode 100644 networkx/algorithms/link_analysis/tests/__init__.py create mode 100644 networkx/algorithms/moral.py create mode 100644 networkx/algorithms/node_classification/tests/__init__.py create mode 100644 networkx/algorithms/non_randomness.py create mode 100644 networkx/algorithms/operators/tests/__init__.py create mode 100644 networkx/algorithms/planar_drawing.py create mode 100644 networkx/algorithms/planarity.py create mode 100644 networkx/algorithms/regular.py create mode 100644 networkx/algorithms/shortest_paths/tests/__init__.py create mode 100644 networkx/algorithms/smallworld.py create mode 100644 networkx/algorithms/sparsifiers.py create mode 100644 networkx/algorithms/tests/__init__.py create mode 100644 networkx/algorithms/tests/test_asteroidal.py create mode 100644 networkx/algorithms/tests/test_d_separation.py create mode 100644 networkx/algorithms/tests/test_graph_hashing.py create mode 100644 networkx/algorithms/tests/test_max_weight_clique.py create mode 100644 networkx/algorithms/tests/test_moral.py create mode 100644 networkx/algorithms/tests/test_non_randomness.py create mode 100644 networkx/algorithms/tests/test_planar_drawing.py create mode 100644 networkx/algorithms/tests/test_planarity.py create mode 100644 networkx/algorithms/tests/test_regular.py create mode 100644 networkx/algorithms/tests/test_smallworld.py create mode 100644 networkx/algorithms/tests/test_sparsifiers.py create mode 100644 networkx/algorithms/traversal/edgebfs.py create mode 100644 networkx/algorithms/traversal/tests/__init__.py create mode 100644 networkx/algorithms/traversal/tests/test_edgebfs.py create mode 100644 networkx/algorithms/tree/decomposition.py create mode 100644 networkx/algorithms/tree/tests/__init__.py create mode 100644 networkx/algorithms/tree/tests/test_decomposition.py create mode 100644 networkx/classes/tests/__init__.py create mode 100644 networkx/conftest.py create mode 100644 networkx/drawing/tests/__init__.py create mode 100644 networkx/generators/cographs.py create mode 100644 networkx/generators/harary_graph.py create mode 100644 networkx/generators/internet_as_graphs.py create mode 100644 networkx/generators/interval_graph.py create mode 100644 networkx/generators/spectral_graph_forge.py create mode 100644 networkx/generators/sudoku.py create mode 100644 networkx/generators/tests/__init__.py create mode 100644 networkx/generators/tests/test_cographs.py create mode 100644 networkx/generators/tests/test_harary_graph.py create mode 100644 networkx/generators/tests/test_internet_as_graphs.py create mode 100644 networkx/generators/tests/test_interval_graph.py create mode 100644 networkx/generators/tests/test_spectral_graph_forge.py create mode 100644 networkx/generators/tests/test_sudoku.py create mode 100644 networkx/linalg/bethehessianmatrix.py create mode 100644 networkx/linalg/tests/__init__.py create mode 100644 networkx/linalg/tests/test_attrmatrix.py create mode 100644 networkx/linalg/tests/test_bethehessian.py create mode 100644 networkx/readwrite/json_graph/tests/__init__.py create mode 100644 networkx/readwrite/tests/__init__.py create mode 100644 networkx/testing/test.py create mode 100644 networkx/testing/tests/__init__.py delete mode 100644 networkx/tests/README delete mode 100644 networkx/tests/test.py create mode 100644 networkx/tests/test_all_random_functions.py create mode 100644 networkx/utils/tests/__init__.py delete mode 100644 requirements/README.md delete mode 100644 requirements/default.txt delete mode 100644 requirements/doc.txt delete mode 100644 requirements/extras.txt delete mode 100644 requirements/test.txt delete mode 100755 tools/appveyor/install.ps1 delete mode 100755 tools/appveyor/run_with_env.cmd delete mode 100644 tools/gitwash_dumper.py delete mode 100755 tools/travis/before_install.sh delete mode 100755 tools/travis/build_docs.sh delete mode 100644 tools/travis/deploy-key.enc delete mode 100755 tools/travis/deploy_docs.sh delete mode 100755 tools/travis/linux_install.sh delete mode 100755 tools/travis/osx_install.sh delete mode 100755 tools/travis/script.sh diff --git a/.appveyor.yml b/.appveyor.yml deleted file mode 100644 index bbc5d66..0000000 --- a/.appveyor.yml +++ /dev/null @@ -1,68 +0,0 @@ -# AppVeyor.com is a Continuous Integration service to build and run tests under -# Windows -# https://ci.appveyor.com/project/networkx/networkx - -environment: - global: - # SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the - # /E:ON and /V:ON options are not enabled in the batch script interpreter - # See: http://stackoverflow.com/a/13751649/163740 - CMD_IN_ENV: "cmd /E:ON /V:ON /C .\\tools\\appveyor\\run_with_env.cmd" - - matrix: - - PYTHON: "C:\\Python27" - PYTHON_VERSION: "2.7.13" - PYTHON_ARCH: "32" - - - PYTHON: "C:\\Python27-x64" - PYTHON_VERSION: "2.7.13" - PYTHON_ARCH: "64" - - - PYTHON: "C:\\Python34" - PYTHON_VERSION: "3.4.4" - PYTHON_ARCH: "32" - - - PYTHON: "C:\\Python34-x64" - PYTHON_VERSION: "3.4.4" - PYTHON_ARCH: "64" - - - PYTHON: "C:\\Python35" - PYTHON_VERSION: "3.5.3" - PYTHON_ARCH: "32" - - - PYTHON: "C:\\Python35-x64" - PYTHON_VERSION: "3.5.3" - PYTHON_ARCH: "64" - - - PYTHON: "C:\\Python36" - PYTHON_VERSION: "3.6.1" - PYTHON_ARCH: "32" - - - PYTHON: "C:\\Python36-x64" - PYTHON_VERSION: "3.6.1" - PYTHON_ARCH: "64" - -install: - # Install Python (from the official .msi of http://python.org) and pip when - # not already installed. - - "powershell .\\tools\\appveyor\\install.ps1" - - "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%" - - # Check that we have the expected version and architecture for Python - - "python --version" - - "python -c \"import struct; print(struct.calcsize('P') * 8)\"" - - # Install the build and runtime dependencies of the project. - # - "%CMD_IN_ENV% pip install --timeout=60 -r requirements.txt" - - "pip install ." - - - "pip install nose" - -# Not a .NET project, we build networkx in the install step instead -build: false - -test_script: - # Change to a non-source folder to make sure we run the tests on the - # installed library. - - "pushd %TEMP%" - - "nosetests --verbosity=2 networkx" diff --git a/.codecov.yml b/.codecov.yml deleted file mode 100644 index e28a818..0000000 --- a/.codecov.yml +++ /dev/null @@ -1,9 +0,0 @@ -# Allow coverage to decrease by 0.05%. -coverage: - status: - project: - default: - threshold: 0.05% - -# Don't post a comment on pull requests. -comment: off diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 9d3e193..0000000 --- a/.coveragerc +++ /dev/null @@ -1,4 +0,0 @@ -[run] -branch = True -source = networkx -omit = */tests/*, *release.py diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 549df69..0000000 --- a/.gitignore +++ /dev/null @@ -1,49 +0,0 @@ -*.pyc -__pycache__ -*~ -.DS_Store -build/* -dist/* -networkx/version.py -examples/*/*.png -doc/networkx-documentation.zip -doc/networkx_reference.pdf -doc/networkx_tutorial.pdf -doc/build -doc/ghpages_build -.coverage -*.class - -# Generated while building documentation. -doc/auto_examples -doc/modules -doc/reference/generated -doc/reference/algorithms/generated -doc/reference/classes/generated -doc/reference/readwrite/generated -doc/path.to.file - -examples/advanced/edgelist.utf-8 -examples/basic/grid.edgelist - -# Generated when 'python setup_egg.py' -networkx.egg-info/ - -# Sublime Text project files -*.sublime-project -*.sublime-workspace - -# Backup files -*.bak - -# IPython Notebook Checkpoints -.ipynb_checkpoints/ - -# Vim's swap files -*.sw[op] - -#Spyder project file -.spyderproject - -# PyCharm project file -.idea diff --git a/.mailmap b/.mailmap deleted file mode 100644 index ff16fd2..0000000 --- a/.mailmap +++ /dev/null @@ -1,4 +0,0 @@ -Aric Hagberg aric -Aric Hagberg aric -Chris Ellison cellison -Dan Schult dschult diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 818f019..0000000 --- a/.travis.yml +++ /dev/null @@ -1,122 +0,0 @@ -# After changing this file, check it on: -# http://lint.travis-ci.org/ -# -sudo: false - -language: python - -cache: - directories: - - $HOME/.cache/pip - -matrix: - include: - - os: linux - python: 2.7 - env: - - OPTIONAL_DEPS=1 - - BUILD_DOCS=1 - - DEPLOY_DOCS=1 - addons: - apt: - packages: - - libgdal-dev - - graphviz - - texlive - - texlive-latex-extra - - latexmk - - os: linux - python: 2.7 - env: - - OPTIONAL_DEPS=1 - - MINIMUM_REQUIREMENTS=1 - - REPORT_COVERAGE=1 - addons: - apt: - packages: - - libgdal-dev - - graphviz - - os: linux - python: 2.7 - env: - - OPTIONAL_DEPS=1 - - PIP_FLAGS="--pre" - addons: - apt: - packages: - - libgdal-dev - - graphviz - - os: linux - python: 3.6 - env: OPTIONAL_DEPS=1 - addons: - apt: - packages: - - libgdal-dev - - graphviz - - os: linux - python: 3.6 - env: - - OPTIONAL_DEPS=1 - - MINIMUM_REQUIREMENTS=1 - addons: - apt: - packages: - - libgdal-dev - - graphviz - - os: osx - osx_image: xcode7.3 - language: generic - env: - - TRAVIS_PYTHON_VERSION=3.6.0 - - OPTIONAL_DEPS=1 - - OSX_PKG_ENV=miniconda - - os: osx - language: generic - env: TRAVIS_PYTHON_VERSION=3.6 - - python: 2.7 - - python: 3.4 - - python: 3.5 - - python: 3.6 - -before_install: - # prepare the system to install prerequisites or dependencies - - source tools/travis/before_install.sh - - uname -a - - printenv - - if [[ "${TRAVIS_OS_NAME}" == "osx" ]]; then - source tools/travis/osx_install.sh; - else - source tools/travis/linux_install.sh; - fi - -install: - # install required packages - - pip install --upgrade pip - - pip install --retries 3 ${PIP_FLAGS} -r requirements.txt - - if [[ "${OPTIONAL_DEPS}" == 1 ]]; then - pip install --retries 3 ${PIP_FLAGS} -r requirements/extras.txt; - fi - # install networkx - - printenv PWD - - pip install . - # show what's installed - - python --version - - pip list - -script: - - if [[ "${BUILD_DOCS}" == 1 ]]; then - source tools/travis/build_docs.sh; - fi - - source tools/travis/script.sh - -after_success: - - if [[ "${REPORT_COVERAGE}" == 1 ]]; then - codecov; - fi - - if [[ "${BUILD_DOCS}" == 1 && "${DEPLOY_DOCS}" == 1 ]]; then - source tools/travis/deploy_docs.sh; - fi - -notifications: - email: false diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 129cc05..7b40d63 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -1,5 +1,10 @@ -Developer overview -================== +.. _contributor_guide: + +Contributor Guide +================= + +Development Workflow +-------------------- 1. If you are a first-time contributor: @@ -11,14 +16,65 @@ Developer overview git clone git@github.com:your-username/networkx.git - * Add the upstream repository:: + * Navigate to the folder networkx and add the upstream repository:: git remote add upstream git@github.com:networkx/networkx.git * Now, you have remote repositories named: - - ``upstream``, which refers to the ``networkx`` repository - - ``origin``, which refers to your personal fork + - ``upstream``, which refers to the ``networkx`` repository + - ``origin``, which refers to your personal fork + + * Next, you need to set up your build environment. + Here are instructions for two popular environment managers: + + * ``venv`` (pip based) + + :: + + # Create a virtualenv named ``networkx-dev`` that lives in the directory of + # the same name + python -m venv networkx-dev + # Activate it + source networkx-dev/bin/activate + # Install main development and runtime dependencies of networkx + pip install -r <(cat requirements/{default,developer,doc,optional,test}.txt) + # + # (Optional) Install pygraphviz, pydot, and gdal packages + # These packages require that you have your system properly configured + # and what that involves differs on various systems. + # pip install -r requirements/extras.txt + # + # Build and install networkx from source + pip install -e . + # Test your installation + PYTHONPATH=. pytest networkx + + * ``conda`` (Anaconda or Miniconda) + + :: + + # Create a conda environment named ``networkx-dev`` + conda create --name networkx-dev + # Activate it + conda activate networkx-dev + # Install main development and runtime dependencies of networkx + conda install -c conda-forge `for i in requirements/{default,developer,doc,optional,test}.txt; do echo -n " --file $i "; done` + # + # (Optional) Install pygraphviz, pydot, and gdal packages + # These packages require that you have your system properly configured + # and what that involves differs on various systems. + # pip install -r requirements/extras.txt + # + # Install networkx from source + pip install -e . --no-deps + # Test your installation + PYTHONPATH=. pytest networkx + + * Finally, we recommend you use a pre-commit hook, which runs black when + you type ``git commit``:: + + pre-commit install 2. Develop your contribution: @@ -35,7 +91,18 @@ Developer overview * Commit locally as you progress (``git add`` and ``git commit``) -3. To submit your contribution: +3. Test your contribution: + + * Run the test suite locally (see `Testing`_ for details):: + + PYTHONPATH=. pytest networkx + + * Running the tests locally *before* submitting a pull request helps catch + problems early and reduces the load on the continuous integration + system. + + +4. Submit your contribution: * Push your changes back to your fork on GitHub:: @@ -52,63 +119,104 @@ For a more detailed discussion, read these :doc:`detailed documents ` on how to use Git with ``networkx`` (``_). -4. Review process: +5. Review process: - * Reviewers (the other developers and interested community members) will - write inline and/or general comments on your Pull Request (PR) to help - you improve its implementation, documentation, and style. Every single - developer working on the project has their code reviewed, and we've come - to see it as friendly conversation from which we all learn and the - overall code quality benefits. Therefore, please don't let the review - discourage you from contributing: its only aim is to improve the quality - of project, not to criticize (we are, after all, very grateful for the - time you're donating!). + * Reviewers (the other developers and interested community members) will + write inline and/or general comments on your Pull Request (PR) to help + you improve its implementation, documentation, and style. Every single + developer working on the project has their code reviewed, and we've come + to see it as friendly conversation from which we all learn and the + overall code quality benefits. Therefore, please don't let the review + discourage you from contributing: its only aim is to improve the quality + of project, not to criticize (we are, after all, very grateful for the + time you're donating!). - * To update your pull request, make your changes on your local repository - and commit. As soon as those changes are pushed up (to the same branch as - before) the pull request will update automatically. + * To update your pull request, make your changes on your local repository + and commit. As soon as those changes are pushed up (to the same branch as + before) the pull request will update automatically. - * `Travis-CI `_, a continuous integration service, - is triggered after each Pull Request update to build the code and run unit - tests of your branch. The Travis tests must pass before your PR can be merged. - If Travis fails, you can find out why by clicking on the "failed" icon (red - cross) and inspecting the build and test log. + * `Travis-CI `_, a continuous integration service, + is triggered after each Pull Request update to build the code and run unit + tests of your branch. The Travis tests must pass before your PR can be merged. + If Travis fails, you can find out why by clicking on the "failed" icon (red + cross) and inspecting the build and test log. - * `AppVeyor `_, is another continuous integration - service, which we use. You will also need to make sure that the AppVeyor - tests pass. + * `AppVeyor `_, is another continuous integration + service that we use. You will also need to make sure that the AppVeyor + tests pass. -.. note:: + .. note:: + + If the PR closes an issue, make sure that GitHub knows to automatically + close the issue when the PR is merged. For example, if the PR closes + issue number 1480, you could use the phrase "Fixes #1480" in the PR + description or commit message. + +6. Document changes + + If your change introduces any API modifications, please update + ``doc/release/release_dev.rst``. - If closing a bug, also add "Fixes #1480" where 1480 is the issue number. + If your change introduces a deprecation, add a reminder to + ``doc/developer/deprecations.rst`` for the team to remove the + deprecated functionality in the future. -Divergence between ``upstream master`` and your feature branch --------------------------------------------------------------- + .. note:: + + To reviewers: make sure the merge message has a brief description of the + change(s) and if the PR closes an issue add, for example, "Closes #123" + where 123 is the issue number. -Never merge the main branch into yours. If GitHub indicates that the -branch of your Pull Request can no longer be merged automatically, rebase -onto master:: - git checkout master - git pull upstream master - git checkout bugfix-for-issue-1480 - git rebase master +Divergence from ``upstream master`` +----------------------------------- -If any conflicts occur, fix the according files and continue:: +If GitHub indicates that the branch of your Pull Request can no longer +be merged automatically, merge the master branch into yours:: - git add conflict-file1 conflict-file2 - git rebase --continue + git fetch upstream master + git merge upstream/master -However, you should only rebase your own branches and must generally not -rebase any branch which you collaborate on with someone else. +If any conflicts occur, they need to be fixed before continuing. See +which files are in conflict using:: -Finally, you must push your rebased branch:: + git status - git push --force origin bugfix-for-issue-1480 +Which displays a message like:: + + Unmerged paths: + (use "git add ..." to mark resolution) + + both modified: file_with_conflict.txt + +Inside the conflicted file, you'll find sections like these:: + + <<<<<<< HEAD + The way the text looks in your branch + ======= + The way the text looks in the master branch + >>>>>>> master + +Choose one version of the text that should be kept, and delete the +rest:: + + The way the text looks in your branch + +Now, add the fixed file:: + + + git add file_with_conflict.txt + +Once you've fixed all merge conflicts, do:: + + git commit + +.. note:: + + Advanced Git users are encouraged to `rebase instead of merge + `__, + but we squash and merge most PRs either way. -(If you are curious, here's a further discussion on the -`dangers of rebasing `_. -Also see this `LWN article `_.) Guidelines ---------- @@ -120,14 +228,6 @@ Guidelines * All changes are reviewed. Ask on the `mailing list `_ if you get no response to your pull request. - -Stylistic Guidelines --------------------- - -* Set up your editor to remove trailing whitespace. - Follow `PEP08 `_. - Check code with `pyflakes` / `flake8`. - * Use the following import conventions:: import numpy as np @@ -136,25 +236,75 @@ Stylistic Guidelines import matplotlib.pyplot as plt import networkx as nx - cimport numpy as cnp # in Cython code +* Use the decorator ``not_implemented_for`` in ``networkx/utils/decorators.py`` + to designate that a function doesn't accept 'directed', 'undirected', + 'multigraph' or 'graph'. The first argument of the decorated function should + be the graph object to be checked. + + .. code-block:: python + + @nx.not_implemented_for('directed', 'multigraph') + def function_not_for_MultiDiGraph(G, others): + # function not for graphs that are directed *and* multigraph + pass + + @nx.not_implemented_for('directed') + @nx.not_implemented_for('multigraph') + def function_only_for_Graph(G, others): + # function not for directed graphs *or* for multigraphs + pass + + +Testing +------- + +``networkx`` has an extensive test suite that ensures correct +execution on your system. The test suite has to pass before a pull +request can be merged, and tests should be added to cover any +modifications to the code base. +We make use of the `pytest `__ +testing framework, with tests located in the various +``networkx/submodule/tests`` folders. + +To run all tests:: + + $ PYTHONPATH=. pytest networkx + +Or the tests for a specific submodule:: + + $ PYTHONPATH=. pytest networkx/readwrite + +Or tests from a specific file:: + + $ PYTHONPATH=. pytest networkx/readwrite/tests/test_yaml.py + +Or a single test within that file:: + + $ PYTHONPATH=. pytest networkx/readwrite/tests/test_yaml.py::TestYaml::testUndirected + +Use ``--doctest-modules`` to run doctests. +For example, run all tests and all doctests using:: + + $ PYTHONPATH=. pytest --doctest-modules networkx + +Tests for a module should ideally cover all code in that module, +i.e., statement coverage should be at 100%. -Pull request codes ------------------- +To measure the test coverage, run:: -When you submit a pull request to GitHub, GitHub will ask you for a summary. If -your code is not ready to merge, but you want to get feedback, please consider -using ``WIP: experimental optimization`` or similar for the title of your pull -request. That way we will all know that it's not yet ready to merge and that -you may be interested in more fundamental comments about design. + $ PYTHONPATH=. pytest --cov=networkx networkx -When you think the pull request is ready to merge, change the title (using the -*Edit* button) to remove the ``WIP:``. +This will print a report with one line for each file in `networkx`, +detailing the test coverage:: -Developer Notes ---------------- + Name Stmts Miss Branch BrPart Cover + ---------------------------------------------------------------------------------- + networkx/__init__.py 33 2 2 1 91% + networkx/algorithms/__init__.py 114 0 0 0 100% + networkx/algorithms/approximation/__init__.py 12 0 0 0 100% + networkx/algorithms/approximation/clique.py 42 1 18 1 97% + ... -For additional information about contributing to NetworkX, please see -the `Developer Notes `_. Bugs ---- diff --git a/CONTRIBUTORS.rst b/CONTRIBUTORS.rst deleted file mode 100644 index b38f0a8..0000000 --- a/CONTRIBUTORS.rst +++ /dev/null @@ -1,165 +0,0 @@ -Credits -======= - -NetworkX was originally written by Aric Hagberg, Dan Schult, and Pieter Swart, -and has been developed with the help of many others. Thanks to everyone who has -improved NetworkX by contributing code, bug reports (and fixes), documentation, -and input on design, features, and the future of NetworkX. - -Contributions -------------- - -This section aims to provide a list of people and projects that have -contributed to ``networkx``. It is intended to be an *inclusive* list, and -anyone who has contributed and wishes to make that contribution known is -welcome to add an entry into this file. Generally, no name should be added to -this list without the approval of the person associated with that name. - -Creating a comprehensive list of contributors can be difficult, and the list -within this file is almost certainly incomplete. Contributors include -testers, bug reporters, contributors who wish to remain anonymous, funding -sources, academic advisors, end users, and even build/integration systems (such -as `TravisCI `_). - -Do you want to make your contribution known? If you have commit access, edit -this file and add your name. If you do not have commit access, feel free to -open an `issue `_, submit a -`pull request `_, or get in -contact with one of the official team -`members `_. - -A supplementary (but still incomplete) list of contributors is given by the -list of names that have commits in ``networkx``'s -`git `_ repository. This can be obtained via:: - - git log --raw | grep "^Author: " | sort | uniq - -A historical, partial listing of contributors and their contributions to some -of the earlier versions of NetworkX can be found -`here `_. - - -Original Authors -^^^^^^^^^^^^^^^^ - -| Aric Hagberg -| Dan Schult -| Pieter Swart -| - - -Contributors -^^^^^^^^^^^^ - -Optionally, add your desired name and include a few relevant links. The order -is partially historical, and now, mostly arbitrary. - -- Aric Hagberg, GitHub: `hagberg `_ -- Dan Schult, GitHub: `dschult `_ -- Pieter Swart -- Katy Bold -- Hernan Rozenfeld -- Brendt Wohlberg -- Jim Bagrow -- Holly Johnsen -- Arnar Flatberg -- Chris Myers -- Joel Miller -- Keith Briggs -- Ignacio Rozada -- Phillipp Pagel -- Sverre Sundsdal -- Ross M. Richardson -- Eben Kenah -- Sasha Gutfriend -- Udi Weinsberg -- Matteo Dell'Amico -- Andrew Conway -- Raf Guns -- Salim Fadhley -- Fabrice Desclaux -- Arpad Horvath -- Minh Van Nguyen -- Willem Ligtenberg -- Loïc Séguin-C. -- Paul McGuire -- Jesus Cerquides -- Ben Edwards -- Jon Olav Vik -- Hugh Brown -- Ben Reilly -- Leo Lopes -- Jordi Torrents, GitHub: `jtorrents `_ -- Dheeraj M R -- Franck Kalala -- Simon Knight -- Conrad Lee -- Sérgio Nery Simões -- Robert King -- Nick Mancuso -- Brian Cloteaux -- Alejandro Weinstein -- Dustin Smith -- Mathieu Larose -- Vincent Gauthier -- chebee7i, GitHub: `chebee7i `_ -- Jeffrey Finkelstein -- Jean-Gabriel Young, Github: `jg-you `_ -- Andrey Paramonov, http://aparamon.msk.ru -- Mridul Seth, GitHub: `MridulS `_ -- Thodoris Sotiropoulos, GitHub: `theosotr `_ -- Konstantinos Karakatsanis, GitHub: `k-karakatsanis `_ -- Ryan Nelson, GitHub: `rnelsonchem `_ -- Niels van Adrichem, GitHub: `NvanAdrichem `_ -- Michael E. Rose, GitHub: `Michael-E-Rose `_ -- Jarrod Millman, GitHub: `jarrodmillman `_ -- Andre Weltsch -- Lewis Robbins -- Mads Jensen, Github: `atombrella `_ -- Edward L. Platt, `elplatt `_ - -Support -------- - -``networkx`` and those who have contributed to ``networkx`` have received -support throughout the years from a variety of sources. We list them below. -If you have provided support to ``networkx`` and a support acknowledgment does -not appear below, please help us remedy the situation, and similarly, please -let us know if you'd like something modified or corrected. - -Research Groups -^^^^^^^^^^^^^^^ - -``networkx`` acknowledges support from the following: - -- `Center for Nonlinear Studies `_, Los Alamos National - Laboratory, PI: Aric Hagberg - -- `Open Source Programs Office `_, - Google - -- `Complexity Sciences Center `_, Department of - Physics, University of California-Davis, PI: James P. Crutchfield - -- `Center for Complexity and Collective Computation `_, - Wisconsin Institute for Discovery, University of Wisconsin-Madison, - PIs: Jessica C. Flack and David C. Krakauer - -Funding -^^^^^^^ - -``networkx`` acknowledges support from the following: - -- Google Summer of Code via Python Software Foundation - -- U.S. Army Research Office grant W911NF-12-1-0288 - -- DARPA Physical Intelligence Subcontract No. 9060-000709 - -- NSF Grant No. PHY-0748828 - -- John Templeton Foundation through a grant to the Santa Fe Institute to - study complexity - -- U.S. Army Research Laboratory and the U.S. Army Research Office under - contract number W911NF-13-1-0340 diff --git a/INSTALL.rst b/INSTALL.rst index 697e96c..ed9e8c5 100644 --- a/INSTALL.rst +++ b/INSTALL.rst @@ -1,22 +1,11 @@ Install ======= -NetworkX requires Python 2.7, 3.4, 3.5, or 3.6. If you do not already +NetworkX requires Python 3.6, 3.7, or 3.8. If you do not already have a Python environment configured on your computer, please see the instructions for installing the full `scientific Python stack `_. -.. note:: - If you are on Windows and want to install optional packages (e.g., `scipy`), - then you will need to install a Python distribution such as - `Anaconda `_, - `Enthought Canopy `_, - `Python(x,y) `_, - `WinPython `_, or - `Pyzo `_. - If you use one of these Python distribution, please refer to their online - documentation. - Below we assume you have the default Python environment already configured on your computer and you intend to install ``networkx`` inside of it. If you want to create and work with Python virtual environments, please follow instructions @@ -80,24 +69,26 @@ Optional packages ----------------- .. note:: - Some optional packages (e.g., `scipy`, `gdal`) may require compiling + Some optional packages (e.g., `gdal`) may require compiling C or C++ code. If you have difficulty installing these packages with `pip`, please review the instructions for installing the full `scientific Python stack `_. -The following optional packages provide additional functionality. +The following optional packages provide additional functionality. See the +files in the ``requirements/`` directory for information about specific +version requirements. -- `NumPy `_ (>= 1.12.0) provides matrix representation of - graphs and is used in some graph algorithms for high-performance matrix - computations. -- `SciPy `_ (>= 0.19.0) provides sparse matrix representation +- `NumPy `_ provides array-based dense + matrix representations of graphs and high-performance array math and linear + algebra which is used in some graph algorithms. +- `SciPy `_ provides sparse matrix representation of graphs and many numerical scientific tools. -- `pandas `_ (>= 0.20.0) provides a DataFrame, which +- `pandas `_ provides a DataFrame, which is a tabular data structure with labeled axes. -- `Matplotlib `_ (>= 2.0.2) provides flexible drawing of +- `Matplotlib `_ provides flexible drawing of graphs. - `PyGraphviz `_ and - `pydot `_ (>= 1.2.3) provide graph drawing + `pydot `_ provide graph drawing and graph layout algorithms via `GraphViz `_. - `PyYAML `_ provides YAML format reading and writing. - `gdal `_ provides shapefile format reading and writing. @@ -118,20 +109,23 @@ Or, install any optional package (e.g., ``numpy``) individually:: Testing ------- -NetworkX uses the Python ``nose`` testing package. If you don't already have -that package installed, follow the directions on the `nose homepage -`_. +NetworkX uses the Python ``pytest`` testing package. You can learn more +about pytest on their `homepage `_. Test a source distribution ^^^^^^^^^^^^^^^^^^^^^^^^^^ You can test the complete package from the unpacked source directory with:: - nosetests networkx -v + pytest networkx Test an installed package ^^^^^^^^^^^^^^^^^^^^^^^^^ +From a shell command prompt you can test the installed package with:: + + pytest --pyargs networkx + If you have a file-based (not a Python egg) installation you can test the installed package with:: @@ -142,34 +136,4 @@ or:: python -c "import networkx as nx; nx.test()" -Testing for developers -^^^^^^^^^^^^^^^^^^^^^^ - -You can test any or all of NetworkX by using the ``nosetests`` test runner. - -First make sure the NetworkX version you want to test is in your ``PYTHONPATH`` -(either installed or pointing to your unpacked source directory). - -Then you can run individual test files with:: - - nosetests path/to/file - -or all tests found in dir and an directories contained in dir:: - - nosetests path/to/dir - -By default nosetests does not test docutils style tests in -Python modules but you can turn that on with:: - - nosetests --with-doctest - -For doctests in stand-alone files NetworkX uses the extension ``txt`` so -you can add:: - - nosetests --with-doctest --doctest-extension=txt - -to also execute those tests. - -These options are on by default if you run nosetests from the root of the -NetworkX distribution since they are specified in the ``setup.cfg`` file found -there. +.. autofunction:: networkx.test diff --git a/LICENSE.txt b/LICENSE.txt index d91b650..9b895b5 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -5,7 +5,7 @@ NetworkX is distributed with the 3-clause BSD license. :: - Copyright (C) 2004-2018, NetworkX Developers + Copyright (C) 2004-2020, NetworkX Developers Aric Hagberg Dan Schult Pieter Swart diff --git a/MANIFEST.in b/MANIFEST.in index af1ce39..34d558c 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,14 +1,12 @@ include MANIFEST.in include setup.py include CONTRIBUTING.rst -include CONTRIBUTORS.rst include INSTALL.rst include LICENSE.txt include README.rst -include RELEASE.rst -recursive-include examples *.py *.edgelist *.mbox *.gz *.bz2 *.zip -recursive-include doc *.py *.rst Makefile *.html *.png *.txt *.css *.inc +recursive-include examples *.txt *.py *.edgelist *.mbox *.gz *.bz2 *.zip *.html *.js *.css +recursive-include doc *.py *.rst Makefile *.html *.png *.txt *.css *.inc *.js include networkx/tests/*.py include networkx/*/tests/*.txt diff --git a/PKG-INFO b/PKG-INFO new file mode 100644 index 0000000..13587d7 --- /dev/null +++ b/PKG-INFO @@ -0,0 +1,120 @@ +Metadata-Version: 2.1 +Name: networkx +Version: 2.5 +Summary: Python package for creating and manipulating graphs and networks +Home-page: http://networkx.github.io/ +Author: Aric Hagberg +Author-email: hagberg@lanl.gov +Maintainer: NetworkX Developers +Maintainer-email: networkx-discuss@googlegroups.com +License: UNKNOWN +Project-URL: Bug Tracker, https://github.com/networkx/networkx/issues +Project-URL: Documentation, https://networkx.github.io/documentation/stable/ +Project-URL: Source Code, https://github.com/networkx/networkx +Description: NetworkX + ======== + + .. image:: https://img.shields.io/pypi/v/networkx.svg + :target: https://pypi.org/project/networkx/ + + .. image:: https://img.shields.io/pypi/pyversions/networkx.svg + :target: https://pypi.org/project/networkx/ + + .. image:: https://travis-ci.org/networkx/networkx.svg?branch=master + :target: https://travis-ci.org/networkx/networkx + + .. image:: https://ci.appveyor.com/api/projects/status/github/networkx/networkx?branch=master&svg=true + :target: https://ci.appveyor.com/project/dschult/networkx-pqott + + .. image:: https://codecov.io/gh/networkx/networkx/branch/master/graph/badge.svg + :target: https://codecov.io/gh/networkx/networkx + + NetworkX is a Python package for the creation, manipulation, + and study of the structure, dynamics, and functions + of complex networks. + + - **Website (including documentation):** https://networkx.github.io + - **Mailing list:** https://groups.google.com/forum/#!forum/networkx-discuss + - **Source:** https://github.com/networkx/networkx + - **Bug reports:** https://github.com/networkx/networkx/issues + + Simple example + -------------- + + Find the shortest path between two nodes in an undirected graph: + + .. code:: python + + >>> import networkx as nx + >>> G = nx.Graph() + >>> G.add_edge('A', 'B', weight=4) + >>> G.add_edge('B', 'D', weight=2) + >>> G.add_edge('A', 'C', weight=3) + >>> G.add_edge('C', 'D', weight=4) + >>> nx.shortest_path(G, 'A', 'D', weight='weight') + ['A', 'B', 'D'] + + Install + ------- + + Install the latest version of NetworkX:: + + $ pip install networkx + + Install with all optional dependencies:: + + $ pip install networkx[all] + + For additional details, please see `INSTALL.rst`. + + Bugs + ---- + + Please report any bugs that you find `here `_. + Or, even better, fork the repository on `GitHub `_ + and create a pull request (PR). We welcome all changes, big or small, and we + will help you make the PR if you are new to `git` (just ask on the issue and/or + see `CONTRIBUTING.rst`). + + License + ------- + + Released under the 3-Clause BSD license (see `LICENSE.txt`):: + + Copyright (C) 2004-2020 NetworkX Developers + Aric Hagberg + Dan Schult + Pieter Swart + +Keywords: Networks,Graph Theory,Mathematics,network,graph,discrete mathematics,math +Platform: Linux +Platform: Mac OSX +Platform: Windows +Platform: Unix +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Scientific/Engineering :: Bio-Informatics +Classifier: Topic :: Scientific/Engineering :: Information Analysis +Classifier: Topic :: Scientific/Engineering :: Mathematics +Classifier: Topic :: Scientific/Engineering :: Physics +Requires-Python: >=3.6 +Provides-Extra: all +Provides-Extra: gdal +Provides-Extra: lxml +Provides-Extra: matplotlib +Provides-Extra: pytest +Provides-Extra: numpy +Provides-Extra: pandas +Provides-Extra: pydot +Provides-Extra: pygraphviz +Provides-Extra: pyyaml +Provides-Extra: scipy diff --git a/README.md b/README.md deleted file mode 100644 index 4aafd96..0000000 --- a/README.md +++ /dev/null @@ -1 +0,0 @@ -# python3-networkx \ No newline at end of file diff --git a/README.rst b/README.rst index 508daf9..94c765d 100644 --- a/README.rst +++ b/README.rst @@ -1,41 +1,36 @@ NetworkX ======== +.. image:: https://img.shields.io/pypi/v/networkx.svg + :target: https://pypi.org/project/networkx/ + +.. image:: https://img.shields.io/pypi/pyversions/networkx.svg + :target: https://pypi.org/project/networkx/ + .. image:: https://travis-ci.org/networkx/networkx.svg?branch=master :target: https://travis-ci.org/networkx/networkx -.. image:: https://ci.appveyor.com/api/projects/status/github/dschult/networkx?branch=master&svg=true +.. image:: https://ci.appveyor.com/api/projects/status/github/networkx/networkx?branch=master&svg=true :target: https://ci.appveyor.com/project/dschult/networkx-pqott .. image:: https://codecov.io/gh/networkx/networkx/branch/master/graph/badge.svg - :target: https://codecov.io/gh/networkx/networkx + :target: https://codecov.io/gh/networkx/networkx NetworkX is a Python package for the creation, manipulation, and study of the structure, dynamics, and functions of complex networks. -- **Website (including documentation):** http://networkx.github.io +- **Website (including documentation):** https://networkx.github.io - **Mailing list:** https://groups.google.com/forum/#!forum/networkx-discuss - **Source:** https://github.com/networkx/networkx - **Bug reports:** https://github.com/networkx/networkx/issues -Install -------- - -Install the latest version of NetworkX:: - - $ pip install networkx - -Install with all optional dependencies:: - - $ pip install networkx[all] - -For additional details, please see `INSTALL.rst`. - Simple example -------------- -Find the shortest path between two nodes in an undirected graph:: +Find the shortest path between two nodes in an undirected graph: + +.. code:: python >>> import networkx as nx >>> G = nx.Graph() @@ -46,6 +41,19 @@ Find the shortest path between two nodes in an undirected graph:: >>> nx.shortest_path(G, 'A', 'D', weight='weight') ['A', 'B', 'D'] +Install +------- + +Install the latest version of NetworkX:: + + $ pip install networkx + +Install with all optional dependencies:: + + $ pip install networkx[all] + +For additional details, please see `INSTALL.rst`. + Bugs ---- @@ -60,7 +68,7 @@ License Released under the 3-Clause BSD license (see `LICENSE.txt`):: - Copyright (C) 2004-2018 NetworkX Developers + Copyright (C) 2004-2020 NetworkX Developers Aric Hagberg Dan Schult Pieter Swart diff --git a/debian/.git-dpm b/debian/.git-dpm deleted file mode 100644 index 3ebda61..0000000 --- a/debian/.git-dpm +++ /dev/null @@ -1,11 +0,0 @@ -# see git-dpm(1) from git-dpm package -dab8178345ed3c93414f040ad1d51c9f3bc4790a -dab8178345ed3c93414f040ad1d51c9f3bc4790a -d17c01666c785cb70a3a0ad9af7b6ce0fbc6e143 -d17c01666c785cb70a3a0ad9af7b6ce0fbc6e143 -python-networkx_1.11.orig.tar.gz -ac24380b13dfe92633370ad2091c0c04b6d098a2 -1315737 -debianTag="debian/%e%v" -patchedTag="patched/%e%v" -upstreamTag="upstream/%e%u" diff --git a/debian/README.Debian b/debian/README.Debian deleted file mode 100644 index 9103012..0000000 --- a/debian/README.Debian +++ /dev/null @@ -1,25 +0,0 @@ -python-networkx for Debian --------------------------- - -As stated in the changelog, I put in Recommends: all the packages needed -to get the networkx/tests/test.py script run OK. - -To run the testsuite: - cd /usr/share/python-support/python-networkx/networkx/tests - python test.py - -Another way is to use a python interpreter and execute: - import networkx.tests - networkx.tests.run() - -To list the recommended packages: - LANG=C apt-cache show python-networkx | grep ^Recommends: - -Some examples won't be functional: they depend on some files that aren't -shipped by upstream because modifications are not allowed, but can be -found easily by using a web search engine: - miles.dat.gz - roget.dat.gz - words.dat.gz - - -- Cyril Brulebois Sat, 28 Jul 2007 13:23:14 +0200 diff --git a/debian/README.source b/debian/README.source deleted file mode 100644 index decde0b..0000000 --- a/debian/README.source +++ /dev/null @@ -1,10 +0,0 @@ -networkx for Debian ------------------- - - - - - - -- Jayden Hewer Wed, 21 Feb 2018 16:14:15 +1300 - diff --git a/debian/changelog b/debian/changelog index 9719a5b..b6edef1 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,5 +1,809 @@ -networkx (2.1-1) unstable; urgency=medium +networkx (2.5+ds-2) unstable; urgency=medium - * Initial release + [ Ondřej Nový ] + * d/control: Update Vcs-* fields with new Debian Python Team Salsa + layout. - -- Jayden Hewer Wed, 21 Feb 2018 16:14:15 +1300 + [ Sandro Tosi ] + * Undo changes made in -1 that were not OK + * debian/copyright + - update upstream copyright years + - extend packaging copyright years + * debian/patches/2bfdee687d0db533555384f92ce3670d47c45aec.patch + - fix a failure in botch tests; Closes: #969756 + + [ Debian Janitor ] + * Trim trailing whitespace. + * Use secure URI in Homepage field. + * Set upstream metadata fields: Bug-Database, Bug-Submit, Repository, + Repository-Browse. + + -- Sandro Tosi Mon, 28 Sep 2020 23:31:14 -0400 + +networkx (2.5+ds-1) unstable; urgency=medium + + * Team upload. + * New upstream version + * Rules-Requires-Root: no + * debhelper-compat 13 (routine-update) + * Override of dh_missing which gets confused over installation of + /usr/lib/python3.8 to /usr/lib/python3 + which I feel should be fixed. + * Remove files from archive that are changed during build + + -- Steffen Moeller Thu, 27 Aug 2020 14:06:33 +0200 + +networkx (2.4-4) unstable; urgency=medium + + * debian/control + - adjust Breaks items as suggested by Adrian Bunk; Closes: #947852 + - wrap-and-sort + - bump Standards-Version to 4.5.0 (no changes needed) + - bump compat to 12 + * debian/patches/* + - refresh patches + * debian/patches/docs-remove-expected_failing_examples.patch + - remove the sphinx_gallery expected_failing_examples directive, since + plot_parallel_betweenness.py works just fine; Closes: #963415 + + -- Sandro Tosi Sun, 26 Jul 2020 23:10:54 -0400 + +networkx (2.4-3) unstable; urgency=medium + + * debian/control + - add Breaks on current buster versions of androguard and hyperkitty, as + they require updates for the new networkx; Closes: #947852 + + -- Sandro Tosi Tue, 31 Dec 2019 19:19:41 -0500 + +networkx (2.4-2) unstable; urgency=medium + + * upload to unstable + + -- Sandro Tosi Sun, 10 Nov 2019 16:54:24 -0500 + +networkx (2.4-1) experimental; urgency=medium + + [ Sandro Tosi ] + * New upstream release + * debian/rules + - install examples + - dont operate on networkx/tests/test.py, no longer present + * debian/copyright + - update copyright file; Closes: #941474 + * Use pytest instead of nose + + [ Ondřej Nový ] + * Bump Standards-Version to 4.4.1. + * d/control: Fix wrong Vcs-*. + + -- Sandro Tosi Sat, 09 Nov 2019 23:55:38 -0500 + +networkx (2.3-1) experimental; urgency=medium + + [ Ondřej Nový ] + * Use debhelper-compat instead of debian/compat. + + [ Sandro Tosi ] + * New upstream release; Closes: #940551 + * Rename source package, tracking py3k-only releases + * debian/copyright + - extend packaging copyright years + - updated copyright to new upstream code + * Only build py3k packages + * debian/rules + - it's a pure-python module, so use purelib to get the build location; this + fixes doc building + - use the built module to run tests, and set the right switch for verbosity + - dont parse debian/changelog + remove other outdated variables + - install the right doc files + remove broken pdf symlinks; Closes: #924210 + * debian/control + - have scricted versioned deps on numpy and matplotlib + - bump Standards-Version to 4.4.0 (no changes needed) + - add matplotlib, pydot, pygrpahviz to Recommends; Closes: #932682 + - replace b-d on texlive-generic-extra with texlive-plain-generic; + Addresses: #933581 + * Use objects.inv from python3/numpy -doc packages + * debian/README.Debian + - remove, outdated + + -- Sandro Tosi Sun, 22 Sep 2019 00:48:34 -0400 + +python-networkx (2.2-1) unstable; urgency=medium + + [ Ondřej Nový ] + * Convert git repository from git-dpm to gbp layout + + [ Gianfranco Costamagna ] + * debian/control + - Move python*-pkg-resources to real dependencies, needed to run the + python-cov testsuite (drawing/nx_pydot.py subpackage); Closes: #902540 + + [ Sandro Tosi ] + * New upstream release + * debian/patch + - refresh, remove patch merged upstream + * debian/control + - b-d on decorator >= 4.3.0 + - bump Standards-Version to 4.2.1 (no changes needed) + + -- Sandro Tosi Fri, 23 Nov 2018 17:21:28 -0500 + +python-networkx (2.1-1) unstable; urgency=medium + + [ Sandro Tosi ] + * New upstream release; Closes: #888458 + - references to lib2to3 have been removed upstream; Closes: #893697 + * debian/control + - revert useless changes performed without any kind of coordination, + preserving only the bump to Standard-Version: 3.9.8, which was not + metioned in the changelog (and not formally released) + - add gdal to b-d and d + - add latexmk to b-d, needed to build doc and fix a FTBFS with sphinx 1.6; + Closes: #872241 + - bump Standards-Version to 4.1.4 (no changes needed) + - add ipykernel, nb2plots, nbformat, nbconvert, sphinx-gallery, traitlets + to b-d-i, needed to build doc + * debian/copyright + - extend packaging copyright years + - update upstream copyright years + * refresh patches + * debian/patches/0003-README.txt-to-examples-needed-by-sphinx-while-buildi.patch + - add the missing README.txt to the examples directory + * debian/rules + - delete files removals line no longer needed + - update location for upstream changelog + * debian/patches/0004-add-copybutton.js.patch + - add required copybutton.js file + * debian/python.org_objects.inv,debian/scipy.org_numpy_objects.inv + - update intersphinx doc + * debian/patches/0005-use-debian-mathjax.js.patch + - use MathJax.js as provided by libjs-mathjax in Debian + * debian/python-networkx-doc.doc-base + - update doc-base to refer to index.html as doc entry point + * debian/patches/0006-skip-plot_football-from-sphinx-gallery-requires-netw.patch + - plot_football.py requires network access, so skip it + + [ Ondřej Nový ] + * d/control: Set Vcs-* to salsa.debian.org + * d/copyright: Use https protocol in Format field + * d/watch: Use https protocol + * d/changelog: Remove trailing whitespaces + + -- Sandro Tosi Thu, 07 Jun 2018 18:18:28 -0400 + +python-networkx (1.11-2) UNRELEASED; urgency=medium + + * Team upload + + [ Ondřej Nový ] + * Fixed VCS URL (https) + + [ Andreas Tille ] + * pngmath conflicts with some other shinx tool - drop it + Closes: #832855 + * Add missing Build-Depends + * debhelper 9 + * cme fix dpkg-control + * fix another shebang + + -- Andreas Tille Sat, 06 Aug 2016 17:50:20 +0200 + +python-networkx (1.11-1) unstable; urgency=medium + + * New upstream release + + -- Sandro Tosi Sun, 31 Jan 2016 11:16:00 +0000 + +python-networkx (1.11~rc2-1) unstable; urgency=medium + + * New upstream release candidate + * debian/copyright + - update packaging copyright years + + -- Sandro Tosi Thu, 28 Jan 2016 19:52:06 +0000 + +python-networkx (1.11~rc1-1) unstable; urgency=medium + + [ Thomas Goirand ] + * Removed the exclude and add python-pydot as build-depends-indep. + * Added python3-matplotlib, python3-numpy, python3-pygraphviz as + build-depends-indep as well. + + [ Sandro Tosi ] + * New upstream release candidate; Closes: #801880 + * debian/control + - suggest the -doc pkg in the module pkgs; Closes: #807126 + * debian/copyright + - extend packaging copyright years + - drop BSD-style, not used + - move the general copyright notice on top, helps with lintian + * debian/README.source + - removed, outdated + * debian/*.inv + - update intersphinx mapping files + * debian/rules + - dont fail if there are errors in the tests + + -- Sandro Tosi Sun, 10 Jan 2016 02:26:44 +0000 + +python-networkx (1.10-1) experimental; urgency=medium + + [ Sandro Tosi ] + * New upstream release (Closes: #800431). + + [ Thomas Goirand ] + * Move to Build-Depends: what was in Build-Depends-Indep: but which is needed + for the clean target. + * Dropped version of packages for those already provided in Jessie. + * Removed now useless python-3.4.patch patch from Chuck Short. + * Removed do-not-use-sphinx_rtd_theme.patch patch. Build-Depends on the + python-sphinx-rtd-theme package instead. + * Exclude tests which are doing internet access and failing. + * Do not rename non-existing README file. + + -- Thomas Goirand Wed, 02 Sep 2015 13:17:15 +0000 + +python-networkx (1.9.1-1) experimental; urgency=medium + + * New upstream release + * debian/watch + - use PyPI redirector + * debian/rules + - remove the get-orig-src crap + * debian/copyright + - extend packaging copyright years + - drop unused paragraph + * debian/control + - update Homepage field + - bump Standards-Version to 3.9.6 (no changes needed) + - set me as Maintainer, team as Uploaders + * debian/patches/using-local-mathjax.js.patch + - updated to new upstream code + + -- Sandro Tosi Sat, 11 Apr 2015 17:38:17 +0100 + +python-networkx (1.9+dfsg1-1) unstable; urgency=medium + + * Team upload. + * New upstream release (Closes: #750233). + * Ran wrap-and-sort. + * debian/copyright in parseable format 1.0. + * Added extend-diff-ignore = "^[^/]*[.]egg-info/" in d/source/options, and + removed the clean of the egg-info folder. + * Refreshed/rebased 10_doc_relocation, 20_example_dirs_remove and + 30_use_local_objects.inv patches. + * Removed 50_force_ordering_in_dict_to_numpy_array_functions.patch + 55_fixups_to_numpy_array_functions_patch.patch + 60_fix_tests_failure_due_to_py3.3_hash_randomization.patch applied + upstream. + * debian/control: removed python-support and added dh-python as build-dep. + * Added python-3.4.patch (comes from the Ubuntu package). + * Removed 40_no_setuptools_in_requires.txt (there's no setup_egg.py anymore + in upstream source code). + * Also cleans networkx/version.py, which is generated. + * Also cleans test.*, doc/source/templates/gallery.html and fontList.cache. + * Added do-not-use-sphinx_rtd_theme.patch. + * debian/rules: do not use setup_egg.py, it's gone from upstream source. + * Move all the doc generation stuff into the override_dh_sphinxdoc. + * Removed obsolete XS-Python-Version: >= 2.6 X-Python3-Version: >= 3.2. + * Adds using-local-mathjax.js.patch to avoid privacy breach in the doc. + * Added missing --with python2. + * Added debian/rules get-orig-src target to generate the orig.tar.xz file. + * Added missing build-depends: python-decorator and python-numpydoc (needed + for building docs). + * Fixed watch file to handle the +dfsg1 mangling. + + -- Thomas Goirand Sun, 22 Jun 2014 07:11:42 +0000 + +python-networkx (1.8.1-1) unstable; urgency=low + + [ Jakub Wilk ] + * Use canonical URIs for Vcs-* fields. + + [ Sandro Tosi ] + * New upstream release + * debian/copyright + - update upstream copyright years + - extend packaging copyright years + - added decorator copyright notice + * debian/patches/* + - refreshed patches + * debian/patch/{50, 55, 60}* + - backport patches + * debian/control + - bump Standards-Version to 3.9.5 (no changes needed) + + -- Sandro Tosi Sat, 18 Jan 2014 12:26:47 +0100 + +python-networkx (1.7-2) experimental; urgency=low + + * debian/*_objects.inv + - updated intersphinx mapping files + * debian/rules + - reorganize build and test targets + * debian/{control, rules} + - provide Python 3 package; Closes: #597422 + + -- Sandro Tosi Tue, 28 Aug 2012 18:04:48 +0200 + +python-networkx (1.7-1) experimental; urgency=low + + * New upstream release + * debian/patches/changeset_* + - removed, included upstream + + -- Sandro Tosi Sat, 11 Aug 2012 12:41:30 +0200 + +python-networkx (1.7~rc1-3) unstable; urgency=low + + * debian/patches/changeset_9ebe087b8bbcdeee3051e07cacd05fa07436c16e.diff + - Preserver order in G.nodes(), fixing a FTBFS in nipype; thanks to Yaroslav + Halchenko for the report and analysis; Closes: #684107 + + -- Sandro Tosi Tue, 07 Aug 2012 22:10:21 +0200 + +python-networkx (1.7~rc1-2) unstable; urgency=low + + * debian/patches/changeset_8960521b5ae4897bdbac4ff49525d8b37bff88d2.diff + - Handle empty graph (all zero matrix) and null graph (raise exception) in + to_scipy_sparse_matrix; thanks to Yaroslav Halchenko for the report; + Closes: #678036 + + -- Sandro Tosi Tue, 19 Jun 2012 21:49:24 +0200 + +python-networkx (1.7~rc1-1) unstable; urgency=low + + * New upstream release candidate + * debian/watch + - recognize rc releases + * debian/copyright + - extend upstream and packaging copyright years + - add Nicholas Mancuso's copyrighted files + * debian/control + - add python-numpy, python-scipy to b-d-i to enable additional tests + * debian/patches/* + - refreshed patches + * debian/patches/50_nosetests_verbosity_0 + - removed, merged upstream + * debian/patches/changeset_fed4cb6e78dc7047c06522b0418ef5b0d8197290.diff + - fix an error in unittests + + -- Sandro Tosi Thu, 14 Jun 2012 23:46:38 +0200 + +python-networkx (1.6-3) unstable; urgency=low + + * debian/rules + - remove leftover from build process, so the package can be built twice in a + row; thanks to Jakub Wilk for the report; Closes: #671193 + + -- Sandro Tosi Sat, 12 May 2012 13:10:30 +0200 + +python-networkx (1.6-2) unstable; urgency=low + + * debian/rules + - set MPLCONFIGDIR to current directory when building the doc, fixing a + FTBFS; thanks to Lucas Nussbaum for the report; Closes: #665038 + * debian/control + - bump Standards-Version to 3.9.3 (no changes needed) + + -- Sandro Tosi Tue, 03 Apr 2012 20:00:02 +0200 + +python-networkx (1.6-1) unstable; urgency=low + + * New upstream release + * debian/patches/10_doc_relocation + - refreshed for new upstream code + * debian/patches/40_astar_unique_shortest_path.diff + - removed, merged upstream + * debian/patches/40_no_setuptools_in_requires.txt + - don't add setuptools to requires.txt; Closes: #639995 + * debian/patches/50_nosetests_verbosity_0 + - set verbosity=0 else tests can't be run + * debian/{control, rules} + - use dh_sphinxdoc + + -- Sandro Tosi Mon, 26 Dec 2011 16:58:36 +0100 + +python-networkx (1.5-2) unstable; urgency=low + + * debian/patches/40_astar_unique_shortest_path.diff + - make A* shortest paths unique; Closes: #634083 + + -- Sandro Tosi Sun, 17 Jul 2011 13:53:32 +0200 + +python-networkx (1.5-1) unstable; urgency=low + + * New upstream release + * debian/control + - added dvipng to b-d-i, needed for math images + + -- Sandro Tosi Thu, 09 Jun 2011 18:13:44 +0200 + +python-networkx (1.4-2) unstable; urgency=low + + * debian/patches/10_doc_relocation + - write a description for this patch + * debian/control + - bump Standards-Version to 3.9.2 (no changes needed) + - don't use 'we' in long description + * debian/rules + - install upstream changelog + + -- Sandro Tosi Sun, 24 Apr 2011 17:48:53 +0200 + +python-networkx (1.4-1) experimental; urgency=low + + * New upstream release; thanks to Yaroslav Halchenko for the report; + Closes: #617677 + * debian/rules + - don't compress objects.inv; thanks to Michael Fladischer for the report; + Closes: #608780 + * debian/watch + - updated to point to PyPi + * debian/control + - bump python-sphinx versioned b-d-i to 1.0.1 minimum + - added python-pygraphviz to b-d-i, needed for doc building + * debian/copyright + - bump upstream and packaging copyright years + * debian/patches/{40_add_networkxcss, 50_boundary-test-fix.patch + 60_remove_svn_refs.diff 70_set_matplotlib_ps_backend.patch} + - removed since merged upstream + * debian/patches/{10_doc_relocation, 20_example_dirs_remove, + 30_use_local_objects.inv} + - refreshed/adapted to new upstream code + + -- Sandro Tosi Sat, 19 Mar 2011 12:19:16 +0100 + +python-networkx (1.3-1) experimental; urgency=low + + * New upstream release + * debian/patches/changeset_r1745.diff + - dropped, available in upstream release + * debian/patches/10_doc_relocation + - refreshed patch for new upstream code + * debian/control + - upstream code is now compatible with 2.6 or later only + - bump Standards-Version to 3.9.1 (no changes needed) + * debian/{control, rules} + - run unittests at build time, b-d on python-nose added + * debian/copyright + - removed reference to /usr/share/common-licenses/BSD + * Create a -doc package ; thanks to Yaroslav Halchenko for the report; + Closes: #567369 + - (d/control) define a new binary package, and add depends on sphinx (>= 1) + - (d/rules) build documentation, install it into the new -doc package + - (d/patches/30_use_local_objects.inv) use local copy of remote objects.inv + * debian/{control, rules} + - moved to dh7 and "reduced" rules file + * debian/rules + - refer to built code when building doc + * debian/python-networkx-doc.doc-base + - added doc-base information + * debian/patches/40_add_networkxcss + - added as patch, since networkx.css is missing from the tarball, but needed + to display properly HTML documentation + * debian/patches/50_boundary-test-fix.patch + - upstream patch to restrict node boundary test cases to valid range + * debian/patches/60_remove_svn_refs.diff + - upstream patch to remove references to old SVN repository (now Mercurial) + * debian/patches/70_set_matplotlib_ps_backend.patch + - set matplotlib backend to 'PS', so a DISPLAY it's not required and the + tests can be run in a "reduced" environment + + -- Sandro Tosi Fri, 10 Dec 2010 23:50:27 +0100 + +python-networkx (1.1-2) unstable; urgency=low + + * debian/{control, source/format, rules} + - switch to '3.0 (quilt)' source format + * debian/rules + - use '-f' option of rm instead of the make-way of ignoring error + * debian/rules + - remove 2 doc files not needed (they are actually broken symlinks); thanks + to Ian Zimmerman for the report; Closes: #580839 + * debian/patches/changeset_r1745.diff + - replace string exceptions, removed from python >= 2.6; Closes: #585307 + * debian/{control, pyversions} + - removed pyversions, now using XS-P-V + + -- Sandro Tosi Wed, 23 Jun 2010 18:12:28 +0200 + +python-networkx (1.1-1) unstable; urgency=low + + * New upstream release + * debian/copyright + - added Drew Conway to the list of copyright holders + * debian/patches/20_example_dirs_remove + - refreshed for new upstream code + * debian/control + - bump versioned python-support b-d to at least version 1 + * debian/rules + - install ReST source for documentation, since it's not currently buildable + + -- Sandro Tosi Mon, 26 Apr 2010 20:52:16 +0200 + +python-networkx (1.0.1-2) unstable; urgency=low + + * debian/rules + - use '--install-layout=deb' when installing and '*-packages' when referring + to installation path, to make the package be Python2.6-ready; thanks to + Jakub Wilk for the report; Closes: #571508 + + -- Sandro Tosi Fri, 26 Feb 2010 23:53:35 +0100 + +python-networkx (1.0.1-1) unstable; urgency=low + + * New upstream release; thanks to Yaroslav Halchenko for the report; + Closes: #565319 + * debian/control + - take maintainership back under DPMT umbrella; thanks to Cyril Brulebois + for his work + - adjust Vcs-{Svn, Browser} to point to DPMT location + - bump Standards-Version to 3.8.4 + + added debian/README.source + - replace b-d-i on python-all-dev with python only + - use HTTP (and not HTTPS) for Homepage field + - rephrased short description; thanks to Rogério Brito for the report; + Closes: #557895 + * debian/pyversions + - minimum version set to 2.5 + * debian/copyright + - updated upstream copyright authors and license information + - update copyright notice for packaging + * debian/watch + - updated to report numerical (with dots) releases + * debian/patches/20_fix_broken_svn_keyboards + - removed, fixed upstream + * debian/patches/20_example_dirs_remove + - don't created empty dirs for examples no more present + + -- Sandro Tosi Fri, 26 Feb 2010 00:20:57 +0100 + +python-networkx (1.0~rc1~svn1492-2) experimental; urgency=low + + * Work around both svn's and python's broken behaviours: exporting from + the former is locale-dependant, and the latter can't cope with Unicode + out of the box (Closes: #557743): + + debian/patches/20_fix_broken_svn_keyboards. + * Self-reminder: don't touch svn ever again. + + -- Cyril Brulebois Tue, 24 Nov 2009 12:26:36 +0100 + +python-networkx (1.0~rc1~svn1492-1) experimental; urgency=low + + * New upstream snapshot, past 1.0~rc1, as requested by Yaroslav + Halchenko (Closes: #549996). + * Refresh patch accordingly: + + debian/patches/10_doc_relocation. + * Get rid of extra LICENSE.txt file in /usr/share/doc. + * Use dh_compress -Xexamples/ to avoid compressing examples, thanks to + Sandro Tosi (Closes: #539942). + * Bump Standards-Version from 3.8.0 to 3.8.3 (no changes needed). + + -- Cyril Brulebois Mon, 23 Nov 2009 15:44:34 +0100 + +python-networkx (0.99-2) unstable; urgency=low + + * Upload to unstable now that lenny is released (yay). + * Fix FTBFS with python-support 0.90.3: no longer rely on its internal + behaviour, and xnow set tests/test.py executable right after “setup.py + install” (Closes: #517065). + * Drop executable bits from bz2 files. + * Update Vcs-* fields: move from DPMT's svn to collab-maint's git. + * Remote DPMT from Uploaders, following Piotr Ożarowski's request. + + -- Cyril Brulebois Sat, 28 Feb 2009 13:36:24 +0100 + +python-networkx (0.99-1) experimental; urgency=low + + [ Sandro Tosi ] + * debian/control: + + Switch Vcs-Browser field to viewsvn. + + [ Cyril Brulebois ] + * New upstream release. + * Refresh patch accordingly: + + debian/patches/10_doc_relocation. + * debian/rules: + + Update uneeded file list, only INSTALL.txt left. + + Update README renaming, using a new $(DEBIANDOC_DIR) variable. + + Stop removing a directory, no longer created. + * debian/docs: + + Deleted, README taken care of through debian/rules. + * debian/control: + + Bump Standards-Version from 3.7.3 to 3.8.0 (no changes needed). + + Update my mail address. + * debian/copyright: + + Ditto. + + -- Cyril Brulebois Fri, 28 Nov 2008 07:42:43 +0100 + +python-networkx (0.36-2) unstable; urgency=low + + * debian/control: + + Replace python-setuptools with python-pkg-resources in the + “Recommends:” since pkg_resources is now available in this + separate package, thanks Matthias Klose (Closes: #468721). + * debian/copyright: + + Use “© $years $name” instead of invalid “$name, $years” and + “(C) $years, $name”, thanks to lintian. + + -- Cyril Brulebois Sun, 02 Mar 2008 01:06:32 +0100 + +python-networkx (0.36-1) unstable; urgency=low + + [ Sandro Tosi ] + * debian/control: + + Uniformize Vcs-Browser field across the DPMT packages. + + [ Cyril Brulebois ] + * New upstream release. + * debian/copyright: + + Update copyright years. + + Update the list of authors. + * Don't repack anymore, upstream no longer ships the examples that + weren't DFSG-compliant. + * debian/README.Debian: + + Update accordingly. + * debian/watch: + + Remove version mangling, no longer needed. + * debian/rules: + + Delete the networkx.egg-info/SOURCES.txt pristine hack. + + Only remove the whole networkx.egg-info directory in the + “clean” target since it gets generated during the build, now. + + -- Cyril Brulebois Mon, 14 Jan 2008 20:33:16 +0100 + +python-networkx (0.35.1.dfsg-2) unstable; urgency=low + + [ Piotr Ożarowski ] + * Add Homepage field, delete it from the long description. + * Rename XS-Vcs-* fields to Vcs-*. + + [ Cyril Brulebois ] + * Add version mangling to debian/watch, stripping the “.dfsg” part of + the Debian version. + * debian/rules cleanup: Delete trailing whitespaces, delete unused + DEB_UPSTREAM_VERSION (which became wrong anyway, due to the “.dfsg” + part of the version). + * Bump Standards-Version from 3.7.2 to 3.7.3, no change needed. + * Move the build instructions from “binary-arch” to “binary-indep”, + per lintian. + * Move the section from “graphics” to “python”, per lintian. + * Create a pristine copy of the networkx.egg-info/SOURCES.txt file during + the “install” target, which gets restored during the “clean” one. + + -- Cyril Brulebois Thu, 20 Dec 2007 02:39:51 +0100 + +python-networkx (0.35.1.dfsg-1) unstable; urgency=low + + * New upstream release. + * Repack since some examples are not DFSG-free: freely distributable but no + modifications are allowed. + * debian/rules: + + Removed two new unneeded files: Download.txt and Install.txt. + + Removed the find/rm for the .cvsignore files, now deleted upstream. + * debian/control: + + Moved python-all-dev from B-D to B-D-I. + + Added XS-Vcs-Svn and XS-Vcs-Browser fields. + * debian/copyright: + + Updated copyright years. + + Added Eben Kennah as contributor. + * debian/watch: + + Updated, with the addition networkx/ directory. + + -- Cyril Brulebois Sat, 28 Jul 2007 13:23:14 +0200 + +python-networkx (0.34-2) unstable; urgency=low + + * Added a versioned B-D on python-suppport (>= 0.6.4) since #422436 is + fixed in this version, and on python-all-dev (>= 2.3.5-11), according + to the Python Policy. + * Dropped the manual egg renaming, which isn't needed anymore, and even + fails with the new python-support (Closes: #423981). + * Turned setup.py usage into setup_egg.py usage. 20_egg_support patch + dropped accordingly. + * Added a TODO file so as not to forget TODO items for next uploads. + * Added a find/rm to remove some .cvsignore files, now installed after + the switch to setup_egg.py. + * Adjusted README.Debian and Recommends:, since python-yaml is now packaged, + and added instructions to run the testsuite from a python interpreter. + + -- Cyril Brulebois Tue, 15 May 2007 10:59:02 +0200 + +python-networkx (0.34-1) unstable; urgency=low + + * New upstream release. + * Many thanks to Piotr Ożarowski for his careful review and his numerous + suggestions for this upload. + * Examples might not work out of the box due to the compression of the data + files, but adding support for compressed data files has been requested + upstream (#104), so that the data files don't have to be shipped + uncompressed. Should be working in the next version. + * Added the Debian Python Modules Team in the Uploaders field. + * Updated rules file to delete an empty directory (data/, under the doc/ + directory). + * Adjusted the folding in the long description for the homepage. + * Added missing B-D on setuptools, needed for the egg installation. It is + versioned so that one is ensured that egg support is present (e.g. when + backporting). + * Added missing Recommends on python-matplotlib. + * Removed unneeded items in the rules file (e.g. CFLAGS, dh_strip, etc.) as + well as ${python:Provides} and ${shlibs:Depends} in the control file. + * Removed the lintian override and added a chmod call to +x the test script + after the dh_pysupport call. + * Dropped unneeded build-stamp, configure, configure-stamp targets in the + rules file. + * Added *.pyc cleanup in the clean target. During a normal build it + shouldn't be needed, but might be useful when playing around with the + package. + + -- Cyril Brulebois Mon, 23 Apr 2007 13:30:39 +0200 + +python-networkx (0.33-1) unstable; urgency=low + + * New upstream release. + * New features: + - draw edges with specified colormap + - more efficient version of Floyd's algorithm for all pairs shortest path + - include tests in source package (networkx/tests) + - include documentation in source package (doc) + * Bug fixes: + - refactored large modules into smaller code files + - degree(nbunch) now returns degrees in same order as nbunch + - degree() now works for multiedges=True + - update node_boundary and edge_boundary for efficiency + - edited documentation for graph classes, now mostly in info.py + * Removed python-numeric from Recommends: (obsolete, no longer supported). + * Added python-setuptools to Recommends: so that one can easily use the + following to run the testsuite. + import networkx + networkx.test() + * Changed watch file from sourceforge to upstream's homepage download + section. + * Changed the install rule so that there's no .pyc compilation at all, since + they are created during the postinst (and thus were deleted after the + ``setup.py install'' call). + * Switched from dpatch to quilt to ease patch management. + + + Thanks to Oleksandr Moskalenko and Piotr Ożarowski for the time they spent + on advising me about the following items. + * Added a B-D on python-all-dev. + * Added egg support: + - added 20_egg_support to import setup from setuptools in setup.py; + - added the --single-version-externally-managed option to the + "setup.py install" call; + - tweaked debian/rules to strip out the version info from the egg + info dir. + * Cleaned *.txt documentation files: license-related files are no longer + installed, superseded by debian/copyright, and Readme.txt is installed as + README. + + -- Cyril Brulebois Fri, 06 Apr 2007 19:10:42 +0200 + +python-networkx (0.32-2) unstable; urgency=low + + * Added a Recommends: on python-pygraphviz | python-pydot, thanks to Seo + Sanghyeon for the notice about pydot (Closes: #401657). + * Fixed a typo in the debian/watch file. + + -- Cyril Brulebois Wed, 6 Dec 2006 14:07:37 +0100 + +python-networkx (0.32-1) unstable; urgency=low + + * Initial release (Closes: #323398) + * Added an override for the tests/test.py file which is not executable due + to python-support (apparently). Interested users should be able to run + this script by prepending its name with the interpreter. + * Added ``2.4-'' into debian/pyversions since the installation fails when + byte-compiling for python2.3. Upstream will be reported that problem since + that release is said to be for python >= 2.3. + * Added Recommends: on all packages needed for the tests/test.py to run + almost OK. There's currently no package providing `yaml' so that having + ``import yaml'' in a python script is OK. + * Added a note about the previous point in the README.Debian file. + * Added Suggests: on all optional packages suggested by upstream. + + -- Cyril Brulebois Wed, 25 Oct 2006 17:52:26 +0200 diff --git a/debian/compat b/debian/compat deleted file mode 100644 index ec63514..0000000 --- a/debian/compat +++ /dev/null @@ -1 +0,0 @@ -9 diff --git a/debian/control b/debian/control index 124578c..dc395f8 100644 --- a/debian/control +++ b/debian/control @@ -1,29 +1,63 @@ Source: networkx +Maintainer: Sandro Tosi +Uploaders: Debian Python Modules Team , Section: python Priority: optional -Maintainer: Jayden Hewer -Build-Depends: debhelper (>=9), +Build-Depends: debhelper-compat (= 13), dh-python, - python-all (>= 2.6.6-3~), - python-setuptools, + latexmk, python3-all, + python3-doc, python3-setuptools, - python-nose (>=0.10.1), - python3-nose (>=0.10.1), - python-decorator (>=4.1.0), - python3-decorator (>=4.1.0) -Standards-Version: 3.9.6 + python3-sphinx, +Build-Depends-Indep: dvipng, + python-numpy-doc, + python3-decorator (>= 4.3.0), + python3-gdal, + python3-ipykernel, + python3-matplotlib (>= 3.0.2), + python3-nb2plots, + python3-nbconvert, + python3-nbformat, + python3-numpy, + python3-numpydoc, + python3-pydot, + python3-pygraphviz, + python3-pytest, + python3-scipy, + python3-sphinx-gallery, + python3-sphinx-rtd-theme, + python3-traitlets, + texlive-binaries, + texlive-fonts-recommended, + texlive-latex-base, + texlive-latex-extra, + texlive-latex-recommended, + texlive-plain-generic, + zip, +Standards-Version: 4.5.0 +Vcs-Browser: https://salsa.debian.org/python-team/packages/networkx +Vcs-Git: https://salsa.debian.org/python-team/packages/networkx.git Homepage: https://networkx.github.io/ -X-Python-Version: >= 2.7 -X-Python3-Version: >= 3.4 -#Vcs-Git: git://anonscm.debian.org/collab-maint/networkx.git -#Vcs-Browser: https://anonscm.debian.org/cgit/collab-maint/networkx.git +Rules-Requires-Root: no -Package: python-networkx +Package: python3-networkx Architecture: all -Depends: python3-decorator (>=4.1.0), ${python:Depends}, ${misc:Depends} -Suggests: python-networkx-doc -Description: tool to create, manipulate and study complex networks (Python2) +Depends: python3-pkg-resources, + ${misc:Depends}, + ${python3:Depends}, +Recommends: python3-gdal, + python3-matplotlib, + python3-numpy, + python3-pydot, + python3-pygraphviz, + python3-scipy, + python3-yaml, +Suggests: python-networkx-doc, +Breaks: androguard (<< 3.3.5-2~), + python3-django-hyperkitty (<< 1.3.0-1.1~), + python3-skimage (<< 0.16.2-1~), +Description: tool to create, manipulate and study complex networks (Python3) NetworkX is a Python-based package for the creation, manipulation, and study of the structure, dynamics, and functions of complex networks. . @@ -36,13 +70,15 @@ Description: tool to create, manipulate and study complex networks (Python2) The potential audience for NetworkX includes: mathematicians, physicists, biologists, computer scientists, social scientists. . - This package contains the Python 2 version of NetworkX. + This package contains the Python 3 version of NetworkX. -Package: python3-networkx +Package: python-networkx-doc Architecture: all -Depends: python3-decorator (>=4.1.0), ${python3:Depends}, ${misc:Depends} -Suggests: python-networkx-doc -Description: tool to create, manipulate and study complex networks (Python3) +Section: doc +Depends: libjs-mathjax, + ${misc:Depends}, + ${sphinxdoc:Depends}, +Description: tool to create, manipulate and study complex networks - documentation NetworkX is a Python-based package for the creation, manipulation, and study of the structure, dynamics, and functions of complex networks. . @@ -55,4 +91,4 @@ Description: tool to create, manipulate and study complex networks (Python3) The potential audience for NetworkX includes: mathematicians, physicists, biologists, computer scientists, social scientists. . - This package contains the Python 3 version of NetworkX. + This package contains documentation for NetworkX. diff --git a/debian/copyright b/debian/copyright index 1d0bf27..1c03767 100644 --- a/debian/copyright +++ b/debian/copyright @@ -1,39 +1,266 @@ Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Upstream-Name: networkx -Source: https://networkx.github.io/ - -NetworkX is distributed with the 3-clause BSD license. - - Copyright (C) 2004-2018, NetworkX Developers - Aric Hagberg - Dan Schult - Pieter Swart - All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are - met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * Neither the name of the NetworkX Developers nor the names of its - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Upstream-Contact: Aric Hagberg +Source: git://github.com/networkx/networkx.git + +Files: * +Copyright: (c) 2004-2020, NetworkX Developers + (c) Aric Hagberg + (c) Dan Schult + (c) Pieter Swart + With additional Authors having left their copyrights to the Upstream Authors + and Copyright Holders listed just above: + A. L. Barabási and R. Albert + A. Steger and N. Wormald + Brendt Wohlberg + C. Gkantsidis, M. Mihail, and E. Zegura + Coen Bron and Joep Kerbosch + Fan Chung and L. Lu + Jean-Loup Guillaume and Matthieu Latapy + Jeong Han Kim and Van H. Vu + Katy Bold + Lun Li, David Alderson, Reiko Tanaka, John C. Doyle, and Walter Willinger + M. E. J. Newman + P. Holme and B. J. Kim + P. L. Krapivsky and S. Redner + Robert Sedgewick + Russel Merris + Eben Kenah + Sasha Gutfraind + Drew Conway +License: BSD-3-clause + +Files: debian/* +Copyright: (c) 2006-2009, Cyril Brulebois + (c) 2010-2020, Sandro Tosi + (c) 2014, Thomas Goirand +License: GPL-3 + +Files: networkx/algorithms/approximation/clustering_coefficient.py +Copyright: Copyright (C) 2013 by Fred Morstatter Jordi Torrents +License: BSD-3-clause + +Files: networkx/algorithms/approximation/clique.py + networkx/algorithms/matching.py + networkx/algorithms/approximation/dominating_set.py + networkx/algorithms/approximation/independent_set.py + networkx/algorithms/approximation/matching.py + networkx/algorithms/approximation/ramsey.py + networkx/algorithms/approximation/vertex_cover.py +Copyright: (c) 2011-2019 Nicholas Mancuso +License: BSD-3-clause + +Files: networkx/algorithms/approximation/tests/test_treewidth.py +Copyright: Copyright (C) 2018 by Rudolf-Andreas Floren Dominik Meier +License: BSD-3-clause + +Files: networkx/algorithms/approximation/connectivity.py + networkx/algorithms/approximation/kcomponents.py +Copyright: Copyright (C) 2015 by Jordi Torrents +License: BSD-3-clause + +Files: networkx/algorithms/assortativity/connectivity.py + networkx/algorithms/assortativity/neighbor_degree.py + networkx/algorithms/bipartite/centrality.py + networkx/algorithms/bipartite/cluster.py + networkx/algorithms/bipartite/redundancy.py +Copyright: Copyright (C) 2011 by Jordi Torrents Aric Hagberg +License: BSD-3-clause + +Files: networkx/algorithms/bipartite/tests/test_redundancy.py +Copyright: Copyright 2015 Jeffrey Finkelstein +License: BSD-3-clause + +Files: networkx/algorithms/bipartite/matching.py + networkx/algorithms/bipartite/tests/test_matching.py +Copyright: Copyright 2015 Jeffrey Finkelstein , + Copyright 2019 Søren Fuglede Jørgensen +License: BSD-3-clause + +Files: networkx/algorithms/bipartite/covering.py + networkx/algorithms/bipartite/tests/test_covering.py + networkx/algorithms/covering.py + networkx/algorithms/tests/test_covering.py +Copyright: Copyright 2016-2019 NetworkX developers. + Copyright (C) 2016 by Nishant Nikhil +License: BSD-3-clause + +Files: networkx/algorithms/centrality/harmonic.py +Copyright: Copyright (C) 2015 by Alessandro Luongo +License: BSD-3-clause + +Files: networkx/algorithms/centrality/percolation.py +Copyright: Copyright (C) 2018 by Pranay Kanwar +License: BSD-3-clause + +Files: networkx/algorithms/centrality/second_order.py +Copyright: Copyright (c) 2015 – Thomson Licensing, SAS +License: BSD-3-clause + +Files: networkx/algorithms/centrality/voterank_alg.py +Copyright: opyright (C) 2017 by Fredrik Erlandsson +License: BSD-3-clause + +Files: networkx/algorithms/chordal.py +Copyright: Copyright (C) 2010 by Jesus Cerquides +License: BSD-3-clause + +Files: networkx/algorithms/coloring/equitable_coloring.py +Copyright: Copyright (C) 2018 by Utkarsh Upadhyay +License: BSD-3-clause + +Files: networkx/algorithms/community/kernighan_lin.py + networkx/algorithms/community/tests/test_kernighan_lin.py + networkx/algorithms/community/community_utils.py + networkx/algorithms/cuts.py +Copyright: Copyright 2011 Ben Edwards . + Copyright 2011 Aric Hagberg + Copyright 2015 NetworkX developers. +License: BSD-3-clause + +Files: networkx/algorithms/community/label_propagation.py +Copyright: Copyright (C) 2015-2019 Aitor Almeida +License: BSD-3-clause + +Files: networkx/algorithms/community/modularity_max.py + networkx/utils/tests/test_mapped_queue.py +Copyright: Copyright 2018 Edward L. Platt +License: BSD-3-clause + +Files: networkx/algorithms/connectivity/stoerwagner.py + networkx/algorithms/flow/capacityscaling.py + networkx/algorithms/flow/edmondskarp.py + networkx/algorithms/flow/preflowpush.py + networkx/algorithms/flow/shortestaugmentingpath.py + networkx/algorithms/flow/utils.py + networkx/linalg/algebraicconnectivity.py +Copyright: Copyright (C) 2014 ysitu +License: BSD-3-clause + +Files: networkx/algorithms/flow/mincost.py + networkx/algorithms/flow/networksimplex.py +Copyright: Copyright (C) 2010 Loïc Séguin-C. +License: BSD-3-clause + +Files: networkx/algorithms/minors.py + networkx/algorithms/tests/test_minors.py +Copyright: Copyright 2015 Jeffrey Finkelstein . + Copyright 2010 Drew Conway + Copyright 2010 Aric Hagberg +License: BSD-3-clause + +Files: networkx/algorithms/sparsifiers.py + networkx/algorithms/tests/test_sparsifiers.py +Copyright: Copyright (C) 2018 Robert Gmyr +License: BSD-3-clause + +Files: networkx/algorithms/triads.py + networkx/generators/triads.py +Copyright: Copyright 2015 NetworkX developers. + Copyright 2011 Reya Group + Copyright 2011 Alex Levenson + Copyright 2011 Diederik van Liere +License: BSD-3-clause + +Files: networkx/algorithms/lowest_common_ancestors.py +Copyright: Copyright (C) 2013 by Alex Roper + Copyright (C) 2017 by Aric Hagberg Dan Schult Pieter Swart +License: BSD-3-clause + +Files: networkx/algorithms/matching.py +Copyright: Copyright 2016 NetworkX developers. + Copyright (C) 2004-2019 by Aric Hagberg Dan Schult Pieter Swart Copyright (C) 2008 by Joris van Rantwijk. + Copyright (C) 2011 by Nicholas Mancuso +License: BSD-3-clause + +Files: networkx/algorithms/moral.py +Copyright: Copyright (C) 2011-2019 by Julien Klaus + Copyright 2016-2019 NetworkX developers. +License: BSD-3-clause + +Files: networkx/algorithms/simple_paths.py +Copyright: Copyright (C) 2012 by Sergio Nery Simoes +License: BSD-3-clause + +Files: networkx/algorithms/smallworld.py +Copyright: Copyright (C) 2017 by Romain Fontugne +License: BSD-3-clause + +Files: networkx/generators/tests/test_expanders.py + networkx/generators/expanders.py +Copyright:Copyright 2014 "cheebee7i". + Copyright 2014 "alexbrc". + Copyright 2014 Jeffrey Finkelstein . +License: BSD-3-clause + +Files: networkx/generators/directed.py +Copyright: Copyright (C) 2006-2019 by Aric Hagberg Dan Schult Pieter Swart + Copyright (C) 2009 by Willem Ligtenberg +License: BSD-3-clause + +Files: networkx/generators/spectral_graph_forge.py + networkx/generators/internet_as_graphs.py +Copyright: Copyright (C) 2017-2019 by Luca Baldesi +License: BSD-3-clause + +Files: networkx/generators/trees.py +Copyright: Copyright (C) 2015-2019 by Jeffrey Finkelstein NetworkX developers +License: BSD-3-clause + +Files: examples/subclass/plot_antigraph.py +Copyright: Copyright (C) 2015-2019 by Jordi Torrents +License: BSD-3-clause + +Files: examples/applications/plot_circuits.py +Copyright: Copyright 2016 Jeffrey Finkelstein . +License: BSD-3-clause + +Files: networkx/generators/harary_graph.py +Copyright: Copyright (C) 2018-2019 by Weisheng Si +License: BSD-3-clause + +Files: networkx/generators/joint_degree_seq.py +Copyright: Copyright (C) 2016-2019 by Minas Gjoka Balint Tillman +License: BSD-3-clause + +License: BSD-3-clause + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + . + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + . + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + . + * Neither the name of the NetworkX Developers nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + . + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +License: GPL-3 + This program is free software; you can redistribute it and/or modify it under + the terms of the GNU General Public License as published by the Free Software + Foundation; either version 3 of the License. + . + This program is distributed in the hope that it will be useful, but WITHOUT + ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS + FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. + . + You should have received a copy of the GNU General Public License along with + this program; if not, write to the Free Software Foundation, Inc., 51 Franklin + St, Fifth Floor, Boston, MA 02110-1301 USA + . + On Debian systems, the complete text of the GNU General Public License v2 + (GPL) can be found in /usr/share/common-licenses/GPL-3. diff --git a/debian/networkx-docs.docs b/debian/networkx-docs.docs deleted file mode 100644 index 7319041..0000000 --- a/debian/networkx-docs.docs +++ /dev/null @@ -1,2 +0,0 @@ -README.source -README.Debian diff --git a/debian/patches/0005-use-debian-mathjax.js.patch b/debian/patches/0005-use-debian-mathjax.js.patch new file mode 100644 index 0000000..5bf0454 --- /dev/null +++ b/debian/patches/0005-use-debian-mathjax.js.patch @@ -0,0 +1,21 @@ +From: Sandro Tosi +Date: Thu, 26 Apr 2018 22:10:42 -0400 +Subject: use debian mathjax.js + +--- + doc/conf.py | 3 +++ + 1 file changed, 3 insertions(+) + +Index: networkx/doc/conf.py +=================================================================== +--- networkx.orig/doc/conf.py ++++ networkx/doc/conf.py +@@ -202,6 +202,8 @@ default_role = "obj" + + numpydoc_show_class_members = False + ++# Debian change, use local MathJax ++mathjax_path = 'MathJax.js' + + def setup(app): + app.add_css_file("custom.css") diff --git a/debian/patches/0006-skip-plot_football-from-sphinx-gallery-requires-netw.patch b/debian/patches/0006-skip-plot_football-from-sphinx-gallery-requires-netw.patch new file mode 100644 index 0000000..6b3c91d --- /dev/null +++ b/debian/patches/0006-skip-plot_football-from-sphinx-gallery-requires-netw.patch @@ -0,0 +1,20 @@ +From: Sandro Tosi +Date: Thu, 7 Jun 2018 17:55:55 -0400 +Subject: skip plot_football from sphinx-gallery, requires network access + +--- + doc/conf.py | 1 + + 1 file changed, 1 insertion(+) + +Index: networkx/doc/conf.py +=================================================================== +--- networkx.orig/doc/conf.py ++++ networkx/doc/conf.py +@@ -49,6 +49,7 @@ sphinx_gallery_conf = { + # path where to save gallery generated examples + "gallery_dirs": "auto_examples", + "backreferences_dir": "modules/generated", ++ 'filename_pattern': '/plot_(?!football)', + } + + # generate autosummary pages diff --git a/debian/patches/10_doc_relocation b/debian/patches/10_doc_relocation new file mode 100644 index 0000000..71d65a7 --- /dev/null +++ b/debian/patches/10_doc_relocation @@ -0,0 +1,25 @@ +From: Thomas Goirand +Date: Thu, 8 Oct 2015 12:34:57 -0700 +Subject: install doc in the right place for Debian + +Forwarded: no +Last-Update: 2014-06-22 + +Patch-Name: 10_doc_relocation +--- + setup.py | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +Index: networkx/setup.py +=================================================================== +--- networkx.orig/setup.py ++++ networkx/setup.py +@@ -64,7 +64,7 @@ packages = [ + "networkx.utils", + ] + +-docdirbase = "share/doc/networkx-%s" % version ++docdirbase = "share/doc/python-networkx" + # add basic documentation + data = [(docdirbase, glob("*.txt"))] + # add examples diff --git a/debian/patches/2bfdee687d0db533555384f92ce3670d47c45aec.patch b/debian/patches/2bfdee687d0db533555384f92ce3670d47c45aec.patch new file mode 100644 index 0000000..d140c21 --- /dev/null +++ b/debian/patches/2bfdee687d0db533555384f92ce3670d47c45aec.patch @@ -0,0 +1,58 @@ +From 2bfdee687d0db533555384f92ce3670d47c45aec Mon Sep 17 00:00:00 2001 +From: josch +Date: Sat, 5 Sep 2020 19:32:40 +0200 +Subject: [PATCH] graphml: re-add graph attribute type 'long' after 857aa81 + removed it (#4189) + +closes: #4188 +--- + networkx/readwrite/graphml.py | 1 + + networkx/readwrite/tests/test_graphml.py | 23 +++++++++++++++++++++++ + 2 files changed, 24 insertions(+) + +diff --git a/networkx/readwrite/graphml.py b/networkx/readwrite/graphml.py +index f24307bbc6..a44c7c5d66 100644 +--- a/networkx/readwrite/graphml.py ++++ b/networkx/readwrite/graphml.py +@@ -358,6 +358,7 @@ class GraphML: + (str, "yfiles"), + (str, "string"), + (int, "int"), ++ (int, "long"), + (float, "float"), + (float, "double"), + (bool, "boolean"), +diff --git a/networkx/readwrite/tests/test_graphml.py b/networkx/readwrite/tests/test_graphml.py +index 7b90791f00..fd85d1320a 100644 +--- a/networkx/readwrite/tests/test_graphml.py ++++ b/networkx/readwrite/tests/test_graphml.py +@@ -992,6 +992,29 @@ def test_read_attributes_with_groups(self): + for node_data in data: + assert node_data["CustomProperty"] != "" + ++ def test_long_attribute_type(self): ++ # test that graphs with attr.type="long" (as produced by botch and ++ # dose3) can be parsed ++ s = """ ++ ++ ++ ++ ++ 4284 ++ ++ ++""" ++ fh = io.BytesIO(s.encode("UTF-8")) ++ G = nx.read_graphml(fh) ++ expected = [("n1", {"cudfversion": 4284})] ++ assert sorted(G.nodes(data=True)) == expected ++ fh.seek(0) ++ H = nx.parse_graphml(s) ++ assert sorted(H.nodes(data=True)) == expected ++ + + class TestWriteGraphML(BaseGraphML): + writer = staticmethod(nx.write_graphml_lxml) diff --git a/debian/patches/docs-remove-expected_failing_examples.patch b/debian/patches/docs-remove-expected_failing_examples.patch new file mode 100644 index 0000000..e69de29 diff --git a/debian/patches/series b/debian/patches/series new file mode 100644 index 0000000..111897b --- /dev/null +++ b/debian/patches/series @@ -0,0 +1,6 @@ +10_doc_relocation +use_local_object.inv_files.patch +0005-use-debian-mathjax.js.patch +0006-skip-plot_football-from-sphinx-gallery-requires-netw.patch +docs-remove-expected_failing_examples.patch +2bfdee687d0db533555384f92ce3670d47c45aec.patch diff --git a/debian/patches/use_local_object.inv_files.patch b/debian/patches/use_local_object.inv_files.patch new file mode 100644 index 0000000..6a4014a --- /dev/null +++ b/debian/patches/use_local_object.inv_files.patch @@ -0,0 +1,27 @@ +From: SVN-Git Migration +Date: Thu, 8 Oct 2015 12:34:59 -0700 +Subject: Use a local copy of object.inv from doc.python.org and + docs.scipy.org, instead of downloading them each time from the internet + +Last-Update: 2014-06-22 + +Patch-Name: 30_use_local_objects.inv +--- + doc/conf.py | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) + +Index: networkx/doc/conf.py +=================================================================== +--- networkx.orig/doc/conf.py ++++ networkx/doc/conf.py +@@ -192,8 +192,8 @@ latex_appendices = ["tutorial"] + + # Intersphinx mapping + intersphinx_mapping = { +- "https://docs.python.org/3/": None, +- "https://numpy.org/doc/stable/": None, ++ "https://docs.python.org/3/": '/usr/share/doc/python3/html/objects.inv', ++ "https://numpy.org/doc/stable/": '/usr/share/doc/python-numpy-doc/html/objects.inv', + } + + # The reST default role (used for this markup: `text`) to use for all diff --git a/debian/python-networkx-doc.doc-base b/debian/python-networkx-doc.doc-base new file mode 100644 index 0000000..ade41b5 --- /dev/null +++ b/debian/python-networkx-doc.doc-base @@ -0,0 +1,20 @@ +Document: python-networkx-doc +Title: NetworkX documentation +Author: NetworkX Developers +Abstract: NetworkX is a Python-based package for the creation, + manipulation, and study of the structure, dynamics, and function of + complex networks. + . + The structure of a graph or network is encoded in the edges + (connections, links, ties, arcs, bonds) between nodes (vertices, + sites, actors). If unqualified, by graph we mean an undirected graph, + i.e. no multiple edges are allowed. By a network we usually mean a + graph with weights (fields, properties) on nodes and/or edges. +Section: Programming/Python + +Format: HTML +Index: /usr/share/doc/python-networkx-doc/html/index.html +Files: /usr/share/doc/python-networkx-doc/html/* + +Format: PDF +Files: /usr/share/doc/python-networkx-doc/*.pdf diff --git a/debian/rules b/debian/rules index fda5e71..d71bc50 100755 --- a/debian/rules +++ b/debian/rules @@ -1,10 +1,82 @@ #!/usr/bin/make -f -# See debhelper(7) (uncomment to enable) -# output every command that modifies files on the build system. -#export DH_VERBOSE = 1 -export PYBUILD_NAME=networkx +LIB3 := $(shell python3 -c "from distutils.command.build import build ; from distutils.core import Distribution ; b = build(Distribution()) ; b.finalize_options() ; print (b.build_purelib)") +PYTHON3 := $(shell py3versions -r) %: - dh $@ --with python2,python3 --buildsystem=pybuild + dh $@ --with sphinxdoc,python3 --buildsystem=pybuild +override_dh_auto_clean: + dh_auto_clean + + # Add here commands to clean up after the build process. + rm -fr build networkx/version.py test.* doc/source/templates/gallery.html fontList.cache + # Remove built doc too + $(MAKE) -C doc clean + rm -rf doc/source/*.pdf doc/source/*.zip examples/*/*.png doc/fontList.cache + + # Make sure that there's no .pyc left + find . -name '*.pyc' -exec rm {} ';' + + rm -f doc/file.dot doc/fontlist-v330.json doc/path.png doc/path.to.file + rm -f examples/advanced/edgelist.utf-8 examples/basic/grid.edgelist examples/pygraphviz/grid.dot examples/pygraphviz/k5.dot examples/pygraphviz/k5.ps examples/pygraphviz/k5_attributes.dot + rm -rf .pytest_cache + + +override_dh_auto_test: +ifeq ($(filter nocheck,$(DEB_BUILD_OPTIONS)),) + #set -e ; \ + # running tests Python 3 + -for py in $(PYTHON3) ; do\ + cd build ; \ + PYTHONPATH=../$(LIB3) $$py -m pytest --verbosity=2; \ + cd .. ; \ + done +endif + +override_dh_install: + # Install w/o compiling *.pyc files + # Install egg-info directories (--single-... option) + python3 setup.py install --no-compile --root=$(CURDIR)/debian/python3-networkx \ + --single-version-externally-managed --install-layout=deb + + # fix test shebang + sed 's|/usr/bin/env python|/usr/bin/python3|' -i debian/python3-networkx/usr/lib/*/*-packages/networkx/generators/tests/test_random_graphs.py + + # remove duplicate examples + rm -rf debian/python3-networkx/usr/share/doc/python-networkx + + # don't ship python 2 code for _decorator because it can't be byte-compiled + rm -f debian/python3-networkx/usr/lib/python3/dist-packages/networkx/external/decorator/_decorator.py + + dh_install + + +override_dh_sphinxdoc: +ifeq (,$(findstring nodocs, $(DEB_BUILD_OPTIONS))) + (export MPLCONFIGDIR=. ; make -C doc dist PYTHONPATH=../$(LIB3)) + + # install doc from dir 'dist', but rename it to 'html', better name + cp -auxf doc/build/dist debian/python-networkx-doc/usr/share/doc/python-networkx-doc/html + + cp -auxf doc/build/latex/networkx_reference.pdf debian/python-networkx-doc/usr/share/doc/python-networkx-doc/ + + rm -rf debian/python-networkx-doc/usr/share/doc/python-networkx-doc/html/_static/jquery.js + dh_link -ppython-networkx-doc /usr/share/javascript/jquery/jquery.js /usr/share/doc/python-networkx-doc/html/_static/jquery.js + + dh_link -ppython-networkx-doc /usr/share/javascript/mathjax/MathJax.js /usr/share/doc/python-networkx-doc/html/_static/MathJax.js + + dh_sphinxdoc +endif + +override_dh_installexamples: + dh_installexamples -ppython-networkx-doc examples/* + +override_dh_compress: + dh_compress -Xexamples/ -X.js -X.pdf -Xobjects.inv + +override_dh_installchangelogs: + dh_installchangelogs doc/news.rst + +override_dh_missing: + echo "I: Skipping dh_missing" diff --git a/debian/source/include-binaries b/debian/source/include-binaries new file mode 100644 index 0000000..8f086bc --- /dev/null +++ b/debian/source/include-binaries @@ -0,0 +1,2 @@ +debian/scipy.org_numpy_objects.inv +debian/python.org_objects.inv diff --git a/debian/source/options b/debian/source/options new file mode 100644 index 0000000..cb61fa5 --- /dev/null +++ b/debian/source/options @@ -0,0 +1 @@ +extend-diff-ignore = "^[^/]*[.]egg-info/" diff --git a/debian/upstream/metadata b/debian/upstream/metadata new file mode 100644 index 0000000..84b488c --- /dev/null +++ b/debian/upstream/metadata @@ -0,0 +1,5 @@ +--- +Bug-Database: https://github.com/networkx/networkx/issues +Bug-Submit: https://github.com/networkx/networkx/issues/new +Repository: https://github.com/networkx/networkx.git +Repository-Browse: https://github.com/networkx/networkx diff --git a/debian/watch b/debian/watch index f9f09e9..2d02f66 100644 --- a/debian/watch +++ b/debian/watch @@ -1,3 +1,3 @@ version=3 opts=dversionmangle=s/.dfsg\d//,uversionmangle=s/(rc|a|b|c)/~$1/ \ -http://pypi.debian.net/networkx/networkx-(.+)\.(?:zip|tgz|tbz|txz|(?:tar\.(?:gz|bz2|xz))) +https://pypi.debian.net/networkx/networkx-(.+)\.(?:zip|tgz|tbz|txz|(?:tar\.(?:gz|bz2|xz))) diff --git a/doc/Makefile b/doc/Makefile index 833d139..73a8b21 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -113,7 +113,7 @@ latexpdf: latex docs: clean html latexpdf cp build/latex/networkx_reference.pdf build/html/_downloads/. - + gitwash-update: python ../tools/gitwash_dumper.py developer networkx \ --project-url=http://networkx.github.io \ diff --git a/doc/README.md b/doc/README.md deleted file mode 100644 index 0205e03..0000000 --- a/doc/README.md +++ /dev/null @@ -1,33 +0,0 @@ -# Building docs - -We currently use Sphinx for generating the API and reference -documentation for NetworkX. - -If you only want to get the documentation, note that pre-built -versions can be found at - - http://networkx.github.io/ - -for both the stable and the latest (i.e., development) releases. - -## Instructions - -In addition to installing NetworkX and its dependencies, install the Python -packages need to build the documentation by entering:: - - pip install -r requirements.txt - -in the ``doc/`` directory. - -To build the HTML documentation, enter:: - - make html - -in the ``doc/`` directory. If all goes well, this will generate a -``build/html`` subdirectory containing the built documentation. - -To build the PDF documentation, enter:: - - make latexpdf - -You will need to have Latex installed for this. diff --git a/doc/_static/custom.css b/doc/_static/custom.css new file mode 100644 index 0000000..3679012 --- /dev/null +++ b/doc/_static/custom.css @@ -0,0 +1,26 @@ +.team-member { + display: inline-block; + padding: 1rem; + margin: 0.25rem; + width: 7rem; + text-align: center; + vertical-align: top; +} + +.team-member-photo { + display: block; +} + +.team-member-photo img { + width: 60px; + border-radius: 50%; + margin-bottom: 0.5rem; +} + +.team-member-name { + font-weight: bold; +} + +.team-member-handle { + display: none; +} diff --git a/doc/_templates/dev_banner.html b/doc/_templates/dev_banner.html new file mode 100644 index 0000000..e444754 --- /dev/null +++ b/doc/_templates/dev_banner.html @@ -0,0 +1,6 @@ +{# Creates a banner at the top of the page for development versions. #} +
+

Note

+

This documents the development version of NetworkX. Documentation for the current release can be found here.

+
+ diff --git a/doc/_templates/eol_banner.html b/doc/_templates/eol_banner.html new file mode 100644 index 0000000..3944a21 --- /dev/null +++ b/doc/_templates/eol_banner.html @@ -0,0 +1,6 @@ +{# Creates a banner at the top of the page for EOL versions. #} +
+

Note

+

This documents an unmaintained version of NetworkX. Please upgrade to a maintained version and see the current NetworkX documentation.

+
+ diff --git a/doc/_templates/layout.html b/doc/_templates/layout.html new file mode 100644 index 0000000..f590c4b --- /dev/null +++ b/doc/_templates/layout.html @@ -0,0 +1,10 @@ +{% extends "!layout.html" %} +{% block sidebartitle %} + Project Homepage | + Source Code + {{ super() }} +{% endblock %} +#{% block document %} +# {% include "dev_banner.html" %} +# {{ super() }} +#{% endblock %} diff --git a/doc/bibliography.rst b/doc/bibliography.rst index 95908df..f619091 100644 --- a/doc/bibliography.rst +++ b/doc/bibliography.rst @@ -1,27 +1,25 @@ -.. -*- coding: utf-8 -*- - Bibliography ============ .. [BA02] R. Albert and A.-L. Barabási, "Statistical mechanics of complex - networks", Reviews of Modern Physics, 74, pp. 47-97, 2002. + networks", Reviews of Modern Physics, 74, pp. 47-97, 2002. https://arxiv.org/abs/cond-mat/0106096 .. [Bollobas01] B. Bollobás, "Random Graphs", Second Edition, Cambridge University Press, 2001. .. [BE05] U. Brandes and T. Erlebach, "Network Analysis: - Methodological Foundations", Lecture Notes in Computer Science, + Methodological Foundations", Lecture Notes in Computer Science, Volume 3418, Springer-Verlag, 2005. -.. [CL1996] G. Chartrand and L. Lesniak, "Graphs and Digraphs", +.. [CL1996] G. Chartrand and L. Lesniak, "Graphs and Digraphs", Chapman and Hall/CRC, 1996. -.. [choudum1986] S.A. Choudum. "A simple proof of the Erdős-Gallai theorem on - graph sequences." Bulletin of the Australian Mathematical Society, 33, +.. [choudum1986] S.A. Choudum. "A simple proof of the Erdős-Gallai theorem on + graph sequences." Bulletin of the Australian Mathematical Society, 33, pp 67-70, 1986. https://doi.org/10.1017/S0004972700002872 -.. [Diestel97] R. Diestel, "Graph Theory", Springer-Verlag, 1997. +.. [Diestel97] R. Diestel, "Graph Theory", Springer-Verlag, 1997. http://diestel-graph-theory.com/index.html .. [DM03] S.N. Dorogovtsev and J.F.F. Mendes, "Evolution of Networks", @@ -30,27 +28,27 @@ Bibliography .. [EppsteinPads] David Eppstein. PADS, A library of Python Algorithms and Data Structures. http://www.ics.uci.edu/~eppstein/PADS - + .. [EG1960] Erdős and Gallai, Mat. Lapok 11 264, 1960. -.. [hakimi1962] Hakimi, S. "On the Realizability of a Set of Integers as +.. [hakimi1962] Hakimi, S. "On the Realizability of a Set of Integers as Degrees of the Vertices of a Graph." SIAM J. Appl. Math. 10, 496-506, 1962. -.. [havel1955] Havel, V. "A Remark on the Existence of Finite Graphs" +.. [havel1955] Havel, V. "A Remark on the Existence of Finite Graphs" Casopis Pest. Mat. 80, 477-480, 1955. - + .. [Langtangen04] H.P. Langtangen, "Python Scripting for Computational Science.", Springer Verlag Series in Computational Science and - Engineering, 2004. + Engineering, 2004. .. [Martelli03] A. Martelli, "Python in a Nutshell", O'Reilly Media Inc, 2003. .. [Newman03] M.E.J. Newman, "The Structure and Function of Complex - Networks", SIAM Review, 45, pp. 167-256, 2003. + Networks", SIAM Review, 45, pp. 167-256, 2003. http://epubs.siam.org/doi/abs/10.1137/S003614450342480 -.. [Sedgewick02] R. Sedgewick, "Algorithms in C: Parts 1-4: +.. [Sedgewick02] R. Sedgewick, "Algorithms in C: Parts 1-4: Fundamentals, Data Structure, Sorting, Searching", Addison Wesley Professional, 3rd ed., 2002. diff --git a/doc/citing.rst b/doc/citing.rst index 2561a2c..ce818cc 100644 --- a/doc/citing.rst +++ b/doc/citing.rst @@ -1,11 +1,9 @@ -.. -*- coding: utf-8 -*- - Citing ====== To cite NetworkX please use the following publication: -Aric A. Hagberg, Daniel A. Schult and Pieter J. Swart, +Aric A. Hagberg, Daniel A. Schult and Pieter J. Swart, `"Exploring network structure, dynamics, and function using NetworkX" `_, in @@ -16,5 +14,5 @@ USA), pp. 11--15, Aug 2008 .. only:: html - `PDF `_ - `BibTeX `_ + `PDF `_ + `BibTeX `_ diff --git a/doc/conf.py b/doc/conf.py index 504259c..725e3cd 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -1,38 +1,11 @@ -# -*- coding: utf-8 -*- -# -# Sphinx documentation build configuration file, created by -# sphinx-quickstart.py on Sat Mar 8 21:47:50 2008. -# -# This file is execfile()d with the current directory set to its containing dir. -# -# The contents of this file are pickled, so don't put values in the namespace -# that aren't pickleable (module imports are okay, they're removed automatically). -# -# All configuration values have a default value; values that are commented out -# serve to show the default value. -from __future__ import print_function - -import sys -import os from datetime import date - from sphinx_gallery.sorting import ExplicitOrder +import sphinx_rtd_theme +from warnings import filterwarnings -# Check Sphinx version -import sphinx -if sphinx.__version__ < "1.3": - raise RuntimeError("Sphinx 1.3 or newer required") - -# Environment variable to know if the docs are being built on rtd. -on_rtd = os.environ.get('READTHEDOCS', None) == 'True' -#print -#print("Building on ReadTheDocs: {}".format(on_rtd)) -#print -#print("Current working directory: {}".format(os.path.abspath(os.curdir))) -#print("Python: {}".format(sys.executable)) - -# If your extensions are in another directory, add it here. -# These locations are relative to conf.py +filterwarnings( + "ignore", message="Matplotlib is currently using agg", category=UserWarning +) # General configuration # --------------------- @@ -40,79 +13,89 @@ # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ - 'sphinx.ext.autosummary', - 'sphinx.ext.autodoc', - 'sphinx.ext.coverage', - 'sphinx.ext.doctest', - 'sphinx.ext.intersphinx', - 'sphinx.ext.mathjax', - 'sphinx.ext.napoleon', - 'sphinx.ext.todo', - 'sphinx.ext.viewcode', - 'sphinx_gallery.gen_gallery', - 'nb2plots', - 'texext', + "sphinx.ext.autosummary", + "sphinx.ext.autodoc", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.intersphinx", + "sphinx.ext.mathjax", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "sphinx_gallery.gen_gallery", + "nb2plots", + "texext", ] # https://github.com/sphinx-gallery/sphinx-gallery sphinx_gallery_conf = { # path to your examples scripts - 'examples_dirs': '../examples', - 'subsection_order': ExplicitOrder(['../examples/basic', - '../examples/drawing', - '../examples/graph', - '../examples/algorithms', - '../examples/advanced', - '../examples/3d_drawing', - '../examples/pygraphviz', - '../examples/javascript', - '../examples/jit', - '../examples/subclass']), + "examples_dirs": "../examples", + "subsection_order": ExplicitOrder( + [ + "../examples/basic", + "../examples/drawing", + "../examples/graph", + "../examples/algorithms", + "../examples/advanced", + "../examples/3d_drawing", + "../examples/pygraphviz", + "../examples/javascript", + "../examples/jit", + "../examples/applications", + "../examples/subclass", + ] + ), # path where to save gallery generated examples - 'gallery_dirs': 'auto_examples', - 'backreferences_dir': 'modules/generated', - 'expected_failing_examples': ['../examples/advanced/plot_parallel_betweenness.py'] + "gallery_dirs": "auto_examples", + "backreferences_dir": "modules/generated", } # generate autosummary pages autosummary_generate = True # Add any paths that contain templates here, relative to this directory. -#templates_path = [''] +templates_path = ["_templates"] + +suppress_warnings = ["ref.citation", "ref.footnote"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. -source_encoding = 'utf-8' +source_encoding = "utf-8" # The master toctree document. -master_doc = 'index' +master_doc = "index" + +# Do not include release announcement template +exclude_patterns = ["release/release_template.rst"] # General substitutions. -project = 'NetworkX' -copyright = '2004-{}, NetworkX Developers'.format(date.today().year) +project = "NetworkX" +copyright = f"2004-{date.today().year}, NetworkX Developers" # The default replacements for |version| and |release|, also used in various # other places throughout the built documents. # # The short X.Y version. import networkx + version = networkx.__version__ # The full version, including dev info -release = networkx.__version__.replace('_', '') +release = networkx.__version__.replace("_", "") # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. # unused_docs = [''] # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). @@ -121,11 +104,11 @@ # show_authors = True # The name of the Pygments (syntax highlighting) style to use. -#pygments_style = 'friendly' -pygments_style = 'sphinx' +# pygments_style = 'friendly' +pygments_style = "sphinx" # A list of prefixs that are ignored when creating the module index. (new in Sphinx 0.6) -modindex_common_prefix = ['networkx.'] +modindex_common_prefix = ["networkx."] doctest_global_setup = "import networkx as nx" @@ -135,81 +118,91 @@ # Options for HTML output # ----------------------- -if not on_rtd: - import sphinx_rtd_theme - html_theme = 'sphinx_rtd_theme' - html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] -# html_theme_options = { -# "rightsidebar": "true", -# "relbarbgcolor: "black" -#} +html_theme = "sphinx_rtd_theme" +html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] + +html_theme_options = { + "canonical_url": "https://networkx.github.io/documentation/stable/", + "navigation_depth": 3, + "logo_only": True, +} + +html_logo = "_static/networkx_logo.svg" # The style sheet to use for HTML and HTML Help pages. A file of that name # must exist either in Sphinx' static/ path, or in one of the custom paths # given in html_static_path. -#html_style = '' +# html_style = '' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -html_last_updated_fmt = '%b %d, %Y' +html_last_updated_fmt = "%b %d, %Y" # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Content template for the index page. -#html_index = 'index.html' +# html_index = 'index.html' # Custom sidebar templates, maps page names to templates. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # templates. -#html_additional_pages = {'': ''} +# html_additional_pages = {'': ''} # If true, the reST sources are included in the HTML build as _sources/. html_copy_source = False -html_use_opensearch = 'http://networkx.github.io' +html_use_opensearch = "http://networkx.github.io" # Output file base name for HTML help builder. -htmlhelp_basename = 'NetworkX' +htmlhelp_basename = "NetworkX" # Options for LaTeX output # ------------------------ # The paper size ('letter' or 'a4'). -latex_paper_size = 'letter' +latex_paper_size = "letter" # The font size ('10pt', '11pt' or '12pt'). -#latex_font_size = '10pt' +# latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, document class [howto/manual]). -latex_documents = [('reference/index', 'networkx_reference.tex', - 'NetworkX Reference', - 'Aric Hagberg, Dan Schult, Pieter Swart', 'manual', 1)] +latex_documents = [ + ( + "reference/index", + "networkx_reference.tex", + "NetworkX Reference", + "Aric Hagberg, Dan Schult, Pieter Swart", + "manual", + 1, + ) +] -latex_appendices = ['tutorial'] +latex_appendices = ["tutorial"] # Intersphinx mapping -intersphinx_mapping = {'https://docs.python.org/2/': None, - 'https://docs.scipy.org/doc/numpy/': None, - } +intersphinx_mapping = { + "https://docs.python.org/3/": None, + "https://numpy.org/doc/stable/": None, +} # The reST default role (used for this markup: `text`) to use for all # documents. -default_role = 'obj' +default_role = "obj" numpydoc_show_class_members = False -# Add the 'copybutton' javascript, to hide/show the prompt in code -# examples + def setup(app): - app.add_javascript('copybutton.js') + app.add_css_file("custom.css") + app.add_js_file("copybutton.js") diff --git a/doc/credits.rst b/doc/credits.rst index 9020480..c048885 100644 --- a/doc/credits.rst +++ b/doc/credits.rst @@ -1 +1,189 @@ -.. include:: ../CONTRIBUTORS.rst +About Us +======== + +NetworkX was originally written by Aric Hagberg, Dan Schult, and Pieter Swart, +and has been developed with the help of many others. Thanks to everyone who has +improved NetworkX by contributing code, bug reports (and fixes), documentation, +and input on design, features, and the future of NetworkX. + +.. include:: team.rst + +Contributors +------------ + +If you are a NetworkX contributor, please feel free to +open an `issue `_ or +submit a `pull request `_ +to add your name to the bottom of the list. + +- Aric Hagberg, GitHub: `hagberg `_ +- Dan Schult, GitHub: `dschult `_ +- Pieter Swart +- Katy Bold +- Hernan Rozenfeld +- Brendt Wohlberg +- Jim Bagrow +- Holly Johnsen +- Arnar Flatberg +- Chris Myers +- Joel Miller +- Keith Briggs +- Ignacio Rozada +- Phillipp Pagel +- Sverre Sundsdal +- Ross M. Richardson +- Eben Kenah +- Sasha Gutfriend +- Udi Weinsberg +- Matteo Dell'Amico +- Andrew Conway +- Raf Guns +- Salim Fadhley +- Fabrice Desclaux +- Arpad Horvath +- Minh Van Nguyen +- Willem Ligtenberg +- Loïc Séguin-C. +- Paul McGuire +- Jesus Cerquides +- Ben Edwards +- Jon Olav Vik +- Hugh Brown +- Ben Reilly +- Leo Lopes +- Jordi Torrents, GitHub: `jtorrents `_ +- Dheeraj M R +- Franck Kalala +- Simon Knight +- Conrad Lee +- Sérgio Nery Simões +- Robert King +- Nick Mancuso +- Brian Cloteaux +- Alejandro Weinstein +- Dustin Smith +- Mathieu Larose +- Romain Fontugne +- Vincent Gauthier +- chebee7i, GitHub: `chebee7i `_ +- Jeffrey Finkelstein +- Jean-Gabriel Young, Github: `jg-you `_ +- Andrey Paramonov, http://aparamon.msk.ru +- Mridul Seth, GitHub: `MridulS `_ +- Thodoris Sotiropoulos, GitHub: `theosotr `_ +- Konstantinos Karakatsanis, GitHub: `k-karakatsanis `_ +- Ryan Nelson, GitHub: `rnelsonchem `_ +- Niels van Adrichem, GitHub: `NvanAdrichem `_ +- Michael E. Rose, GitHub: `Michael-E-Rose `_ +- Jarrod Millman, GitHub: `jarrodmillman `_ +- Andre Weltsch +- Lewis Robbins +- Mads Jensen, Github: `atombrella `_ +- Edward L. Platt, `elplatt `_ +- James Owen, Github: `leamingrad `_ +- Robert Gmyr, Github: `gmyr `_ +- Mike Trenfield +- Jon Crall, Github: `Erotemic `_ +- Issa Moradnejad, Github ``_, LinkedIn ``_ +- Brian Kiefer, Github: `bkief `_ +- Julien Klaus +- Peter C. Kroon, Github: `pckroon `_ +- Weisheng Si, Github: `ws4u `_ +- Haakon H. Rød, Gitlab: `haakonhr `_, ``_ +- Efraim Rodrigues, GitHub ``_, LinkedIn ``_ +- Erwan Le Merrer +- Søren Fuglede Jørgensen, GitHub: `fuglede `_ +- Salim BELHADDAD, LinkedIn ``_ +- Jangwon Yie, GitHub ``_, LinkedIn ``_ +- ysitu +- Tomas Gavenciak +- Luca Baldesi +- Yuto Yamaguchi +- James Clough +- Minas Gjoka +- Drew Conway +- Alex Levenson +- Haochen Wu +- Erwan Le Merrer +- Alex Roper +- P C Kroon +- Christopher Ellison +- D. Eppstein +- Federico Rosato +- Aitor Almeida +- Ferran Parés +- Christian Olsson +- Fredrik Erlandsson +- Nanda H Krishna +- Nicholas Mancuso +- Fred Morstatter +- Ollie Glass +- Rodrigo Dorantes-Gilardi +- Pranay Kanwar +- Balint Tillman +- Diederik van Liere +- Ferdinando Papale +- Miguel Sozinho Ramalho +- Brandon Liu +- Nima Mohammadi +- Jason Grout +- Jan Aagaard Meier +- Henrik Haugbølle +- Piotr Brodka +- Sasha Gutfraind +- Alessandro Luongo +- Huston Hedinger +- Oleguer Sagarra +- Kazimierz Wojciechowski, GitHub ``_, LinkedIn ``_ +- Gaetano Pietro Paolo Carpinato, Github ``_, LinkedIn ``_ +- Arun Nampally, GitHub ``_, LinkedIn ``_ +- Ryan Duve +- Shashi Prakash Tripathi, Github ``_,LinkedIn ``_ +- Danny Niquette +- James Trimble, Github: `jamestrimble `_ +- Matthias Bruhns, Github ``_ + +A supplementary (but still incomplete) list of contributors is given by the +list of names that have commits in ``networkx``'s +`git `_ repository. This can be obtained via:: + + git log --raw | grep "^Author: " | sort | uniq + +A historical, partial listing of contributors and their contributions to some +of the earlier versions of NetworkX can be found +`here `_. + + +Support +------- + +NetworkX acknowledges support from the following research groups: + +- `Center for Nonlinear Studies `_, Los Alamos National + Laboratory, PI: Aric Hagberg + +- `Open Source Programs Office `_, + Google + +- `Complexity Sciences Center `_, Department of + Physics, University of California-Davis, PI: James P. Crutchfield + +- `Center for Complexity and Collective Computation `_, + Wisconsin Institute for Discovery, University of Wisconsin-Madison, + PIs: Jessica C. Flack and David C. Krakauer + +NetworkX acknowledges the following financial support: + +- Google Summer of Code via Python Software Foundation + +- U.S. Army Research Office grant W911NF-12-1-0288 + +- DARPA Physical Intelligence Subcontract No. 9060-000709 + +- NSF Grant No. PHY-0748828 + +- John Templeton Foundation through a grant to the Santa Fe Institute to + study complexity + +- U.S. Army Research Laboratory and the U.S. Army Research Office under + contract number W911NF-13-1-0340 diff --git a/doc/developer/code_of_conduct.rst b/doc/developer/code_of_conduct.rst new file mode 100644 index 0000000..2d70708 --- /dev/null +++ b/doc/developer/code_of_conduct.rst @@ -0,0 +1 @@ +.. include:: ../../CODE_OF_CONDUCT.rst diff --git a/doc/developer/core_developer.rst b/doc/developer/core_developer.rst new file mode 100644 index 0000000..390fc01 --- /dev/null +++ b/doc/developer/core_developer.rst @@ -0,0 +1,170 @@ +.. _core_dev: + +Core Developer Guide +==================== + +As a core developer, you should continue making pull requests +in accordance with the :ref:`contributor_guide`. +You are responsible for shepherding other contributors through the review process. +You should be familiar with our :ref:`mission_and_values`. +You also have the ability to merge or approve other contributors' pull requests. +Much like nuclear launch keys, it is a shared power: you must merge *only after* +another core developer has approved the pull request, *and* after you yourself have carefully +reviewed it. (See `Reviewing`_ and especially `Merge Only Changes You +Understand`_ below.) To ensure a clean git history, use GitHub's +`Squash and Merge `__ +feature to merge, unless you have a good reason not to do so. + +Reviewing +--------- + +How to Conduct A Good Review +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +*Always* be kind to contributors. Nearly all of NetworkX is +volunteer work, for which we are tremendously grateful. Provide +constructive criticism on ideas and implementations, and remind +yourself of how it felt when your own work was being evaluated as a +novice. + +NetworkX strongly values mentorship in code review. New users +often need more handholding, having little to no git +experience. Repeat yourself liberally, and, if you don’t recognize a +contributor, point them to our development guide, or other GitHub +workflow tutorials around the web. Do not assume that they know how +GitHub works (e.g., many don't realize that adding a commit +automatically updates a pull request). Gentle, polite, kind +encouragement can make the difference between a new core developer and +an abandoned pull request. + +When reviewing, focus on the following: + +1. **API:** The API is what users see when they first use + NetworkX. APIs are difficult to change once released, so + should be simple, `functional + `__ (i.e. not + carry state), consistent with other parts of the library, and + should avoid modifying input variables. Please familiarize + yourself with the project's :ref:`deprecation_policy`. + +2. **Documentation:** Any new feature should have a gallery + example that not only illustrates but explains it. + +3. **The algorithm:** You should understand the code being modified or + added before approving it. (See `Merge Only Changes You + Understand`_ below.) Implementations should do what they claim, + and be simple, readable, and efficient. + +4. **Tests:** All contributions to the library *must* be tested, and + each added line of code should be covered by at least one test. Good + tests not only execute the code, but explores corner cases. It is tempting + not to review tests, but please do so. + +Other changes may be *nitpicky*: spelling mistakes, formatting, +etc. Do not ask contributors to make these changes, and instead +make the changes by `pushing to their branch +`__, +or using GitHub’s `suggestion +`__ +`feature +`__. +(The latter is preferred because it gives the contributor a choice in +whether to accept the changes.) + +Our default merge policy is to squash all PR commits into a single +commit. Users who wish to bring the latest changes from ``master`` +into their branch should be advised to merge, not to rebase. Even +when merge conflicts arise, don’t ask for a rebase unless you know +that a contributor is experienced with git. Instead, rebase the branch +yourself, force-push to their branch, and advise the contributor on +how to force-pull. If the contributor is no longer active, you may +take over their branch by submitting a new pull request and closing +the original. In doing so, ensure you communicate that you are not +throwing the contributor's work away! You should use GitHub's +``Co-authored-by:`` keyword for commit messages to credit the +original contributor. + + +Please add a note to a pull request after you push new changes; GitHub +may not send out notifications for these. + +Merge Only Changes You Understand +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +*Long-term maintainability* is an important concern. Code doesn't +merely have to *work*, but should be *understood* by multiple core +developers. Changes will have to be made in the future, and the +original contributor may have moved on. + +Therefore, *do not merge a code change unless you understand it*. Ask +for help freely: we have a long history of consulting community +members, or even external developers, for added insight where needed, +and see this as a great learning opportunity. + +While we collectively "own" any patches (and bugs!) that become part +of the code base, you are vouching for changes you merge. Please take +that responsibility seriously. + +Closing issues and pull requests +-------------------------------- + +Sometimes, an issue must be closed that was not fully resolved. This can be +for a number of reasons: + +- the person behind the original post has not responded to calls for + clarification, and none of the core developers have been able to reproduce + their issue; +- fixing the issue is difficult, and it is deemed too niche a use case to + devote sustained effort or prioritize over other issues; or +- the use case or feature request is something that core developers feel + does not belong in NetworkX, + +among others. Similarly, pull requests sometimes need to be closed without +merging, because: + +- the pull request implements a niche feature that we consider not worth the + added maintenance burden; +- the pull request implements a useful feature, but requires significant + effort to bring up to NetworkX's standards, and the original + contributor has moved on, and no other developer can be found to make the + necessary changes; or +- the pull request makes changes that do not align with our values, such as + increasing the code complexity of a function significantly to implement a + marginal speedup, + +among others. + +All these may be valid reasons for closing, but we must be wary not to alienate +contributors by closing an issue or pull request without an explanation. When +closing, your message should: + +- explain clearly how the decision was made to close. This is particularly + important when the decision was made in a community meeting, which does not + have as visible a record as the comments thread on the issue itself; +- thank the contributor(s) for their work; and +- provide a clear path for the contributor or anyone else to appeal the + decision. + +These points help ensure that all contributors feel welcome and empowered to +keep contributing, regardless of the outcome of past contributions. + +Further resources +----------------- + +As a core member, you should be familiar with community and developer +resources such as: + +- Our :ref:`contributor_guide` +- Our :ref:`code_of_conduct` +- `PEP8 `__ for Python style +- `PEP257 `__ and the `NumPy + documentation + guide `__ + for docstrings. (NumPy docstrings are a superset of PEP257. You + should read both.) +- The NetworkX `tag on + StackOverflow `__ +- Our `mailing + list `__ + +You are not required to monitor all of the social resources. diff --git a/doc/developer/deprecations.rst b/doc/developer/deprecations.rst new file mode 100644 index 0000000..aedcb33 --- /dev/null +++ b/doc/developer/deprecations.rst @@ -0,0 +1,56 @@ +Deprecations +============ + +.. _deprecation_policy: + +Policy +------ + +If the behavior of the library has to be changed, a deprecation cycle must be +followed to warn users. + +A deprecation cycle is *not* necessary when: + +* adding a new function, or +* adding a new keyword argument to the *end* of a function signature, or +* fixing buggy behavior + +A deprecation cycle is necessary for *any breaking API change*, meaning a +change where the function, invoked with the same arguments, would return a +different result after the change. This includes: + +* changing the order of arguments or keyword arguments, or +* adding arguments or keyword arguments to a function, or +* changing the name of a function, class, method, etc., or +* moving a function, class, etc. to a different module, or +* changing the default value of a function's arguments. + +Usually, our policy is to put in place a deprecation cycle over two minor +releases (e.g., if a deprecation warning appears in 2.3, then the functionality +should be removed in 2.5). For major releases we usually require that all +deprecations have at least a 1-release deprecation cycle (e.g., if 3.0 occurs +after 2.5, then all removed functionality in 3.0 should be deprecated in 2.5). + +Note that these 1- and 2-release deprecation cycles for major and minor +releases is not a strict rule and in some cases, the developers can agree on a +different procedure upon justification (like when we can't detect the change, +or it involves moving or deleting an entire function for example). + +Todo +---- + +Make sure to review ``networkx/conftest.py`` after removing deprecated code. + +Version 3.0 +~~~~~~~~~~~ + +* In ``readwrite/gml.py`` remove ``literal_stringizer`` and related tests. +* In ``readwrite/gml.py`` remove ``literal_destringizer`` and related tests. +* In ``utils/misc.py`` remove ``is_string_like`` and related tests. +* In ``utils/misc.py`` remove ``make_str`` and related tests. +* Remove ``utils/contextmanagers.py`` and related tests. +* In ``drawing/nx_agraph.py`` remove ``display_pygraphviz`` and related tests. +* In ``algorithms/chordal.py`` replace ``chordal_graph_cliques`` with ``_chordal_graph_cliques``. +* In ``algorithms/centrality/betweenness_centrality_subset.py`` remove ``betweenness_centrality_source``. +* In ``algorithms/centrality/betweenness.py`` remove ``edge_betweeness``. +* In ``algorithms/community_modularity_max.py`` remove old name ``_naive_greedy_modularity_communities``. diff --git a/doc/developer/gitwash/configure_git.rst b/doc/developer/gitwash/configure_git.rst index 3a172d5..7059bde 100644 --- a/doc/developer/gitwash/configure_git.rst +++ b/doc/developer/gitwash/configure_git.rst @@ -152,19 +152,19 @@ and it gives graph / text output something like this (but with color!): * 6d8e1ee - (HEAD, origin/my-fancy-feature, my-fancy-feature) NF - a fancy file (45 minutes ago) [Matthew Brett] * d304a73 - (origin/placeholder, placeholder) Merge pull request #48 from hhuuggoo/master (2 weeks ago) [Jonathan Terhorst] - |\ + |\ | * 4aff2a8 - fixed bug 35, and added a test in test_bugfixes (2 weeks ago) [Hugo] - |/ + |/ * a7ff2e5 - Added notes on discussion/proposal made during Data Array Summit. (2 weeks ago) [Corran Webster] * 68f6752 - Initial implimentation of AxisIndexer - uses 'index_by' which needs to be changed to a call on an Axes object - this is all very sketchy right now. (2 weeks ago) [Corr * 376adbd - Merge pull request #46 from terhorst/master (2 weeks ago) [Jonathan Terhorst] - |\ + |\ | * b605216 - updated joshu example to current api (3 weeks ago) [Jonathan Terhorst] | * 2e991e8 - add testing for outer ufunc (3 weeks ago) [Jonathan Terhorst] | * 7beda5a - prevent axis from throwing an exception if testing equality with non-axis object (3 weeks ago) [Jonathan Terhorst] | * 65af65e - convert unit testing code to assertions (3 weeks ago) [Jonathan Terhorst] | * 956fbab - Merge remote-tracking branch 'upstream/master' (3 weeks ago) [Jonathan Terhorst] - | |\ + | |\ | |/ Thanks to Yury V. Zaytsev for posting it. diff --git a/doc/developer/gitwash/development_workflow.rst b/doc/developer/gitwash/development_workflow.rst index bedc49f..644edc2 100644 --- a/doc/developer/gitwash/development_workflow.rst +++ b/doc/developer/gitwash/development_workflow.rst @@ -24,7 +24,7 @@ In what follows we'll refer to the upstream networkx ``master`` branch, as * Name your branch for the purpose of the changes - e.g. ``bugfix-for-issue-14`` or ``refactor-database-code``. * If you can possibly avoid it, avoid merging trunk or any other branches into - your feature branch while you are working. + your feature branch while you are working. * If you do find yourself merging from trunk, consider :ref:`rebase-on-trunk` * Ask on the `networkx mailing list`_ if you get stuck. * Ask for code review! diff --git a/doc/developer/gitwash/index.rst b/doc/developer/gitwash/index.rst index 01629d1..4e64ea7 100644 --- a/doc/developer/gitwash/index.rst +++ b/doc/developer/gitwash/index.rst @@ -1,7 +1,7 @@ .. _using-git: -Working with *networkx* source code -================================================ +Advanced Git +============ Contents: diff --git a/doc/developer/index.rst b/doc/developer/index.rst index 040b5a1..62b9c27 100644 --- a/doc/developer/index.rst +++ b/doc/developer/index.rst @@ -6,5 +6,12 @@ Developer Guide .. toctree:: :maxdepth: 2 + code_of_conduct + values contribute + core_developer gitwash/index + release + deprecations + roadmap + nxeps/index diff --git a/doc/developer/nxeps/_static/nxep-0000.png b/doc/developer/nxeps/_static/nxep-0000.png new file mode 100644 index 0000000000000000000000000000000000000000..0fc8176d242edf8163a1436adcd4afcef5d97802 GIT binary patch literal 12925 zcmajG2T)UA^e&o&8hS6Gg&w6>L3)4?lpWA ziy*y7lOk36&F}y2oA+kkoq3s)oU9~kXYZ49*80A0tt8wt(xat{Rfo98=BHSo@j%;kWYMxyQ2T@KvpkNF5-)ve!7qS?t41= z1={*J00IL8C7nH7eeG<$93(w`oN{(lApigrps%C#ASib`FL=gs^s;;2IR6h5j)4gt zuQqb8E(Xe?F*YauFyy}ZTl2RM-r>t0ICHs+*Rg|{?taj6OJ+zAhMY4o-4zlz7hwtx z3r7!ZtNE@Cob~tT1^+$|YyGVtx0V~So0X-q7Sh&t$}3IN_5Z)QM8t9*%|>jC4KoUg z7Jxj3XA{JKwwkIvIB#7VrC-PSiP8YdK?hXIh5jjL5d>>Y!;5X42~#uB6QqG`7JTxP zxe?T2_OhI7k6c=HBW2}#b%bF=4EUsEZi70PT-CHktBI775K53r+`yU0k^{R%XA@K# z*_Bi_jK-AD0o@|A2_R+aApZYM!57LE4=Y1#IbvId?w#=zBR$Pk>=(Ud4RnX zynuTTIjdSJycoT>r0Kx#J`8VeE)onK8q4{MWPm~J5|LY!;)5e?Vnb}1X=R2KlOB>E zZ))ctY(fuJq`sFFsA0Y;pJgN~!dCk_xz|v7KToP(e9pk-c-Zs0w&{Sr^uQHzK8C#r zTks$_qB71z&N}!CRYUpK^Nq=N0KDQ|3P}&n1eqt)2(vx*dwDeO^xAEhQ-Yg3gEgb` zDA$L0>v$jd2SC0K1gxs0kr;wM5<|9@P3MNWP#T?G zw%S$Q#szOtPrmtD6?U09_|H{HN!>SfSr*McK9KR?;$)C0f7P%t7!SX6O!ZoX+Mg5vI(*(GAmt@bDKi;J)G+;rhhW~O{yv6 zFRY>bH6%q@c#1m=!5Z23E*QZLX}CuR%#$ZuLj(<;#y%dEzO6}=kfjGuM8LP_389Ms z@EZi?E>+14*UgeD4`~!1uScG#abB_+Glvbhbn&?suRy4u3y6JmVWI<>0OCl|NS&xbtgO$Ums*1gT|g^ySS? z@5Q&smSa@pQo||apJ?TY1c6y7hu%%-?{&`m2esrc6O1utLz+YR_qqU$+(CRwz>}k)9=Sk28w!aGlkpCW%)MAxg1y6ob zT(U`(%(u!kRVlpaKrwkUF}-vucB{9GaI(xHygs^L8jJ_FrON-{B$U1SpDhOZcc1@GBe48s+HidJeCert2^tXU)UG{$ z0$Z367*wVSJ!xxS&_9ghz5~-dYhHU@oBNIng7MY$a*s1$^R6B6#}f9kUXVX8n4I<9 z<(}NHW`DhnXLKIBYAW7Rn}E(+x2smM-i!Zc>hZkjiuAot>LAu8DQsTfi-~DPJ)HmZ z_0ms(VVGmfN!n20l^PsaL-|wLXNGy_C$kLjB}YVZ^Zn8bk(ysmR0LI&3G#<_lc1+q z66Ie2^BWn(5dn55jVxI~ZGbr~$0{@$N!z1ZOL@wKW`kBQ{Ey3wvhtm3Xm>wga6K2?Fmy!y&z!-;7@wq%W@}}!%L+o%2!Z! z7vDfQaw_4Td>Y+0n5Un|=_DX&>JlmhaQZr?`y~F#No5tqo-4NVyspu19*D$P#)t%r zyB)W3l4-HPEp56$_BH#;jJg-@kkK}kN_fTZiif}&+8SoPN#GX=S?<_BgY<$F8*cL2 z=lqFZD#^J1_{h7qNH2z60H%>tt$)DPBW9_U%obGVm`;n)(*o``986HDrhO7D z7@FeHGgd2Aqu^wIE<={JL=J7&lbq1{T{78Pqc&SRzsuV5n`c#59_wLr922WGtGOUk zjh0|RWDnkc<+9{JCD$?z@jfy0gZnTr85x7xb*|H1_3V2q)~Q@wrJ5&<9#3({m>co8 ze~peSnd)ThQCA1g&NLlI-7-~Xis{c!CXe+5gbTDk2}&oRk8W>tk{q_AXoKu{gjB6{ zA{bP3rq80rj(T2uS(O^n<2N~k_A&$ubOw}S5`ZMX?SSTHYN|HW;4_woH;(imoI!vl)AS=_ZyPS?45X#)4+TtPH` z;5!Vm6M!FB4%^LqhtZ3e7j!y=$@hfE!xgnoI_ZZOE=5KR^h&_rRZ8dCcLnbMdbg83 zI#d|0SZUtz0V$WSy$SDhh6#mYRj%8I&2g24aD4^$ya>|YEGvXix>MQw-U|iJhXzvi ze;;qWQYdtwkcf0)gU${ayr(x^4392734{IlTh$*|zNb#bPV4xVw&yR`Q~%rqE%ns&oW~kFqkPzyRGk=Qw_a?L6 zn4dYAP!1b%Digcr$ihdGRfpY6l@JnVl#8$q-hExP9@{Wfpe$){@-)yOH*KG`G~$9I z%P=4DyE=I|dGXwYmtS3LUKv&2Oq-aC+>cYFDUnAF=a42{KQ<)rOkY#+#tbH_LkQQZTX|1;#<_bv_;GSGGt(w2`it$TKIXMe1hh)r2r zJYSYqJTPn`?eSS)IN5WC`riCgzw&xTT4|6RcHmM)YPUTc*N+eMzAHLECZYd{TR2rk zej|nB#)iq;ykMAv;fGP6gPq5YWS+iHh_K{HR{y!ApUw^K+uz2%4u8!P%Vt$bVUD-H z^FPB>gZQ&N^fvKSUOfXkhQe(q#PDFSs^q9|KyDHsyhoi>R zcu3$z8DJy&I2iEwwb#54@$EY`GK-3|!+=7cPl~6fJ{BxxEYJ{U53Xk3Y0YjT93J6L z3J4$tr`_2DPrq!ef|UU{6X8MLgW7k4Jc#aSgG+vHoVAPzY7Mern&w$+KJorFIB~lR7qwGcs$#lE zqw|YC2O#r55tNY$N&$3ww9vd;6#8Q;(bNOR%E~5T5=45znwjY*x|z$UA4sUUGv}R3 zaH|6~mVzcjV{daai*r@mav%+G)EFa1Lab)WFw50Qp&qP1^(QZcYF2PK}_@655 z*)24n%BtJzD-`ZL85T}3;#AOTQ_0MTG#<)MYbFvA=bzk4WRGKyqdW~W2d>QVvkqlX z683?iPZzvjM3j~78hw8PxF8RL63<8Sy{GA#0R_ZkrQc6sYHkD%DSl6Ts2ki7M)T*j zrZu9ajRnFfOW9ZI#)06?BsW!`0}YVuP|)cF`!pA!ZTSR7`x=Hly=Nz`CQ6M^M~`y=Csxuf2|Je}~%KdS?$ ze5LNEZtM9@{0AeN>1im{kGXz|bTe(p zVClX+XqwjD;J$e1%)w{msUd9kkPi6|kff8{yGmd~)n9|XUbs@+v2G(R>r0j8aOPc> zJ1pb!qCG=j-b;TJzJOk;i^9&XXn;fwE)Eip(X zk7Rk=^6;1C-{PS%NiM6_y*3aIdAPNQ~^f zsk(Sv?N%)IOL!yzG;J@=uHoJ7x&F#wRFhl~IDrXkp*@aX7K7|vPp+(^ z&+ET1vO;eO_&#`pY>`AZ&Mv$%6q=($3;)GE{ea8zWT29#jD9IqCz_>k*oW zmha|iZJr~P>>O#}0JNLUz`>?Tv%+1e3fC7S_vL}?OjqAJva{)D&uj0AOo%K{o1Mgj zgx9?D^x5|Az6I24IZ$lc!vC!bjo&g(+p@k@?csT%oISI)xjjy+Y3_G&SUnAsYt>gE zn@xzk_3xG)vIx5G1)Ux2P-{TU9 zt89NPb9jVV1J&h3-mvG#{_zjqN8LqFu2pvAcr-ZxdryThbAAP<3Y`id3PFiykZcCE z)V6~0xw<#a2kNn5IbW@2!-qoyZk=7N1^E{gg?z10<-rx+^0`M1L4LxM%xOj~tDx8# z{Ywq`jSgWE@Lo#83FcK05P{q33G>*EgH z(rO-ZT^nGeJ^2yWiq%wN459*8klEk*z}b|QD&k-1^&FO*`NbTudWAKT;uvwdLk>{)g?VjK;*vL&EVS^cZ zrML$N-y(WWZ%Ood2Np(LNkxm)Z)1Z!+uli~-oy8EQnzQeR+?3Rf2>-CiA?s)O;AKFFD zb|#MVO+6EBAZLw(gT{b#J&Y)B$ych)Iyd6W3hO`l08K{rYd_!iv|f zkl_QmBp(I^35r#oT>NXNd9b(N*BUs_Am|-vWF^>HL$74;EM{vS6u84K(A|c(2n@h) z4eG*7H_0bYWRUlc0}7ltWsL`PLH49fk#G15I?8&|PoPvdcWO(`Z7}vU^Vs%*vV<~~ z7~fLy$E4o-1F-i;LucvcC34vYj}y;(N3$+E2Q65BlgK3bk(Ve4*C8xy!aK*e5T~4x z^7B{gZBrB}vx#8u;N;D^*~l9qy5p9m{znxt7Ui5R;Re{G_(NMZV6-8cA% z90|wjsv66&1|^yOdx7@C47P}@k2HdL!YmD%vd+zgoZgefgzSV)61bW!l9#*xa@%P2 zKPhqpP4UlnBo7uj0ed+pPW@^X*g3SJTzWO*nfwWluR4T~5uPF-|j0RfzKy*CBXJGFF;9NRf-54L3#ZAbfNJpSwx& z42Ks>m@m>LR%NCAjo&O5V?tlTB(*Uns-r_5x%Y{hr?3prROvOgt)y^RR*B{xt$DIK zN14(T-`C?w(1)SonQxY~@8ao7cQl{8Gtgo5Y9+eJJ-!o>CQJn0jQc|7nn5=qN#V)I zRn@F*aAzkU;fePm^4@6{)E(h1GlXQnD@r8-EXu4G0U|S?)iLu3 zAE$AuyU5wi#z!0^7qI_?Ta_ZWSu$Ib$FaEHmY(+`tf!1~2K6dr!?YjZgSXi8Y~`u{ z!~4A)oe!V|rWTdMTtnf_{6y$;6hRyrxeD&gyzFHFZf?B}~Dxze)F|I1|SiFc8w5Gi(+l8=-X{a8PF`5g?)>Yd5l7B7}6oyn=z= z0YswNZ&3d3sLv2c;D_KD-CI^flrFXzQHQjzjvK5z>Kk5078J*M$Li`2VTI= zTuibC=DCz|N2sx^F!0JgQ$a)a`rQk~LF+=@!dE{j-SieTSR0Lef96x6hD0FJ!5OEx1rmH3-VSGuN_#diJ`kGU1UuVNznN*# zFVE10QWh@>>k67gP{Yi0daKEd5WxfApg>@`xdX`s-h>dYi)bs@0D;BaHY!jp|2o%E&eO;BpdHN(|6>vl_>*G2 z=WATx3kK(>Y1ko%9&74$-Y|QsiNWmiD#>~2PRXj!LDlJe%}sh+mVu7aIW$6+27q|A z`1wOb>+sx6UFQ`FJMcPhDAPONY=Vkov3KyWm!hc#B{afWh1RHc%N9HTU^c;7)?E-~ zdO`7Cz`wiKs0Y1XD=GbAOfx|db^hxKDhGo)$J{mHXsYgz7wD*s_j(r*Mu7N9DFkiv z1{AEC?>mVmQf^8!XW)b^fWL_K0mnUwB~erTVX`cr;;1KgS13>umn_*u*EF;r%|hH$ zv?1Sj1-q>b(M#VQkPKwvT1CxUf}@W_G6Y6n4XT#n&|PA`xYlC~jE3dMs%R$-%sy zJ#e(2uL^KlcteqlvX<~v<8nHcj+1YV(t9Q>HM>g;6hc?w1sYG@jV{XG5#1Jf^lci~=& zXt)o_QsAnkR?JnZh-MPJF=>eVBhX9D3zi+sTKm_u9L8D_)OM$t1T4+$4mwuKuDkyB zN{VbwH1~Av*=%NwW=>E2;=qz?43p=oMl8V-7+7Ls^1 z`A*iEd@nk7{^j{vu2JI0-A}}~UzW;wu@?nAHYPPeY6$0kZBInIonBd4yu6y-Udh+! z_nDd4F{c(-RneWho>Zgn_o%`6 zc-VtK8jco##&_@BvKD)w5PvF>;^uOUpA2xLwYm=|2)CjwG?R$(+{J;&U{n- z7Ev>&bcvPc^bnom`X!B`4uQ5>?)ZC{1=0#xIg_3+vXGUh&YYrjm~jnjFrM_24dX?ts zOp!T*4`%IT&1u$0!T+MRx=it5E8dd>ld;oK{=*xxMYB=gMne)()%$czAV?XZ8=8+w zq`vcJ@x2(<+|F^BD9qWjm4A05T@$0j_X0OXShJE{vpr+@F#swC3B@~EYyAEhhs)zM zn~xUou91lk0*-Kk+uFZ!GgE%DNhTm!#z!PaJVM?YW5%Mu?<`3{rB*lYV8%>YCUlnE zBM~>>6q0#tW5b^*XRlg<(Xj^dUuAhKr;~dMyrm&Dm!$!r*r~O$o=;DQb@+{x{&iPr z)yfsr4ZF$25ASa0zc4gwy&*dv_WFSw!U9g=CQa!X*>)>D6lfp#7J_(s?=Hx`IfNst ziDUbrG^WaXR@bMh{ja-EQ7(w{%6+!Il_@_PwKfOfdZ z`#wtTU?;(L9hrBvPJ%x+e3saeWa!`dwh3D1Y)yHC^)TzP%BnuW7Ne z9{xI`weo>-Q0||TT*fF}+dA%Vf_l4M4?JY1+!m}+S~d(JSMuie91V&>>{?^J!^+b? zbVN#R?TxuPbOd#IRtt)?j$uA3j4H=Z0073F|9Sy(=QY*SLIqai)|4c{w~OsDtWR_% zI+b#$eq&v35D{f|ALU8|rNEmiZMh~c{i6myZQGiue$+_`@cfw#tf(9=mzP21Lv zU=7_)f#({Ruf}?0!uc1!!XWRy^R!1wH}|JD-~N_&?us8M2Y;ZgsE>)JMfQx3eEDcz z{BbjnjW5PQsN0~F@iVUI$_chFCM{~HOIB8H_XJkGw5NifmCPE$mkc2?;k}1(;g5T6!dNMW%{&LSQzfPlK6aZHi+#){|^d` z8p&i)O_NW0CQbWA+MJF$etv08x;K&tX^9Y#OZc;;sF`*bjEty)m#hwv3y1Y*@bydI ziOu@BnJV!;@BI)=$|7Uq^E(qr=D5S{cH2@LP-)vgQiq{rk6W$}^1;HYW-1E?>91Wa z%mO5-USHKjPs$`Y+Ep6Rl$>RzvZ31IWLc$<^(wK9|Ka%x^=~DY#@R!OhXyUTA&BqN zqRqB=8eX``V?&tS->&Zg-4ztXwIKQywD1&nqe3K=s&Td~i9}BnCmN%;3tH{S&bP5n zy4^tUw>#GT{lzg+MkbpN+CB+wc1awpz|?G@>Pq2()Xt_L56g zY#&e%S>XRjz{D@+h&5sPKUWQibN{~^MFNQ|AW>y)>WX9Gr=!(@jtxp~(3T^?j@U!u z5QdqE{lj$s*7P_J%l%tS+cT5E_2~LYp7n80>fFfV$@qQ2g|Vd;8UiOl|A%UvtxyV> zp(G=iex2kG=M7I%I&c4|XDKSAj~98xB0dpXtS<8j8WB5X+f}xBaW~;+lfKWSXfm4k z^b>6hDdYIcE$|@%1j5I~W~I@TP!i`j4i8!A1EoFzUa*%E2$m<$x%WN=CLf0pQ9{Ojf+Pi$ zue0$pj)aRO955GHO;19sO#PKOnNmL)kRd$XLR7omUjQu725w`bu)Cr?(;@itT;>SA z=3N^_uhy0}+w`P?b&+{kxrdrrgH`_8=F;?uxZz|H#8GJvA7Uk1eg$1({`WLtrzp9# zCI|Bd^TBeq;d67J<+T+_hU@-{gA+`hdpoPx>^gC)vXENceq18*UUH8hzeew zF`UMO)HN#?M4@SO-8+q^rD4@jpem#b#>5OoG&KTg8w&&u#yC-!xPH(I%8&lkvEo+5 z-QlZ+1I%b^^EJS_@ivkj3Z;0?{4u!yNTjd>tvWRRYCAf*E6=J$5oMFF3wu97W~BuZ zHo~A+)qP!83jR>y{V%QiT!}O*`}}iF+YWbkh(jt=3qN*5aoFw-3R|i>Cxn&{?_?M5 zhR}NHt{f0699MdEnI8wVZZ(S;3v9s~~g3Qv;UIP%^b zRUSgk^MUYeqx*7aQ_=mV)HCO=F>#pLR>i29H6@;)?pHrZ!cw9ia~lK0Pv)co2L%PU zMSzpV@gFe_Gp8p24_y!n;H9wVTFPU{pQLzTXb!lG;R~7MUYp)cv9&K`0FFSm6{KfE z!ue|W*j4^jBf{Gvh{}||HpQ-gRLDGpHi@r<^VG~LhZDI++Lruu);n$X{Co74;#mpj zu~2q5jp-$S;ggF+)Phx1GzXG;FAGDyrS#1S<{8ufwYHN_W#p77iwj8K*9-RnH^#%of5_9}p42p{WTc(* zVQ+KefzBxt7{`zLWDkWAThdOcOIHn2lqaOA%I^W0zI>&q*0|{&NbItE1Qp%8HAI7U zDGjT6=L*#T?%#x2QAW#@B3zuUKHrd z0zWsF4`T1tM+!EwI0Gk}4%qhRn;3O9@)P?#mvPKjH8k5Kwc{+N3b*HT9XJwXXq0CL z5-Y|0_={{;?v}?y1Gk4E*t%z%=1t92GXAtxs%dsAN}eAXs%t}ZHn58qAF$TH8CK!| z4yW5Ibj&kltWNpj&T3RGe^$SSJ*Am0%-M?Qbw|v1}$aSe`qr9Wdmo*lPESH&ax6#X{xe8>7 z+)+q+3WcPMPUwhsb#gD$kA@8P^4pOAoyUj;P1t{W60sC&@}Mt_D1_0g6t_wwt-$VLvb*+02aaw;?7u);Wr>*Tg=~E=n1`|0`v#GC^}n-WPS) zv70@5yQNTgnNt9mBdZc04(|K$0~M%DELj(cLDJfulplB|)q{8+l^>X~5}msL$4ORJ zOB`@T<~H7cz!z0N=1!(+cy=`~euv(sgqWW>{QFafG7pt(Bm18<@-`}!f2O^Y(;gr< z@Ki4S7sWnxXWrC=-plL9>CFpX3zY(cAwnan$L<~!v|SZqvBl`Ht)jonp9BVLx&Atz zt~I)T=Dt6<1Z$vQk+FFnVi`kzb%|SOPFpHFjE#=`E=?oIR}U|L_afqJdUrpbCOMEV zeR1ZY9+T1G8;k~Uc9{2v`t0y;wStn;q+Nh$$dL;4s8*_(5yU)oY?Z@|xh|G4Vh1+x zZ+EZ3S5kk{1aWM)^F)iJXkWe3CTe`|Fa=G`^)TkYev|EkGfoNA5^5yiriUosg4v}u zA_Tel?OrA2A(Dd;EP`+mn(bAZ=IQ?GpWoX&S z8OaG`L3M#mh(QwQK|wCKxZqhj%9k~ouX&zhdAHGZwgH#rF3x0g?RAL)>;S@2(UR<- zYv2ITBN5%kC)j+I0cby!mj%>H5ijuQ~8K!2ObE#!BbquQ&AB5=jCNrFj?5_L-~y@R|ki zFwv-6H#K(&uFeaGiGG`J+|QdmKPIRyu;NQB2xL{WVk`sDk(hK_JSpNn=k{l*v+K)| z)?EymzX35#>$Uw2tFHKdxiyT|S;pk7aXGZ_J890y14Ee9%Lgymu$v^xY{Y6(kb^6xy*Z%GCSf@Q8CkHKLLJ0?`Eyw64Oqh9#jm&p zhtl#h?l}2GM%;dT5#Nc*&gG8gtx1ojNy)Q0>7-tnF}3;Se6zw&I$#ptrcHdCd~0VvlX@$hX_b4&mdbg zH$AhL(JIhwTa>}mt<2@MlBcZ8L2Bi0AvQM_X}&or@4xwXnG12Q<;bd`>8eoe!XEFc zH0&alzeYqef4hRcba@W04yDi5q!W=hqeXx=d8=oStHOt6807V5@zJ3-!TMmV5iCja z-d1ztTG$s+SLzGismENExo_+%u7&o)!ZLinj{9rL%z3XBydWioF#znpCCa8gQ1~gG zv_H!Cdwr9b-sIxyo34k z;@BX3xYc?o-xF@M@$7LT8fx*F~wk}k#A=|xiJHcznU^AN+rh&cENx3v6b2=O8#i^s1foS!S%AV3T z!xZ1D(vPqHoIpEdGJ_5tdTq~E6eW$mVu5}b{&UHRukNZtwUV#*edhf7@EZ2}rzZ|F z%+d{8AvqN*(ibnJuB3batPu=O6+=$WJa3^uN?daPQ*M?a8Q@n)|L<_s3T8LX>T&pN zX(0G*8BjMGivF(4+6#kGnvudbY&8M@+-nwdJ{A8a3hej(?e6NQ0)A1I!QbBOb?Zw0jy`T!xlJ{2iSv=IpwQB3y~w`SvsFkBNcYP+=&mPh_gL3A07xftCI` z;b!dlFg;o}TUa&O1K&nahxG@FoPAy^Jc6Z}RLs}IZf}h{TFy%+b%Gz|%)c%QlQEw` zQ|@bhj{NrKU?}LXP&R5eVFE?Ul|36}V7)6Cciv6VS3dK(WxF)F?gJS5=L6dllXo=K zWXsvc`a42v#vafTgTE&hl3!2$4^)YwXTBB|*CY8mg@hL*!9(ZaFQ`M`95R2QPIDci zKcnvS=jJQnSj%Y*NJ9p`rI^vK;M6@WQVCF^YC0czx@);oNd511h&si;2$lH;TsHT3 z3Nf?seHgNr{B`!u6`SgNSN{9C0@a=}>*e6@2jMc{GC4Nf4N(>atsa^R8eTa5*wYmDs2BBKKTF6 zqeP1Nf1r{*PW+1r4{UkQZkGs}OSp1K;8Z9BSruLuQB3odL5oix6~B);-jIIjBvWQA x=gy&D3#?RLPFl%~aewagAM5=8ZeRaiM}_3Kbo=sG5dSIy(APE6se;=?{xA9@T)zMS literal 0 HcmV?d00001 diff --git a/doc/developer/nxeps/index.rst b/doc/developer/nxeps/index.rst new file mode 100644 index 0000000..1a1e9d2 --- /dev/null +++ b/doc/developer/nxeps/index.rst @@ -0,0 +1,17 @@ +.. _nxep_list: + +NXEPs +***** + +NetworkX Enhancement Proposals (NXEPs) document major changes or proposals. + +.. toctree:: + :maxdepth: 1 + + nxep-0000 + nxep-0001 + +.. toctree:: + :hidden: + + nxep-template diff --git a/doc/developer/nxeps/nxep-0000.rst b/doc/developer/nxeps/nxep-0000.rst new file mode 100644 index 0000000..3ebda92 --- /dev/null +++ b/doc/developer/nxeps/nxep-0000.rst @@ -0,0 +1,283 @@ +.. _NXEP0: + +============================ +NXEP 0 — Purpose and Process +============================ + +:Author: Jarrod Millman +:Status: Draft +:Type: Process +:Created: 2020-06-25 + + +What is a NXEP? +--------------- + + +NXEP stands for NetworkX Enhancement Proposal. NXEPs are the primary +mechanisms for proposing major new features, for collecting community input on +an issue, and for documenting the design decisions that have gone into +NetworkX. A NXEP should provide a concise technical specification of the +feature and a rationale for the feature. The NXEP author is responsible for +building consensus within the community and documenting dissenting opinions. + +Because the NXEPs are maintained as text files in a versioned +repository, their revision history is the historical record of the +feature proposal [1]_. + + +Types +^^^^^ + +There are three kinds of NXEPs: + +1. A **Standards Track** NXEP describes a new feature or implementation + for NetworkX. + +2. An **Informational** NXEP describes a NetworkX design issue, or provides + general guidelines or information to the Python community, but does not + propose a new feature. Informational NXEPs do not necessarily represent a + NetworkX community consensus or recommendation, so users and implementers are + free to ignore Informational NXEPs or follow their advice. + +3. A **Process** NXEP describes a process surrounding NetworkX, or + proposes a change to (or an event in) a process. Process NXEPs are + like Standards Track NXEPs but apply to areas other than the NetworkX + language itself. They may propose an implementation, but not to + NetworkX's codebase; they require community consensus. Examples include + procedures, guidelines, changes to the decision-making process, and + changes to the tools or environment used in NetworkX development. + Any meta-NXEP is also considered a Process NXEP. + + +NXEP Workflow +------------- + +The NXEP process begins with a new idea for NetworkX. It is highly +recommended that a single NXEP contain a single key proposal or new +idea. Small enhancements or patches often don't need +a NXEP and can be injected into the NetworkX development workflow with a +pull request to the NetworkX `repo`_. The more focused the +NXEP, the more successful it tends to be. +If in doubt, split your NXEP into several well-focused ones. + +Each NXEP must have a champion---someone who writes the NXEP using the style +and format described below, shepherds the discussions in the appropriate +forums, and attempts to build community consensus around the idea. The NXEP +champion (a.k.a. Author) should first attempt to ascertain whether the idea is +suitable for a NXEP. Posting to the networkx-discussion `mailing list`_ is the best +way to go about doing this. + +The proposal should be submitted as a draft NXEP via a `GitHub pull +request`_ to the ``doc/nxeps`` directory with the name ``nxep-.rst`` +where ```` is an appropriately assigned four-digit number (e.g., +``nxep-0000.rst``). The draft must use the :doc:`nxep-template` file. + +Once the PR for the NXEP is in place, a post should be made to the +mailing list containing the sections up to "Backward compatibility", +with the purpose of limiting discussion there to usage and impact. +Discussion on the pull request will have a broader scope, also including +details of implementation. + +At the earliest convenience, the PR should be merged (regardless of +whether it is accepted during discussion). Additional PRs may be made +by the Author to update or expand the NXEP, or by maintainers to set +its status, discussion URL, etc. + +Standards Track NXEPs consist of two parts, a design document and a +reference implementation. It is generally recommended that at least a +prototype implementation be co-developed with the NXEP, as ideas that sound +good in principle sometimes turn out to be impractical when subjected to the +test of implementation. Often it makes sense for the prototype implementation +to be made available as PR to the NetworkX repo (making sure to appropriately +mark the PR as a WIP). + + +Review and Resolution +^^^^^^^^^^^^^^^^^^^^^ + +NXEPs are discussed on the mailing list. The possible paths of the +status of NXEPs are as follows: + +.. image:: _static/nxep-0000.png + +All NXEPs should be created with the ``Draft`` status. + +Eventually, after discussion, there may be a consensus that the NXEP +should be accepted – see the next section for details. At this point +the status becomes ``Accepted``. + +Once a NXEP has been ``Accepted``, the reference implementation must be +completed. When the reference implementation is complete and incorporated +into the main source code repository, the status will be changed to ``Final``. + +To allow gathering of additional design and interface feedback before +committing to long term stability for a language feature or standard library +API, a NXEP may also be marked as "Provisional". This is short for +"Provisionally Accepted", and indicates that the proposal has been accepted for +inclusion in the reference implementation, but additional user feedback is +needed before the full design can be considered "Final". Unlike regular +accepted NXEPs, provisionally accepted NXEPs may still be Rejected or Withdrawn +even after the related changes have been included in a Python release. + +Wherever possible, it is considered preferable to reduce the scope of a +proposal to avoid the need to rely on the "Provisional" status (e.g. by +deferring some features to later NXEPs), as this status can lead to version +compatibility challenges in the wider NetworkX ecosystem. + +A NXEP can also be assigned status ``Deferred``. The NXEP author or a +core developer can assign the NXEP this status when no progress is being made +on the NXEP. + +A NXEP can also be ``Rejected``. Perhaps after all is said and done it +was not a good idea. It is still important to have a record of this +fact. The ``Withdrawn`` status is similar---it means that the NXEP author +themselves has decided that the NXEP is actually a bad idea, or has +accepted that a competing proposal is a better alternative. + +When a NXEP is ``Accepted``, ``Rejected``, or ``Withdrawn``, the NXEP should be +updated accordingly. In addition to updating the status field, at the very +least the ``Resolution`` header should be added with a link to the relevant +thread in the mailing list archives. + +NXEPs can also be ``Superseded`` by a different NXEP, rendering the +original obsolete. The ``Replaced-By`` and ``Replaces`` headers +should be added to the original and new NXEPs respectively. + +Process NXEPs may also have a status of ``Active`` if they are never +meant to be completed, e.g. NXEP 0 (this NXEP). + + +How a NXEP becomes Accepted +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +A NXEP is ``Accepted`` by consensus of all interested contributors. We +need a concrete way to tell whether consensus has been reached. When +you think a NXEP is ready to accept, send an email to the +networkx-discussion mailing list with a subject like: + + Proposal to accept NXEP #: + +In the body of your email, you should: + +* link to the latest version of the NXEP, + +* briefly describe any major points of contention and how they were + resolved, + +* include a sentence like: "If there are no substantive objections + within 7 days from this email, then the NXEP will be accepted; see + NXEP 0 for more details." + +For an example, see: https://mail.python.org/pipermail/networkx-discussion/2018-June/078345.html + +After you send the email, you should make sure to link to the email +thread from the ``Discussion`` section of the NXEP, so that people can +find it later. + +Generally the NXEP author will be the one to send this email, but +anyone can do it – the important thing is to make sure that everyone +knows when a NXEP is on the verge of acceptance, and give them a final +chance to respond. If there's some special reason to extend this final +comment period beyond 7 days, then that's fine, just say so in the +email. You shouldn't do less than 7 days, because sometimes people are +travelling or similar and need some time to respond. + +In general, the goal is to make sure that the community has consensus, +not provide a rigid policy for people to try to game. When in doubt, +err on the side of asking for more feedback and looking for +opportunities to compromise. + +If the final comment period passes without any substantive objections, +then the NXEP can officially be marked ``Accepted``. You should send a +followup email notifying the list (celebratory emoji optional but +encouraged 🎉✨), and then update the NXEP by setting its ``:Status:`` +to ``Accepted``, and its ``:Resolution:`` header to a link to your +followup email. + +If there *are* substantive objections, then the NXEP remains in +``Draft`` state, discussion continues as normal, and it can be +proposed for acceptance again later once the objections are resolved. + +In unusual cases, disagreements about the direction or approach may +require escalation to the NetworkX :ref:`steering_council` who +then decide whether a controversial NXEP is ``Accepted``. + + +Maintenance +^^^^^^^^^^^ + +In general, Standards track NXEPs are no longer modified after they have +reached the Final state as the code and project documentation are considered +the ultimate reference for the implemented feature. +However, finalized Standards track NXEPs may be updated as needed. + +Process NXEPs may be updated over time to reflect changes +to development practices and other details. The precise process followed in +these cases will depend on the nature and purpose of the NXEP being updated. + + +Format and Template +------------------- + +NXEPs are UTF-8 encoded text files using the reStructuredText_ format. Please +see the :doc:`nxep-template` file and the reStructuredTextPrimer_ for more +information. We use Sphinx_ to convert NXEPs to HTML for viewing on the web +[2]_. + + +Header Preamble +^^^^^^^^^^^^^^^ + +Each NXEP must begin with a header preamble. The headers +must appear in the following order. Headers marked with ``*`` are +optional. All other headers are required. :: + + :Author: <list of authors' real names and optionally, email addresses> + :Status: <Draft | Active | Accepted | Deferred | Rejected | + Withdrawn | Final | Superseded> + :Type: <Standards Track | Process> + :Created: <date created on, in dd-mmm-yyyy format> + * :Requires: <nxep numbers> + * :NetworkX-Version: <version number> + * :Replaces: <nxep number> + * :Replaced-By: <nxep number> + * :Resolution: <url> + +The Author header lists the names, and optionally the email addresses +of all the authors of the NXEP. The format of the Author header +value must be + + Random J. User <address@dom.ain> + +if the email address is included, and just + + Random J. User + +if the address is not given. If there are multiple authors, each should be on +a separate line. + + +References and Footnotes +------------------------ + +.. [1] This historical record is available by the normal git commands + for retrieving older revisions, and can also be browsed on + `GitHub <https://github.com/networkx/networkx/tree/master/doc/nxeps>`_. + +.. [2] The URL for viewing NXEPs on the web is + https://networkx.github.io/nxeps/. + +.. _repo: https://github.com/networkx/networkx + +.. _mailing list: http://groups.google.com/group/networkx-discuss/ + +.. _issue tracker: https://github.com/networkx/networkx/issues + +.. _`GitHub pull request`: https://github.com/networkx/networkx/pulls + +.. _reStructuredText: http://docutils.sourceforge.net/rst.html + +.. _reStructuredTextPrimer: http://www.sphinx-doc.org/en/stable/rest.html + +.. _Sphinx: http://www.sphinx-doc.org/en/stable/ diff --git a/doc/developer/nxeps/nxep-0001.rst b/doc/developer/nxeps/nxep-0001.rst new file mode 100644 index 0000000..516c249 --- /dev/null +++ b/doc/developer/nxeps/nxep-0001.rst @@ -0,0 +1,158 @@ +.. _governance: + +======================================= +NXEP 1 — Governance and Decision Making +======================================= + +:Author: Jarrod Millman <millman@berkeley.edu> +:Author: Dan Schult <dschult@colgate.edu> +:Status: Draft +:Type: Process +:Created: 2020-06-25 + +Abstract +======== + +NetworkX is a consensus-based community project. Anyone with an interest in the +project can join the community, contribute to the project design, and +participate in the decision making process. This document describes how that +participation takes place, how to find consensus, and how deadlocks are +resolved. + +Roles And Responsibilities +========================== + +The Community +------------- +The NetworkX community consists of anyone using or working with the project +in any way. + +Contributors +------------ +Any community member can become a contributor by interacting directly with the +project in concrete ways, such as: + +- proposing a change to the code or documentation via a GitHub pull request; +- reporting issues on our + `GitHub issues page <https://github.com/networkx/networkx/issues>`_; +- discussing the design of the library, website, or tutorials on the + `mailing list <http://groups.google.com/group/networkx-discuss/>`_, + or in existing issues and pull requests; or +- reviewing + `open pull requests <https://github.com/networkx/networkx/pulls>`_, + +among other possibilities. By contributing to the project, community members +can directly help to shape its future. + +Contributors should read the :ref:`contributor_guide` and our :ref:`code_of_conduct`. + +Core Developers +--------------- +Core developers are community members that have demonstrated continued +commitment to the project through ongoing contributions. They +have shown they can be trusted to maintain NetworkX with care. Becoming a +core developer allows contributors to merge approved pull requests, cast votes +for and against merging a pull request, and be involved in deciding major +changes to the API, and thereby more easily carry on with their project related +activities. Core developers appear as team members on the `NetworkX Core Team page +<https://github.com/orgs/networkx/teams/core-developers/members>`_ and can +be messaged ``@networkx/core-developers``. Core +developers are expected to review code contributions while adhering to the +:ref:`core_dev`. + +New core developers can be nominated by any existing core developer. +Discussion about new core developer nominations is one of the few activities +that takes place on the project's private management list. The decision to +invite a new core developer must be made by “lazy consensus”, meaning unanimous +agreement by all responding existing core developers. Invitation must take +place at least one week after initial nomination, to allow existing members +time to voice any objections. + +.. _steering_council: + +Steering Council +---------------- +The Steering Council (SC) members are core developers who have additional +responsibilities to ensure the smooth running of the project. SC members are +expected to participate in strategic planning, approve changes to the +governance model, and make decisions about funding granted to the project +itself. (Funding to community members is theirs to pursue and manage.) The +purpose of the SC is to ensure smooth progress from the big-picture +perspective. Changes that impact the full project require analysis informed by +long experience with both the project and the larger ecosystem. When the core +developer community (including the SC members) fails to reach such a consensus +in a reasonable timeframe, the SC is the entity that resolves the issue. + +Steering Council members appear as team members on the `NetworkX Steering +Council Team page +<https://github.com/orgs/networkx/teams/steering-council/members>`_ and +can be messaged ``@networkx/steering-council``. Core + +Decision Making Process +======================= + +Decisions about the future of the project are made through discussion with all +members of the community. All non-sensitive project management discussion takes +place on the project +`mailing list <http://groups.google.com/group/networkx-discuss/>`_ +and the `issue tracker <https://github.com/networkx/networkx/issues>`_. +Occasionally, sensitive discussion may occur on a private list. + +Decisions should be made in accordance with our :ref:`mission_and_values`. + +NetworkX uses a *consensus seeking* process for making decisions. The group +tries to find a resolution that has no open objections among core developers. +Core developers are expected to distinguish between fundamental objections to a +proposal and minor perceived flaws that they can live with, and not hold up the +decision making process for the latter. If no option can be found without +an objection, the decision is escalated to the SC, which will itself use +consensus seeking to come to a resolution. In the unlikely event that there is +still a deadlock, the proposal will move forward if it has the support of a +simple majority of the SC. Any proposal must be described by a NetworkX :ref:`nxep`. + +Decisions (in addition to adding core developers and SC membership as above) +are made according to the following rules: + +- **Minor documentation changes**, such as typo fixes, or addition / correction of a + sentence (but no change of the NetworkX landing page or the “about” + page), require approval by a core developer *and* no disagreement or requested + changes by a core developer on the issue or pull request page (lazy + consensus). Core developers are expected to give “reasonable time” to others + to give their opinion on the pull request if they’re not confident others + would agree. + +- **Code changes and major documentation changes** require agreement by *two* + core developers *and* no disagreement or requested changes by a core developer + on the issue or pull-request page (lazy consensus). + +- **Changes to the API principles** require a :ref:`nxep` and follow the + decision-making process outlined above. + +- **Changes to this governance model or our mission and values** + require a :ref:`nxep` and follow the decision-making process outlined above, + *unless* there is unanimous agreement from core developers on the change. + +If an objection is raised on a lazy consensus, the proposer can appeal to the +community and core developers and the change can be approved or rejected by +escalating to the SC, and if necessary, a NXEP (see below). + +.. _nxep: + +Enhancement Proposals (NXEPs) +============================= + +Any proposals for enhancements of NetworkX should be written as a formal NXEP +following the template :doc:`nxep-template`. The NXEP must be made public and +discussed before any vote is taken. The discussion must be summarized by a +key advocate of the proposal in the appropriate section of the NXEP. +Once this summary is made public and after sufficient time to allow the +core team to understand it, they vote. +The workflow of a NXEP is detailed in :ref:`nxep0`. + +A list of all existing NXEPs is available :ref:`here <nxep_list>`. + +Acknowledgments +=============== + +This document is based on the `scikit-image governance document +<https://scikit-image.org/docs/stable/skips/1-governance.html>`_. diff --git a/doc/developer/nxeps/nxep-template.rst b/doc/developer/nxeps/nxep-template.rst new file mode 100644 index 0000000..1054afd --- /dev/null +++ b/doc/developer/nxeps/nxep-template.rst @@ -0,0 +1,90 @@ +================================== +NXEP X — Template and Instructions +================================== + +:Author: <list of authors' real names and optionally, email addresses> +:Status: <Draft | Active | Accepted | Deferred | Rejected | Withdrawn | Final | Superseded> +:Type: <Standards Track | Process> +:Created: <date created on, in yyyy-mm-dd format> +:Resolution: <url> (required for Accepted | Rejected | Withdrawn) + + +Abstract +-------- + +The abstract should be a short description of what the NXEP will achieve. + +Note that the — in the title is an elongated dash, not -. + +Motivation and Scope +-------------------- + +This section describes the need for the proposed change. It should describe +the existing problem, who it affects, what it is trying to solve, and why. +This section should explicitly address the scope of and key requirements for +the proposed change. + +Usage and Impact +---------------- + +This section describes how users of NumPy will use features described in this +NXEP. It should be comprised mainly of code examples that wouldn't be possible +without acceptance and implementation of this NXEP, as well as the impact the +proposed changes would have on the ecosystem. This section should be written +from the perspective of the users of NumPy, and the benefits it will provide +them; and as such, it should include implementation details only if +necessary to explain the functionality. + +Backward compatibility +---------------------- + +This section describes the ways in which the NXEP breaks backward compatibility. + +The mailing list post will contain the NXEP up to and including this section. +Its purpose is to provide a high-level summary to users who are not interested +in detailed technical discussion, but may have opinions around, e.g., usage and +impact. + +Detailed description +-------------------- + +This section should provide a detailed description of the proposed change. +It should include examples of how the new functionality would be used, +intended use-cases and pseudo-code illustrating its use. + + +Related Work +------------ + +This section should list relevant and/or similar technologies, possibly in other +libraries. It does not need to be comprehensive, just list the major examples of +prior and relevant art. + + +Implementation +-------------- + +This section lists the major steps required to implement the NXEP. Where +possible, it should be noted where one step is dependent on another, and which +steps may be optionally omitted. Where it makes sense, each step should +include a link to related pull requests as the implementation progresses. + +Any pull requests or development branches containing work on this NXEP should +be linked to from here. (A NXEP does not need to be implemented in a single +pull request if it makes sense to implement it in discrete phases). + + +Alternatives +------------ + +If there were any alternative solutions to solving the same problem, they should +be discussed here, along with a justification for the chosen approach. + + +Discussion +---------- + +This section may just be a bullet list including links to any discussions +regarding the NXEP: + +- This includes links to mailing list threads or relevant GitHub issues. diff --git a/RELEASE.rst b/doc/developer/release.rst similarity index 77% rename from RELEASE.rst rename to doc/developer/release.rst index edfe928..1825862 100644 --- a/RELEASE.rst +++ b/doc/developer/release.rst @@ -1,5 +1,5 @@ -How to make a new release of ``networkx`` -========================================= +Release Process +=============== - Update the release notes: @@ -22,13 +22,18 @@ How to make a new release of ``networkx`` 7. Update ``doc/news.rst``. +- Comment out ``dev_banner.html`` in ``doc/_templates/layout.html``. + - Toggle ``dev = True`` to ``dev = False`` in ``networkx/release.py``. -- Commit changes. +- Commit changes:: + + git add networkx/release.py + git commit -m "Designate X.X release" - Add the version number as a tag in git:: - git tag -s [-u <key-id>] networkx-<major>.<minor> + git tag -s [-u <key-id>] networkx-<major>.<minor> -m 'signed <major>.<minor> tag' (If you do not have a gpg key, use -m instead; it is important for Debian packaging that the tags are annotated) @@ -42,13 +47,13 @@ How to make a new release of ``networkx`` - Review the github release page:: - https://github.com/networkx/networkx/releases + https://github.com/networkx/networkx/releases - Publish on PyPi:: git clean -fxd - python setup.py sdist --formats=zip - twine upload -s dist/networkx*.zip + python setup.py sdist bdist_wheel + twine upload -s dist/* - Update documentation on the web: The documentation is kept in a separate repo: networkx/documentation @@ -58,9 +63,10 @@ How to make a new release of ``networkx`` - Copy the documentation built by Travis. Assuming you are at the top-level of the ``documentation`` repo:: - cp -a latest networkx-<major>.<minor> - git add networkx-<major>.<minor> + # FIXME - use eol_banner.html + cp -a latest networkx-<major>.<minor> ln -sfn networkx-<major>.<minor> stable + git add networkx-<major>.<minor> stable git commit -m "Add <major>.<minor> docs" # maybe squash all the Deploy GitHub Pages commits # git rebase -i HEAD~XX where XX is the number of commits back @@ -71,9 +77,16 @@ How to make a new release of ``networkx`` - Increase the version number + - Uncomment ``dev_banner.html`` in ``doc/_templates/layout.html``. - Toggle ``dev = False`` to ``dev = True`` in ``networkx/release.py``. - Update ``major`` and ``minor`` in ``networkx/release.py``. + - Commit and push changes:: + + git add networkx/release.py + git commit -m "Bump release version" + git push upstream master + - Update the web frontpage: The webpage is kept in a separate repo: networkx/website diff --git a/doc/developer/roadmap.rst b/doc/developer/roadmap.rst new file mode 100644 index 0000000..295b8a0 --- /dev/null +++ b/doc/developer/roadmap.rst @@ -0,0 +1,4 @@ +======= +Roadmap +======= + diff --git a/doc/developer/values.rst b/doc/developer/values.rst new file mode 100644 index 0000000..8d5c2f4 --- /dev/null +++ b/doc/developer/values.rst @@ -0,0 +1,54 @@ +.. _mission_and_values: + +================== +Mission and Values +================== + +Our mission +----------- + +NetworkX aims to be the reference library for network science algorithms in +Python. We accomplish this by: + +- **being easy to use and install**. We are careful in taking on new + dependencies, and sometimes cull existing ones, or make them optional. All + functions in our API have thorough docstrings clarifying expected inputs and + outputs. +- **providing a consistent API**. Conceptually identical arguments have the + same name and position in a function signature. +- **ensuring correctness**. Test coverage is close to 100% and code is reviewed by + at least two core developers before being included in the library. +- **caring for users’ data**. We have a functional API and don't modify + input data unless explicitly directed to do so. +- **promoting education in network science**, with extensive pedagogical + documentation. + +Our values +---------- + +- We are inclusive (:ref:`code_of_conduct`). We welcome and mentor newcomers who are + making their first contribution. +- We are open source and community-driven (:ref:`governance`). +- We focus on graph data structures and algorithms for network science applications. +- We prefer pure Python implementations using native data structures + (especially dicts) due to their consistent, intuitive interface and amazing + performance capabilities. We include interfaces to other data structures, + especially NumPy arrays and SciPy sparse matrices for algorithms that more + naturally use arrays and matrices or where time or space requirements are + significantly lower. Sometimes we provide two algorithms for the same result, + one using each data structure, when pedagogy or space/time trade-offs justify + such multiplicity. +- We value simple, readable implementations over getting every last ounce of + performance. Readable code that is easy to understand, for newcomers and + maintainers alike, makes it easier to contribute new code as well as prevent + bugs. This means that we will prefer a 20% slowdown if it reduces lines of + code two-fold, for example. +- We value education and documentation. All functions should have `NumPy-style + docstrings <https://numpy.org/doc/stable/docs/howto_document.html>`, + preferably with examples, as well as gallery examples that showcase how that + function is used in a scientific application. + +Acknowledgments +--------------- + +This document is modified from the `scikit-image` mission and values document. diff --git a/doc/index.rst b/doc/index.rst index f086212..832e021 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -1,14 +1,12 @@ -.. -*- coding: utf-8 -*- - .. _contents: -Overview -======== +Software for Complex Networks +============================= NetworkX is a Python package for the creation, manipulation, and study of the structure, dynamics, and functions of complex networks. -NetworkX provides +NetworkX provides: - tools for the study of the structure and dynamics of social, biological, and infrastructure networks; @@ -78,6 +76,7 @@ Documentation install tutorial + auto_examples/index reference/index developer/index news @@ -85,7 +84,6 @@ Documentation credits citing bibliography - auto_examples/index Indices and tables ------------------ diff --git a/doc/news.rst b/doc/news.rst index a0fe28a..0abbc8a 100644 --- a/doc/news.rst +++ b/doc/news.rst @@ -1,17 +1,57 @@ -.. -*- coding: utf-8 -*- .. currentmodule:: networkx Release Log =========== +NetworkX 2.5 +------------ +Release date: 22 August 2020 + +Supports Python 3.6, 3.7, and 3.8. + +Release notes +~~~~~~~~~~~~~ + +See :doc:`release/release_2.5`. + + +NetworkX 2.4 +------------ +Release date: 16 October 2019 + +Supports Python 3.5, 3.6, 3.7, and 3.8. +This is the last release to support Python 3.5. + +Release notes +~~~~~~~~~~~~~ + +See :doc:`release/release_2.4`. + + +NetworkX 2.3 +------------ +Release date: 11 April 2019 + +Supports Python 3.5, 3.6 and 3.7. +This is our first Python 3 only release. + +Release notes +~~~~~~~~~~~~~ + +See :doc:`release/release_2.3`. + + NetworkX 2.2 ------------ -Release date: TBD +Release date: 19 September 2018 + +Supports Python 2.7, 3.5, 3.6 and 3.7. +This is the last release to support Python 2. Release notes ~~~~~~~~~~~~~ -See :doc:`release/release_dev`. +See :doc:`release/release_2.2`. NetworkX 2.1 @@ -20,7 +60,6 @@ Release date: 22 January 2018 Supports Python 2.7, 3.4, 3.5, and 3.6. - Release notes ~~~~~~~~~~~~~ @@ -34,7 +73,7 @@ Release date: 20 September 2017 Support for Python 3.6 added, drop support for Python 3.3. See :doc:`release/migration_guide_from_1.x_to_2.0`. - + Release notes ~~~~~~~~~~~~~ @@ -77,7 +116,7 @@ Highlights - pyparsing dependence removed from GML reader/parser - improve flow algorithms - new generators related to expander graphs. -- new generators for multipartite graphs, nonisomorphic trees, +- new generators for multipartite graphs, nonisomorphic trees, circulant graphs - allow graph subclasses to use dict-like objects in place of dicts - added ordered graph subclasses @@ -170,7 +209,7 @@ Highlights ~~~~~~~~~~ - New functions for k-clique community finding, flow hierarchy, - union, disjoint union, compose, and intersection operators that work on + union, disjoint union, compose, and intersection operators that work on lists of graphs, and creating the biadjacency matrix of a bipartite graph. - New approximation algorithms for dominating set, edge dominating set, @@ -225,7 +264,7 @@ Highlights New features ~~~~~~~~~~~~ - - Algorithms for :mod:`generating <networkx.generators.bipartite>` + - Algorithms for :mod:`generating <networkx.generators.bipartite>` and :mod:`analyzing <networkx.algorithms.bipartite>` bipartite graphs - :mod:`Maximal independent set <networkx.algorithms.mis>` algorithm - :mod:`Erdős-Gallai graphical degree sequence test <networkx.generators.degree_seq>` @@ -233,10 +272,10 @@ New features - More memory efficient :mod:`Dijkstra path length <networkx.algorithms.shortest_paths.weighted>` with cutoff parameter - :mod:`Weighted clustering coefficient <networkx.algorithms.cluster>` - Read and write version 1.2 of :mod:`GEXF reader <networkx.readwrite.gexf>` format - - :mod:`Neighbor degree correlation <networkx.algorithms.neighbor_degree>` + - :mod:`Neighbor degree correlation <networkx.algorithms.neighbor_degree>` that handle subsets of nodes - - :mod:`In-place node relabeling <networkx.relabel>` - - Many 'weighted' graph algorithms now take optional parameter to use + - :mod:`In-place node relabeling <networkx.relabel>` + - Many 'weighted' graph algorithms now take optional parameter to use specified edge attribute (default='weight') (ticket https://networkx.lanl.gov/trac/ticket/509) @@ -308,10 +347,10 @@ New features API changes ~~~~~~~~~~~ - - :mod:`gnp_random_graph() <networkx.generators.random_graphs>` now takes a - directed=True|False keyword instead of create_using - - :mod:`gnm_random_graph() <networkx.generators.random_graphs>` now takes a - directed=True|False keyword instead of create_using + - :mod:`gnp_random_graph() <networkx.generators.random_graphs>` now takes a + directed=True|False keyword instead of create_using + - :mod:`gnm_random_graph() <networkx.generators.random_graphs>` now takes a + directed=True|False keyword instead of create_using Bug fixes ~~~~~~~~~ @@ -331,14 +370,14 @@ New features - Works with Python versions 2.6, 2.7, 3.1, and 3.2 (but not 2.4 and 2.5). - :mod:`Minimum cost flow algorithms <networkx.algorithms.flow>` - :mod:`Bellman-Ford shortest paths <networkx.algorithms.shortest_paths.weighted>` - - :mod:`GraphML reader and writer <networkx.readwrite.graphml>` - - :mod:`More exception/error types <networkx.exception>` + - :mod:`GraphML reader and writer <networkx.readwrite.graphml>` + - :mod:`More exception/error types <networkx.exception>` - Updated many tests to unittest style. Run with: "import networkx; networkx.test()" (requires nose testing package) - and more, see https://networkx.lanl.gov/trac/query?status=closed&group=milestone&milestone=networkx-1.3 API changes ~~~~~~~~~~~ - - :mod:`minimum_spanning_tree() now returns a NetworkX Graph (a tree or forest) <networkx.algorithms.mst>` + - :mod:`minimum_spanning_tree() now returns a NetworkX Graph (a tree or forest) <networkx.algorithms.mst>` Bug fixes ~~~~~~~~~ @@ -354,11 +393,11 @@ See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ - - :mod:`Ford-Fulkerson max flow and min cut <networkx.algorithms.flow>` - - :mod:`Closeness vitality <networkx.algorithms.vitality>` - - :mod:`Eulerian circuits <networkx.algorithms.euler>` - - :mod:`Functions for isolates <networkx.algorithms.isolates>` - - :mod:`Simpler s_max generator <networkx.generators.degree_seq>` + - :mod:`Ford-Fulkerson max flow and min cut <networkx.algorithms.flow>` + - :mod:`Closeness vitality <networkx.algorithms.vitality>` + - :mod:`Eulerian circuits <networkx.algorithms.euler>` + - :mod:`Functions for isolates <networkx.algorithms.isolates>` + - :mod:`Simpler s_max generator <networkx.generators.degree_seq>` - Compatible with IronPython-2.6 - Improved testing functionality: import networkx; networkx.test() tests entire package and skips tests with missing optional packages @@ -375,21 +414,21 @@ See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ - - :mod:`Algorithm for finding a basis for graph cycles <networkx.algorithms.cycles>` - - :mod:`Blockmodeling <networkx.algorithms.block>` - - :mod:`Assortativity and mixing matrices <networkx.algorithms.mixing>` - - :mod:`in-degree and out-degree centrality <networkx.algorithms.centrality.degree>` - - :mod:`Attracting components <networkx.algorithms.components.attracting>` + - :mod:`Algorithm for finding a basis for graph cycles <networkx.algorithms.cycles>` + - :mod:`Blockmodeling <networkx.algorithms.block>` + - :mod:`Assortativity and mixing matrices <networkx.algorithms.mixing>` + - :mod:`in-degree and out-degree centrality <networkx.algorithms.centrality.degree>` + - :mod:`Attracting components <networkx.algorithms.components.attracting>` and :mod:`condensation <networkx.algorithms.components.strongly_connected>`. - :mod:`Weakly connected components <networkx.algorithms.components.weakly_connected>` - - :mod:`Simpler interface to shortest path algorithms <networkx.algorithms.shortest_paths.generic>` - - :mod:`Edgelist format to read and write data with attributes <networkx.readwrite.edgelist>` - - :mod:`Attribute matrices <networkx.linalg.spectrum>` - - :mod:`GML reader for nested attributes <networkx.readwrite.gml>` - - Current-flow (random walk) - :mod:`betweenness <networkx.algorithms.centrality.current_flow_betweenness>` - and - :mod:`closeness <networkx.algorithms.centrality.current_flow_closeness>`. + - :mod:`Simpler interface to shortest path algorithms <networkx.algorithms.shortest_paths.generic>` + - :mod:`Edgelist format to read and write data with attributes <networkx.readwrite.edgelist>` + - :mod:`Attribute matrices <networkx.linalg.spectrum>` + - :mod:`GML reader for nested attributes <networkx.readwrite.gml>` + - Current-flow (random walk) + :mod:`betweenness <networkx.algorithms.centrality.current_flow_betweenness>` + and + :mod:`closeness <networkx.algorithms.centrality.current_flow_closeness>`. - :mod:`Directed configuration model <networkx.generators.degree_seq>`, and :mod:`directed random graph model <networkx.generators.random_graphs>`. - Improved documentation of drawing, shortest paths, and other algorithms @@ -403,35 +442,35 @@ Returning dictionaries Several of the algorithms and the degree() method now return dictionaries keyed by node instead of lists. In some cases there was a with_labels keyword which is no longer necessary. For example, - + >>> G=nx.Graph() >>> G.add_edge('a','b') >>> G.degree() # doctest: +SKIP {'a': 1, 'b': 1} - + Asking for the degree of a single node still returns a single number - + >>> G.degree('a') 1 The following now return dictionaries by default (instead of lists) and the with_labels keyword has been removed: - - - :meth:`Graph.degree`, + + - :meth:`Graph.degree`, :meth:`MultiGraph.degree`, - :meth:`DiGraph.degree`, - :meth:`DiGraph.in_degree`, + :meth:`DiGraph.degree`, + :meth:`DiGraph.in_degree`, :meth:`DiGraph.out_degree`, - :meth:`MultiDiGraph.degree`, - :meth:`MultiDiGraph.in_degree`, + :meth:`MultiDiGraph.degree`, + :meth:`MultiDiGraph.in_degree`, :meth:`MultiDiGraph.out_degree`. - - :func:`clustering`, + - :func:`clustering`, :func:`triangles` - - :func:`node_clique_number`, - :func:`number_of_cliques`, + - :func:`node_clique_number`, + :func:`number_of_cliques`, :func:`cliques_containing_node` - :func:`eccentricity` - + The following now return dictionaries by default (instead of lists) @@ -456,11 +495,11 @@ Examples Bug fixes ~~~~~~~~~ - Support graph attributes with union, intersection, and other graph operations - - Improve subgraph speed (and related algorithms such as + - Improve subgraph speed (and related algorithms such as connected_components_subgraphs()) - - Handle multigraphs in more operators (e.g. union) + - Handle multigraphs in more operators (e.g. union) - Handle double-quoted labels with pydot - - Normalize betweenness_centrality for undirected graphs correctly + - Normalize betweenness_centrality for undirected graphs correctly - Normalize eigenvector_centrality by l2 norm - :func:`read_gml` now returns multigraphs @@ -488,7 +527,7 @@ to allow graph, node, and edge attributes. See http://networkx.lanl.gov/reference/api_changes.html - Update Graph, DiGraph, and MultiGraph classes to allow attributes. - - Default edge data is now an empty dictionary (was the integer 1) + - Default edge data is now an empty dictionary (was the integer 1) - Difference and intersection operators - Average shortest path - A* (A-Star) algorithm @@ -531,7 +570,7 @@ See http://networkx.lanl.gov/reference/api_changes.html Bug fixes ~~~~~~~~~ - - handle root= option to draw_graphviz correctly + - handle root= option to draw_graphviz correctly Examples ~~~~~~~~ @@ -562,20 +601,20 @@ New features Bug fixes ~~~~~~~~~ - - Better edge data handling with GML writer + - Better edge data handling with GML writer - Edge betweenness fix for XGraph with default data of None - Handle Matplotlib version strings (allow "pre") - Interface to PyGraphviz (to_agraph()) now handles parallel edges - Fix bug in copy from XGraph to XGraph with multiedges - - Use SciPy sparse lil matrix format instead of coo format + - Use SciPy sparse lil matrix format instead of coo format - Clear up ambiguous cases for Barabasi-Albert model - Better care of color maps with Matplotlib when drawing colored nodes - and edges + and edges - Fix error handling in layout.py Examples ~~~~~~~~ - - Ubigraph examples showing 3D drawing + - Ubigraph examples showing 3D drawing NetworkX 0.36 @@ -588,15 +627,15 @@ See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ - - GML format graph reader, tests, and example (football.py) + - GML format graph reader, tests, and example (football.py) - edge_betweenness() and load_betweenness() Bug fixes ~~~~~~~~~ - - remove obsolete parts of pygraphviz interface + - remove obsolete parts of pygraphviz interface - improve handling of Matplotlib version strings - write_dot() now writes parallel edges and self loops - - is_bipartite() and bipartite_color() fixes + - is_bipartite() and bipartite_color() fixes - configuration model speedup using random.shuffle() - convert with specified nodelist now works correctly - vf2 isomorphism checker updates @@ -622,7 +661,7 @@ See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ - algorithms for strongly connected components. - - Brandes betweenness centrality algorithm (weighted and unweighted versions) + - Brandes betweenness centrality algorithm (weighted and unweighted versions) - closeness centrality for weighted graphs - dfs_preorder, dfs_postorder, dfs_tree, dfs_successor, dfs_predecessor - readers for GraphML, LEDA, sparse6, and graph6 formats. @@ -646,7 +685,7 @@ See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ - - benchmarks for graph classes + - benchmarks for graph classes - Brandes betweenness centrality algorithm - Dijkstra predecessor and distance algorithm - xslt to convert DIA graphs to NetworkX @@ -657,9 +696,9 @@ New features Bug fixes ~~~~~~~~~ - - speedups of neighbors() + - speedups of neighbors() - simplified Dijkstra's algorithm code - - better exception handling for shortest paths + - better exception handling for shortest paths - get_edge(u,v) returns None (instead of exception) if no edge u-v - floyd_warshall_array fixes for negative weights - bad G467, docs, and unittest fixes for graph atlas @@ -668,8 +707,8 @@ Bug fixes - remove extra kwds arguments in many places - no multi counting edges in conversion to dict of lists for multigraphs - allow passing tuple to get_edge() - - bad parameter order in node/edge betweenness - - edge betweenness doesn't fail with XGraph + - bad parameter order in node/edge betweenness + - edge betweenness doesn't fail with XGraph - don't throw exceptions for nodes not in graph (silently ignore instead) in edges_* and degree_* @@ -689,13 +728,13 @@ New features - include documentation in source package (doc) - tests can now be run with >>> import networkx - >>> networkx.test() + >>> networkx.test() Bug fixes ~~~~~~~~~ - read_gpickle now works correctly with Windows - refactored large modules into smaller code files - - degree(nbunch) now returns degrees in same order as nbunch + - degree(nbunch) now returns degrees in same order as nbunch - degree() now works for multiedges=True - update node_boundary and edge_boundary for efficiency - edited documentation for graph classes, now mostly in info.py @@ -720,12 +759,12 @@ New features - Generators and functions for bipartite graphs - Experimental classes for trees and forests - Support for new pygraphviz update (in nx_agraph.py) , see - http://networkx.lanl.gov/pygraphviz/ for pygraphviz details + http://networkx.lanl.gov/pygraphviz/ for pygraphviz details Bug fixes ~~~~~~~~~ - Handle special cases correctly in triangles function - - Typos in documentation + - Typos in documentation - Handle special cases in shortest_path and shortest_path_length, allow cutoff parameter for maximum depth to search - Update examples: erdos_renyi.py, miles.py, roget,py, eigenvalues.py @@ -753,7 +792,7 @@ New features Bug fixes ~~~~~~~~~ - Allow drawing graphs with no edges using pylab - - Use faster heapq in dijkstra + - Use faster heapq in dijkstra - Don't complain if X windows is not available Examples @@ -771,10 +810,10 @@ See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ - - update to work with Python 2.5 - - bidirectional version of shortest_path and Dijkstra + - update to work with Python 2.5 + - bidirectional version of shortest_path and Dijkstra - single_source_shortest_path and all_pairs_shortest_path - - s-metric and experimental code to generate maximal s-metric graph + - s-metric and experimental code to generate maximal s-metric graph - double_edge_swap and connected_double_edge_swap - Floyd's algorithm for all pairs shortest path - read and write unicode graph data to text files @@ -790,13 +829,13 @@ Bug fixes - function name changes in shortest_path routines - saner internal handling of nbunch (node bunches), raise an exception if an nbunch isn't a node or iterable - - better keyword handling in io.py allows reading multiple graphs + - better keyword handling in io.py allows reading multiple graphs - don't mix Numeric and numpy arrays in graph layouts and drawing - avoid automatically rescaling matplotlib axes when redrawing graph layout Examples ~~~~~~~~ - - unicode node labels + - unicode node labels NetworkX 0.29 @@ -809,14 +848,14 @@ See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ - Algorithms for betweenness, eigenvalues, eigenvectors, and - spectral projection for threshold graphs + spectral projection for threshold graphs - Use numpy when available - dense_gnm_random_graph generator - Generators for some directed graphs: GN, GNR, and GNC by Krapivsky - and Redner + and Redner - Grid graph generators now label by index tuples. Helper functions for manipulating labels. - - relabel_nodes_with_function + - relabel_nodes_with_function Bug fixes @@ -840,9 +879,9 @@ See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ - Option to construct Laplacian with rows and columns in specified order - - Option in convert_node_labels_to_integers to use sorted order + - Option in convert_node_labels_to_integers to use sorted order - predecessor(G,n) function that returns dictionary of - nodes with predecessors from breadth-first search of G + nodes with predecessors from breadth-first search of G starting at node n. https://networkx.lanl.gov/trac/ticket/26 @@ -851,24 +890,24 @@ Examples - Formation of giant component in binomial_graph: - Chess masters matches: - Gallery https://networkx.github.io/documentation/latest/auto_examples/index.html - + Bug fixes ~~~~~~~~~ - Adjusted names for random graphs. - + erdos_renyi_graph=binomial_graph=gnp_graph: n nodes with + + erdos_renyi_graph=binomial_graph=gnp_graph: n nodes with edge probability p + gnm_graph: n nodes and m edges - + fast_gnp_random_graph: gnp for sparse graphs (small p) + + fast_gnp_random_graph: gnp for sparse graphs (small p) - Documentation contains correct spelling of Barabási, Bollobás, Erdős, and Rényi in UTF-8 encoding - Increased speed of connected_components and related functions by using faster BFS algorithm in networkx.paths - https://networkx.lanl.gov/trac/ticket/27 + https://networkx.lanl.gov/trac/ticket/27 - XGraph and XDiGraph with multiedges=True produced error on delete_edge - Cleaned up docstring errors - Normalize names of some graphs to produce strings that represent calling sequence - + NetworkX 0.27 ------------- @@ -890,7 +929,7 @@ New features See https://networkx.lanl.gov/trac/ticket/24 - Addition of out_edges, in_edges and corresponding out_neighbors and in_neighbors for digraphs. For digraphs edges=out_edges. - + Examples ~~~~~~~~ - Minard's data for Napoleon's Russian campaign @@ -898,7 +937,7 @@ Examples Bug fixes ~~~~~~~~~ - XGraph(multiedges=True) returns a copy of the list of edges - for get_edge() + for get_edge() NetworkX 0.26 ------------- @@ -914,7 +953,7 @@ New features rows/columns in matrix - optional pygraphviz and pydot interface to graphviz is now callable as "graphviz" with pygraphviz preferred. Use draw_graphviz(G). - + Examples ~~~~~~~~ - Several new examples showing how draw to graphs with various @@ -954,9 +993,9 @@ Bug fixes ~~~~~~~~~ - use create_using= instead of result= keywords for graph types in all cases - - missing weights for degree 0 and 1 nodes in clustering + - missing weights for degree 0 and 1 nodes in clustering - configuration model now uses XGraph, returns graph with identical - degree sequence as input sequence + degree sequence as input sequence - fixed Dijkstra priority queue - fixed non-recursive toposort and is_directed_acyclic graph @@ -972,7 +1011,7 @@ Bug fixes - Changed to list comprehension in DiGraph.reverse() for python2.3 compatibility - Barabasi-Albert graph generator fixed - - Attempt to add self loop should add node even if parallel edges not + - Attempt to add self loop should add node even if parallel edges not allowed NetworkX 0.23 @@ -1015,7 +1054,7 @@ Documentation Bug fixes ~~~~~~~~~ - - Fixed logic in io.py for reading DiGraphs. + - Fixed logic in io.py for reading DiGraphs. - Path based centrality measures (betweenness, closeness) modified so they work on graphs that are not connected and produce the same result as if each connected component were @@ -1041,20 +1080,20 @@ Examples - Kevin Bacon movie actor graph: Examples/kevin_bacon.py - Compute eigenvalues of graph Laplacian: Examples/eigenvalues.py - Atlas of small graphs: Examples/atlas.py - + Documentation ~~~~~~~~~~~~~ - Rewrite of setup scripts to install documentation and - tests in documentation directory specified + tests in documentation directory specified Bug fixes ~~~~~~~~~ - Handle calls to edges() with non-node, non-iterable items. - truncated_tetrahedral_graph was just plain wrong - Speedup of betweenness_centrality code - - bfs_path_length now returns correct lengths + - bfs_path_length now returns correct lengths - Catch error if target of search not in connected component of source - Code cleanup to label internal functions with _name - Changed import statement lines to always use "import NX" to - protect name-spaces + protect name-spaces - Other minor bug-fixes and testing added diff --git a/doc/reference/algorithms/approximation.rst b/doc/reference/algorithms/approximation.rst index fe5ab1b..5b207d4 100644 --- a/doc/reference/algorithms/approximation.rst +++ b/doc/reference/algorithms/approximation.rst @@ -1,6 +1,6 @@ -**************************** +***************************** Approximations and Heuristics -**************************** +***************************** .. automodule:: networkx.algorithms.approximation @@ -90,6 +90,16 @@ Steiner Tree steiner_tree +Treewidth +--------- +.. automodule:: networkx.algorithms.approximation.treewidth +.. autosummary:: + :toctree: generated/ + + treewidth_min_degree + treewidth_min_fill_in + + Vertex Cover ------------ .. automodule:: networkx.algorithms.approximation.vertex_cover diff --git a/doc/reference/algorithms/assortativity.rst b/doc/reference/algorithms/assortativity.rst index 753dc1e..02f3b8d 100644 --- a/doc/reference/algorithms/assortativity.rst +++ b/doc/reference/algorithms/assortativity.rst @@ -7,7 +7,7 @@ Assortativity :toctree: generated/ Assortativity -------------- +------------- .. autosummary:: :toctree: generated/ @@ -17,7 +17,7 @@ Assortativity degree_pearson_correlation_coefficient Average neighbor degree ------------------------ +----------------------- .. autosummary:: :toctree: generated/ @@ -40,5 +40,16 @@ Mixing attribute_mixing_matrix degree_mixing_matrix - degree_mixing_dict + numeric_mixing_matrix attribute_mixing_dict + degree_mixing_dict + mixing_dict + +Pairs +----- +.. autosummary:: + :toctree: generated/ + + node_attribute_xy + node_degree_xy + diff --git a/doc/reference/algorithms/asteroidal.rst b/doc/reference/algorithms/asteroidal.rst new file mode 100644 index 0000000..13db0c4 --- /dev/null +++ b/doc/reference/algorithms/asteroidal.rst @@ -0,0 +1,10 @@ +********** +Asteroidal +********** + +.. automodule:: networkx.algorithms.asteroidal +.. autosummary:: + :toctree: generated/ + + is_at_free + find_asteroidal_triple diff --git a/doc/reference/algorithms/bipartite.rst b/doc/reference/algorithms/bipartite.rst index f94bd72..b2b3c7c 100644 --- a/doc/reference/algorithms/bipartite.rst +++ b/doc/reference/algorithms/bipartite.rst @@ -18,6 +18,18 @@ Basic functions density degrees +Edgelist +-------- +.. automodule:: networkx.algorithms.bipartite.edgelist +.. autosummary:: + :toctree: generated/ + + generate_edgelist + write_edgelist + parse_edgelist + read_edgelist + + Matching -------- .. automodule:: networkx.algorithms.bipartite.matching @@ -27,6 +39,8 @@ Matching eppstein_matching hopcroft_karp_matching to_vertex_cover + maximum_matching + minimum_weight_full_matching Matrix diff --git a/doc/reference/algorithms/bridges.rst b/doc/reference/algorithms/bridges.rst index 14df053..b6f44ac 100644 --- a/doc/reference/algorithms/bridges.rst +++ b/doc/reference/algorithms/bridges.rst @@ -7,3 +7,4 @@ Bridges bridges has_bridges + local_bridges diff --git a/doc/reference/algorithms/centrality.rst b/doc/reference/algorithms/centrality.rst index b48b96a..a079a5e 100644 --- a/doc/reference/algorithms/centrality.rst +++ b/doc/reference/algorithms/centrality.rst @@ -29,6 +29,7 @@ Closeness :toctree: generated/ closeness_centrality + incremental_closeness_centrality Current Flow Closeness ---------------------- @@ -36,6 +37,7 @@ Current Flow Closeness :toctree: generated/ current_flow_closeness_centrality + information_centrality (Shortest Path) Betweenness --------------------------- @@ -43,8 +45,9 @@ Current Flow Closeness :toctree: generated/ betweenness_centrality - edge_betweenness_centrality + betweenness_centrality_source betweenness_centrality_subset + edge_betweenness_centrality edge_betweenness_centrality_subset @@ -66,6 +69,17 @@ Communicability Betweenness communicability_betweenness_centrality +Group Centrality +---------------- +.. autosummary:: + :toctree: generated/ + + group_betweenness_centrality + group_closeness_centrality + group_degree_centrality + group_in_degree_centrality + group_out_degree_centrality + Load ---- .. autosummary:: @@ -90,6 +104,13 @@ Harmonic Centrality harmonic_centrality +Dispersion +---------- +.. autosummary:: + :toctree: generated/ + + dispersion + Reaching -------- .. autosummary:: @@ -97,3 +118,33 @@ Reaching local_reaching_centrality global_reaching_centrality + +Percolation +----------- +.. autosummary:: + :toctree: generated/ + + percolation_centrality + +Second Order Centrality +----------------------- +.. autosummary:: + :toctree: generated/ + + second_order_centrality + +Trophic +------- +.. autosummary:: + :toctree: generated/ + + trophic_levels + trophic_differences + trophic_incoherence_parameter + +VoteRank +-------- +.. autosummary:: + :toctree: generated/ + + voterank diff --git a/doc/reference/algorithms/chordal.rst b/doc/reference/algorithms/chordal.rst index dae8c0c..a623204 100644 --- a/doc/reference/algorithms/chordal.rst +++ b/doc/reference/algorithms/chordal.rst @@ -10,4 +10,5 @@ Chordal is_chordal chordal_graph_cliques chordal_graph_treewidth + complete_to_chordal_graph find_induced_nodes diff --git a/doc/reference/algorithms/clique.rst b/doc/reference/algorithms/clique.rst index 751752e..4324c3c 100644 --- a/doc/reference/algorithms/clique.rst +++ b/doc/reference/algorithms/clique.rst @@ -9,9 +9,10 @@ Clique enumerate_all_cliques find_cliques make_max_clique_graph - make_clique_bipartite + make_clique_bipartite graph_clique_number - graph_number_of_cliques + graph_number_of_cliques node_clique_number number_of_cliques cliques_containing_node + max_weight_clique diff --git a/doc/reference/algorithms/clustering.rst b/doc/reference/algorithms/clustering.rst index 9847512..afedff1 100644 --- a/doc/reference/algorithms/clustering.rst +++ b/doc/reference/algorithms/clustering.rst @@ -9,6 +9,6 @@ Clustering triangles transitivity clustering - average_clustering + average_clustering square_clustering generalized_degree diff --git a/doc/reference/algorithms/coloring.rst b/doc/reference/algorithms/coloring.rst index 996055e..a0e68ce 100644 --- a/doc/reference/algorithms/coloring.rst +++ b/doc/reference/algorithms/coloring.rst @@ -7,6 +7,7 @@ Coloring :toctree: generated/ greedy_color + equitable_color Some node ordering strategies are provided for use with :func:`greedy_color`. diff --git a/doc/reference/algorithms/community.rst b/doc/reference/algorithms/community.rst index 59fc9b1..266f42b 100644 --- a/doc/reference/algorithms/community.rst +++ b/doc/reference/algorithms/community.rst @@ -14,16 +14,6 @@ Bipartitions kernighan_lin_bisection - -Generators ----------- -.. automodule:: networkx.algorithms.community.community_generators -.. autosummary:: - :toctree: generated/ - - LFR_benchmark_graph - - K-Clique -------- .. automodule:: networkx.algorithms.community.kclique @@ -37,8 +27,17 @@ Modularity-based communities .. automodule:: networkx.algorithms.community.modularity_max .. autosummary:: :toctree: generated/ - + greedy_modularity_communities + _naive_greedy_modularity_communities + +Tree partitioning +----------------- +.. automodule:: networkx.algorithms.community.lukes +.. autosummary:: + :toctree: generated/ + + lukes_partitioning Label propagation ----------------- @@ -51,7 +50,7 @@ Label propagation Fluid Communities ----------------- -.. automodule:: networkx.algorithms.community.asyn_fluidc +.. automodule:: networkx.algorithms.community.asyn_fluid .. autosummary:: :toctree: generated/ @@ -64,6 +63,7 @@ Measuring partitions :toctree: generated/ coverage + modularity performance Partitions via centrality measures diff --git a/doc/reference/algorithms/component.rst b/doc/reference/algorithms/component.rst index 1caf03e..23aa4c5 100644 --- a/doc/reference/algorithms/component.rst +++ b/doc/reference/algorithms/component.rst @@ -11,7 +11,6 @@ Connectivity is_connected number_connected_components connected_components - connected_component_subgraphs node_connected_component Strong connectivity @@ -22,7 +21,6 @@ Strong connectivity is_strongly_connected number_strongly_connected_components strongly_connected_components - strongly_connected_component_subgraphs strongly_connected_components_recursive kosaraju_strongly_connected_components condensation @@ -35,7 +33,6 @@ Weak connectivity is_weakly_connected number_weakly_connected_components weakly_connected_components - weakly_connected_component_subgraphs Attracting components --------------------- @@ -45,7 +42,6 @@ Attracting components is_attracting_component number_attracting_components attracting_components - attracting_component_subgraphs Biconnected components ---------------------- @@ -55,7 +51,6 @@ Biconnected components is_biconnected biconnected_components biconnected_component_edges - biconnected_component_subgraphs articulation_points Semiconnectedness diff --git a/doc/reference/algorithms/core.rst b/doc/reference/algorithms/core.rst index 2c37bf3..19917b5 100644 --- a/doc/reference/algorithms/core.rst +++ b/doc/reference/algorithms/core.rst @@ -11,3 +11,5 @@ Cores k_shell k_crust k_corona + k_truss + onion_layers diff --git a/doc/reference/algorithms/d_separation.rst b/doc/reference/algorithms/d_separation.rst new file mode 100644 index 0000000..a57fdcb --- /dev/null +++ b/doc/reference/algorithms/d_separation.rst @@ -0,0 +1,9 @@ +============ +D-Separation +============ + +.. automodule:: networkx.algorithms.d_separation +.. autosummary:: + :toctree: generated/ + + d_separated diff --git a/doc/reference/algorithms/dag.rst b/doc/reference/algorithms/dag.rst index cce7390..19edbcf 100644 --- a/doc/reference/algorithms/dag.rst +++ b/doc/reference/algorithms/dag.rst @@ -9,10 +9,12 @@ Directed Acyclic Graphs ancestors descendants topological_sort + all_topological_sorts lexicographical_topological_sort is_directed_acyclic_graph is_aperiodic transitive_closure + transitive_closure_dag transitive_reduction antichains dag_longest_path diff --git a/doc/reference/algorithms/dispersion.rst b/doc/reference/algorithms/dispersion.rst deleted file mode 100644 index ea0cb9d..0000000 --- a/doc/reference/algorithms/dispersion.rst +++ /dev/null @@ -1,12 +0,0 @@ -********** -Dispersion -********** - -.. automodule:: networkx.algorithms.centrality - -Dispersion ----------- -.. autosummary:: - :toctree: generated/ - - dispersion diff --git a/doc/reference/algorithms/distance_measures.rst b/doc/reference/algorithms/distance_measures.rst index 78c35ee..1a6a3c7 100644 --- a/doc/reference/algorithms/distance_measures.rst +++ b/doc/reference/algorithms/distance_measures.rst @@ -6,10 +6,13 @@ Distance Measures .. autosummary:: :toctree: generated/ + barycenter center diameter eccentricity + extrema_bounding periphery radius + resistance_distance diff --git a/doc/reference/algorithms/efficiency.rst b/doc/reference/algorithms/efficiency_measures.rst similarity index 70% rename from doc/reference/algorithms/efficiency.rst rename to doc/reference/algorithms/efficiency_measures.rst index 844dddb..51a5e91 100644 --- a/doc/reference/algorithms/efficiency.rst +++ b/doc/reference/algorithms/efficiency_measures.rst @@ -2,7 +2,7 @@ Efficiency ********** -.. automodule:: networkx.algorithms.efficiency +.. automodule:: networkx.algorithms.efficiency_measures .. autosummary:: :toctree: generated/ diff --git a/doc/reference/algorithms/euler.rst b/doc/reference/algorithms/euler.rst index 7c7991e..9dae667 100644 --- a/doc/reference/algorithms/euler.rst +++ b/doc/reference/algorithms/euler.rst @@ -8,3 +8,7 @@ Eulerian is_eulerian eulerian_circuit + eulerize + is_semieulerian + has_eulerian_path + eulerian_path diff --git a/doc/reference/algorithms/graph_hashing.rst b/doc/reference/algorithms/graph_hashing.rst new file mode 100644 index 0000000..0fd908b --- /dev/null +++ b/doc/reference/algorithms/graph_hashing.rst @@ -0,0 +1,9 @@ +************* +Graph Hashing +************* + +.. automodule:: networkx.algorithms.graph_hashing +.. autosummary:: + :toctree: generated/ + + weisfeiler_lehman_graph_hash diff --git a/doc/reference/algorithms/index.rst b/doc/reference/algorithms/index.rst index 8aaacfb..19a682e 100644 --- a/doc/reference/algorithms/index.rst +++ b/doc/reference/algorithms/index.rst @@ -11,6 +11,7 @@ Algorithms approximation assortativity + asteroidal bipartite boundary bridges @@ -28,15 +29,16 @@ Algorithms covering cycles cuts + d_separation dag - dispersion distance_measures distance_regular dominance dominating - efficiency + efficiency_measures euler flow + graph_hashing graphical hierarchy hybrid @@ -44,16 +46,28 @@ Algorithms isomorphism link_analysis link_prediction + lowest_common_ancestors matching minors mis + non_randomness + moral + node_classification operators + planarity + planar_drawing reciprocity + regular rich_club shortest_paths + similarity simple_paths + smallworld + smetric + sparsifiers structuralholes swap + threshold tournament traversal tree diff --git a/doc/reference/algorithms/isolates.rst b/doc/reference/algorithms/isolates.rst index 2411337..854382c 100644 --- a/doc/reference/algorithms/isolates.rst +++ b/doc/reference/algorithms/isolates.rst @@ -8,3 +8,4 @@ Isolates is_isolate isolates + number_of_isolates diff --git a/doc/reference/algorithms/isomorphism.ismags.rst b/doc/reference/algorithms/isomorphism.ismags.rst new file mode 100644 index 0000000..8ca55fa --- /dev/null +++ b/doc/reference/algorithms/isomorphism.ismags.rst @@ -0,0 +1,23 @@ +.. _ismags: + +**************** +ISMAGS Algorithm +**************** + +.. automodule:: networkx.algorithms.isomorphism.ismags + +ISMAGS object +------------- +.. currentmodule:: networkx.algorithms.isomorphism + +.. autosummary:: + :toctree: generated/ + + ISMAGS + ISMAGS.analyze_symmetry + ISMAGS.is_isomorphic + ISMAGS.subgraph_is_isomorphic + ISMAGS.isomorphisms_iter + ISMAGS.subgraph_isomorphisms_iter + ISMAGS.largest_common_subgraph + diff --git a/doc/reference/algorithms/isomorphism.rst b/doc/reference/algorithms/isomorphism.rst index 132c93f..5a29f1e 100644 --- a/doc/reference/algorithms/isomorphism.rst +++ b/doc/reference/algorithms/isomorphism.rst @@ -16,9 +16,22 @@ Isomorphism fast_could_be_isomorphic faster_could_be_isomorphic -Advanced Interface to VF2 Algorithm ------------------------------------ + +Tree Isomorphism +----------------- +.. automodule:: networkx.algorithms.isomorphism.tree_isomorphism +.. autosummary:: + :toctree: generated/ + + rooted_tree_isomorphism + tree_isomorphism + + +Advanced Interfaces +------------------- .. toctree:: :maxdepth: 2 isomorphism.vf2 + isomorphism.ismags + diff --git a/doc/reference/algorithms/isomorphism.vf2.rst b/doc/reference/algorithms/isomorphism.vf2.rst index 6a4b5a8..a5c2ad4 100644 --- a/doc/reference/algorithms/isomorphism.vf2.rst +++ b/doc/reference/algorithms/isomorphism.vf2.rst @@ -1,7 +1,7 @@ .. _vf2: ************* -VF2 Algorithm +VF2 Algorithm ************* .. automodule:: networkx.algorithms.isomorphism.isomorphvf2 @@ -12,7 +12,7 @@ Graph Matcher .. autosummary:: :toctree: generated/ - + GraphMatcher.__init__ GraphMatcher.initialize GraphMatcher.is_isomorphic diff --git a/doc/reference/algorithms/link_prediction.rst b/doc/reference/algorithms/link_prediction.rst index a95caee..5207b62 100644 --- a/doc/reference/algorithms/link_prediction.rst +++ b/doc/reference/algorithms/link_prediction.rst @@ -13,3 +13,4 @@ Link Prediction cn_soundarajan_hopcroft ra_index_soundarajan_hopcroft within_inter_cluster + common_neighbor_centrality diff --git a/doc/reference/algorithms/lowest_common_ancestors.rst b/doc/reference/algorithms/lowest_common_ancestors.rst new file mode 100644 index 0000000..a82b357 --- /dev/null +++ b/doc/reference/algorithms/lowest_common_ancestors.rst @@ -0,0 +1,11 @@ +********************** +Lowest Common Ancestor +********************** + +.. automodule:: networkx.algorithms.lowest_common_ancestors +.. autosummary:: + :toctree: generated/ + + all_pairs_lowest_common_ancestor + tree_all_pairs_lowest_common_ancestor + lowest_common_ancestor diff --git a/doc/reference/algorithms/matching.rst b/doc/reference/algorithms/matching.rst index a7c2fe7..331e2b5 100644 --- a/doc/reference/algorithms/matching.rst +++ b/doc/reference/algorithms/matching.rst @@ -8,5 +8,6 @@ Matching is_matching is_maximal_matching + is_perfect_matching maximal_matching max_weight_matching diff --git a/doc/reference/algorithms/moral.rst b/doc/reference/algorithms/moral.rst new file mode 100644 index 0000000..8d2bdfc --- /dev/null +++ b/doc/reference/algorithms/moral.rst @@ -0,0 +1,9 @@ +***** +Moral +***** + +.. automodule:: networkx.algorithms.moral +.. autosummary:: + :toctree: generated/ + + moral_graph diff --git a/doc/reference/algorithms/node_classification.rst b/doc/reference/algorithms/node_classification.rst index f8afa4d..2229818 100644 --- a/doc/reference/algorithms/node_classification.rst +++ b/doc/reference/algorithms/node_classification.rst @@ -1,22 +1,20 @@ -*********** Node Classification -*********** - +=================== .. automodule:: networkx.algorithms.node_classification .. currentmodule:: networkx -Harmonic Function (HMN) ------------- -.. automodule:: networkx.algorithms.community.hmn +Harmonic Function +----------------- +.. automodule:: networkx.algorithms.node_classification.hmn .. autosummary:: :toctree: generated/ harmonic_function -Local and Global Consistency (LGC) ------------- -.. automodule:: networkx.algorithms.community.lgc +Local and Global Consistency +---------------------------- +.. automodule:: networkx.algorithms.node_classification.lgc .. autosummary:: :toctree: generated/ diff --git a/doc/reference/algorithms/non_randomness.rst b/doc/reference/algorithms/non_randomness.rst new file mode 100644 index 0000000..090707c --- /dev/null +++ b/doc/reference/algorithms/non_randomness.rst @@ -0,0 +1,9 @@ +************** +non-randomness +************** + +.. automodule:: networkx.algorithms.non_randomness +.. autosummary:: + :toctree: generated/ + + non_randomness diff --git a/doc/reference/algorithms/operators.rst b/doc/reference/algorithms/operators.rst index c4c861d..7babf2c 100644 --- a/doc/reference/algorithms/operators.rst +++ b/doc/reference/algorithms/operators.rst @@ -20,6 +20,7 @@ Operators intersection difference symmetric_difference + full_join .. automodule:: networkx.algorithms.operators.all @@ -38,6 +39,7 @@ Operators cartesian_product lexicographic_product + rooted_product strong_product tensor_product power diff --git a/doc/reference/algorithms/planar_drawing.rst b/doc/reference/algorithms/planar_drawing.rst new file mode 100644 index 0000000..c1a4b1d --- /dev/null +++ b/doc/reference/algorithms/planar_drawing.rst @@ -0,0 +1,9 @@ +************** +Planar Drawing +************** + +.. automodule:: networkx.algorithms.planar_drawing +.. autosummary:: + :toctree: generated/ + + combinatorial_embedding_to_pos diff --git a/doc/reference/algorithms/planarity.rst b/doc/reference/algorithms/planarity.rst new file mode 100644 index 0000000..cad00dc --- /dev/null +++ b/doc/reference/algorithms/planarity.rst @@ -0,0 +1,11 @@ +********* +Planarity +********* + +.. automodule:: networkx.algorithms.planarity +.. autosummary:: + :toctree: generated/ + + check_planarity +.. autoclass:: PlanarEmbedding + :members: \ No newline at end of file diff --git a/doc/reference/algorithms/regular.rst b/doc/reference/algorithms/regular.rst new file mode 100644 index 0000000..b1a9bf1 --- /dev/null +++ b/doc/reference/algorithms/regular.rst @@ -0,0 +1,11 @@ +******* +Regular +******* + +.. automodule:: networkx.algorithms.regular +.. autosummary:: + :toctree: generated/ + + is_regular + is_k_regular + k_factor diff --git a/doc/reference/algorithms/shortest_paths.rst b/doc/reference/algorithms/shortest_paths.rst index d7177e2..daf1c2d 100644 --- a/doc/reference/algorithms/shortest_paths.rst +++ b/doc/reference/algorithms/shortest_paths.rst @@ -20,6 +20,9 @@ Advanced Interface single_source_shortest_path single_source_shortest_path_length + single_target_shortest_path + single_target_shortest_path_length + bidirectional_shortest_path all_pairs_shortest_path all_pairs_shortest_path_length predecessor @@ -34,22 +37,25 @@ Advanced Interface single_source_dijkstra single_source_dijkstra_path single_source_dijkstra_path_length + multi_source_dijkstra multi_source_dijkstra_path multi_source_dijkstra_path_length + all_pairs_dijkstra all_pairs_dijkstra_path all_pairs_dijkstra_path_length bidirectional_dijkstra bellman_ford_path bellman_ford_path_length + single_source_bellman_ford single_source_bellman_ford_path single_source_bellman_ford_path_length all_pairs_bellman_ford_path all_pairs_bellman_ford_path_length - single_source_bellman_ford bellman_ford_predecessor_and_distance negative_edge_cycle + goldberg_radzik johnson @@ -63,6 +69,7 @@ Dense Graphs floyd_warshall floyd_warshall_predecessor_and_distance floyd_warshall_numpy + reconstruct_path A* Algorithm @@ -73,5 +80,5 @@ A* Algorithm :toctree: generated/ astar_path - astar_path_length + astar_path_length diff --git a/doc/reference/algorithms/similarity.rst b/doc/reference/algorithms/similarity.rst new file mode 100644 index 0000000..53f6446 --- /dev/null +++ b/doc/reference/algorithms/similarity.rst @@ -0,0 +1,14 @@ +******************* +Similarity Measures +******************* + +.. automodule:: networkx.algorithms.similarity +.. autosummary:: + :toctree: generated/ + + graph_edit_distance + optimal_edit_paths + optimize_graph_edit_distance + optimize_edit_paths + simrank_similarity + simrank_similarity_numpy diff --git a/doc/reference/algorithms/simple_paths.rst b/doc/reference/algorithms/simple_paths.rst index ee2cce1..89b3a64 100644 --- a/doc/reference/algorithms/simple_paths.rst +++ b/doc/reference/algorithms/simple_paths.rst @@ -7,5 +7,6 @@ Simple Paths :toctree: generated/ all_simple_paths + all_simple_edge_paths is_simple_path shortest_simple_paths diff --git a/doc/reference/algorithms/smallworld.rst b/doc/reference/algorithms/smallworld.rst new file mode 100644 index 0000000..5ffc0cc --- /dev/null +++ b/doc/reference/algorithms/smallworld.rst @@ -0,0 +1,12 @@ +*********** +Small-world +*********** + +.. automodule:: networkx.algorithms.smallworld +.. autosummary:: + :toctree: generated/ + + random_reference + lattice_reference + sigma + omega diff --git a/doc/reference/algorithms/smetric.rst b/doc/reference/algorithms/smetric.rst new file mode 100644 index 0000000..5f893e0 --- /dev/null +++ b/doc/reference/algorithms/smetric.rst @@ -0,0 +1,9 @@ +******** +s metric +******** + +.. automodule:: networkx.algorithms.smetric +.. autosummary:: + :toctree: generated/ + + s_metric diff --git a/doc/reference/algorithms/sparsifiers.rst b/doc/reference/algorithms/sparsifiers.rst new file mode 100644 index 0000000..82676ea --- /dev/null +++ b/doc/reference/algorithms/sparsifiers.rst @@ -0,0 +1,9 @@ +*********** +Sparsifiers +*********** + +.. automodule:: networkx.algorithms.sparsifiers +.. autosummary:: + :toctree: generated/ + + spanner diff --git a/doc/reference/algorithms/threshold.rst b/doc/reference/algorithms/threshold.rst new file mode 100644 index 0000000..c9b1c80 --- /dev/null +++ b/doc/reference/algorithms/threshold.rst @@ -0,0 +1,10 @@ +**************** +Threshold Graphs +**************** + +.. automodule:: networkx.algorithms.threshold +.. autosummary:: + :toctree: generated/ + + find_threshold_graph + is_threshold_graph diff --git a/doc/reference/algorithms/traversal.rst b/doc/reference/algorithms/traversal.rst index 58b69a2..2f830fc 100644 --- a/doc/reference/algorithms/traversal.rst +++ b/doc/reference/algorithms/traversal.rst @@ -32,6 +32,7 @@ Breadth First Search bfs_tree bfs_predecessors bfs_successors + descendants_at_distance Beam search ----------- @@ -49,3 +50,11 @@ Depth First Search on Edges :toctree: generated/ edge_dfs + +Breadth First Search on Edges +----------------------------- +.. automodule:: networkx.algorithms.traversal.edgebfs +.. autosummary:: + :toctree: generated/ + + edge_bfs diff --git a/doc/reference/algorithms/tree.rst b/doc/reference/algorithms/tree.rst index e2bbc7b..70af361 100644 --- a/doc/reference/algorithms/tree.rst +++ b/doc/reference/algorithms/tree.rst @@ -61,6 +61,14 @@ Spanning Trees minimum_spanning_edges maximum_spanning_edges +Decomposition +------------- +.. automodule:: networkx.algorithms.tree.decomposition +.. autosummary:: + :toctree: generated/ + + junction_tree + Exceptions ---------- .. automodule:: networkx.algorithms.tree.coding diff --git a/doc/reference/algorithms/triads.rst b/doc/reference/algorithms/triads.rst index 97caa28..90885a7 100644 --- a/doc/reference/algorithms/triads.rst +++ b/doc/reference/algorithms/triads.rst @@ -7,3 +7,8 @@ Triads :toctree: generated/ triadic_census + random_triad + triads_by_type + triad_type + all_triads + all_triplets diff --git a/doc/reference/classes/digraph.rst b/doc/reference/classes/digraph.rst index d8e0e82..3644815 100644 --- a/doc/reference/classes/digraph.rst +++ b/doc/reference/classes/digraph.rst @@ -28,7 +28,9 @@ Adding and removing nodes and edges DiGraph.add_weighted_edges_from DiGraph.remove_edge DiGraph.remove_edges_from + DiGraph.update DiGraph.clear + DiGraph.clear_edges @@ -83,4 +85,3 @@ Making copies and subgraphs DiGraph.subgraph DiGraph.edge_subgraph DiGraph.reverse - DiGraph.fresh_copy diff --git a/doc/reference/classes/graph.rst b/doc/reference/classes/graph.rst index 1dea2f0..f60671e 100644 --- a/doc/reference/classes/graph.rst +++ b/doc/reference/classes/graph.rst @@ -28,7 +28,9 @@ Adding and removing nodes and edges Graph.add_weighted_edges_from Graph.remove_edge Graph.remove_edges_from + Graph.update Graph.clear + Graph.clear_edges @@ -75,4 +77,3 @@ Making copies and subgraphs Graph.to_directed Graph.subgraph Graph.edge_subgraph - Graph.fresh_copy diff --git a/doc/reference/classes/index.rst b/doc/reference/classes/index.rst index d7bcd02..0747795 100644 --- a/doc/reference/classes/index.rst +++ b/doc/reference/classes/index.rst @@ -15,14 +15,17 @@ graph you want to represent. Which graph class should I use? =============================== -=================== ======================== -Graph Type NetworkX Class -=================== ======================== -Undirected Simple Graph -Directed Simple DiGraph -With Self-loops Graph, DiGraph -With Parallel edges MultiGraph, MultiDiGraph -=================== ======================== ++----------------+------------+--------------------+------------------------+ +| Networkx Class | Type | Self-loops allowed | Parallel edges allowed | ++================+============+====================+========================+ +| Graph | undirected | Yes | No | ++----------------+------------+--------------------+------------------------+ +| DiGraph | directed | Yes | No | ++----------------+------------+--------------------+------------------------+ +| MultiGraph | undirected | Yes | Yes | ++----------------+------------+--------------------+------------------------+ +| MultiDiGraph | directed | Yes | Yes | ++----------------+------------+--------------------+------------------------+ Basic graph types ================= @@ -44,3 +47,37 @@ Basic graph types :class:`~networkx.OrderedDiGraph`, :class:`~networkx.OrderedMultiGraph`, or :class:`~networkx.OrderedMultiDiGraph`, which behave like the base graph classes but give a consistent order for reporting of nodes and edges. + +Graph Views +=========== + +.. automodule:: networkx.classes.graphviews +.. autosummary:: + :toctree: generated/ + + generic_graph_view + subgraph_view + reverse_view + +Filters +======= + +.. note:: Filters can be used with views to restrict the view (or expand it). + They can filter nodes or filter edges. These examples are intended to help + you build new ones. They may instead contain all the filters you ever need. + +.. automodule:: networkx.classes.filters +.. autosummary:: + :toctree: generated/ + + no_filter + hide_nodes + hide_edges + hide_diedges + hide_multidiedges + hide_multiedges + show_nodes + show_edges + show_diedges + show_multidiedges + show_multiedges diff --git a/doc/reference/classes/multidigraph.rst b/doc/reference/classes/multidigraph.rst index 85a4c7d..a1d56e5 100644 --- a/doc/reference/classes/multidigraph.rst +++ b/doc/reference/classes/multidigraph.rst @@ -30,7 +30,9 @@ Adding and Removing Nodes and Edges MultiDiGraph.new_edge_key MultiDiGraph.remove_edge MultiDiGraph.remove_edges_from + MultiDiGraph.update MultiDiGraph.clear + MultiDiGraph.clear_edges @@ -84,4 +86,3 @@ Making copies and subgraphs MultiDiGraph.subgraph MultiDiGraph.edge_subgraph MultiDiGraph.reverse - MultiDiGraph.fresh_copy diff --git a/doc/reference/classes/multigraph.rst b/doc/reference/classes/multigraph.rst index 577cfc0..ae032ff 100644 --- a/doc/reference/classes/multigraph.rst +++ b/doc/reference/classes/multigraph.rst @@ -29,7 +29,9 @@ Adding and removing nodes and edges MultiGraph.new_edge_key MultiGraph.remove_edge MultiGraph.remove_edges_from + MultiGraph.update MultiGraph.clear + MultiGraph.clear_edges @@ -76,4 +78,3 @@ Making copies and subgraphs MultiGraph.to_directed MultiGraph.subgraph MultiGraph.edge_subgraph - MultiGraph.fresh_copy diff --git a/doc/reference/convert.rst b/doc/reference/convert.rst index 78645b6..bc21a8c 100644 --- a/doc/reference/convert.rst +++ b/doc/reference/convert.rst @@ -38,8 +38,10 @@ Numpy :toctree: generated/ to_numpy_matrix + to_numpy_array to_numpy_recarray from_numpy_matrix + from_numpy_array Scipy ----- diff --git a/doc/reference/drawing.rst b/doc/reference/drawing.rst index 6093e15..f482b39 100644 --- a/doc/reference/drawing.rst +++ b/doc/reference/drawing.rst @@ -19,7 +19,7 @@ dedicated and fully-featured graph visualization tools are `PGF/TikZ <https://sourceforge.net/projects/pgf/>`_. To use these and other such tools, you should export your NetworkX graph into a format that can be read by those tools. For example, Cytoscape can read the -GraphML format, and so, ``networkx.write_graphml(G)`` might be an appropriate +GraphML format, and so, ``networkx.write_graphml(G, path)`` might be an appropriate choice. Matplotlib @@ -36,6 +36,7 @@ Matplotlib draw_networkx_edge_labels draw_circular draw_kamada_kawai + draw_planar draw_random draw_spectral draw_spring @@ -77,11 +78,16 @@ Graph Layout .. autosummary:: :toctree: generated/ + bipartite_layout circular_layout kamada_kawai_layout + planar_layout random_layout rescale_layout + rescale_layout_dict shell_layout spring_layout spectral_layout - + spiral_layout + multipartite_layout + diff --git a/doc/reference/functions.rst b/doc/reference/functions.rst index 4916194..04ad1e5 100644 --- a/doc/reference/functions.rst +++ b/doc/reference/functions.rst @@ -15,9 +15,18 @@ Graph info create_empty_copy is_directed + to_directed + to_undirected + is_empty add_star add_path add_cycle + subgraph + subgraph_view + induced_subgraph + restricted_view + reverse_view + edge_subgraph Nodes @@ -27,6 +36,7 @@ Nodes nodes number_of_nodes + neighbors all_neighbors non_neighbors common_neighbors @@ -37,9 +47,9 @@ Edges .. autosummary:: :toctree: generated/ - edges number_of_edges + density non_edges Self loops @@ -56,11 +66,20 @@ Attributes .. autosummary:: :toctree: generated/ + is_weighted + is_negatively_weighted set_node_attributes get_node_attributes set_edge_attributes get_edge_attributes +Paths +---------- +.. autosummary:: + :toctree: generated/ + + is_path + path_weight Freezing graph structure ------------------------ @@ -69,8 +88,3 @@ Freezing graph structure freeze is_frozen - - - - - diff --git a/doc/reference/generators.rst b/doc/reference/generators.rst index f8744c4..e05e89f 100644 --- a/doc/reference/generators.rst +++ b/doc/reference/generators.rst @@ -25,12 +25,15 @@ Classic balanced_tree barbell_graph + binomial_tree complete_graph complete_multipartite_graph circular_ladder_graph + circulant_graph cycle_graph dorogovtsev_goltsev_mendes_graph empty_graph + full_rary_tree ladder_graph lollipop_graph null_graph @@ -49,7 +52,7 @@ Expanders margulis_gabber_galil_graph chordal_cycle_graph - + paley_graph Lattice ------- @@ -80,6 +83,7 @@ Small dodecahedral_graph frucht_graph heawood_graph + hoffman_singleton_graph house_graph house_x_graph icosahedral_graph @@ -112,12 +116,15 @@ Random Graphs connected_watts_strogatz_graph random_regular_graph barabasi_albert_graph + dual_barabasi_albert_graph + extended_barabasi_albert_graph powerlaw_cluster_graph random_kernel_graph random_lobster random_shell_graph random_powerlaw_tree random_powerlaw_tree_sequence + random_kernel_graph Duplication Divergence @@ -210,6 +217,15 @@ Stochastic stochastic_graph +AS graph +-------- +.. automodule:: networkx.generators.internet_as_graphs +.. autosummary:: + :toctree: generated/ + + random_internet_as_graph + + Intersection ------------ .. automodule:: networkx.generators.intersection @@ -230,6 +246,7 @@ Social Networks karate_club_graph davis_southern_women_graph florentine_families_graph + les_miserables_graph Community @@ -240,14 +257,25 @@ Community caveman_graph connected_caveman_graph - relaxed_caveman_graph - random_partition_graph - planted_partition_graph gaussian_random_partition_graph + LFR_benchmark_graph + planted_partition_graph + random_partition_graph + relaxed_caveman_graph ring_of_cliques + stochastic_block_model windmill_graph +Spectral +-------- +.. automodule:: networkx.generators.spectral_graph_forge +.. autosummary:: + :toctree: generated/ + + spectral_graph_forge + + Trees ----- .. automodule:: networkx.generators.trees @@ -285,6 +313,9 @@ Joint Degree Sequence is_valid_joint_degree joint_degree_graph + is_valid_directed_joint_degree + directed_joint_degree_graph + Mycielski --------- @@ -294,3 +325,37 @@ Mycielski mycielskian mycielski_graph + + +Harary Graph +------------ +.. automodule:: networkx.generators.harary_graph +.. autosummary:: + :toctree: generated/ + + hnm_harary_graph + hkn_harary_graph + +Cographs +------------ +.. automodule:: networkx.generators.cographs +.. autosummary:: + :toctree: generated/ + + random_cograph + +Interval Graph +--------------- +.. automodule:: networkx.generators.interval_graph +.. autosummary:: + :toctree: generated/ + + interval_graph + +Sudoku +------ +.. automodule:: networkx.generators.sudoku +.. autosummary:: + :toctree: generated/ + + sudoku_graph \ No newline at end of file diff --git a/doc/reference/glossary.rst b/doc/reference/glossary.rst index 798da32..d2cdda9 100644 --- a/doc/reference/glossary.rst +++ b/doc/reference/glossary.rst @@ -24,26 +24,8 @@ Glossary assigning to the `G.edges[u][v]` attribute dictionary for the specified edge *u*-*v*. - hashable - An object is hashable if it has a hash value which never changes - during its lifetime (it needs a :meth:`__hash__` method), and can be - compared to other objects (it needs an :meth:`__eq__` or :meth:`__cmp__` - method). Hashable objects which compare equal must have the same - hash value. - - Hashability makes an object usable as a dictionary key and a set - member, because these data structures use the hash value internally. - - All of Python's immutable built-in objects are hashable, while no - mutable containers (such as lists or dictionaries) are. Objects - which are instances of user-defined classes are hashable by - default; they all compare unequal, and their hash value is their - :func:`id`. - - Definition from https://docs.python.org/2/glossary.html - nbunch - An nbunch is a single node, container of nodes or None (representing + An nbunch is a single node, container of nodes or `None` (representing all nodes). It can be a list, set, graph, etc.. To filter an nbunch so that only nodes actually in `G` appear, use `G.nbunch_iter(nbunch)`. diff --git a/doc/reference/index.rst b/doc/reference/index.rst index 4d31d0e..8e0f24d 100644 --- a/doc/reference/index.rst +++ b/doc/reference/index.rst @@ -19,6 +19,7 @@ Reference relabel readwrite/index drawing + randomness exceptions utils glossary diff --git a/doc/reference/introduction.rst b/doc/reference/introduction.rst index 6dd8cb2..c174b22 100644 --- a/doc/reference/introduction.rst +++ b/doc/reference/introduction.rst @@ -281,7 +281,7 @@ computed with a layout function. The edges are lines between those dots. >>> nx.draw(G) # default spring_layout >>> plt.subplot(122) <matplotlib.axes._subplots.AxesSubplot object at ...> - >>> nx.draw(G, pos=nx.circular_layout(G), nodecolor='r', edge_color='b') + >>> nx.draw(G, pos=nx.circular_layout(G), node_color='r', edge_color='b') See the :doc:`examples </auto_examples/index>` for more ideas. @@ -317,7 +317,7 @@ edges $(A, B)$ and $(B, C)$. >>> G.add_edge('A', 'B') >>> G.add_edge('B', 'C') >>> print(G.adj) - {'A': {'B': {}}, 'C': {'B': {}}, 'B': {'A': {}, 'C': {}}} + {'A': {'B': {}}, 'B': {'A': {}, 'C': {}}, 'C': {'B': {}}} The data structure gets morphed slightly for each base graph class. For DiGraph two dict-of-dicts-of-dicts structures are provided, one diff --git a/doc/reference/linalg.rst b/doc/reference/linalg.rst index d6d92d0..ecdfc57 100644 --- a/doc/reference/linalg.rst +++ b/doc/reference/linalg.rst @@ -24,15 +24,15 @@ Laplacian Matrix laplacian_matrix normalized_laplacian_matrix directed_laplacian_matrix + directed_combinatorial_laplacian_matrix -Spectrum ---------- -.. automodule:: networkx.linalg.spectrum +Bethe Hessian Matrix +-------------------- +.. automodule:: networkx.linalg.bethehessianmatrix .. autosummary:: :toctree: generated/ - laplacian_spectrum - adjacency_spectrum + bethe_hessian_matrix Algebraic Connectivity ---------------------- @@ -53,3 +53,25 @@ Attribute Matrices attr_matrix attr_sparse_matrix + +Modularity Matrices +------------------- + +.. automodule:: networkx.linalg.modularitymatrix +.. autosummary:: + :toctree: generated/ + + modularity_matrix + directed_modularity_matrix + +Spectrum +--------- +.. automodule:: networkx.linalg.spectrum +.. autosummary:: + :toctree: generated/ + + adjacency_spectrum + laplacian_spectrum + bethe_hessian_spectrum + normalized_laplacian_spectrum + modularity_spectrum diff --git a/doc/reference/randomness.rst b/doc/reference/randomness.rst new file mode 100644 index 0000000..4ca7280 --- /dev/null +++ b/doc/reference/randomness.rst @@ -0,0 +1,88 @@ +.. _randomness: + +Randomness +========== +.. currentmodule:: networkx + +Random Number Generators (RNGs) are often used when generating, drawing +and computing properties or manipulating networks. NetworkX provides +functions which use one of two standard RNGs: NumPy's package `numpy.random` +or Python's built-in package `random`. They each provide the same +algorithm for generating numbers (Mersenne Twister). Their interfaces +are similar (dangerously similar) and yet distinct. +They each provide a global default instance of their generator that +is shared by all programs in a single session. +For the most part you can use the RNGs as NetworkX has them set up and +you'll get reasonable pseudorandom results (results that are statistically +random, but created in a deterministic manner). + +Sometimes you want more control over how the numbers are generated. +In particular, you need to set the `seed` of the generator to make +your results reproducible -- either for scientific publication or +for debugging. Both RNG packages have easy functions to set the seed +to any integer, thus determining the subsequent generated values. +Since this package (and many others) use both RNGs you may need to +set the `seed` of both RNGs. Even if we strictly only used one of the +RNGs, you may find yourself using another package that uses the other. +Setting the state of the two global RNGs is as simple setting the +seed of each RNG to an arbitrary integer: + +.. nbplot:: + + >>> import random + >>> random.seed(246) # or any integer + >>> import numpy + >>> numpy.random.seed(4812) + +Many users will be satisfied with this level of control. + +For people who want even more control, we include an optional argument +to functions that use an RNG. This argument is called `seed`, but +determines more than the seed of the RNG. It tells the function which +RNG package to use, and whether to use a global or local RNG. + +.. nbplot:: + + >>> from networkx import path_graph, random_layout + >>> G = path_graph(9) + >>> pos = random_layout(G, seed=None) # use (either) global default RNG + >>> pos = random_layout(G, seed=42) # local RNG just for this call + >>> pos = random_layout(G, seed=numpy.random) # use numpy global RNG + >>> random_state = numpy.random.RandomState(42) + >>> pos = random_layout(G, seed=random_state) # use/reuse your own RNG + +Each NetworkX function that uses an RNG was written with one RNG package +in mind. It either uses `random` or `numpy.random` by default. +But some users want to only use a single RNG for all their code. +This `seed` argument provides a mechanism so that any function +can use a `numpy.random` RNG even if the function is written for `random`. +It works as follows. + +The default behavior (when `seed=None`) is to use the global RNG +for the function's preferred package. +If seed is set to an integer value, +a local RNG is created with the indicated seed value and +is used for the duration of that function (including any +calls to other functions) and then discarded. +Alternatively, you can specify `seed=numpy.random` to ensure that +the global numpy RNG is used whether the function expects it or not. +Finally, you can provide a numpy RNG to be used by the function. +The RNG is then available to use in other functions or even other +package like sklearn. +In this way you can use a single RNG for all random numbers +in your project. + +While it is possible to assign `seed` a `random`-style RNG for +NetworkX functions written for the `random` package API, +the numpy RNG interface has too +many nice features for us to ensure a `random`-style RNG will work in +all functions. In practice, you can do most things using only `random` +RNGs (useful if numpy is not available). But your experience will be +richer if numpy is available. + +To summarize, you can easily ignore the `seed` argument and use the global +RNGs. You can specify to use only the numpy global RNG with +`seed=numpy.random`. You can use a local RNG by providing an integer +seed value. And you can provide your own numpy RNG, reusing it for all +functions. It is easier to use numpy RNGs if you want a single RNG for +your computations. diff --git a/doc/reference/readwrite/gexf.rst b/doc/reference/readwrite/gexf.rst index 04708d2..3d2ffed 100644 --- a/doc/reference/readwrite/gexf.rst +++ b/doc/reference/readwrite/gexf.rst @@ -6,5 +6,6 @@ GEXF read_gexf write_gexf + generate_gexf relabel_gexf_graph diff --git a/doc/reference/readwrite/graphml.rst b/doc/reference/readwrite/graphml.rst index a07abd4..1206cae 100644 --- a/doc/reference/readwrite/graphml.rst +++ b/doc/reference/readwrite/graphml.rst @@ -6,5 +6,7 @@ GraphML read_graphml write_graphml + generate_graphml + parse_graphml diff --git a/doc/reference/readwrite/json_graph.rst b/doc/reference/readwrite/json_graph.rst index 62d5e7f..6ae934f 100644 --- a/doc/reference/readwrite/json_graph.rst +++ b/doc/reference/readwrite/json_graph.rst @@ -8,6 +8,8 @@ JSON node_link_graph adjacency_data adjacency_graph + cytoscape_data + cytoscape_graph tree_data tree_graph jit_data diff --git a/doc/reference/readwrite/pajek.rst b/doc/reference/readwrite/pajek.rst index 33570b1..b50caf8 100644 --- a/doc/reference/readwrite/pajek.rst +++ b/doc/reference/readwrite/pajek.rst @@ -7,6 +7,4 @@ Pajek read_pajek write_pajek parse_pajek - - - + generate_pajek diff --git a/doc/reference/utils.rst b/doc/reference/utils.rst index 9c8e24f..4d79ae7 100644 --- a/doc/reference/utils.rst +++ b/doc/reference/utils.rst @@ -15,6 +15,7 @@ Helper Functions flatten iterable is_list_of_ints + make_list_of_ints make_str generate_unique_node default_opener @@ -64,11 +65,3 @@ Cuthill-Mckee Ordering cuthill_mckee_ordering reverse_cuthill_mckee_ordering - -Context Managers ----------------- -.. automodule:: networkx.utils.contextmanagers -.. autosummary:: - :toctree: generated/ - - reversed diff --git a/doc/release/api_0.99.rst b/doc/release/api_0.99.rst index 0d6a29f..e2d4f07 100644 --- a/doc/release/api_0.99.rst +++ b/doc/release/api_0.99.rst @@ -5,10 +5,10 @@ Version 0.99 API changes The version networkx-0.99 is the penultimate release before networkx-1.0. We have bumped the version from 0.37 to 0.99 to indicate (in our unusual version number scheme) that this is a major -change to NetworkX. +change to NetworkX. We have made some significant changes, detailed below, to NetworkX -to improve performance, functionality, and clarity. +to improve performance, functionality, and clarity. Version 0.99 requires Python 2.4 or greater. @@ -18,8 +18,8 @@ http://groups.google.com/group/networkx-discuss Changes in base classes ======================= -The most significant changes are in the graph classes. -We have redesigned the Graph() and DiGraph() classes +The most significant changes are in the graph classes. +We have redesigned the Graph() and DiGraph() classes to optionally allow edge data. This change allows Graph and DiGraph to naturally represent weighted graphs and to hold arbitrary information on edges. @@ -31,7 +31,7 @@ weighted graphs and to hold arbitrary information on edges. - The Graph and DiGraph classes now allow self loops. - - The XGraph and XDiGraph classes are removed and replaced with + - The XGraph and XDiGraph classes are removed and replaced with MultiGraph and MultiDiGraph. MultiGraph and MultiDiGraph optionally allow parallel (multiple) edges between two nodes. @@ -56,7 +56,7 @@ edges() delete_node() ^^^^^^^^^^^^^ - The preferred name is now remove_node(). + The preferred name is now remove_node(). delete_nodes_from() @@ -79,37 +79,37 @@ delete_edges_from() add_edge() ^^^^^^^^^^ The add_edge() method no longer accepts an edge tuple (u,v) - directly. The tuple must be unpacked into individual nodes. + directly. The tuple must be unpacked into individual nodes. >>> import networkx as nx >>> u='a' >>> v='b' >>> e=(u,v) >>> G=nx.Graph() - + Old - >>> # G.add_edge((u,v)) # or G.add_edge(e) + >>> # G.add_edge((u,v)) # or G.add_edge(e) - New + New - >>> G.add_edge(*e) # or G.add_edge(*(u,v)) + >>> G.add_edge(*e) # or G.add_edge(*(u,v)) The * operator unpacks the edge tuple in the argument list. Add edge now has a data keyword parameter for setting the default (data=1) edge data. - + >>> # G.add_edge('a','b','foo') # add edge with string "foo" as data >>> # G.add_edge(1,2,5.0) # add edge with float 5 as data - + add_edges_from() ^^^^^^^^^^^^^^^^ Now can take list or iterator of either 2-tuples (u,v), - 3-tuples (u,v,data) or a mix of both. + 3-tuples (u,v,data) or a mix of both. Now has data keyword parameter (default 1) for setting the edge data for any edge in the edge list that is a 2-tuple. @@ -118,17 +118,17 @@ add_edges_from() has_edge() ^^^^^^^^^^ The has_edge() method no longer accepts an edge tuple (u,v) - directly. The tuple must be unpacked into individual nodes. + directly. The tuple must be unpacked into individual nodes. - Old: + Old: >>> # G.has_edge((u,v)) # or has_edge(e) - New: + New: - >>> G.has_edge(*e) # or has_edge(*(u,v)) + >>> G.has_edge(*e) # or has_edge(*(u,v)) True - + The * operator unpacks the edge tuple in the argument list. get_edge() @@ -136,7 +136,7 @@ get_edge() Now has the keyword argument "default" to specify what value to return if no edge is found. If not specified an exception is raised if no edge is found. - + The fastest way to get edge data for edge (u,v) is to use G[u][v] instead of G.get_edge(u,v) @@ -144,12 +144,12 @@ get_edge() degree_iter() ^^^^^^^^^^^^^ The degree_iter method now returns an iterator over pairs of (node, - degree). This was the previous behavior of degree_iter(with_labels=true) + degree). This was the previous behavior of degree_iter(with_labels=true) Also there is a new keyword weighted=False|True for weighted degree. subgraph() ^^^^^^^^^^ - The argument inplace=False|True has been replaced with copy=True|False. + The argument inplace=False|True has been replaced with copy=True|False. Subgraph no longer takes create_using keyword. To change the graph type either make a copy of @@ -173,11 +173,11 @@ __getitem__() >>> G.neighbors(0) # doctest: +SKIP [1] - + This change allows algorithms to use the underlying dict-of-dict - representation through G[v] for substantial performance gains. + representation through G[v] for substantial performance gains. Warning: The returned dictionary should not be modified as it may - corrupt the graph data structure. Make a copy G[v].copy() if you + corrupt the graph data structure. Make a copy G[v].copy() if you wish to modify the dict. @@ -200,13 +200,13 @@ node_boundary() ^^^^^^^^^^^^^^^ now a function -edge_boundary() -^^^^^^^^^^^^^^^ +edge_boundary() +^^^^^^^^^^^^^^^ now a function -is_directed() +is_directed() ^^^^^^^^^^^^^ - use the directed attribute + use the directed attribute >>> G=nx.DiGraph() >>> # G.directed @@ -216,8 +216,8 @@ G.out_edges() ^^^^^^^^^^^^^ use G.edges() -G.in_edges() -^^^^^^^^^^^^ +G.in_edges() +^^^^^^^^^^^^ use >>> G = nx.DiGraph() @@ -251,12 +251,12 @@ Imports ------- Some of the code modules were moved into subdirectories. -Import statements such as:: +Import statements such as:: import networkx.centrality from networkx.centrality import * -may no longer work (including that example). +may no longer work (including that example). Use either @@ -269,7 +269,7 @@ or Self-loops ---------- For Graph and DiGraph self loops are now allowed. -This might affect code or algorithms that add self loops +This might affect code or algorithms that add self loops which were intended to be ignored. Use the methods @@ -285,7 +285,7 @@ Copy Copies of NetworkX graphs including using the copy() method now return complete copies of the graph. This means that all connection information is copied--subsequent changes in the -copy do not change the old graph. But node keys and edge +copy do not change the old graph. But node keys and edge data in the original and copy graphs are pointers to the same data. prepare_nbunch @@ -296,20 +296,20 @@ Used internally - now called nbunch_iter and returns an iterator. Converting your old code to Version 0.99 ======================================== -Mostly you can just run the code and python will raise an exception +Mostly you can just run the code and python will raise an exception for features that changed. Common places for changes are - Converting XGraph() to either Graph or MultiGraph - Converting XGraph.edges() to Graph.edges(data=True) - Switching some rarely used methods to attributes (e.g. directed) or to functions (e.g. node_boundary) - - If you relied on the old default edge data being None, you will + - If you relied on the old default edge data being None, you will have to account for it now being 1. -You may also want to look through your code for places which could +You may also want to look through your code for places which could improve speed or readability. The iterators are helpful with large graphs and getting edge data via G[u][v] is quite fast. You may also -want to change G.neighbors(n) to G[n] which returns the dict keyed by +want to change G.neighbors(n) to G[n] which returns the dict keyed by neighbor nodes to the edge data. It is faster for many purposes but does not work well when you are changing the graph. diff --git a/doc/release/api_1.0.rst b/doc/release/api_1.0.rst index d9ea824..0da8789 100644 --- a/doc/release/api_1.0.rst +++ b/doc/release/api_1.0.rst @@ -39,13 +39,13 @@ Graph attributes ---------------- Each graph keeps a dictionary of key=value attributes in the member G.graph. These attributes can be accessed -directly using G.graph or added at instantiation using +directly using G.graph or added at instantiation using keyword arguments. >>> G=nx.Graph(region='Africa') >>> G.graph['color']='green' >>> G.graph -{'color': 'green', 'region': 'Africa'} +{'region': 'Africa', 'color': 'green'} Node attributes --------------- @@ -65,7 +65,7 @@ Add node attributes using add_node(), add_nodes_from() or G.node Edge attributes --------------- Each edge has a corresponding dictionary of attributes. -The default edge data is now an empty dictionary of attributes +The default edge data is now an empty dictionary of attributes and adding attributes to edges is optional. A common use case is to add a weight attribute to an edge: @@ -98,11 +98,11 @@ add_node() add_nodes_from() -^^^^^^^^^^^^^^^^ - Now takes optional keyword=value attributes or a dictionary of +^^^^^^^^^^^^^^^^ + Now takes optional keyword=value attributes or a dictionary of attributes applied to all affected nodes. - >>> G.add_nodes_from([1,2],time='2pm') # all nodes have same attribute + >>> G.add_nodes_from([1,2],time='2pm') # all nodes have same attribute add_edge() ^^^^^^^^^^ @@ -111,8 +111,8 @@ add_edge() >>> G.add_edge(1, 2, weight=4.7 ) add_edges_from() -^^^^^^^^^^^^^^^^ - Now takes optional keyword=value attributes or a dictionary of +^^^^^^^^^^^^^^^^ + Now takes optional keyword=value attributes or a dictionary of attributes applied to all affected edges. >>> G.add_edges_from([(3,4),(4,5)], color='red') @@ -151,7 +151,7 @@ to_directed(), to_undirected() subgraph() ^^^^^^^^^^ - With copy=True now returns a deep copy of the graph + With copy=True now returns a deep copy of the graph (copies all underlying data and attributes for nodes and edges). >>> G = nx.Graph() @@ -160,7 +160,7 @@ subgraph() add_cycle(), add_path(), add_star() ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - Now take optional keyword=value attributes or a dictionary of + Now take optional keyword=value attributes or a dictionary of attributes which are applied to all edges affected by the method. >>> G = nx.Graph() @@ -171,7 +171,7 @@ Methods removed delete_node() ^^^^^^^^^^^^^ - The preferred name is now remove_node(). + The preferred name is now remove_node(). delete_nodes_from() ^^^^^^^^^^^^^^^^^^^ @@ -189,7 +189,7 @@ delete_edges_from() has_neighbor(): - Use has_edge() + Use has_edge() get_edge() ^^^^^^^^^^ @@ -213,13 +213,13 @@ Methods added ------------- add_weighted edges_from() -^^^^^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^^^ Convenience method to add weighted edges to graph using a list of 3-tuples (u,v,weight). get_edge_data() ^^^^^^^^^^^^^^^ - Renamed from get_edge(). + Renamed from get_edge(). The fastest way to get edge data for edge (u,v) is to use G[u][v] instead of G.get_edge_data(u,v) @@ -249,8 +249,8 @@ UbiGraph Additional functions/generators =============================== -ego_graph, stochastic_graph, PageRank algorithm, HITS algorithm, -GraphML writer, freeze, is_frozen, A* algorithm, +ego_graph, stochastic_graph, PageRank algorithm, HITS algorithm, +GraphML writer, freeze, is_frozen, A* algorithm, directed scale-free generator, random clustered graph. @@ -261,7 +261,7 @@ Weighted edges -------------- Edge information is now stored in an attribution dictionary -so all edge data must be given a key to identify it. +so all edge data must be given a key to identify it. There is currently only one standard/reserved key, 'weight', which is used by algorithms and functions that use weighted edges. The @@ -272,7 +272,7 @@ users to assign as needed. >>> G.add_edge(1,2,weight=3.1415) # add the edge 1-2 with a weight >>> G[1][2]['weight']=2.3 # set the weight to 2.3 -Similarly, for direct access the edge data, use +Similarly, for direct access the edge data, use the key of the edge data to retrieve it. >>> w = G[1][2]['weight'] diff --git a/doc/release/api_1.4.rst b/doc/release/api_1.4.rst index 64aef6b..0f80986 100644 --- a/doc/release/api_1.4.rst +++ b/doc/release/api_1.4.rst @@ -4,7 +4,7 @@ Version 1.4 notes and API changes We have made some API changes, detailed below, to add clarity. This page reflects changes from networkx-1.3 to networkx-1.4. -For changes from earlier versions to networkx-1.0 see +For changes from earlier versions to networkx-1.0 see :doc:`Version 1.0 API changes <api_1.0>`. Please send comments and questions to the networkx-discuss mailing list: diff --git a/doc/release/api_1.5.rst b/doc/release/api_1.5.rst index f15e3ea..bda3d36 100644 --- a/doc/release/api_1.5.rst +++ b/doc/release/api_1.5.rst @@ -10,19 +10,19 @@ http://groups.google.com/group/networkx-discuss . Weighted graph algorithms ------------------------- -Many 'weighted' graph algorithms now take optional parameter to +Many 'weighted' graph algorithms now take optional parameter to specify which edge attribute should be used for the weight (default='weight') (ticket https://networkx.lanl.gov/trac/ticket/509) In some cases the parameter name was changed from weighted_edges, -or weighted, to weight. Here is how to specify which edge attribute +or weighted, to weight. Here is how to specify which edge attribute will be used in the algorithms: - Use weight=None to consider all weights equally (unweighted case) - Use weight=True or weight='weight' to use the 'weight' edge attribute -- Use weight='other' to use the 'other' edge attribute +- Use weight='other' to use the 'other' edge attribute Algorithms affected are: @@ -41,7 +41,7 @@ single_source_dijkstra_path_basic, astar_path, astar_path_length Random geometric graph ---------------------- -The random geometric graph generator has been simplified. -It no longer supports the create_using, repel, or verbose parameters. +The random geometric graph generator has been simplified. +It no longer supports the create_using, repel, or verbose parameters. An optional pos keyword was added to allow specification of node positions. diff --git a/doc/release/api_1.6.rst b/doc/release/api_1.6.rst index 9e3be99..9d45d62 100644 --- a/doc/release/api_1.6.rst +++ b/doc/release/api_1.6.rst @@ -12,14 +12,14 @@ Graph Classes The degree* methods in the graph classes (Graph, DiGraph, MultiGraph, MultiDiGraph) now take an optional weight= keyword that allows computing -weighted degree with arbitrary (numerical) edge attributes. Setting +weighted degree with arbitrary (numerical) edge attributes. Setting weight=None is equivalent to the previous weighted=False. Weighted graph algorithms ------------------------- -Many 'weighted' graph algorithms now take optional parameter to +Many 'weighted' graph algorithms now take optional parameter to specify which edge attribute should be used for the weight (default='weight') (ticket https://networkx.lanl.gov/trac/ticket/573) @@ -30,11 +30,11 @@ how to specify which edge attribute will be used in the algorithms: - Use weight='weight' to use the 'weight' edge attribute -- Use weight='other' to use the 'other' edge attribute +- Use weight='other' to use the 'other' edge attribute Algorithms affected are: -to_scipy_sparse_matrix, +to_scipy_sparse_matrix, clustering, average_clustering, bipartite.degree, @@ -95,13 +95,13 @@ Other * condensation - The condensation algorithm now takes a second argument (scc) and returns a + The condensation algorithm now takes a second argument (scc) and returns a graph with nodes labeled as integers instead of node tuples. * degree connectivity - average_in_degree_connectivity and average_out_degree_connectivity have - have been replaced with + average_in_degree_connectivity and average_out_degree_connectivity have + have been replaced with average_degree_connectivity(G, source='in', target='in') @@ -111,8 +111,8 @@ Other * neighbor degree - average_neighbor_in_degree and average_neighbor_out_degreey have - have been replaced with + average_neighbor_in_degree and average_neighbor_out_degreey have + have been replaced with average_neighbor_degree(G, source='in', target='in') diff --git a/doc/release/contribs.py b/doc/release/contribs.py index 3646f63..2696367 100644 --- a/doc/release/contribs.py +++ b/doc/release/contribs.py @@ -1,6 +1,5 @@ -#!/usr/bin/env python # https://github.com/scikit-image/scikit-image/blob/master/doc/release/contribs.py -import subprocess +from subprocess import check_output import sys import string import shlex @@ -11,41 +10,50 @@ tag = sys.argv[1] + def call(cmd): - return subprocess.check_output(shlex.split(cmd), universal_newlines=True).split('\n') + return check_output(shlex.split(cmd), universal_newlines=True).split("\n") + -tag_date = call("git log -n1 --format='%%ci' %s" % tag)[0] -print("Release %s was on %s\n" % (tag, tag_date)) +tag_date = call(f"git log -n1 --format='%ci' {tag}")[0] +print(f"Release {tag} was on {tag_date}\n") -merges = call("git log --since='%s' --merges --format='>>>%%B' --reverse" % tag_date) +merges = call(f"git log --since='{tag_date}' --merges --format='>>>%B' --reverse") merges = [m for m in merges if m.strip()] -merges = '\n'.join(merges).split('>>>') -merges = [m.split('\n')[:2] for m in merges] +merges = "\n".join(merges).split(">>>") +merges = [m.split("\n")[:2] for m in merges] merges = [m for m in merges if len(m) == 2 and m[1].strip()] -num_commits = call("git rev-list %s..HEAD --count" % tag)[0] -print("A total of %s changes have been committed.\n" % num_commits) +num_commits = call(f"git rev-list {tag}..HEAD --count")[0] +print(f"A total of {num_commits} changes have been committed.\n") + +# Use filter to remove empty strings +commits = filter(None, call(f"git log --since='{tag_date}' --pretty=%s --reverse")) +for c in commits: + print("- " + c) -print("It contained the following %d merges:\n" % len(merges)) +print(f"\nIt contained the following {len(merges)} merges:\n") for (merge, message) in merges: - if merge.startswith('Merge pull request #'): - PR = ' (%s)' % merge.split()[3] + if merge.startswith("Merge pull request #"): + PR = f" ({merge.split()[3]})" else: - PR = '' + PR = "" - print('- ' + message + PR) + print("- " + message + PR) print("\nMade by the following committers [alphabetical by last name]:\n") -authors = call("git log --since='%s' --format=%%aN" % tag_date) +authors = call(f"git log --since='{tag_date}' --format=%aN") authors = [a.strip() for a in authors if a.strip()] + def key(author): author = [v for v in author.split() if v[0] in string.ascii_letters] if len(author) > 0: return author[-1] + authors = sorted(set(authors), key=key) for a in authors: - print('- ' + a) + print("- " + a) diff --git a/doc/release/index.rst b/doc/release/index.rst index 5a7c226..4744dfc 100644 --- a/doc/release/index.rst +++ b/doc/release/index.rst @@ -4,10 +4,25 @@ API changes *********** +We don't use semantic versioning. The first number indicates that we have +made a major API break (e.g., 1.x to 2.x), which has happened once and probably +won't happen again for some time. The point releases are new versions and may +contain minor API breakage. Usually, this happens after a one cycle deprecation +period. + +.. warning:: + Since we don't normally make bug-fix only releases, it may not make sense + for you to use ``~=`` as a pip version specifier. + .. toctree:: :maxdepth: 2 release_dev + release_2.4 + release_2.3 + release_2.2 + release_2.1 + release_2.0 api_1.11 api_1.10 api_1.9 diff --git a/doc/release/migration_guide_from_1.x_to_2.0.rst b/doc/release/migration_guide_from_1.x_to_2.0.rst index c0fb53f..f2d1d8b 100644 --- a/doc/release/migration_guide_from_1.x_to_2.0.rst +++ b/doc/release/migration_guide_from_1.x_to_2.0.rst @@ -81,9 +81,10 @@ views. >>> H = nx.Graph() >>> H.add_nodes_from([1, 'networkx', '2.0']) >>> G.nodes & H.nodes # finding common nodes in 2 graphs - set([1]) - >>> G.nodes | H.nodes # union of nodes in 2 graphs - set([0, 1, 2, 3, 4, 'networkx', '2.0']) + {1} + >>> # union of nodes in 2 graphs + >>> G.nodes | H.nodes # doctest: +SKIP + {0, 1, 2, 3, 4, 'networkx', '2.0'} Similarly, ``G.edges`` now returns an EdgeView instead of a list of edges and it also supports set operations. @@ -94,7 +95,7 @@ also supports set operations. [(0, 1), (0, 2), (0, 3), (0, 4), (1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)] ``G.degree`` now returns a DegreeView. This is less dict-like than the other views -in the sense that it iterates over (node, degree) pairs, does not provide +in the sense that it iterates over (node, degree) pairs, does not provide keys/values/items/get methods. It does provide lookup ``G.degree[n]`` and ``(node, degree)`` iteration. A dict keyed by nodes to degree values can be easily created if needed as ``dict(G.degree)``. @@ -117,7 +118,7 @@ easily created if needed as ``dict(G.degree)``. The degree of an individual node can be calculated by ``G.degree[node]``. Similar changes have been made to ``in_degree`` and ``out_degree`` for directed graphs. If you want just the degree values, here are some options. -They are shown for ``in_degree`` of a ``DiGraph``, but similar ideas work +They are shown for ``in_degree`` of a ``DiGraph``, but similar ideas work for ``out_degree`` and ``degree`` >>> DG = nx.DiGraph() @@ -130,8 +131,13 @@ for ``out_degree`` and ``degree`` >>> [deg[n] for n in [1, 3]] # using lookup for only some nodes [1, 0] - >>> dict(DG.in_degree([1, 3])).values() # works for nx-1 and nx-2 - [1, 0] + >>> for node, in_deg in dict(DG.in_degree).items(): # works for nx1 and nx2 + ... print(node, in_deg) + 1 1 + 2 1 + 3 0 + >>> dict(DG.in_degree([1, 3])).values() # works for nx1 and nx2 + dict_values([1, 0]) >>> # DG.in_degree(nlist) creates a restricted view for only nodes in nlist. >>> # but see the fourth option above for using lookup instead. >>> list(d for n, d in DG.in_degree([1, 3])) @@ -148,7 +154,7 @@ If ``n`` is a node in ``G``, then ``G.neighbors(n)`` returns an iterator. >>> n = 1 >>> G.neighbors(n) - <dictionary-keyiterator object at ...> + <dict_keyiterator object at ...> >>> list(G.neighbors(n)) [0, 2, 3, 4] @@ -169,9 +175,9 @@ DiGraphViews behave similar to GraphViews, but have a few more methods. >>> D.out_degree[2] 2 >>> D.in_edges - InEdgeView([(1, 2), (1, 3), (2, 3), (2, 4)]) + InEdgeView([(1, 2), (2, 3), (1, 3), (2, 4)]) >>> list(D.in_edges()) - [(1, 2), (1, 3), (2, 3), (2, 4)] + [(1, 2), (2, 3), (1, 3), (2, 4)] >>> D.out_edges(2) OutEdgeDataView([(2, 3), (2, 4)]) >>> list(D.out_edges(2)) @@ -181,11 +187,11 @@ DiGraphViews behave similar to GraphViews, but have a few more methods. >>> list(D.in_degree) [(1, 0), (2, 1), (3, 2), (4, 1)] >>> D.successors(2) - <dictionary-keyiterator object at ...> + <dict_keyiterator object at ...> >>> list(D.successors(2)) [3, 4] >>> D.predecessors(2) - <dictionary-keyiterator object at ...> + <dict_keyiterator object at ...> >>> list(D.predecessors(2)) [1] @@ -241,11 +247,11 @@ because it may be a different graph type (directed/undirected) than the view. If ``nbunch`` was a single node source, then the same effect can now be achieved using the ``subgraph`` operator: - >>> nx.topological_sort(G.subgraph(nx.descendants(G, nbunch))) - + nx.topological_sort(G.subgraph(nx.descendants(G, nbunch))) + To achieve a reverse topological sort, the output should be converted to a list: - >>>> reversed(list(nx.topological_sort(G))) + reversed(list(nx.topological_sort(G))) ------- diff --git a/doc/release/release_2.0.rst b/doc/release/release_2.0.rst index 08fbce3..4e95d88 100644 --- a/doc/release/release_2.0.rst +++ b/doc/release/release_2.0.rst @@ -30,7 +30,7 @@ API Changes With the release of NetworkX 2.0 we are moving towards a view/iterator reporting API. We used to have two methods for the same property of the graph, one that returns a list and one that returns an iterator. With 2.0 we have replaced them with a view. - A view is a read-only object that is quick to create, automatically updated, and + A view is a read-only object that is quick to create, automatically updated, and provides basic access like iteration, membership and set operations where appropriate. For example, ``G.nodes()`` used to return a list and ``G.nodes_iter()`` an iterator. Now ``G.nodes()`` returns a view and ``G.nodes_iter()`` is removed. ``G.degree()`` @@ -59,7 +59,7 @@ API Changes >>> G.nodes[3] {'color': 'blue'} >>> G.nodes & {3, 4, 5} - set([3, 4]) + {3, 4} The following methods have changed: @@ -198,7 +198,7 @@ API Changes * [`#2620 <https://github.com/networkx/networkx/pull/2620>`_] Removed ``draw_nx``, please use ``draw`` or ``draw_networkx``. - + * [`#1662 <https://github.com/networkx/networkx/pull/1662>`_] Rewrote ``topolgical_sort`` as a generator. It no longer accepts ``reverse`` or ``nbunch`` arguments and is slightly faster. diff --git a/doc/release/release_2.2.rst b/doc/release/release_2.2.rst new file mode 100644 index 0000000..b093f27 --- /dev/null +++ b/doc/release/release_2.2.rst @@ -0,0 +1,166 @@ +Announcement: NetworkX 2.2 +========================== + +We're happy to announce the release of NetworkX 2.2! +NetworkX is a Python package for the creation, manipulation, and study of the +structure, dynamics, and functions of complex networks. + +For more information, please visit our `website <http://networkx.github.io/>`_ +and our `gallery of examples +<https://networkx.github.io/documentation/latest/auto_examples/index.html>`_. +Please send comments and questions to the `networkx-discuss mailing list +<http://groups.google.com/group/networkx-discuss>`_. + +Highlights +---------- + +This release is the result of 8 months of work with over 149 commits by +58 contributors. Highlights include: + +- Add support for Python 3.7. This is the last release to support Python 2. +- Uniform random number generator (RNG) handling which defaults to global + RNGs but allows specification of a single RNG for all random numbers in NX. +- Improved GraphViews to ease subclassing and remove cyclic references + which caused trouble with deepcopy and pickle. +- New Graph method `G.update(H)` + +Improvements +------------ + +Each function that uses random numbers now uses a `seed` argument to control +the random number generation (RNG). By default the global default RNG is +used. More precisely, the `random` package's default RNG or the numpy.random +default RNG. You can also create your own RNG and pass it into the `seed` +argument. Finally, you can use an integer to indicate the state to set for +the RNG. In this case a local RNG is created leaving the global RNG untouched. +Some functions use `random` and some use `numpy.random`, but we have written +a translater so that all functions CAN take a `numpy.random.RandomState` +object. So a single RNG can be used for the entire package. + +Cyclic references between graph classes and views have been removed to ease +subclassing without memory leaks. Graphs no longer hold references to views. + +Cyclic references between a graph and itself have been removed by eliminating +G.root_graph. It turns out this was an avoidable construct anyway. + +GraphViews have been reformulated as functions removing much of the subclass +trouble with the copy/to_directed/subgraph methods. It also simplifies the +graph view code base and API. There are now three function that create +graph views: generic_graph_view(graph, create_using), reverse_view(digraph) +and subgraph_view(graph, node_filter, edge_filter). + +GraphML can now be written with attributes using numpy numeric types. +In particular, np.float64 and np.int64 no longer need to convert to Python +float and int to be written. They are still written as generic floats so +reading them back in will not make the numpy values. + +A generator following the Stochastic Block Model is now available. + +New function `all_topolgical_sort` to generate all possible top_sorts. + +New functions for tree width and tree decompositions. + +Functions for Clauset-Newman-Moore modularity-max community detection. + +Functions for small world analysis, directed clustering and perfect matchings, +eulerizing a graph, depth-limited BFS, percolation centrality, +planarity checking. + +The shortest_path generic and convenience functions now have a `method` +parameter to choose between dijkstra and bellmon-ford in the weighted case. +Default is dijkstra (which was the only option before). + +API Changes +----------- +empty_graph has taken over the functionality from +nx.convert._prep_create_using which was removed. + +The `create_using` argument (used in many functions) should now be a +Graph Constructor like nx.Graph or nx.DiGraph. +It can still be a graph instance which will be cleared before use, but the +preferred use is a constructor. + +New Base Class Method: update +H.update(G) adds the nodes, edges and graph attributes of G to H. +H.update(edges=e, nodes=n) add the edges and nodes from containers e and n. +H.update(e), and H.update(nodes=n) are also allowed. +First argument is a graph if it has `edges` and `nodes` attributes. +Otherwise the first argument is treated as a list of edges. + +The bellman_ford predecessor dicts had sentinal value `[None]` for +source nodes. That has been changed so source nodes have pred value '[]' + + +Deprecations +------------ + +Graph class method `fresh_copy` - simply use `__class__`. +The GraphView classes are deprecated in preference to the function +interface. Specifically, `ReverseView` and `ReverseMultiView` are +replaced by `reverse_view`. `SubGraph`, `SubDiGraph`, `SubMultiGraph` +and `SubMultiDiGraph` are replaced by `subgraph_view`. +And `GraphView`, `DiGraphView`, `MultiGraphView`, `MultiDiGraphView` +are derecated in favor of `generic_graph_view(graph, create_using)`. + + +Contributors to this release +---------------------------- + +- Luca Baldesi +- William Bernoudy +- Alexander Condello +- Saurav Das +- Dormir30 +- Graham Fetterman +- Robert Gmyr +- Thomas Grainger +- Benjamin M. Gyori +- Ramiro Gómez +- Darío Hereñú +- Mads Jensen +- Michael Johnson +- Pranay Kanwar +- Aabir Abubaker Kar +- Jacek Karwowski +- Mohammed Kashif +- David Kraeutmann +- Winni Kretzschmar +- Ivan Laković +- Daniel Leicht +- Katrin Leinweber +- Alexander Lenail +- Lonnen +- Ji Ma +- Erwan Le Merrer +- Jarrod Millman +- Baurzhan Muftakhidinov +- Neil +- Jens P +- Edward L Platt +- Guillaume Plique +- Miguel Sozinho Ramalho +- Lewis Robbins +- Romain +- Federico Rosato +- Tom Russell +- Dan Schult +- Gabe Schwartz +- Aaron Smith +- Leo Torres +- Martin Váňa +- Ruaridh Williamson +- Huon Wilson +- Haochen Wu +- Yuto Yamaguchi +- Felix Yan +- Jean-Gabriel Young +- aparamon +- armando1793 +- aweltsch +- chebee7i +- hongshaoyang +- komo-fr +- leamingrad +- luzpaz +- mtrenfield +- regstrtn diff --git a/doc/release/release_2.3.rst b/doc/release/release_2.3.rst new file mode 100644 index 0000000..87b1d54 --- /dev/null +++ b/doc/release/release_2.3.rst @@ -0,0 +1,98 @@ +Announcement: NetworkX 2.3 +========================== + +We're happy to announce the release of NetworkX 2.3! +NetworkX is a Python package for the creation, manipulation, and study of the +structure, dynamics, and functions of complex networks. + +For more information, please visit our `website <http://networkx.github.io/>`_ +and our `gallery of examples +<https://networkx.github.io/documentation/latest/auto_examples/index.html>`_. +Please send comments and questions to the `networkx-discuss mailing list +<http://groups.google.com/group/networkx-discuss>`_. + +Highlights +---------- + +This release is the result of 6 months of work with over 92 pull requests by +30 contributors. Highlights include: + +- Dropped support for Python 2. We are no longer supporting Python 2.7 and we will + start changing code to take advantage of Python 3 features we couldn't before. +- Added some Moral Graph analysis functions. +- Enable matplotlib drawing using curved arrows via connectionstyle parameter. +- Remove ticks and axes labels from matplotlib plots. +- Two new generators of Harary Graphs. +- Added Dual Barabasi-Albert model +- Added VoteRank algorithm +- Added Equitable coloring algorithms +- Added planar layout algorithms +- Les Miserables network example +- Javascript example update + +Improvements +------------ + +- Change default colors to be color-blind friendly +- Many bug fixes and documentation improvements +- Speed up of simple_cycles +- Improvements for reading various formats like GML, GEXF, Graphml +- Allow subclassing to access node_attr_dict_factory + + +API Changes +----------- +- The G.fresh_copy() mechanism for creating an empty_graph of the same + type (introduced in v2.0) does not playing nicely with pickle and others. + So, we have removed the code that caused a need for that. Instead you + should use the more natural G.__class__() syntax to get an empty_graph + of the same type as G. + +Deprecations +------------ +- The Graph.fresh_copy() method should now use Graph.__class__() +- ReverseView class removed in favor of reverse_view() function. + +Contributors to this release +---------------------------- + +- Mike Babst +- Jonathan Barnoud +- Scott Chow +- Jon Crall +- Clayton A Davis +- Michaël Defferrard +- Fredrik Erlandsson +- Eyal +- Tanay Gahlot +- Matthew Gilbert +- Øyvind Heddeland Instefjord +- Hongwei Jin +- Kieran +- Dongkwan Kim +- Julien Klaus +- Warren W. Kretzschmar +- Elias Kuthe +- Eric Ma +- Christoph Martin +- Jarrod Millman +- Issa Moradnejad +- Moradnejad +- Niema Moshiri +- Ramil Nugmanov +- Jens P +- Benjamin Peterson +- Edward L Platt +- Matteo Pozza +- Antoine Prouvost +- Mickaël Schoentgen +- Dan Schult +- Johannes Schulte +- Mridul Seth +- Weisheng Si +- Utkarsh Upadhyay +- damianos +- guidoeco +- jeanfrancois8512 +- komo-fr +- last2sword diff --git a/doc/release/release_2.4.rst b/doc/release/release_2.4.rst new file mode 100644 index 0000000..face96e --- /dev/null +++ b/doc/release/release_2.4.rst @@ -0,0 +1,409 @@ +Announcement: NetworkX 2.4 +========================== + +We're happy to announce the release of NetworkX 2.4! +NetworkX is a Python package for the creation, manipulation, and study of the +structure, dynamics, and functions of complex networks. + +For more information, please visit our `website <http://networkx.github.io/>`_ +and our `gallery of examples +<https://networkx.github.io/documentation/latest/auto_examples/index.html>`_. +Please send comments and questions to the `networkx-discuss mailing list +<http://groups.google.com/group/networkx-discuss>`_. + +Highlights +---------- + +This release is the result of 6 months of work with over 200 commits by +67 contributors. Highlights include: + +- Remove deprecated code from 1.x +- Support for Python 3.8 +- Switched to pytest for testing +- Last release to support Python 3.5 + +New Functions: + +- barycenter functions +- Bethe Hessian matrix function +- Eulerian Path methods +- group centrality measures +- subgraph monomorphisms +- k-truss algorithms +- onion decomposition +- resistance distance +- asteroidal triples +- non-randomness measures +- linear prufing +- minimum weight bipartite matching +- Incremental closeness centrality +- ISMAGS subgraph isomorphism algorithm +- create chordal graph of a graph + +New generators + +- Binomial tree generator +- Directed joint degree generator +- Random internet AS graph generator + +New for Layouts + +- spiral node layout routine +- support for 3d layouts + + +Improvements +------------ +- allow average shortest path to use Floyd-Warshall method +- improve read/write of GML, GEXF, GraphML +- allow string or json object as input to jit_graph +- attempt to allow numpy.array input in place of lists in more places +- faster strongly connected components +- faster Floyd-Warshall Optimization +- faster global efficiency +- faster transitive closure +- fix unionfind; betweenness_subset; lexico-topo-sort; A*; + inverse_line_graph; async label propagation; edgelist reading; + Gomory-Hu flow method; label_propagation; partial_duplication; + shell_layout with 1 node in shell; from_pandas_edgelist +- Documentation improvement and fixes + + +API Changes +----------- + +A utility function is_list_of_ints became is_bunch_of_ints +and now tests int(item)==item instead of isinstance(_, int) +This allows e.g. floats whose values are integer. + +Added utility make_list_of_ints to convert containers of +integer values to lists of integers + + +Deprecations +------------ + +Removed functions (marked as deprecated in NetworkX 2.1): + +- attracting_component_subgraphs +- connected_component_subgraphs +- weakly_connected_component_subgraphs +- strongly_connected_component_subgraphs +- biconnected_component_subgraphs +- See docs for component functions for how to get subgraphs. + +Graph Object methods removed (marked as deprecated 2.1) + +- G.add_path +- G.add_cycle +- G.add_star +- G.nodes_with_selfloops +- G.number_of_selfloops +- G.selfloop_edges +- These are now NetworkX functions, e.g. nx.add_star(G, 5) +- G.node --> use G.nodes +- G.fresh_copy --> use G.__class__ + +Remove old names for graphview functions. + +- ReverseView +- SubGraph +- SubMultiGraph +- SubMultiDiGraph +- SubDiGraph +- GraphView +- DiGraphView +- MultiGraphView +- MultiDiGraphView +- MultiReverseView +- Use reverse_view, subgraph_view and generic_graph_view. + +Pull requests and commits merged in this release +------------------------------------------------ + +A total of 205 changes have been committed. + +- Bump release version +- algorithms/traversal/edgebfs name fix (#3397) +- Add see also links (#3403) +- Add the reference for the Harary graph generators (#3407) +- typo: swap source and target (#3413) +- Fix spring_layout bug with fixed nodes (#3415) +- Move LFR_benchmark to generators (#3411) +- Add barycenter algorithm (#2939) +- Add bethe hessian matrix (#3401) +- Binomial trees generator (#3409) +- Fix edge_color inconsistency with node_color and description. (#3395) +- Adding module for group centrality measures (#3421) +- Improve edgelist See Also (#3423) +- Typo fix (#3424) +- Add doc warning about self-loops for adamic_adar_index (#3427) +- Fix UnionFind set extraction (#3224) +- add required argument to `write_graphml` example (#3429) +- Fix centrality betweeness subset (#3425) +- Add two versions of Simrank similarity (#3222) +- Fixed typo +- Merge pull request #3436 from nandahkrishna/fix-typo-betweenness-centrality-subset-test +- Reorder and complete doc (#3438) +- added topo_order parameter to functions that rely on topological_sort (#3447) +- Implemented subgraph monomorphism (#3435) +- Set seed in random_degree_sequence_graph docstring test (#3451) +- Replace cb.iterable with np.iterable (#3458) +- don't remove ticks of other pyplot axes (#3476) +- Fix typo in "G>raph Modelling Language" (#3468) +- Naive k-truss algorithm implementation. (#3462) +- Adding onion decomposition (#3461) +- New Feature - Resistance Distance (#3385) +- No multigraphs for betweenness (#3454) +- Wheels are python 3 only +- Fix deprecation warning with Python 3.7 (#3487) +- Fix dfs_preorder_nodes docstring saying "edges" instead of "nodes" (#3484) +- Added group closeness and group degree centralities (#3437) +- Fixed incorrect docs (#3495) +- Fixes Issue #3493 - Bug in lexicographical_topological_sort() (#3494) +- AT-free graph recognition (#3377) +- Update introduction.rst (#3504) +- Full join operation and cograph generator (#3503) +- Optimize the strongly connected components algorithm. (#3516) +- Adding non-randomness measures for graphs (#3515) +- Added safeguards (input graph G) for non-randomness measures (#3526) +- Optimize the strongly connected components algorithm - Take 2 (#3519) +- Small fix for bug found @ issue #3524 (#3529) +- Restore checking PyPy3 (#3514) +- Linear prufer coding (#3535) +- Fix inverse_line_graph. (#3507) +- Fix A* returning wrong solution (#3508) +- Implement minimum weight full matching of bipartite graphs (#3527) +- Get chordal graph for #1054 (#3353) +- Faster transitive closure computation for DAGs (#3445) +- Write mixed-type attributes correctly in write_graphml_lxml (#3536) +- Fixes some edge cases for inverse_line_graph(). (#3538) +- explicitly stated i.j convention in to_numpy_array +- Incremental Closeness Centrality (undirected, unweighted graphs) (#3444) +- Implement ISMAGS subgraph isomorphism algorithm (#3312) +- Fixes bug in networkx.algorithms.community.label_propagation.asyn_lpa_communities (#3545) +- When exporting to GML, write non 32-bit numbers as strings. (#3540) +- Try to bug Fix #3552 (#3554) +- add Directed Joint Degree Graph generator (#3551) +- typo (#3557) +- Fix a few documentation issues for the bipartite algorithm reference (#3555) +- i,j convention in adj mat i/o in relevant funcs +- Merge pull request #3542 from malch2/doc/update +- Add 3.8-dev to travis +- Fix dict iteration for Py3.8 +- Ignore other failures for now +- Fix a typo in docstring for get_edge_data (#3564) +- Fix wrong title (#3566) +- Fix typo in doctring (#3568) +- Fix and Improve docstrings in graph.py (#3569) +- Improved graph class selection table (#3570) +- Add spiral layout for graph drawing (#3534) +- #3575 return coordinates of 3d layouts (#3576) +- Handle k==n within the Watts-Strogatz graph generator (#3579) +- Floyd-Warshall Optimization (#3400) +- Use Sphinx 2.2 +- Add missing link to asteroidal docs +- Fix Sphinx warnings +- Fix Sphinx latexpdf build +- Updated Contributor list (#3592) +- Prim from list to set (#3512) +- Fix issue 3491 (#3588) +- Make Travis fail on Python 3.8 failures +- Fix test_gexf to handle default serialisation order of the XML attributes +- Remove future imports needed by Py2 +- add internet_as_graph generator (#3574) +- remove cyclical references from OutEdgeDataView (#3598) +- Add minimum source and target margin to draw_networkx_edges. (#3390) +- fix to_directed function (#3599) +- Fixes #3573:GEXF output problem (#3606) +- Global efficiency attempt to speed up (#3604) +- Bugfix: Added flexibility in reading values for label and id (#3603) +- Add method floyd-warshall to average_shortest_path_length (#3267) +- Replaced is with == and minor pycodestyle fixes (#3608) +- Fix many documentation based Issues (#3609) +- Resolve many documentation issues (#3611) +- Fixes #3187 transitive_closure now returns self-loops when cycles present (#3613) +- Add support for initializing pagerank_scipy (#3183) +- Add last 7 lines of Gomory-hu algorithm Fixes #3293 (#3614) +- Implemented Euler Path functions (#3399) +- Fix the direction of edges in label_propagation.py (#3619) +- Removed unused import of random module (#3620) +- Fix operation order in partial_duplication_graph (#3626) +- Keep shells with 1 node away from origin in shell_layout (#3629) +- Allow jit_graph to read json string or json object (#3628) +- Fix typo within incode documentation (#3621) +- pycodestyle and update docs for greedy_coloring.py+tests (#3631) +- Add version badges +- Load long description from README +- Add missing code block (#3630) +- Change is_list_of_ints to make_list_of_ints (#3617) +- Handle edgeattr in from_pandas_edgelist when no columns match request (#3634) +- Make draft of release notes for v2.4 +- Shift notes from dev to v2.4 filename. +- Use recent pypy +- Test Py 3.8 on macos +- add check of attr type before converting inf/nan in GEXF (#3636) +- Fix sphinx errors And add links to single_source_dijkstra in docs for dijkstra_path/length (#3638) +- Document subgraph_view (#3627) +- First round of pytest fixes +- Use class methods for class setup/teardown +- Have CIs use pytest +- Use class methods for class setup/teardown, cont. +- Do less testing (until we get it working) +- replace idiom from networkx import * in test files +- Fix assert funcs override +- Fix static methods in link_prediction +- Partially fix v2userfunc tests +- Fix graph/digraph tests +- Fix multigraph checks +- Fix multidigraph checks +- Fix test_function checks +- Fix distance_measures tests +- Fix decorators tests +- Fix some raises in test_mst +- Fix clique tests +- Fix yaml tests +- Fix tests in reportviews +- Fix vf2 tests +- Fix mst tests +- Fix gdal tests +- Convert nose.tools.assert_* functions into asserts +- Remove unused imports +- Fix some warnings +- Update testing instructions +- Reenable all test platforms +- Fix some __init__ warnings +- replace nose yield tests in test_coloring.py +- Add testing, coverage, and dev environment info +- Try pytestimportorskip +- Another pair of variations on pytest.importorskip +- fix typo and try again +- Remove deprecated weakly_connected_component_subgraphs +- replace assert_almost_equal and raises in algorithms/tests +- set places=0 on tests that use old almost_equal +- Update nx.test() +- Have pytest run doctests / not sphinx +- Revert "Remove deprecated weakly_connected_component_subgraphs" +- remove warnings for using deprecated function +- Remove deprecated functions and methods. add to release notes. +- Fix subgraph_view testing +- remove tests of deprecated views and fix use of deprecated G.node +- tracking down use of deprecated functions +- Fix deprecated use of add_path/star/cycle +- reduce warnings for deprecated functions +- skirt issues wih raises in test_harmonic +- reduce the number of warnings by removing deprecated functions +- convert_matrix demo of one way to get doctests to work +- Remove deprecated from examples +- Changes to convert_matrix and others that depend on np.matrix +- clean up doctest deprecated code +- More doctest corrections +- Fix examples +- Remove nose from generators +- Remove nose from utils +- Remove nose from classes +- Replace nose.assert_raises with pytest.raises +- Replace nose.raises with pytest.raises context manager +- Replace `eq_`, `ok_` with assert +- Use pytest for doctest +- Highlight switch to pytest in release notes +- Remove `from nose.tools import *` +- Remove nose.tools.SkipTest +- Finalize transition to pytest +- Merge pull request #3639 from stefanv/pytest-port +- Test Python 3.8 with AppVeyor +- Merge pull request #3648 from jarrodmillman/windows-py3.8 +- Remove deprecated weakly_connected_component_subgraphs +- Update release notes +- Update README +- Announce Python 3.8 support +- Designate 2.4rc1 release +- Bump release version +- Remove remaining SkipTests +- fix documentation notes (#3644) (#3645) +- Test Py 3.8.0 on AppVeyor +- Speed up AppVeyor +- Cleanup travis config +- Improve CI caching +- Update Py 3.8 on travis +- Merge pull request #3652 from jarrodmillman/speedup-appveyor +- Finalize release notes + +It contained the following 5 merges: + +- Fixed typo in betweenness centrality subset test (#3436) +- explicitly stated i.j convention in to_numpy_array (#3542) +- pytest port (#3639) +- Test Python 3.8 with AppVeyor (#3648) +- Cleanup and speedup CI (#3652) + +Contributors to this release +---------------------------- + +- Rajendra Adhikari +- Antoine Allard +- Antoine +- Salim BELHADDAD +- Luca Baldesi +- Tamás Bitai +- Tobias Blass +- Malayaja Chutani +- Peter Cock +- Almog Cohen +- Diogo Cruz +- Martin Darmüntzel +- Elan Ernest +- Jacob Jona Fahlenkamp +- Michael Fedell +- Andy Garfield +- Ramiro Gómez +- Haakon +- Alex Henrie +- Steffen Hirschmann +- Martin James McHugh III +- Jacob +- Søren Fuglede Jørgensen +- Omer Katz +- Julien Klaus +- Matej Klemen +- Nanda H Krishna +- Peter C Kroon +- Anthony Labarre +- Anton Lodder +- MCer4294967296 +- Eric Ma +- Fil Menczer +- Erwan Le Merrer +- Alexander Metz +- Jarrod Millman +- Subhendu Ranajn Mishra +- Jamie Morton +- James Myatt +- Kevin Newman +- Aaron Opfer +- Aditya Pal +- Pascal-Ortiz +- Peter +- Jose Pinilla +- Alexios Polyzos +- Michael Recachinas +- Efraim Rodrigues +- Adam Rosenthal +- Dan Schult +- William Schwartz +- Weisheng Si +- Kanishk Tantia +- Ivan Tham +- George Valkanas +- Stefan van der Walt +- Hsi-Hsuan Wu +- Haochen Wu +- Xiangyu Xu +- Jean-Gabriel Young +- bkief +- daniel-karl +- michelb7398 +- mikedeltalima +- nandahkrishna +- skhiuk +- tbalint diff --git a/doc/release/release_2.5.rst b/doc/release/release_2.5.rst new file mode 100644 index 0000000..e89ab49 --- /dev/null +++ b/doc/release/release_2.5.rst @@ -0,0 +1,493 @@ +Announcement: NetworkX 2.5 +========================== + +We're happy to announce the release of NetworkX 2.5! +NetworkX is a Python package for the creation, manipulation, and study of the +structure, dynamics, and functions of complex networks. + +For more information, please visit our `website <http://networkx.github.io/>`_ +and our `gallery of examples +<https://networkx.github.io/documentation/latest/auto_examples/index.html>`_. +Please send comments and questions to the `networkx-discuss mailing list +<http://groups.google.com/group/networkx-discuss>`_. + +Highlights +---------- + +This release is the result of 10 months of work with over 200 commits by +92 contributors. Highlights include: + +- Dropped support for Python 3.5. +- add Pathlib support to work with files. +- improve performance. +- Updated docs and tests. +- Removed code designed to work with Python 2. + +New Functions: + +- lukes_partitioning +- triadic analysis functions +- functions for trophic levels analysis +- d_separated +- is_regular and other regular graph measures +- graph_hash using Weisfeiler Lehman methods +- common_neighbor_centrality (CCPA link prediction) +- max_weight_clique +- path_weight and is_path +- rescale_layout_dict +- junction_tree + +New generators: + +- paley_graph +- interval_graph + +New layouts: + +- multipartite_layout + + +Improvements +------------ + +- Add governance documents, developer guide and community structures +- Implement explicit deprecation policy. +- Initiate an NX Enhancement Proposal (NXEP) system +- optimize single_source_shortest_path +- improved consistent "weight" specification in shortest_path routines +- Reduce numpy.matrix usage which is discouraged by numpy. +- improved line color +- better search engine treatment of docs +- lattice and grid_graph and grid_2d_graph can use dim=tuple +- fix initializer of kamada_kawai_layout algorithm +- moral and threshold functions now included in namespace and docs +- scale arrows better when drawing +- more uniform creation of random lobster graphs +- allow editing graph during iteration over connected_components +- better column handling in coversion of pandas DataFrame +- allow simrank_similarity with directed graph input +- ensure VoteRank ability is nonnegative +- speedup kernighan_lin_bisection +- speedup negative weight cycle detection +- tree_isomorphism +- rooted_tree_isomorphism +- Gexf edge attribute "label" is available + + +API Changes +----------- + +- enabled "copy" flag parameter in `contracted_nodes` +- allow partially periodic lattices +- return value for minimum_st_node_cut now always a set +- removed unused "has_numpy" argument from create_py_random_state +- fixed return values when drawing empty nodes and edges +- allow sets and frozensets of edges as input to nx.Graph() +- "weight" can be function for astar, directional_dijksta, all_shortest_path +- allow named key ids for GraphML edge writing +- all keywords are now checked for validity in nx.draw and friends +- EdgeDataView "in" operator checks if nodes are "in nbunch" +- remove completeness condition from minimum weight full matching +- option to sort neighbors in bfs traversal +- draw_networkx accepts numpy array for edgelist +- relabel_nodes with 2 nodes mapped to same node can now create multiedge +- steiner_tree works with MultiGraph +- Add `show` kwarg to view_pygraphviz (#4155) +- Prepare for turning chordal_graph_cliques into a generator (#4162) +- GraphML reader keyword force_multigraph creates MultiGraph even w/o multiedges + + +Deprecations +------------ + +- [`#3680 <https://github.com/networkx/networkx/pull/3680>`_] + Deprecate `make_str(x)` for `str(x)`. + Deprecate `is_string_like(obj)` for `isinstance(obj, str)`. + +- [`#3725 <https://github.com/networkx/networkx/pull/3725>`_] + Deprecate `literal_stringizer` and `literal_destringizer`. + +- [`#3983 <https://github.com/networkx/networkx/pull/3983>`_] + Deprecate `reversed` context manager. + +- [`#4155 <https://github.com/networkx/networkx/pull/4155>`_] + Deprecate `display_pygraphviz`. + +- [`#4162 <https://github.com/networkx/networkx/pull/4162>`_] + Deprecate `chordal_graph_cliques` returning a set. + +- [`#4161 <https://github.com/networkx/networkx/pull/4161>`_] + Deprecate `betweenness_centrality_source`. + +- [`#4161 <https://github.com/networkx/networkx/pull/4161>`_] + Deprecate `edge_betweeness`. + +- [`#4161 <https://github.com/networkx/networkx/pull/4161>`_] + Rename `_naive_greedy_modularity_communities` as `naive_greedy_modularity_communities`. + +Pull requests merged in this release +------------------------------------ + +A total of 256 changes have been committed. + +- Bump release version +- Update release process +- Drop support for Python 3.5 +- fix typo docs +- Remove old Python 2 code +- Enable more doctests +- Fix pydot tests +- Unclear how to test the test helper function +- Pathlib introduced in Py 3.4 +- Remove code using sys.version_info to detect Python 2 +- Use yield from +- PEP8 fixes to tests +- Remove unused imports +- Use pytest.importorskip +- PEP8 fixes +- Remove unused imports +- Add pep8_speaks conf +- Use itertools accumulate +- Fixes issue 3610: Bug in version attribute of gexf.py +- Ignore W503 +- Run doctest without optional dependencies +- Skip doctests when missing dependencies +- Remove sed imports +- Enable tests (#3678) +- `contracted_nodes` copy flag added (#3646) +- Deprecate make_str +- Deprecate is_string_like +- Fix PEP8 issues +- Enable ThinGraph tests (#3681) +- Optimize _single_shortest_path_length (#3647) +- Fix issue 3431: Return error in case of bad input to make_small_graph (#3676) +- avoid duplicate tests due to imports (#3684) +- Fix typo: Laplacion -> Laplacian (#3689) +- Add tests +- Lukes algorithm implementation (#3666) +- Remove shim that worked around using starmap +- Add back to gallery +- Add colormap and color limits to LineCollection (#3698) +- Fix matplotlib deprecation (#3697) +- Adapt SciPy CoC +- Update docs to be more accurate about speed of G.neighbors (#3699) +- Use canonical url to help search engines +- Remove duplicate license parameter (#3710) +- Fix documentation issues for exceptions in a few places +- Fix more documentation issues with exceptions +- Remove old Python 2 code +- Remove boiler plate from top of modules +- Remove superfluous encoding information +- Update examples +- Simplify package docstring +- Remove shebang from non-executables +- Add contributors +- K-truss is defined for edges being in (k-2) triangles and not for k triangles (#3713) +- Enable optional tests on Python 3.8 +- Fix test_numpy_type to pass under Python 3.8 +- Add links to data files +- Deprecate Python 2/3 compatibility code +- Update style +- Update style +- Separate easy and hard to install optional requirements +- Install optional dependencies by default +- Refactor tests +- Sample code for subgraph copy: add parenthesis to is_multigraph (#3734) +- Fixed typo (#3735) +- fix citation links (#3741) +- remove f strings from setup.py for clear error message < py3.6 (#3738) +- 3511 gml list support (#3649) +- added linestyle as argument (#3747) +- Link to files needed for example (#3752) +- fixed a typo +- Merge pull request #3759 from yohm/patch-1 +- remove unused variable so grid_graph supports dim=tuple (#3760) +- Sudoku generator issue 3756 (#3757) +- Fix scaling of single node shells in shall_layout (#3764) +- Adding triadic analysis functions (#3742) +- Improve test coverage +- Update contribs script +- Convert %-format to fstring +- Upgrade to Py36 syntax +- Upgrade to Py36 syntax +- Update string format +- Fix scipy deprecation warnings +- Update year +- Silence known warnings (#3770) +- Fix docstring for asyn_fluidc (#3779) +- Fix #3703 (#3784) +- fix initializer for kamada_kawai_layout (networkx #3658) (#3782) +- Minor comments issue (#3787) +- Adding moral and threshold packages to main namespace (#3788) +- Add weight functions to bidirectional_dijkstra and astar (#3799) +- Shrink the source side of an arrow properly when drawing a directed edge. #3805 (#3806) +- option for partially-periodic lattices (networkx #3586) (#3807) +- Prevent KeyError on subgraph_is_monomorphic (#3798) +- Trophic Levels #3736 (#3804) +- UnionFind's union doesn't accurately track set sizes (#3810) +- Remove whitespace (#3816) +- reconsider the lobster generator (#3822) +- Fix typo (#3838) +- fix typo slightly confusing the meaning (#3840) +- Added fix for issue #3846 (#3848) +- Remove unused variable has_numpy from create_py_random_state (#3852) +- Fix return values when drawing empty nodes and edges #3833 (#3854) +- Make connected_components safe to component set mutation (#3859) +- Fix example in docstring (#3866) +- Update README.rst website link to https (#3888) +- typo (#3894) +- Made CONTRIBUTING.rst more clearer (#3895) +- Fixing docs for nx.info(), along with necessary tests (#3893) +- added default arg for json dumps for jit_data func (#3891) +- Fixed nx.Digraph to nx.DiGraph (#3909) +- Use Sphinx 3.0.1 +- Fix Sphinx deprecation +- Add logo to docs +- allow set of edge nodes (#3907) +- Add extra information when casting 'id' to int() fails. (Resolves #3910) (#3916) +- add paley graph (#3900) +- add paley graph to doc (#3927) +- Update astar.py (#3947) +- use keywords for positional arguments (#3952) +- fix documentation (#3959) +- Add option for named key ids to GraphML writing. (#3960) +- fix documentation (#3958) +- Correct handling of zero-weight edges in all_shortest_paths (#3783) +- Fix documentation typo (#3965) +- Fix: documentation of simrank_similarity_numpy (#3954) +- Fix for #3930 (source & target columns not overwritten when converting to pd.DataFrame) (#3935) +- Add weight function for shortest simple paths for #3948 (#3949) +- Fix defination of communicability (#3973) +- Fix simrank_similarity with directed graph input (#3961) +- Fixed weakening of voting ability (#3970) +- implemented faster sweep algorithm for kernighan_lin_bisection (#3858) +- Fix issue #3926 (#3928) +- Update CONTRIBUTORS.rst (#3982) +- Deprecate context_manager reversed in favor of reversed_view (#3983) +- Update CONTRIBUTORS.rst (#3987) +- Enhancement for voterank (#3972) +- add d-separation algorithm (#3974) +- DOC: added see also section to find_cycle (#3999) +- improve docs for subgraph_view filter_egde (#4010) +- Fix exception causes in dag.py (#4000) +- use raise from for exceptions in to_networkx_graph (#4009) +- Fix exception causes and messages in 12 modules (#4012) +- Fix typo: `np.int` -> `np.int_` (#4013) +- fix a typo (#4017) +- change documentation (#3981) +- algorithms for regular graphs (#3925) +- Typo Hand should be Hans (#4025) +- DOC: Add testing bullet to CONTRIBUTING. (#4035) +- Update Sphinx +- Update optional/test deps +- Add governance/values/nexp/roadmap +- Improve formatting of None in tutorial (#3986) +- Fixes DiGraph spelling in docstring (#3892) +- Update links to Py3 docs (#4042) +- Add method to clear edges only (#3477) +- Fix exception causes and messages all over the codebase (#4015) +- Handle kwds explicitly in draw_networkx (#4033) +- return empty generator instead of empty list (#3967) +- Correctly infer numpy float types (#3919) +- MAINT: Update from_graph6_bytes arg/docs. (#4034) +- Add URLs/banner/titlebar to documentation (#4044) +- Add negative cycle detection heuristic (#3879) +- Remove unused imports (#3855) +- Fixed Bug in generate_gml(G, stringizer=None) (#3841) +- Raise NetworkXError when k < 2 (#3761) +- MAINT: rm np.matrix from alg. conn. module +- MAINT: rm np.matrix from attribute_ac. +- MAINT,TST: Parametrize methods in TestAlgebraicConnectivity. +- MAINT,TST: parametrize buckminsterfullerene test. +- MAINT,TST: Remove unused _methods class attr +- MAINT,TST: Parametrize TestSpectralOrdering. +- excluded self/recursive edges (#4037) +- WIP: Change EdgeDataView __contains__ feature (2nd attempt) (#3845) +- Index edges for multi graph simple paths (#3358) +- ENH: Add new graph_hashing feature +- Fix pandas deprecation +- Organize removal of deprecated code +- Update sphinx +- ENH: Add roots and timeout to GED (#4026) +- Make gallery more prominent +- Add an implementation for interval_graph and its unit tests (#3705) +- Fixed typo in kamada_kawai_layout docstring (#4059) +- Remove completeness condition from minimum weight full matching (#4057) +- Implemented multipartite_layout (#3815) +- added new Link Prediction algorithm (CCPA) (#4028) +- add the option of sorting node's neighbors during bfs traversal (#4029) +- TST: remove int64 specification from test. (#4055) +- Ran pyupgrade --py36plus +- Remove trailing spaces +- Tell psf/black to ignore specific np.arrays +- Format w/ black +- Add pre-commit hook to for psf/black +- Merge pull request #4060 from jarrodmillman/black +- Fix a few typos in matching docstrings (#4063) +- fix bug for to_scipy_sparse_matrix function (#3985) +- Update documentation of minimum weight full matching (#4062) +- Add maximum weight clique algorithm (#4016) +- Clear pygraphviz object after creating networkx object (#4070) +- Use newer osx on travis (#4075) +- Install Python after updating brew (#4079) +- Add link to black (#4078) +- Improves docs regarding aliases of erdos-reyni graph generators (#4074) +- MAINT: Remove dependency version info from INSTALL (#4081) +- Simplify top-level directory (#4087) +- DOC: Fix return types in laplacianmatrix. (#4090) +- add modularity to the docs (#4096) +- Allow G.remove_edges_from(nx.selfloops_edges(G)) (#4080) +- MAINT: rm private fn in favor of numpy builtin. (#4094) +- Allow custom keys for multiedges in from_pandas_edgelist (#4076) +- Fix planar_layout docstring (#4097) +- DOC: Rewording re: numpy.matrix +- MAINT: rm to/from_numpy_matrix internally +- Merge pull request #4093 from rossbar/rm_npmatrix +- Remove copyright boilerplate (#4105) +- Update contributor guide (#4088) +- Add function to calculate path cost for a specified path (#4069) +- Update docstring for from_pandas_edgelist (#4108) +- Add max_weight_clique to doc (#4110) +- Update deprecation policyt (#4112) +- Improve modularity calculation (#4103) +- Add team gallery (#4117) +- CI: Setup circle CI for documentation builds (#4119) +- Build pdf (#4123) +- DOC: Suggestions and improvments from tutorial readthrough (#4121) +- Enable 3.9-dev on travis (#4124) +- Fix parse_edgelist behavior with multiple attributes (#4125) +- CI: temporary fix for CI latex installation issues (#4131) +- Updated draw_networkx to accept numpy array for edgelist (#4132) +- Add tree isomorphism (#4067) +- MAINT: Switch to abc-based isinstance checks in to_networkx_graph (#4136) +- Use dict instead of OrderedDict since dict is ordered by default from Python 3.6. (#4145) +- MAINT: fixups to parse_edgelist. (#4128) +- Update apt-get on circleci image (#4147) +- add rescale_layout_dict to change scale of the layout_dicts (#4154) +- Update dependencies +- Remove gdal from requirements +- relabel_nodes now preserves edges in multigraphs (#4066) +- MAINT,TST: Improve coverage of nx_agraph module (#4156) +- Get steiner_tree to work with MultiGraphs by postprocessing (#4160) +- junction_tree for #1012 (#4004) +- API: Add `show` kwarg to view_pygraphviz. (#4155) +- Prepare for turning chordal_graph_cliques into a generator (#4162) +- Docs update (#4161) +- Remove unnecessary nx imports from doctests (#4163) +- MultiGraph from graphml with explicit edge ids #3470 (#3763) +- Update sphinx dep (#4164) +- Add edge label in GEXF writer as an optional attribute (#3347) +- First Draft of Release Notes for v2.5 (#4159) +- Designate 2.5rc1 release +- Bump release version +- Update deprecations in release notes (#4166) +- DOC: Update docstrings for public functions in threshold module (#4167) +- Format python in docstrings (#4168) +- DOC,BLD: Fix doc build warning from markup error. (#4174) + +It contained the following 3 merges: + +- fixed a typo (#3759) +- Use psf/black (#4060) +- MAINT: Replace internal usage of to_numpy_matrix and from_numpy_matrix (#4093) + + +Contributors to this release +---------------------------- +- Adnan Abdulmuttaleb +- Abhi +- Antoine-H +- Salim BELHADDAD +- Ross Barnowski +- Lukas Bernwald +- Isaac Boates +- Kelly Boothby +- Matthias Bruhns +- Mahmut Bulut +- Rüdiger Busche +- Gaetano Carpinato +- Nikos Chan +- Harold Chan +- Camden Cheek +- Daniel +- Daniel-Davies +- Bastian David +- Christoph Deil +- Tanguy Fardet +- 赵丰 (Zhao Feng) +- Andy Garfield +- Oded Green +- Drew H +- Alex Henrie +- Kang Hong Jin +- Manas Joshi +- Søren Fuglede Jørgensen +- Aabir Abubaker Kar +- Folgert Karsdorp +- Suny Kim +- Don Kirkby +- Katherine Klise +- Steve Kowalik +- Ilia Kurenkov +- Whi Kwon +- Paolo Lammens +- Zachary Lawrence +- Sanghack Lee +- Anton Lodder +- Lukas Lösche +- Eric Ma +- Mackyboy12 +- Christoph Martin +- Alex Marvin +- Mattwmaster58 +- James McDermott +- Jarrod Millman +- Ibraheem Moosa +- Yohsuke Murase +- Neil +- Harri Nieminen +- Danny Niquette +- Carlos G. Oliver +- Juan Orduz +- Austin Orr +- Pedro Ortale +- Aditya Pal +- PalAditya +- Jose Pinilla +- PranayAnchuri +- Jorge Martín Pérez +- Pradeep Reddy Raamana +- Ram Rachum +- David Radcliffe +- Federico Rosato +- Tom Russell +- Craig Schmidt +- Jonathan Schneider +- Dan Schult +- Mridul Seth +- Karthikeyan Singaravelan +- Songyu-Wang +- Kanishk Tantia +- Jeremias Traub +- James Trimble +- Shashi Tripathi +- Stefan van der Walt +- Jonatan Westholm +- Kazimierz Wojciechowski +- Jangwon Yie +- adnanmuttaleb +- anentropic +- arunwise +- beckedorf +- ernstklrb +- farhanbhoraniya +- fj128 +- gseva +- haochenucr +- johnthagen +- kiryph +- muratgu +- ryan-duve +- sauxpa +- tombeek111 +- willpeppo diff --git a/doc/release/release_dev.rst b/doc/release/release_dev.rst index 9beab40..2dae5e5 100644 --- a/doc/release/release_dev.rst +++ b/doc/release/release_dev.rst @@ -1,7 +1,7 @@ -Announcement: NetworkX 2.2 +Announcement: NetworkX 2.6 ========================== -We're happy to announce the release of NetworkX 2.2! +We're happy to announce the release of NetworkX 2.6! NetworkX is a Python package for the creation, manipulation, and study of the structure, dynamics, and functions of complex networks. diff --git a/doc/release/report_functions_without_rst_generated.py b/doc/release/report_functions_without_rst_generated.py new file mode 100644 index 0000000..f73da43 --- /dev/null +++ b/doc/release/report_functions_without_rst_generated.py @@ -0,0 +1,34 @@ +import os +import inspect +import networkx as nx + +print("Run this script from the doc/ directory of the repository") +funcs = inspect.getmembers(nx, inspect.isfunction) + +for n, f in funcs: + # print(n + ": "+str(f)) + cmd = r"find . -name *\." + n + ".rst -print" + # print(cmd) + result = os.popen(cmd).read() + # print(result) + + old_names = ( + "find_cores", + "test", + "edge_betweenness", + "betweenness_centrality_source", + "write_graphml_lxml", + "write_graphml_xml", + "adj_matrix", + "project", + "fruchterman_reingold_layout", + "node_degree_xy", + "node_attribute_xy", + "find_cliques_recursive", + "recursive_simple_cycles", + ) + + if len(result) == 0 and n not in old_names: + print("Missing file from docs: ", n) + +print("Done finding functions that are missing from the docs") diff --git a/doc/requirements.txt b/doc/requirements.txt deleted file mode 100644 index d727adc..0000000 --- a/doc/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ --r ../requirements/doc.txt - diff --git a/doc/team.rst b/doc/team.rst new file mode 100644 index 0000000..1b97464 --- /dev/null +++ b/doc/team.rst @@ -0,0 +1,270 @@ + +Core Developers +--------------- + +NetworkX development is guided by the following core team: + + + +.. raw:: html + + <div class="team-member"> + <a href="https://github.com/boothby" class="team-member-name"> + <div class="team-member-photo"> + <img + src="https://avatars2.githubusercontent.com/u/569654?u=c29b79275293c22fa3c56a06ed04e004465ef331&v=4&s=40" + loading="lazy" + alt="Avatar picture of @boothby" + /> + </div> + Kelly Boothby + </a> + <div class="team-member-handle">@boothby</div> + </div> + + +.. raw:: html + + <div class="team-member"> + <a href="https://github.com/camillescott" class="team-member-name"> + <div class="team-member-photo"> + <img + src="https://avatars3.githubusercontent.com/u/2896301?u=bd57c546510c131f4f7f41e3999fb8e6e33a2298&v=4&s=40" + loading="lazy" + alt="Avatar picture of @camillescott" + /> + </div> + Camille Scott + </a> + <div class="team-member-handle">@camillescott</div> + </div> + + +.. raw:: html + + <div class="team-member"> + <a href="https://github.com/dschult" class="team-member-name"> + <div class="team-member-photo"> + <img + src="https://avatars3.githubusercontent.com/u/915037?u=6a27f396c666c5c2172a1cfc7b0d4bbcd0069eed&v=4&s=40" + loading="lazy" + alt="Avatar picture of @dschult" + /> + </div> + Dan Schult + </a> + <div class="team-member-handle">@dschult</div> + </div> + + +.. raw:: html + + <div class="team-member"> + <a href="https://github.com/ericmjl" class="team-member-name"> + <div class="team-member-photo"> + <img + src="https://avatars0.githubusercontent.com/u/2631566?u=c5d73d769c251a862d7d4bbf1119297d8085c34c&v=4&s=40" + loading="lazy" + alt="Avatar picture of @ericmjl" + /> + </div> + Eric Ma + </a> + <div class="team-member-handle">@ericmjl</div> + </div> + + +.. raw:: html + + <div class="team-member"> + <a href="https://github.com/hagberg" class="team-member-name"> + <div class="team-member-photo"> + <img + src="https://avatars3.githubusercontent.com/u/187875?v=4&s=40" + loading="lazy" + alt="Avatar picture of @hagberg" + /> + </div> + Aric Hagberg + </a> + <div class="team-member-handle">@hagberg</div> + </div> + + +.. raw:: html + + <div class="team-member"> + <a href="https://github.com/jarrodmillman" class="team-member-name"> + <div class="team-member-photo"> + <img + src="https://avatars1.githubusercontent.com/u/123428?v=4&s=40" + loading="lazy" + alt="Avatar picture of @jarrodmillman" + /> + </div> + Jarrod Millman + </a> + <div class="team-member-handle">@jarrodmillman</div> + </div> + + +.. raw:: html + + <div class="team-member"> + <a href="https://github.com/MridulS" class="team-member-name"> + <div class="team-member-photo"> + <img + src="https://avatars1.githubusercontent.com/u/5363860?u=ce5c6e9388d2fd153ebf8b0bb521c928b0813608&v=4&s=40" + loading="lazy" + alt="Avatar picture of @MridulS" + /> + </div> + Mridul Seth + </a> + <div class="team-member-handle">@MridulS</div> + </div> + + +.. raw:: html + + <div class="team-member"> + <a href="https://github.com/rossbar" class="team-member-name"> + <div class="team-member-photo"> + <img + src="https://avatars2.githubusercontent.com/u/1268991?u=974707b96081a9705f3a239c0773320f353ee02f&v=4&s=40" + loading="lazy" + alt="Avatar picture of @rossbar" + /> + </div> + Ross Barnowski + </a> + <div class="team-member-handle">@rossbar</div> + </div> + + +.. raw:: html + + <div class="team-member"> + <a href="https://github.com/stefanv" class="team-member-name"> + <div class="team-member-photo"> + <img + src="https://avatars3.githubusercontent.com/u/45071?u=c779b5e06448fbc638bc987cdfe305c7f9a7175e&v=4&s=40" + loading="lazy" + alt="Avatar picture of @stefanv" + /> + </div> + Stefan van der Walt + </a> + <div class="team-member-handle">@stefanv</div> + </div> + + + +Emeritus Developers +------------------- + +We thank these previously-active core developers for their contributions to NetworkX. + + + +.. raw:: html + + <div class="team-member"> + <a href="https://github.com/bjedwards" class="team-member-name"> + <div class="team-member-photo"> + <img + src="https://avatars0.githubusercontent.com/u/726274?u=e493f38cb65425f6de7a9568ee3802a183deaa8e&v=4&s=40" + loading="lazy" + alt="Avatar picture of @bjedwards" + /> + </div> + Benjamin Edwards + </a> + <div class="team-member-handle">@bjedwards</div> + </div> + + +.. raw:: html + + <div class="team-member"> + <a href="https://github.com/chebee7i" class="team-member-name"> + <div class="team-member-photo"> + <img + src="https://avatars2.githubusercontent.com/u/326005?u=a5a33cadf55b2fbdd8b033517f97f763563aa72a&v=4&s=40" + loading="lazy" + alt="Avatar picture of @chebee7i" + /> + </div> + @chebee7i + </a> + <div class="team-member-handle">@chebee7i</div> + </div> + + +.. raw:: html + + <div class="team-member"> + <a href="https://github.com/jfinkels" class="team-member-name"> + <div class="team-member-photo"> + <img + src="https://avatars0.githubusercontent.com/u/121755?v=4&s=40" + loading="lazy" + alt="Avatar picture of @jfinkels" + /> + </div> + @jfinkels + </a> + <div class="team-member-handle">@jfinkels</div> + </div> + + +.. raw:: html + + <div class="team-member"> + <a href="https://github.com/jtorrents" class="team-member-name"> + <div class="team-member-photo"> + <img + src="https://avatars2.githubusercontent.com/u/1184374?v=4&s=40" + loading="lazy" + alt="Avatar picture of @jtorrents" + /> + </div> + Jordi Torrents + </a> + <div class="team-member-handle">@jtorrents</div> + </div> + + +.. raw:: html + + <div class="team-member"> + <a href="https://github.com/loicseguin" class="team-member-name"> + <div class="team-member-photo"> + <img + src="https://avatars3.githubusercontent.com/u/812562?v=4&s=40" + loading="lazy" + alt="Avatar picture of @loicseguin" + /> + </div> + Loïc Séguin-Charbonneau + </a> + <div class="team-member-handle">@loicseguin</div> + </div> + + +.. raw:: html + + <div class="team-member"> + <a href="https://github.com/ysitu" class="team-member-name"> + <div class="team-member-photo"> + <img + src="https://avatars2.githubusercontent.com/u/7018196?v=4&s=40" + loading="lazy" + alt="Avatar picture of @ysitu" + /> + </div> + @ysitu + </a> + <div class="team-member-handle">@ysitu</div> + </div> + diff --git a/doc/tutorial.rst b/doc/tutorial.rst index 01624a4..1de851a 100644 --- a/doc/tutorial.rst +++ b/doc/tutorial.rst @@ -1,5 +1,3 @@ -.. -*- coding: utf-8 -*- - Tutorial ======== @@ -19,8 +17,8 @@ Create an empty graph with no nodes and no edges. By definition, a :class:`Graph` is a collection of nodes (vertices) along with identified pairs of nodes (called edges, links, etc). In NetworkX, nodes can -be any hashable object e.g., a text string, an image, an XML object, another -Graph, a customized node object, etc. +be any :py:term:`hashable` object e.g., a text string, an image, an XML object, +another Graph, a customized node object, etc. .. note:: Python's ``None`` object should not be used as a node as it determines whether optional function arguments have been assigned in many functions. @@ -37,22 +35,31 @@ at a time, >>> G.add_node(1) -add a list of nodes, +or add nodes from any :py:term:`iterable` container, such as a list .. nbplot:: >>> G.add_nodes_from([2, 3]) -or add any iterable container of nodes. You can also add nodes along with node -attributes if your container yields 2-tuples (node, node_attribute_dict). -Node attributes are discussed further below. +You can also add nodes along with node +attributes if your container yields 2-tuples of the form +``(node, node_attribute_dict)``:: + + >>> G.add_nodes_from([ + ... (4, {"color": "red"}), + ... (5, {"color": "green"}), + ... ]) + +Node attributes are discussed further :ref:`below <attributes>`. + +Nodes from one graph can be incorporated into another: .. nbplot:: >>> H = nx.path_graph(10) >>> G.add_nodes_from(H) -Note that ``G`` now contains the nodes of ``H`` as nodes of ``G``. +``G`` now contains the nodes of ``H`` as nodes of ``G``. In contrast, you could use the graph ``H`` as a node in ``G``. .. nbplot:: @@ -89,7 +96,8 @@ by adding a list of edges, or by adding any :term:`ebunch` of edges. An *ebunch* is any iterable container of edge-tuples. An edge-tuple can be a 2-tuple of nodes or a 3-tuple with 2 nodes followed by an edge attribute dictionary, e.g., -``(2, 3, {'weight': 3.1415})``. Edge attributes are discussed further below +``(2, 3, {'weight': 3.1415})``. Edge attributes are discussed further +:ref:`below <attributes>`. .. nbplot:: @@ -123,6 +131,9 @@ At this stage the graph ``G`` consists of 8 nodes and 3 edges, as can be seen by >>> G.number_of_edges() 3 +Examining elements of a graph +----------------------------- + We can examine the nodes and edges. Four basic graph properties facilitate reporting: ``G.nodes``, ``G.edges``, ``G.adj`` and ``G.degree``. These are set-like views of the nodes, edges, neighbors (adjacencies), and degrees @@ -137,7 +148,7 @@ better in other contexts. .. nbplot:: >>> list(G.nodes) - ['a', 1, 2, 3, 'spam', 'm', 'p', 's'] + [1, 2, 3, 'spam', 's', 'p', 'a', 'm'] >>> list(G.edges) [(1, 2), (1, 3), (3, 'm')] >>> list(G.adj[1]) # or list(G.neighbors(1)) @@ -146,8 +157,9 @@ better in other contexts. 2 One can specify to report the edges and degree from a subset of all nodes -using an *nbunch*. An *nbunch* is any of: None (meaning all nodes), a node, -or an iterable container of nodes that is not itself a node in the graph. +using an :term:`nbunch`. An *nbunch* is any of: ``None`` (meaning all nodes), +a node, or an iterable container of nodes that is not itself a node in the +graph. .. nbplot:: @@ -156,6 +168,9 @@ or an iterable container of nodes that is not itself a node in the graph. >>> G.degree([2, 3]) DegreeView({2: 1, 3: 2}) +Removing elements from a graph +------------------------------ + One can remove nodes and edges from the graph in a similar fashion to adding. Use methods :meth:`Graph.remove_node`, @@ -172,6 +187,12 @@ and [1, 3, 'spam'] >>> G.remove_edge(1, 3) +Using the graph constructors +---------------------------- + +Graph objects do not have to be built up incrementally - data specifying +graph structure can be passed directly to the constructors of the various +graph classes. When creating a graph structure by instantiating one of the graph classes you can specify data in several formats. @@ -198,24 +219,25 @@ Data Bank, and ``x`` could refer to an XML record of publications detailing experimental observations of their interaction. We have found this power quite useful, but its abuse -can lead to unexpected surprises unless one is familiar with Python. +can lead to surprising behavior unless one is familiar with Python. If in doubt, consider using :func:`~relabel.convert_node_labels_to_integers` to obtain a more traditional graph with integer labels. Accessing edges and neighbors ----------------------------- -In addition to the views :meth:`Graph.edges`, and :meth:`Graph.adj`, +In addition to the views :attr:`Graph.edges`, and :attr:`Graph.adj`, access to edges and neighbors is possible using subscript notation. .. nbplot:: + >>> G = nx.Graph([(1, 2, {"color": "yellow"})]) >>> G[1] # same as G.adj[1] - AtlasView({2: {}}) + AtlasView({2: {'color': 'yellow'}}) >>> G[1][2] - {} + {'color': 'yellow'} >>> G.edges[1, 2] - {} + {'color': 'yellow'} You can get/set the attributes of an edge using subscript notation if the edge already exists. @@ -225,6 +247,8 @@ if the edge already exists. >>> G.add_edge(1, 3) >>> G[1][3]['color'] = "blue" >>> G.edges[1, 2]['color'] = "red" + >>> G.edges[1, 2] + {'color': 'red'} Fast examination of all (node, adjacency) pairs is achieved using ``G.adjacency()``, or ``G.adj.items()``. @@ -237,7 +261,7 @@ Note that for undirected graphs, adjacency iteration sees each edge twice. >>> for n, nbrs in FG.adj.items(): ... for nbr, eattr in nbrs.items(): ... wt = eattr['weight'] - ... if wt < 0.5: print('(%d, %d, %.3f)' % (n, nbr, wt)) + ... if wt < 0.5: print(f"({n}, {nbr}, {wt:.3})") (1, 2, 0.125) (2, 1, 0.125) (3, 4, 0.375) @@ -248,10 +272,13 @@ Convenient access to all edges is achieved with the edges property. .. nbplot:: >>> for (u, v, wt) in FG.edges.data('weight'): - ... if wt < 0.5: print('(%d, %d, %.3f)' % (u, v, wt)) + ... if wt < 0.5: + ... print(f"({u}, {v}, {wt:.3})") (1, 2, 0.125) (3, 4, 0.375) +.. _attributes: + Adding attributes to graphs, nodes, and edges --------------------------------------------- @@ -296,7 +323,7 @@ Add node attributes using ``add_node()``, ``add_nodes_from()``, or ``G.nodes`` {'time': '5pm'} >>> G.nodes[1]['room'] = 714 >>> G.nodes.data() - NodeDataView({1: {'room': 714, 'time': '5pm'}, 3: {'time': '2pm'}}) + NodeDataView({1: {'time': '5pm', 'room': 714}, 3: {'time': '2pm'}}) Note that adding a node to ``G.nodes`` does not add it to the graph, use ``G.add_node()`` to add new nodes. Similarly for edges. @@ -321,9 +348,9 @@ algorithms requiring weighted edges. Directed graphs --------------- -The :class:`DiGraph` class provides additional properties specific to -directed edges, e.g., -:meth:`DiGraph.out_edges`, :meth:`DiGraph.in_degree`, +The :class:`DiGraph` class provides additional methods and properties specific +to directed edges, e.g., +:attr:`DiGraph.out_edges`, :attr:`DiGraph.in_degree`, :meth:`DiGraph.predecessors`, :meth:`DiGraph.successors` etc. To allow algorithms to work with both classes easily, the directed versions of ``neighbors()`` is equivalent to ``successors()`` while ``degree`` reports @@ -351,7 +378,7 @@ convert it using :meth:`Graph.to_undirected` or with .. nbplot:: - >>> H = nx.Graph(G) # convert G to undirected graph + >>> H = nx.Graph(G) # create an undirected graph H from a directed graph G Multigraphs ----------- @@ -388,29 +415,40 @@ Graph generators and graph operations In addition to constructing graphs node-by-node or edge-by-edge, they can also be generated by -1. Applying classic graph operations, such as:: +1. Applying classic graph operations, such as: + +.. autosummary:: - subgraph(G, nbunch) - induced subgraph view of G on nodes in nbunch - union(G1,G2) - graph union - disjoint_union(G1,G2) - graph union assuming all nodes are different - cartesian_product(G1,G2) - return Cartesian product graph - compose(G1,G2) - combine graphs identifying nodes common to both - complement(G) - graph complement - create_empty_copy(G) - return an empty copy of the same graph class - convert_to_undirected(G) - return an undirected representation of G - convert_to_directed(G) - return a directed representation of G + subgraph - induced subgraph view of G on nodes in nbunch + union - graph union + disjoint_union - graph union assuming all nodes are different + cartesian_product - return Cartesian product graph + compose - combine graphs identifying nodes common to both + complement - graph complement + create_empty_copy - return an empty copy of the same graph class + to_undirected - return an undirected representation of G + to_directed - return a directed representation of G 2. Using a call to one of the classic small graphs, e.g., -.. nbplot:: +.. autosummary:: - >>> petersen = nx.petersen_graph() - >>> tutte = nx.tutte_graph() - >>> maze = nx.sedgewick_maze_graph() - >>> tet = nx.tetrahedral_graph() + petersen_graph + tutte_graph + sedgewick_maze_graph + tetrahedral_graph 3. Using a (constructive) generator for a classic graph, e.g., +.. autosummary:: + + complete_graph + complete_bipartite_graph + barbell_graph + lollipop_graph + +like so: + .. nbplot:: >>> K_5 = nx.complete_graph(5) @@ -418,7 +456,16 @@ can also be generated by >>> barbell = nx.barbell_graph(10, 10) >>> lollipop = nx.lollipop_graph(10, 20) -4. Using a stochastic graph generator, e.g., +4. Using a stochastic graph generator, e.g, + +.. autosummary:: + + erdos_renyi_graph + watts_strogatz_graph + barabasi_albert_graph + random_lobster + +like so: .. nbplot:: @@ -450,7 +497,7 @@ functions such as: >>> G.add_edges_from([(1, 2), (1, 3)]) >>> G.add_node("spam") # adds node "spam" >>> list(nx.connected_components(G)) - [set([1, 2, 3]), set(['spam'])] + [{1, 2, 3}, {'spam'}] >>> sorted(d for n, d in G.degree()) [0, 1, 1, 2] >>> nx.clustering(G) @@ -463,7 +510,7 @@ These are easily stored in a `dict` structure if you desire. >>> sp = dict(nx.all_pairs_shortest_path(G)) >>> sp[3] - {1: [3, 1], 2: [3, 1, 2], 3: [3]} + {3: [3], 1: [3, 1], 2: [3, 1, 2]} See :doc:`/reference/algorithms/index` for details on graph algorithms supported. @@ -482,10 +529,6 @@ First import Matplotlib's plot interface (pylab works too) >>> import matplotlib.pyplot as plt -You may find it useful to interactively test code using ``ipython -pylab``, -which combines the power of ipython and matplotlib and provides a convenient -interactive mode. - To test if the import of ``networkx.drawing`` was successful draw ``G`` using one of .. nbplot:: diff --git a/examples/3d_drawing/mayavi2_spring.py b/examples/3d_drawing/mayavi2_spring.py index ea61b07..7aa0b30 100644 --- a/examples/3d_drawing/mayavi2_spring.py +++ b/examples/3d_drawing/mayavi2_spring.py @@ -3,15 +3,11 @@ Mayavi2 ======= -This is """ -# needs mayavi2 -# run with ipython -wthread import networkx as nx import numpy as np from mayavi import mlab -mlab.options.offscreen = True # some graphs to try # H=nx.krackhardt_kite_graph() @@ -28,19 +24,18 @@ # scalar colors scalars = np.array(list(G.nodes())) + 5 -mlab.figure(1, bgcolor=(0, 0, 0)) -mlab.clf() - -pts = mlab.points3d(xyz[:, 0], xyz[:, 1], xyz[:, 2], - scalars, - scale_factor=0.1, - scale_mode='none', - colormap='Blues', - resolution=20) +pts = mlab.points3d( + xyz[:, 0], + xyz[:, 1], + xyz[:, 2], + scalars, + scale_factor=0.1, + scale_mode="none", + colormap="Blues", + resolution=20, +) pts.mlab_source.dataset.lines = np.array(list(G.edges())) tube = mlab.pipeline.tube(pts, tube_radius=0.01) mlab.pipeline.surface(tube, color=(0.8, 0.8, 0.8)) - -mlab.savefig('mayavi2_spring.png') -# mlab.show() # interactive window +mlab.show() diff --git a/examples/README.txt b/examples/README.txt index c7bbdbe..d0049bd 100644 --- a/examples/README.txt +++ b/examples/README.txt @@ -1,7 +1,7 @@ .. _examples_gallery: -Examples -======== +Gallery +======= General-purpose and introductory examples for NetworkX. The `tutorial <../tutorial.html>`_ introduces conventions and basic graph diff --git a/examples/advanced/plot_heavy_metal_umlaut.py b/examples/advanced/plot_heavy_metal_umlaut.py index 70c66b6..ef6cf1f 100644 --- a/examples/advanced/plot_heavy_metal_umlaut.py +++ b/examples/advanced/plot_heavy_metal_umlaut.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- """ ================== Heavy Metal Umlaut @@ -10,34 +8,17 @@ Also shows creative use of the Heavy Metal Umlaut: https://en.wikipedia.org/wiki/Heavy_metal_umlaut """ -# Author: Aric Hagberg (hagberg@lanl.gov) - -# Copyright (C) 2006-2018 by -# Aric Hagberg <hagberg@lanl.gov> -# Dan Schult <dschult@colgate.edu> -# Pieter Swart <swart@lanl.gov> -# All rights reserved. -# BSD license. import matplotlib.pyplot as plt import networkx as nx -try: - hd = 'H' + unichr(252) + 'sker D' + unichr(252) - mh = 'Mot' + unichr(246) + 'rhead' - mc = 'M' + unichr(246) + 'tley Cr' + unichr(252) + 'e' - st = 'Sp' + unichr(305) + 'n' + unichr(776) + 'al Tap' - q = 'Queensr' + unichr(255) + 'che' - boc = 'Blue ' + unichr(214) + 'yster Cult' - dt = 'Deatht' + unichr(246) + 'ngue' -except NameError: - hd = 'H' + chr(252) + 'sker D' + chr(252) - mh = 'Mot' + chr(246) + 'rhead' - mc = 'M' + chr(246) + 'tley Cr' + chr(252) + 'e' - st = 'Sp' + chr(305) + 'n' + chr(776) + 'al Tap' - q = 'Queensr' + chr(255) + 'che' - boc = 'Blue ' + chr(214) + 'yster Cult' - dt = 'Deatht' + chr(246) + 'ngue' +hd = "H" + chr(252) + "sker D" + chr(252) +mh = "Mot" + chr(246) + "rhead" +mc = "M" + chr(246) + "tley Cr" + chr(252) + "e" +st = "Sp" + chr(305) + "n" + chr(776) + "al Tap" +q = "Queensr" + chr(255) + "che" +boc = "Blue " + chr(214) + "yster Cult" +dt = "Deatht" + chr(246) + "ngue" G = nx.Graph() G.add_edge(hd, mh) @@ -50,13 +31,12 @@ G.add_edge(st, mh) # write in UTF-8 encoding -fh = open('edgelist.utf-8', 'wb') -fh.write('# -*- coding: utf-8 -*-\n'.encode('utf-8')) # encoding hint for emacs -nx.write_multiline_adjlist(G, fh, delimiter='\t', encoding='utf-8') +fh = open("edgelist.utf-8", "wb") +nx.write_multiline_adjlist(G, fh, delimiter="\t", encoding="utf-8") # read and store in UTF-8 -fh = open('edgelist.utf-8', 'rb') -H = nx.read_multiline_adjlist(fh, delimiter='\t', encoding='utf-8') +fh = open("edgelist.utf-8", "rb") +H = nx.read_multiline_adjlist(fh, delimiter="\t", encoding="utf-8") for n in G.nodes(): if n not in H: diff --git a/examples/advanced/iterated_dynamical_systems.py b/examples/advanced/plot_iterated_dynamical_systems.py similarity index 90% rename from examples/advanced/iterated_dynamical_systems.py rename to examples/advanced/plot_iterated_dynamical_systems.py index 6d47f33..1dde327 100644 --- a/examples/advanced/iterated_dynamical_systems.py +++ b/examples/advanced/plot_iterated_dynamical_systems.py @@ -74,7 +74,7 @@ associated with discrete dynamical systems. The most famous is the Collatz 3n+1 problem. See the function collatz_problem_digraph below. The Collatz conjecture ---- that every orbit returrns to the fixed point 1 in finite time +--- that every orbit returns to the fixed point 1 in finite time --- is still unproven. Even the great Paul Erdos said "Mathematics is not yet ready for such problems", and offered $500 for its solution. @@ -96,7 +96,7 @@ def digitsrep(n, b=10): return [0] dlist = [] - while (n > 0): + while n > 0: # Prepend next least-significant digit dlist = [n % b] + dlist # Floor-division @@ -109,7 +109,7 @@ def powersum(n, p, b=10): dlist = digitsrep(n, b) sum = 0 for k in dlist: - sum += k**p + sum += k ** p return sum @@ -146,17 +146,23 @@ def squaring_cycle_graph_old(n, b=10): def sum_of_digits_graph(nmax, b=10): - def f(n): return powersum(n, 1, b) + def f(n): + return powersum(n, 1, b) + return discrete_dynamics_digraph(nmax, f) def squaring_cycle_digraph(nmax, b=10): - def f(n): return powersum(n, 2, b) + def f(n): + return powersum(n, 2, b) + return discrete_dynamics_digraph(nmax, f) def cubing_153_digraph(nmax): - def f(n): return powersum(n, 3, 10) + def f(n): + return powersum(n, 3, 10) + return discrete_dynamics_digraph(nmax, f) @@ -184,6 +190,7 @@ def f(n): return n // 2 else: return 3 * n + 1 + return discrete_dynamics_digraph(nmax, f) @@ -194,12 +201,10 @@ def fixed_points(G): return [n for n in G if G.out_degree(n) == 0] -if __name__ == "__main__": - nmax = 10000 - print("Building cubing_153_digraph(%d)" % nmax) - G = cubing_153_digraph(nmax) - print("Resulting digraph has", len(G), "nodes and", - G.size(), " edges") - print("Shortest path from 177 to 153 is:") - print(nx.shortest_path(G, 177, 153)) - print("fixed points are %s" % fixed_points(G)) +nmax = 10000 +print(f"Building cubing_153_digraph({nmax})") +G = cubing_153_digraph(nmax) +print("Resulting digraph has", len(G), "nodes and", G.size(), " edges") +print("Shortest path from 177 to 153 is:") +print(nx.shortest_path(G, 177, 153)) +print(f"fixed points are {fixed_points(G)}") diff --git a/examples/advanced/plot_parallel_betweenness.py b/examples/advanced/plot_parallel_betweenness.py index 897656f..7a27aba 100644 --- a/examples/advanced/plot_parallel_betweenness.py +++ b/examples/advanced/plot_parallel_betweenness.py @@ -10,11 +10,6 @@ the contribution of those nodes to the betweenness centrality of the whole network. Here we divide the network in chunks of nodes and we compute their contribution to the betweenness centrality of the whole network. - -This doesn't work in python2.7.13. It does work in 3.6, 3.5, 3.4, and 3.3. - -It may be related to this: -https://stackoverflow.com/questions/1816958/cant-pickle-type-instancemethod-when-using-multiprocessing-pool-map """ from multiprocessing import Pool @@ -35,26 +30,22 @@ def chunks(l, n): yield x -def _betmap(G_normalized_weight_sources_tuple): - """Pool for multiprocess only accepts functions with one argument. - This function uses a tuple as its only argument. We use a named tuple for - python 3 compatibility, and then unpack it when we send it to - `betweenness_centrality_source` - """ - return nx.betweenness_centrality_source(*G_normalized_weight_sources_tuple) - - def betweenness_centrality_parallel(G, processes=None): """Parallel betweenness centrality function""" p = Pool(processes=processes) node_divisor = len(p._pool) * 4 node_chunks = list(chunks(G.nodes(), int(G.order() / node_divisor))) num_chunks = len(node_chunks) - bt_sc = p.map(_betmap, - zip([G] * num_chunks, - [True] * num_chunks, - [None] * num_chunks, - node_chunks)) + bt_sc = p.starmap( + nx.betweenness_centrality_subset, + zip( + [G] * num_chunks, + node_chunks, + [list(G)] * num_chunks, + [True] * num_chunks, + [None] * num_chunks, + ), + ) # Reduce the partial solutions bt_c = bt_sc[0] @@ -64,25 +55,24 @@ def betweenness_centrality_parallel(G, processes=None): return bt_c -if __name__ == "__main__": - G_ba = nx.barabasi_albert_graph(1000, 3) - G_er = nx.gnp_random_graph(1000, 0.01) - G_ws = nx.connected_watts_strogatz_graph(1000, 4, 0.1) - for G in [G_ba, G_er, G_ws]: - print("") - print("Computing betweenness centrality for:") - print(nx.info(G)) - print("\tParallel version") - start = time.time() - bt = betweenness_centrality_parallel(G) - print("\t\tTime: %.4F" % (time.time() - start)) - print("\t\tBetweenness centrality for node 0: %.5f" % (bt[0])) - print("\tNon-Parallel version") - start = time.time() - bt = nx.betweenness_centrality(G) - print("\t\tTime: %.4F seconds" % (time.time() - start)) - print("\t\tBetweenness centrality for node 0: %.5f" % (bt[0])) +G_ba = nx.barabasi_albert_graph(1000, 3) +G_er = nx.gnp_random_graph(1000, 0.01) +G_ws = nx.connected_watts_strogatz_graph(1000, 4, 0.1) +for G in [G_ba, G_er, G_ws]: print("") - - nx.draw(G_ba) - plt.show() + print("Computing betweenness centrality for:") + print(nx.info(G)) + print("\tParallel version") + start = time.time() + bt = betweenness_centrality_parallel(G) + print(f"\t\tTime: {(time.time() - start):.4F} seconds") + print(f"\t\tBetweenness centrality for node 0: {bt[0]:.5f}") + print("\tNon-Parallel version") + start = time.time() + bt = nx.betweenness_centrality(G) + print(f"\t\tTime: {(time.time() - start):.4F} seconds") + print(f"\t\tBetweenness centrality for node 0: {bt[0]:.5f}") +print("") + +nx.draw(G_ba, node_size=100) +plt.show() diff --git a/examples/algorithms/beam_search.py b/examples/algorithms/plot_beam_search.py similarity index 69% rename from examples/algorithms/beam_search.py rename to examples/algorithms/plot_beam_search.py index 211002b..fd6a21a 100644 --- a/examples/algorithms/beam_search.py +++ b/examples/algorithms/plot_beam_search.py @@ -1,6 +1,3 @@ -# beam_search.py - progressive widening beam search -# -# Copyright 2016-2018 NetworkX developers. """ =========== Beam Search @@ -13,6 +10,7 @@ """ import math +import matplotlib.pyplot as plt import networkx as nx @@ -57,9 +55,7 @@ def progressive_widening_search(G, source, value, condition, initial_width=1): # least the number of nodes in the graph, so the final invocation of # `bfs_beam_edges` is equivalent to a plain old breadth-first # search. Therefore, all nodes will eventually be visited. - # - # TODO In Python 3.3+, this should be `math.log2(len(G))`. - log_m = math.ceil(math.log(len(G), 2)) + log_m = math.ceil(math.log2(len(G))) for i in range(log_m): width = initial_width * pow(2, i) # Since we are always starting from the same source node, this @@ -70,32 +66,44 @@ def progressive_widening_search(G, source, value, condition, initial_width=1): return v # At this point, since all nodes have been visited, we know that # none of the nodes satisfied the termination condition. - raise nx.NodeNotFound('no node satisfied the termination condition') + raise nx.NodeNotFound("no node satisfied the termination condition") + +############################################################################### +# Search for a node with high centrality. +# --------------------------------------- +# +# We generate a random graph, compute the centrality of each node, then perform +# the progressive widening search in order to find a node of high centrality. -def main(): - """Search for a node with high centrality. +G = nx.gnp_random_graph(100, 0.5) +centrality = nx.eigenvector_centrality(G) +avg_centrality = sum(centrality.values()) / len(G) - In this example, we generate a random graph, compute the centrality - of each node, then perform the progressive widening search in order - to find a node of high centrality. - """ - G = nx.gnp_random_graph(100, 0.5) - centrality = nx.eigenvector_centrality(G) - avg_centrality = sum(centrality.values()) / len(G) +def has_high_centrality(v): + return centrality[v] >= avg_centrality - def has_high_centrality(v): - return centrality[v] >= avg_centrality - source = 0 - value = centrality.get - condition = has_high_centrality +source = 0 +value = centrality.get +condition = has_high_centrality - found_node = progressive_widening_search(G, source, value, condition) - c = centrality[found_node] - print('found node {0} with centrality {1}'.format(found_node, c)) +found_node = progressive_widening_search(G, source, value, condition) +c = centrality[found_node] +print(f"found node {found_node} with centrality {c}") -if __name__ == '__main__': - main() +# Draw graph +pos = nx.spring_layout(G) +options = { + "node_color": "blue", + "node_size": 20, + "edge_color": "grey", + "linewidths": 0, + "width": 0.1, +} +nx.draw(G, pos, **options) +# Draw node with high centrality as large and red +nx.draw_networkx_nodes(G, pos, nodelist=[found_node], node_size=100, node_color="r") +plt.show() diff --git a/examples/algorithms/plot_blockmodel.py b/examples/algorithms/plot_blockmodel.py index 114ee3f..7b67f21 100644 --- a/examples/algorithms/plot_blockmodel.py +++ b/examples/algorithms/plot_blockmodel.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# encoding: utf-8 """ ========== Blockmodel @@ -22,7 +20,6 @@ } """ -# Authors: Drew Conway <drew.conway@nyu.edu>, Aric Hagberg <hagberg@lanl.gov> from collections import defaultdict @@ -52,32 +49,31 @@ def create_hc(G): return list(partition.values()) -if __name__ == '__main__': - G = nx.read_edgelist("hartford_drug.edgelist") +G = nx.read_edgelist("hartford_drug.edgelist") - # Extract largest connected component into graph H - H = next(nx.connected_component_subgraphs(G)) - # Makes life easier to have consecutively labeled integer nodes - H = nx.convert_node_labels_to_integers(H) - # Create parititions with hierarchical clustering - partitions = create_hc(H) - # Build blockmodel graph - BM = nx.quotient_graph(H, partitions, relabel=True) +# Extract largest connected component into graph H +H = G.subgraph(next(nx.connected_components(G))) +# Makes life easier to have consecutively labeled integer nodes +H = nx.convert_node_labels_to_integers(H) +# Create parititions with hierarchical clustering +partitions = create_hc(H) +# Build blockmodel graph +BM = nx.quotient_graph(H, partitions, relabel=True) - # Draw original graph - pos = nx.spring_layout(H, iterations=100) - plt.subplot(211) - nx.draw(H, pos, with_labels=False, node_size=10) +# Draw original graph +pos = nx.spring_layout(H, iterations=100) +plt.subplot(211) +nx.draw(H, pos, with_labels=False, node_size=10) - # Draw block model with weighted edges and nodes sized by number of internal nodes - node_size = [BM.nodes[x]['nnodes'] * 10 for x in BM.nodes()] - edge_width = [(2 * d['weight']) for (u, v, d) in BM.edges(data=True)] - # Set positions to mean of positions of internal nodes from original graph - posBM = {} - for n in BM: - xy = numpy.array([pos[u] for u in BM.nodes[n]['graph']]) - posBM[n] = xy.mean(axis=0) - plt.subplot(212) - nx.draw(BM, posBM, node_size=node_size, width=edge_width, with_labels=False) - plt.axis('off') - plt.show() +# Draw block model with weighted edges and nodes sized by number of internal nodes +node_size = [BM.nodes[x]["nnodes"] * 10 for x in BM.nodes()] +edge_width = [(2 * d["weight"]) for (u, v, d) in BM.edges(data=True)] +# Set positions to mean of positions of internal nodes from original graph +posBM = {} +for n in BM: + xy = numpy.array([pos[u] for u in BM.nodes[n]["graph"]]) + posBM[n] = xy.mean(axis=0) +plt.subplot(212) +nx.draw(BM, posBM, node_size=node_size, width=edge_width, with_labels=False) +plt.axis("off") +plt.show() diff --git a/examples/algorithms/plot_davis_club.py b/examples/algorithms/plot_davis_club.py index 2e4a265..57b4c3d 100644 --- a/examples/algorithms/plot_davis_club.py +++ b/examples/algorithms/plot_davis_club.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ ========== Davis Club @@ -18,26 +17,26 @@ import networkx.algorithms.bipartite as bipartite G = nx.davis_southern_women_graph() -women = G.graph['top'] -clubs = G.graph['bottom'] +women = G.graph["top"] +clubs = G.graph["bottom"] print("Biadjacency matrix") print(bipartite.biadjacency_matrix(G, women, clubs)) # project bipartite graph onto women nodes W = bipartite.projected_graph(G, women) -print('') +print() print("#Friends, Member") for w in women: - print('%d %s' % (W.degree(w), w)) + print(f"{W.degree(w)} {w}") # project bipartite graph onto women nodes keeping number of co-occurence # the degree computed is weighted and counts the total number of shared contacts W = bipartite.weighted_projected_graph(G, women) -print('') +print() print("#Friend meetings, Member") for w in women: - print('%d %s' % (W.degree(w, weight='weight'), w)) + print(f"{W.degree(w, weight='weight')} {w}") nx.draw(G) plt.show() diff --git a/examples/algorithms/plot_decomposition.py b/examples/algorithms/plot_decomposition.py new file mode 100644 index 0000000..7b83a71 --- /dev/null +++ b/examples/algorithms/plot_decomposition.py @@ -0,0 +1,40 @@ +""" +============= +Decomposition +============= + +Example of creating a junction tree from a directed graph. +""" + +import networkx as nx +from networkx.algorithms import moral +from networkx.algorithms.tree.decomposition import junction_tree +from networkx.drawing.nx_agraph import graphviz_layout as layout +import matplotlib.pyplot as plt + +B = nx.DiGraph() +B.add_nodes_from(["A", "B", "C", "D", "E", "F"]) +B.add_edges_from( + [("A", "B"), ("A", "C"), ("B", "D"), ("B", "F"), ("C", "E"), ("E", "F")] +) + +options = {"with_labels": True, "node_color": "white", "edgecolors": "blue"} + +bayes_pos = layout(B, prog="neato") +ax1 = plt.subplot(1, 3, 1) +plt.title("Bayesian Network") +nx.draw_networkx(B, pos=bayes_pos, **options) + +mg = moral.moral_graph(B) +plt.subplot(1, 3, 2, sharex=ax1, sharey=ax1) +plt.title("Moralized Graph") +nx.draw_networkx(mg, pos=bayes_pos, **options) + +jt = junction_tree(B) +plt.subplot(1, 3, 3) +plt.title("Junction Tree") +nsize = [2000 * len(n) for n in list(jt.nodes())] +nx.draw_networkx(jt, pos=layout(jt, prog="neato"), node_size=nsize, **options) + +plt.tight_layout() +plt.show() diff --git a/examples/algorithms/plot_krackhardt_centrality.py b/examples/algorithms/plot_krackhardt_centrality.py index ff64373..5ce838b 100644 --- a/examples/algorithms/plot_krackhardt_centrality.py +++ b/examples/algorithms/plot_krackhardt_centrality.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ ===================== Krackhardt Centrality @@ -6,16 +5,6 @@ Centrality measures of Krackhardt social network. """ -# Author: Aric Hagberg (hagberg@lanl.gov) -# Date: 2005-05-12 14:33:11 -0600 (Thu, 12 May 2005) -# Revision: 998 - -# Copyright (C) 2004-2018 by -# Aric Hagberg <hagberg@lanl.gov> -# Dan Schult <dschult@colgate.edu> -# Pieter Swart <swart@lanl.gov> -# All rights reserved. -# BSD license. import matplotlib.pyplot as plt import networkx as nx @@ -25,17 +14,17 @@ print("Betweenness") b = nx.betweenness_centrality(G) for v in G.nodes(): - print("%0.2d %5.3f" % (v, b[v])) + print(f"{v:2} {b[v]:.3f}") print("Degree centrality") d = nx.degree_centrality(G) for v in G.nodes(): - print("%0.2d %5.3f" % (v, d[v])) + print(f"{v:2} {d[v]:.3f}") print("Closeness centrality") c = nx.closeness_centrality(G) for v in G.nodes(): - print("%0.2d %5.3f" % (v, c[v])) + print(f"{v:2} {c[v]:.3f}") nx.draw(G) plt.show() diff --git a/examples/algorithms/rcm.py b/examples/algorithms/plot_rcm.py similarity index 61% rename from examples/algorithms/rcm.py rename to examples/algorithms/plot_rcm.py index 30b2be0..8ea926e 100644 --- a/examples/algorithms/rcm.py +++ b/examples/algorithms/plot_rcm.py @@ -9,9 +9,6 @@ reduces the matrix bandwidth. """ -# Copyright (C) 2011-2018 by -# Author: Aric Hagberg <aric.hagberg@gmail.com> -# BSD License import networkx as nx from networkx.utils import reverse_cuthill_mckee_ordering import numpy as np @@ -24,15 +21,15 @@ print("unordered Laplacian matrix") A = nx.laplacian_matrix(G) x, y = np.nonzero(A) -#print("lower bandwidth:",(y-x).max()) -#print("upper bandwidth:",(x-y).max()) -print("bandwidth: %d" % ((y - x).max() + (x - y).max() + 1)) +# print(f"lower bandwidth: {(y - x).max()}") +# print(f"upper bandwidth: {(x - y).max()}") +print(f"bandwidth: {(y - x).max() + (x - y).max() + 1}") print(A) B = nx.laplacian_matrix(G, nodelist=rcm) print("low-bandwidth Laplacian matrix") x, y = np.nonzero(B) -#print("lower bandwidth:",(y-x).max()) -#print("upper bandwidth:",(x-y).max()) -print("bandwidth: %d" % ((y - x).max() + (x - y).max() + 1)) +# print(f"lower bandwidth: {(y - x).max()}") +# print(f"upper bandwidth: {(x - y).max()}") +print(f"bandwidth: {(y - x).max() + (x - y).max() + 1}") print(B) diff --git a/examples/applications/README.txt b/examples/applications/README.txt new file mode 100644 index 0000000..1b557aa --- /dev/null +++ b/examples/applications/README.txt @@ -0,0 +1,2 @@ +Applications +------------ diff --git a/examples/applications/circuits.py b/examples/applications/plot_circuits.py similarity index 50% rename from examples/applications/circuits.py rename to examples/applications/plot_circuits.py index 455c68a..dad487d 100644 --- a/examples/applications/circuits.py +++ b/examples/applications/plot_circuits.py @@ -1,13 +1,9 @@ -#!/usr/bin/env python -# circuits.py - convert a Boolean circuit to an equivalent Boolean formula -# -# Copyright 2016 Jeffrey Finkelstein <jeffrey.finkelstein@gmail.com>. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. -"""Convert a Boolean circuit to an equivalent Boolean formula. +""" +======== +Circuits +======== + +Convert a Boolean circuit to an equivalent Boolean formula. A Boolean circuit can be exponentially more expressive than an equivalent formula in the worst case, since the circuit can reuse @@ -27,16 +23,15 @@ def circuit_to_formula(circuit): # Transfer the operator or variable labels for each node from the # circuit to the formula. for v in formula: - source = formula.node[v]['source'] - formula.node[v]['label'] = circuit.node[source]['label'] + source = formula.nodes[v]["source"] + formula.nodes[v]["label"] = circuit.nodes[source]["label"] return formula def formula_to_string(formula): - def _to_string(formula, root): # If there are no children, this is a variable node. - label = formula.node[root]['label'] + label = formula.nodes[root]["label"] if not formula[root]: return label # Otherwise, this is an operator. @@ -44,7 +39,7 @@ def _to_string(formula, root): # If one child, the label must be a NOT operator. if len(children) == 1: child = arbitrary_element(children) - return '{}({})'.format(label, _to_string(formula, child)) + return f"{label}({_to_string(formula, child)})" # NB "left" and "right" here are a little misleading: there is # no order on the children of a node. That's okay because the # Boolean AND and OR operators are symmetric. It just means that @@ -54,42 +49,40 @@ def _to_string(formula, root): left, right = formula[root] left_subformula = _to_string(formula, left) right_subformula = _to_string(formula, right) - return '({} {} {})'.format(left_subformula, label, right_subformula) + return f"({left_subformula} {label} {right_subformula})" root = next(v for v, d in formula.in_degree() if d == 0) return _to_string(formula, root) -def main(): - # Create an example Boolean circuit. - # - # This circuit has a ∧ at the output and two ∨s at the next layer. - # The third layer has a variable x that appears in the left ∨, a - # variable y that appears in both the left and right ∨s, and a - # negation for the variable z that appears as the sole node in the - # fourth layer. - circuit = DiGraph() - # Layer 0 - circuit.add_node(0, label='∧') - # Layer 1 - circuit.add_node(1, label='∨') - circuit.add_node(2, label='∨') - circuit.add_edge(0, 1) - circuit.add_edge(0, 2) - # Layer 2 - circuit.add_node(3, label='x') - circuit.add_node(4, label='y') - circuit.add_node(5, label='¬') - circuit.add_edge(1, 3) - circuit.add_edge(1, 4) - circuit.add_edge(2, 4) - circuit.add_edge(2, 5) - # Layer 3 - circuit.add_node(6, label='z') - circuit.add_edge(5, 6) - # Convert the circuit to an equivalent formula. - formula = circuit_to_formula(circuit) - print(formula_to_string(formula)) - -if __name__ == '__main__': - main() +############################################################################### +# Create an example Boolean circuit. +# ---------------------------------- +# +# This circuit has a ∧ at the output and two ∨s at the next layer. +# The third layer has a variable x that appears in the left ∨, a +# variable y that appears in both the left and right ∨s, and a +# negation for the variable z that appears as the sole node in the +# fourth layer. +circuit = DiGraph() +# Layer 0 +circuit.add_node(0, label="∧") +# Layer 1 +circuit.add_node(1, label="∨") +circuit.add_node(2, label="∨") +circuit.add_edge(0, 1) +circuit.add_edge(0, 2) +# Layer 2 +circuit.add_node(3, label="x") +circuit.add_node(4, label="y") +circuit.add_node(5, label="¬") +circuit.add_edge(1, 3) +circuit.add_edge(1, 4) +circuit.add_edge(2, 4) +circuit.add_edge(2, 5) +# Layer 3 +circuit.add_node(6, label="z") +circuit.add_edge(5, 6) +# Convert the circuit to an equivalent formula. +formula = circuit_to_formula(circuit) +print(formula_to_string(formula)) diff --git a/examples/basic/plot_properties.py b/examples/basic/plot_properties.py index 7ee1d5c..0b057df 100644 --- a/examples/basic/plot_properties.py +++ b/examples/basic/plot_properties.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ ========== Properties @@ -6,12 +5,6 @@ Compute some network properties for the lollipop graph. """ -# Copyright (C) 2004-2018 by -# Aric Hagberg <hagberg@lanl.gov> -# Dan Schult <dschult@colgate.edu> -# Pieter Swart <swart@lanl.gov> -# All rights reserved. -# BSD license. import matplotlib.pyplot as plt from networkx import nx @@ -23,12 +16,12 @@ print("source vertex {target:length, }") for v in G.nodes(): spl = dict(nx.single_source_shortest_path_length(G, v)) - print('{} {} '.format(v, spl)) + print(f"{v} {spl} ") for p in spl: pathlengths.append(spl[p]) -print('') -print("average shortest path length %s" % (sum(pathlengths) / len(pathlengths))) +print() +print(f"average shortest path length {sum(pathlengths) / len(pathlengths)}") # histogram of path lengths dist = {} @@ -38,18 +31,18 @@ else: dist[p] = 1 -print('') +print() print("length #paths") verts = dist.keys() for d in sorted(verts): - print('%s %d' % (d, dist[d])) - -print("radius: %d" % nx.radius(G)) -print("diameter: %d" % nx.diameter(G)) -print("eccentricity: %s" % nx.eccentricity(G)) -print("center: %s" % nx.center(G)) -print("periphery: %s" % nx.periphery(G)) -print("density: %s" % nx.density(G)) + print(f"{d} {dist[d]}") + +print(f"radius: {nx.radius(G)}") +print(f"diameter: {nx.diameter(G)}") +print(f"eccentricity: {nx.eccentricity(G)}") +print(f"center: {nx.center(G)}") +print(f"periphery: {nx.periphery(G)}") +print(f"density: {nx.density(G)}") nx.draw(G, with_labels=True) plt.show() diff --git a/examples/basic/plot_read_write.py b/examples/basic/plot_read_write.py index 2b2d3c8..f1e6487 100644 --- a/examples/basic/plot_read_write.py +++ b/examples/basic/plot_read_write.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ ====================== Read and write graphs. @@ -6,27 +5,17 @@ Read and write graphs. """ -# Author: Aric Hagberg (hagberg@lanl.gov) - -# Copyright (C) 2004-2018 by -# Aric Hagberg <hagberg@lanl.gov> -# Dan Schult <dschult@colgate.edu> -# Pieter Swart <swart@lanl.gov> -# All rights reserved. -# BSD license. - -import sys import matplotlib.pyplot as plt import networkx as nx G = nx.grid_2d_graph(5, 5) # 5x5 grid -try: # Python 2.6+ - nx.write_adjlist(G, sys.stdout) # write adjacency list to screen -except TypeError: # Python 3.x - nx.write_adjlist(G, sys.stdout.buffer) # write adjacency list to screen + +# print the adjacency list +for line in nx.generate_adjlist(G): + print(line) # write edgelist to grid.edgelist -nx. write_edgelist(G, path="grid.edgelist", delimiter=":") +nx.write_edgelist(G, path="grid.edgelist", delimiter=":") # read edgelist from grid.edgelist H = nx.read_edgelist(path="grid.edgelist", delimiter=":") diff --git a/examples/drawing/plot_atlas.py b/examples/drawing/plot_atlas.py index 3de3ce0..6bf00f1 100644 --- a/examples/drawing/plot_atlas.py +++ b/examples/drawing/plot_atlas.py @@ -1,38 +1,23 @@ -#!/usr/bin/env python """ ===== Atlas ===== Atlas of all graphs of 6 nodes or less. - """ -# Author: Aric Hagberg (hagberg@lanl.gov) - -# Copyright (C) 2004-2018 by -# Aric Hagberg <hagberg@lanl.gov> -# Dan Schult <dschult@colgate.edu> -# Pieter Swart <swart@lanl.gov> -# All rights reserved. -# BSD license. import random -try: - import pygraphviz - from networkx.drawing.nx_agraph import graphviz_layout -except ImportError: - try: - import pydot - from networkx.drawing.nx_pydot import graphviz_layout - except ImportError: - raise ImportError("This example needs Graphviz and either " - "PyGraphviz or pydot.") +# This example needs Graphviz and either PyGraphviz or pydot. +# from networkx.drawing.nx_pydot import graphviz_layout +from networkx.drawing.nx_agraph import graphviz_layout import matplotlib.pyplot as plt import networkx as nx -from networkx.algorithms.isomorphism.isomorph import graph_could_be_isomorphic as isomorphic +from networkx.algorithms.isomorphism.isomorph import ( + graph_could_be_isomorphic as isomorphic, +) from networkx.generators.atlas import graph_atlas_g @@ -50,8 +35,8 @@ def atlas6(): G.remove_node(n) U = nx.disjoint_union(U, G) - # list of graphs of all connected components - C = nx.connected_component_subgraphs(U) + # iterator of graphs of all connected components + C = (U.subgraph(c) for c in nx.connected_components(U)) UU = nx.Graph() # do quick isomorphic-like check, not a true isomorphism checker @@ -72,26 +57,17 @@ def iso(G1, glist): return False -if __name__ == '__main__': - G = atlas6() - - print("graph has %d nodes with %d edges" - % (nx.number_of_nodes(G), nx.number_of_edges(G))) - print(nx.number_connected_components(G), "connected components") - - plt.figure(1, figsize=(8, 8)) - # layout graphs with positions using graphviz neato - pos = graphviz_layout(G, prog="neato") - # color nodes the same in each connected subgraph - C = nx.connected_component_subgraphs(G) - for g in C: - c = [random.random()] * nx.number_of_nodes(g) # random color... - nx.draw(g, - pos, - node_size=40, - node_color=c, - vmin=0.0, - vmax=1.0, - with_labels=False - ) - plt.show() +G = atlas6() + +print(f"graph has {nx.number_of_nodes(G)} nodes with {nx.number_of_edges(G)} edges") +print(nx.number_connected_components(G), "connected components") + +plt.figure(1, figsize=(8, 8)) +# layout graphs with positions using graphviz neato +pos = graphviz_layout(G, prog="neato") +# color nodes the same in each connected subgraph +C = (G.subgraph(c) for c in nx.connected_components(G)) +for g in C: + c = [random.random()] * nx.number_of_nodes(g) # random color... + nx.draw(g, pos, node_size=40, node_color=c, vmin=0.0, vmax=1.0, with_labels=False) +plt.show() diff --git a/examples/drawing/plot_chess_masters.py b/examples/drawing/plot_chess_masters.py index 278b4b1..35f47d7 100644 --- a/examples/drawing/plot_chess_masters.py +++ b/examples/drawing/plot_chess_masters.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - """ ============= Chess Masters @@ -26,23 +24,13 @@ where `game_info` is a `dict` describing each game. """ -# Copyright (C) 2006-2018 by -# Aric Hagberg <hagberg@lanl.gov> -# Dan Schult <dschult@colgate.edu> -# Pieter Swart <swart@lanl.gov> -# All rights reserved. -# BSD license. import matplotlib.pyplot as plt import networkx as nx # tag names specifying what game info should be # stored in the dict on each digraph edge -game_details = ["Event", - "Date", - "Result", - "ECO", - "Site"] +game_details = ["Event", "Date", "Result", "ECO", "Site"] def chess_pgn_graph(pgn_file="chess_masters_WCC.pgn.bz2"): @@ -55,104 +43,105 @@ def chess_pgn_graph(pgn_file="chess_masters_WCC.pgn.bz2"): """ import bz2 + G = nx.MultiDiGraph() game = {} datafile = bz2.BZ2File(pgn_file) - lines = (line.decode().rstrip('\r\n') for line in datafile) + lines = (line.decode().rstrip("\r\n") for line in datafile) for line in lines: - if line.startswith('['): - tag, value = line[1:-1].split(' ', 1) + if line.startswith("["): + tag, value = line[1:-1].split(" ", 1) game[str(tag)] = value.strip('"') else: # empty line after tag set indicates # we finished reading game info if game: - white = game.pop('White') - black = game.pop('Black') + white = game.pop("White") + black = game.pop("Black") G.add_edge(white, black, **game) game = {} return G -if __name__ == '__main__': - G = chess_pgn_graph() - - ngames = G.number_of_edges() - nplayers = G.number_of_nodes() - - print("Loaded %d chess games between %d players\n" - % (ngames, nplayers)) - - # identify connected components - # of the undirected version - Gcc = list(nx.connected_component_subgraphs(G.to_undirected())) - if len(Gcc) > 1: - print("Note the disconnected component consisting of:") - print(Gcc[1].nodes()) - - # find all games with B97 opening (as described in ECO) - openings = set([game_info['ECO'] - for (white, black, game_info) in G.edges(data=True)]) - print("\nFrom a total of %d different openings," % len(openings)) - print('the following games used the Sicilian opening') - print('with the Najdorff 7...Qb6 "Poisoned Pawn" variation.\n') - - for (white, black, game_info) in G.edges(data=True): - if game_info['ECO'] == 'B97': - print(white, "vs", black) - for k, v in game_info.items(): - print(" ", k, ": ", v) - print("\n") - - # make new undirected graph H without multi-edges - H = nx.Graph(G) - - # edge width is proportional number of games played - edgewidth = [] - for (u, v, d) in H.edges(data=True): - edgewidth.append(len(G.get_edge_data(u, v))) - - # node size is proportional to number of games won - wins = dict.fromkeys(G.nodes(), 0.0) - for (u, v, d) in G.edges(data=True): - r = d['Result'].split('-') - if r[0] == '1': - wins[u] += 1.0 - elif r[0] == '1/2': - wins[u] += 0.5 - wins[v] += 0.5 - else: - wins[v] += 1.0 - try: - pos = nx.nx_agraph.graphviz_layout(H) - except: - pos = nx.spring_layout(H, iterations=20) - - plt.rcParams['text.usetex'] = False - plt.figure(figsize=(8, 8)) - nx.draw_networkx_edges(H, pos, alpha=0.3, width=edgewidth, edge_color='m') - nodesize = [wins[v] * 50 for v in H] - nx.draw_networkx_nodes(H, pos, node_size=nodesize, node_color='w', alpha=0.4) - nx.draw_networkx_edges(H, pos, alpha=0.4, node_size=0, width=1, edge_color='k') - nx.draw_networkx_labels(H, pos, fontsize=14) - font = {'fontname': 'Helvetica', - 'color': 'k', - 'fontweight': 'bold', - 'fontsize': 14} - plt.title("World Chess Championship Games: 1886 - 1985", font) - - # change font and write text (using data coordinates) - font = {'fontname': 'Helvetica', - 'color': 'r', - 'fontweight': 'bold', - 'fontsize': 14} - - plt.text(0.5, 0.97, "edge width = # games played", - horizontalalignment='center', - transform=plt.gca().transAxes) - plt.text(0.5, 0.94, "node size = # games won", - horizontalalignment='center', - transform=plt.gca().transAxes) - - plt.axis('off') - plt.show() +G = chess_pgn_graph() + +ngames = G.number_of_edges() +nplayers = G.number_of_nodes() + +print(f"Loaded {ngames} chess games between {nplayers} players\n") + +# identify connected components +# of the undirected version +H = G.to_undirected() +Gcc = [H.subgraph(c) for c in nx.connected_components(H)] +if len(Gcc) > 1: + print("Note the disconnected component consisting of:") + print(Gcc[1].nodes()) + +# find all games with B97 opening (as described in ECO) +openings = {game_info["ECO"] for (white, black, game_info) in G.edges(data=True)} +print(f"\nFrom a total of {len(openings)} different openings,") +print("the following games used the Sicilian opening") +print('with the Najdorff 7...Qb6 "Poisoned Pawn" variation.\n') + +for (white, black, game_info) in G.edges(data=True): + if game_info["ECO"] == "B97": + print(white, "vs", black) + for k, v in game_info.items(): + print(" ", k, ": ", v) + print("\n") + +# make new undirected graph H without multi-edges +H = nx.Graph(G) + +# edge width is proportional number of games played +edgewidth = [] +for (u, v, d) in H.edges(data=True): + edgewidth.append(len(G.get_edge_data(u, v))) + +# node size is proportional to number of games won +wins = dict.fromkeys(G.nodes(), 0.0) +for (u, v, d) in G.edges(data=True): + r = d["Result"].split("-") + if r[0] == "1": + wins[u] += 1.0 + elif r[0] == "1/2": + wins[u] += 0.5 + wins[v] += 0.5 + else: + wins[v] += 1.0 +try: + pos = nx.nx_agraph.graphviz_layout(H) +except ImportError: + pos = nx.spring_layout(H, iterations=20) + +plt.rcParams["text.usetex"] = False +plt.figure(figsize=(8, 8)) +nx.draw_networkx_edges(H, pos, alpha=0.3, width=edgewidth, edge_color="m") +nodesize = [wins[v] * 50 for v in H] +nx.draw_networkx_nodes(H, pos, node_size=nodesize, node_color="w", alpha=0.4) +nx.draw_networkx_edges(H, pos, alpha=0.4, node_size=0, width=1, edge_color="k") +nx.draw_networkx_labels(H, pos, font_size=14) +font = {"fontname": "Helvetica", "color": "k", "fontweight": "bold", "fontsize": 14} +plt.title("World Chess Championship Games: 1886 - 1985", font) + +# change font and write text (using data coordinates) +font = {"fontname": "Helvetica", "color": "r", "fontweight": "bold", "fontsize": 14} + +plt.text( + 0.5, + 0.97, + "edge width = # games played", + horizontalalignment="center", + transform=plt.gca().transAxes, +) +plt.text( + 0.5, + 0.94, + "node size = # games won", + horizontalalignment="center", + transform=plt.gca().transAxes, +) + +plt.axis("off") +plt.show() diff --git a/examples/drawing/plot_circular_tree.py b/examples/drawing/plot_circular_tree.py index e6ad066..b7db4b3 100644 --- a/examples/drawing/plot_circular_tree.py +++ b/examples/drawing/plot_circular_tree.py @@ -2,26 +2,19 @@ ============= Circular Tree ============= - -This """ + import matplotlib.pyplot as plt import networkx as nx -try: - import pygraphviz - from networkx.drawing.nx_agraph import graphviz_layout -except ImportError: - try: - import pydot - from networkx.drawing.nx_pydot import graphviz_layout - except ImportError: - raise ImportError("This example needs Graphviz and either " - "PyGraphviz or pydot") +# This example needs Graphviz and either PyGraphviz or pydot +# from networkx.drawing.nx_pydot import graphviz_layout +from networkx.drawing.nx_agraph import graphviz_layout + G = nx.balanced_tree(3, 5) -pos = graphviz_layout(G, prog='twopi', args='') +pos = graphviz_layout(G, prog="twopi", args="") plt.figure(figsize=(8, 8)) nx.draw(G, pos, node_size=20, alpha=0.5, node_color="blue", with_labels=False) -plt.axis('equal') +plt.axis("equal") plt.show() diff --git a/examples/drawing/plot_degree_histogram.py b/examples/drawing/plot_degree_histogram.py index 1095533..86d6b7c 100644 --- a/examples/drawing/plot_degree_histogram.py +++ b/examples/drawing/plot_degree_histogram.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ ================ Degree histogram @@ -14,12 +13,11 @@ G = nx.gnp_random_graph(100, 0.02) degree_sequence = sorted([d for n, d in G.degree()], reverse=True) # degree sequence -# print "Degree sequence", degree_sequence degreeCount = collections.Counter(degree_sequence) deg, cnt = zip(*degreeCount.items()) fig, ax = plt.subplots() -plt.bar(deg, cnt, width=0.80, color='b') +plt.bar(deg, cnt, width=0.80, color="b") plt.title("Degree Histogram") plt.ylabel("Count") @@ -29,10 +27,9 @@ # draw graph in inset plt.axes([0.4, 0.4, 0.5, 0.5]) -Gcc = sorted(nx.connected_component_subgraphs(G), key=len, reverse=True)[0] +Gcc = G.subgraph(sorted(nx.connected_components(G), key=len, reverse=True)[0]) pos = nx.spring_layout(G) -plt.axis('off') +plt.axis("off") nx.draw_networkx_nodes(G, pos, node_size=20) nx.draw_networkx_edges(G, pos, alpha=0.4) - plt.show() diff --git a/examples/drawing/plot_degree_rank.py b/examples/drawing/plot_degree_rank.py index 950a7d2..61039cc 100644 --- a/examples/drawing/plot_degree_rank.py +++ b/examples/drawing/plot_degree_rank.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ =========== Degree Rank @@ -7,27 +6,24 @@ Random graph from given degree sequence. Draw degree rank plot and graph with matplotlib. """ -# Author: Aric Hagberg <aric.hagberg@gmail.com> import networkx as nx import matplotlib.pyplot as plt G = nx.gnp_random_graph(100, 0.02) degree_sequence = sorted([d for n, d in G.degree()], reverse=True) -# print "Degree sequence", degree_sequence dmax = max(degree_sequence) -plt.loglog(degree_sequence, 'b-', marker='o') +plt.loglog(degree_sequence, "b-", marker="o") plt.title("Degree rank plot") plt.ylabel("degree") plt.xlabel("rank") # draw graph in inset plt.axes([0.45, 0.45, 0.45, 0.45]) -Gcc = sorted(nx.connected_component_subgraphs(G), key=len, reverse=True)[0] +Gcc = G.subgraph(sorted(nx.connected_components(G), key=len, reverse=True)[0]) pos = nx.spring_layout(Gcc) -plt.axis('off') +plt.axis("off") nx.draw_networkx_nodes(Gcc, pos, node_size=20) nx.draw_networkx_edges(Gcc, pos, alpha=0.4) - plt.show() diff --git a/examples/drawing/plot_directed.py b/examples/drawing/plot_directed.py index a881ef9..6c06257 100644 --- a/examples/drawing/plot_directed.py +++ b/examples/drawing/plot_directed.py @@ -1,4 +1,3 @@ -#! /usr/bin/env python """ ============== Directed Graph @@ -8,9 +7,8 @@ Edges have different colors and alphas (opacity). Drawn using matplotlib. """ -# Author: Rodrigo Dorantes-Gilardi (rodgdor@gmail.com) -from __future__ import division +import matplotlib as mpl import matplotlib.pyplot as plt import networkx as nx @@ -22,14 +20,25 @@ edge_colors = range(2, M + 2) edge_alphas = [(5 + i) / (M + 4) for i in range(M)] -nodes = nx.draw_networkx_nodes(G, pos, node_size=node_sizes, node_color='blue') -edges = nx.draw_networkx_edges(G, pos, node_size=node_sizes, arrowstyle='->', - arrowsize=10, edge_color=edge_colors, - edge_cmap=plt.cm.Blues, width=2) +nodes = nx.draw_networkx_nodes(G, pos, node_size=node_sizes, node_color="blue") +edges = nx.draw_networkx_edges( + G, + pos, + node_size=node_sizes, + arrowstyle="->", + arrowsize=10, + edge_color=edge_colors, + edge_cmap=plt.cm.Blues, + width=2, +) # set alpha value for each edge for i in range(M): edges[i].set_alpha(edge_alphas[i]) +pc = mpl.collections.PatchCollection(edges, cmap=plt.cm.Blues) +pc.set_array(edge_colors) +plt.colorbar(pc) + ax = plt.gca() ax.set_axis_off() plt.show() diff --git a/examples/drawing/plot_edge_colormap.py b/examples/drawing/plot_edge_colormap.py index ad3fbb6..eb2444b 100644 --- a/examples/drawing/plot_edge_colormap.py +++ b/examples/drawing/plot_edge_colormap.py @@ -1,13 +1,10 @@ -#!/usr/bin/env python """ ============= Edge Colormap ============= Draw a graph with matplotlib, color edges. -You must have matplotlib>=87.7 for this to work. """ -# Author: Aric Hagberg (hagberg@lanl.gov) import matplotlib.pyplot as plt import networkx as nx @@ -15,6 +12,12 @@ G = nx.star_graph(20) pos = nx.spring_layout(G) colors = range(20) -nx.draw(G, pos, node_color='#A0CBE2', edge_color=colors, - width=4, edge_cmap=plt.cm.Blues, with_labels=False) +options = { + "node_color": "#A0CBE2", + "edge_color": colors, + "width": 4, + "edge_cmap": plt.cm.Blues, + "with_labels": False, +} +nx.draw(G, pos, **options) plt.show() diff --git a/examples/drawing/plot_ego_graph.py b/examples/drawing/plot_ego_graph.py index 4099103..002b2fd 100644 --- a/examples/drawing/plot_ego_graph.py +++ b/examples/drawing/plot_ego_graph.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- """ ========= Ego Graph @@ -8,26 +6,29 @@ Example using the NetworkX ego_graph() function to return the main egonet of the largest hub in a Barabási-Albert network. """ -# Author: Drew Conway (drew.conway@nyu.edu) from operator import itemgetter import matplotlib.pyplot as plt import networkx as nx -if __name__ == '__main__': - # Create a BA model graph - n = 1000 - m = 2 - G = nx.generators.barabasi_albert_graph(n, m) - # find node with largest degree - node_and_degree = G.degree() - (largest_hub, degree) = sorted(node_and_degree, key=itemgetter(1))[-1] - # Create ego graph of main hub - hub_ego = nx.ego_graph(G, largest_hub) - # Draw graph - pos = nx.spring_layout(hub_ego) - nx.draw(hub_ego, pos, node_color='b', node_size=50, with_labels=False) - # Draw ego as large and red - nx.draw_networkx_nodes(hub_ego, pos, nodelist=[largest_hub], node_size=300, node_color='r') - plt.show() +# Create a BA model graph +n = 1000 +m = 2 +G = nx.generators.barabasi_albert_graph(n, m) + +# find node with largest degree +node_and_degree = G.degree() +(largest_hub, degree) = sorted(node_and_degree, key=itemgetter(1))[-1] + +# Create ego graph of main hub +hub_ego = nx.ego_graph(G, largest_hub) + +# Draw graph +pos = nx.spring_layout(hub_ego) +nx.draw(hub_ego, pos, node_color="b", node_size=50, with_labels=False) + +# Draw ego as large and red +options = {"node_size": 300, "node_color": "r"} +nx.draw_networkx_nodes(hub_ego, pos, nodelist=[largest_hub], **options) +plt.show() diff --git a/examples/drawing/plot_four_grids.py b/examples/drawing/plot_four_grids.py index 9123bb2..deb7096 100644 --- a/examples/drawing/plot_four_grids.py +++ b/examples/drawing/plot_four_grids.py @@ -1,20 +1,10 @@ -#!/usr/bin/env python """ ========== Four Grids ========== Draw a graph with matplotlib. -You must have matplotlib for this to work. """ -# Author: Aric Hagberg (hagberg@lanl.gov) - -# Copyright (C) 2004-2018 -# Aric Hagberg <hagberg@lanl.gov> -# Dan Schult <dschult@colgate.edu> -# Pieter Swart <swart@lanl.gov> -# All rights reserved. -# BSD license. import matplotlib.pyplot as plt import networkx as nx @@ -27,13 +17,13 @@ nx.draw(G, pos, font_size=8) plt.subplot(222) -nx.draw(G, pos, node_color='k', node_size=0, with_labels=False) +nx.draw(G, pos, node_color="k", node_size=0, with_labels=False) plt.subplot(223) -nx.draw(G, pos, node_color='g', node_size=250, with_labels=False, width=6) +nx.draw(G, pos, node_color="g", node_size=250, with_labels=False, width=6) plt.subplot(224) H = G.to_directed() -nx.draw(H, pos, node_color='b', node_size=20, with_labels=False) +nx.draw(H, pos, node_color="b", node_size=20, with_labels=False) plt.show() diff --git a/examples/drawing/plot_giant_component.py b/examples/drawing/plot_giant_component.py index 82d05bf..c289bb3 100644 --- a/examples/drawing/plot_giant_component.py +++ b/examples/drawing/plot_giant_component.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ =============== Giant Component @@ -7,32 +6,18 @@ This example illustrates the sudden appearance of a giant connected component in a binomial random graph. """ -# Copyright (C) 2006-2018 -# Aric Hagberg <hagberg@lanl.gov> -# Dan Schult <dschult@colgate.edu> -# Pieter Swart <swart@lanl.gov> -# All rights reserved. -# BSD license. import math import matplotlib.pyplot as plt import networkx as nx -try: - import pygraphviz - from networkx.drawing.nx_agraph import graphviz_layout - layout = graphviz_layout -except ImportError: - try: - import pydot - from networkx.drawing.nx_pydot import graphviz_layout - layout = graphviz_layout - except ImportError: - print("PyGraphviz and pydot not found;\n" - "drawing with spring layout;\n" - "will be slow.") - layout = nx.spring_layout +# This example needs Graphviz and either PyGraphviz or pydot. +# from networkx.drawing.nx_pydot import graphviz_layout as layout +from networkx.drawing.nx_agraph import graphviz_layout as layout + +# If you don't have pygraphviz or pydot, you can do this +# layout = nx.spring_layout n = 150 # 150 nodes @@ -51,26 +36,16 @@ pos = layout(G) region += 1 plt.subplot(region) - plt.title("p = %6.3f" % (p)) - nx.draw(G, pos, - with_labels=False, - node_size=10 - ) + plt.title(f"p = {p:.3f}") + nx.draw(G, pos, with_labels=False, node_size=10) # identify largest connected component - Gcc = sorted(nx.connected_component_subgraphs(G), key=len, reverse=True) - G0 = Gcc[0] - nx.draw_networkx_edges(G0, pos, - with_labels=False, - edge_color='r', - width=6.0 - ) + Gcc = sorted(nx.connected_components(G), key=len, reverse=True) + G0 = G.subgraph(Gcc[0]) + nx.draw_networkx_edges(G0, pos, edge_color="r", width=6.0) # show other connected components for Gi in Gcc[1:]: if len(Gi) > 1: - nx.draw_networkx_edges(Gi, pos, - with_labels=False, - edge_color='r', - alpha=0.3, - width=5.0 - ) + nx.draw_networkx_edges( + G.subgraph(Gi), pos, edge_color="r", alpha=0.3, width=5.0, + ) plt.show() diff --git a/examples/drawing/plot_house_with_colors.py b/examples/drawing/plot_house_with_colors.py index 1aced4a..add6c0b 100644 --- a/examples/drawing/plot_house_with_colors.py +++ b/examples/drawing/plot_house_with_colors.py @@ -1,26 +1,19 @@ -#!/usr/bin/env python """ ================= House With Colors ================= Draw a graph with matplotlib. -You must have matplotlib for this to work. """ -# Author: Aric Hagberg (hagberg@lanl.gov) import matplotlib.pyplot as plt import networkx as nx G = nx.house_graph() # explicitly set positions -pos = {0: (0, 0), - 1: (1, 0), - 2: (0, 1), - 3: (1, 1), - 4: (0.5, 2.0)} +pos = {0: (0, 0), 1: (1, 0), 2: (0, 1), 3: (1, 1), 4: (0.5, 2.0)} nx.draw_networkx_nodes(G, pos, node_size=2000, nodelist=[4]) -nx.draw_networkx_nodes(G, pos, node_size=3000, nodelist=[0, 1, 2, 3], node_color='b') +nx.draw_networkx_nodes(G, pos, node_size=3000, nodelist=[0, 1, 2, 3], node_color="b") nx.draw_networkx_edges(G, pos, alpha=0.5, width=6) -plt.axis('off') +plt.axis("off") plt.show() diff --git a/examples/drawing/plot_knuth_miles.py b/examples/drawing/plot_knuth_miles.py index f1ddc25..1a745f1 100644 --- a/examples/drawing/plot_knuth_miles.py +++ b/examples/drawing/plot_knuth_miles.py @@ -1,36 +1,25 @@ -#!/usr/bin/env python """ =========== Knuth Miles =========== -`miles_graph()` returns an undirected graph over the 128 US cities from -the datafile `miles_dat.txt`. The cities each have location and population -data. The edges are labeled with the distance between the two cities. +`miles_graph()` returns an undirected graph over the 128 US cities from. The +cities each have location and population data. The edges are labeled with the +distance between the two cities. -This example is described in Section 1.1 in Knuth's book (see [1]_ and [2]_). +This example is described in Section 1.1 of -References. ------------ - -.. [1] Donald E. Knuth, - "The Stanford GraphBase: A Platform for Combinatorial Computing", - ACM Press, New York, 1993. -.. [2] http://www-cs-faculty.stanford.edu/~knuth/sgb.html + Donald E. Knuth, "The Stanford GraphBase: A Platform for Combinatorial + Computing", ACM Press, New York, 1993. + http://www-cs-faculty.stanford.edu/~knuth/sgb.html +The data file can be found at: +- https://github.com/networkx/networkx/blob/master/examples/drawing/knuth_miles.txt.gz """ -# Author: Aric Hagberg (hagberg@lanl.gov) - -# Copyright (C) 2004-2018 by -# Aric Hagberg <hagberg@lanl.gov> -# Dan Schult <dschult@colgate.edu> -# Pieter Swart <swart@lanl.gov> -# All rights reserved. -# BSD license. +import gzip import re -import sys import matplotlib.pyplot as plt import networkx as nx @@ -41,8 +30,8 @@ def miles_graph(): from the Stanford GraphBase. """ # open file miles_dat.txt.gz (or miles_dat.txt) - import gzip - fh = gzip.open('knuth_miles.txt.gz', 'r') + + fh = gzip.open("knuth_miles.txt.gz", "r") G = nx.Graph() G.position = {} @@ -54,7 +43,7 @@ def miles_graph(): if line.startswith("*"): # skip comments continue - numfind = re.compile("^\d+") + numfind = re.compile(r"^\d+") if numfind.match(line): # this line is distances dist = line.split() @@ -75,33 +64,33 @@ def miles_graph(): return G -if __name__ == '__main__': - - G = miles_graph() - - print("Loaded miles_dat.txt containing 128 cities.") - print("digraph has %d nodes with %d edges" - % (nx.number_of_nodes(G), nx.number_of_edges(G))) - - # make new graph of cites, edge if less then 300 miles between them - H = nx.Graph() - for v in G: - H.add_node(v) - for (u, v, d) in G.edges(data=True): - if d['weight'] < 300: - H.add_edge(u, v) - - # draw with matplotlib/pylab - plt.figure(figsize=(8, 8)) - # with nodes colored by degree sized by population - node_color = [float(H.degree(v)) for v in H] - nx.draw(H, G.position, - node_size=[G.population[v] for v in H], - node_color=node_color, - with_labels=False) - - # scale the axes equally - plt.xlim(-5000, 500) - plt.ylim(-2000, 3500) - - plt.show() +G = miles_graph() + +print("Loaded miles_dat.txt containing 128 cities.") +print(f"digraph has {nx.number_of_nodes(G)} nodes with {nx.number_of_edges(G)} edges") + +# make new graph of cites, edge if less then 300 miles between them +H = nx.Graph() +for v in G: + H.add_node(v) +for (u, v, d) in G.edges(data=True): + if d["weight"] < 300: + H.add_edge(u, v) + +# draw with matplotlib/pylab +plt.figure(figsize=(8, 8)) +# with nodes colored by degree sized by population +node_color = [float(H.degree(v)) for v in H] +nx.draw( + H, + G.position, + node_size=[G.population[v] for v in H], + node_color=node_color, + with_labels=False, +) + +# scale the axes equally +plt.xlim(-5000, 500) +plt.ylim(-2000, 3500) + +plt.show() diff --git a/examples/drawing/plot_labels_and_colors.py b/examples/drawing/plot_labels_and_colors.py index bd84639..633b247 100644 --- a/examples/drawing/plot_labels_and_colors.py +++ b/examples/drawing/plot_labels_and_colors.py @@ -1,14 +1,10 @@ -#!/usr/bin/env python """ ================= Labels And Colors ================= Draw a graph with matplotlib, color by degree. - -You must have matplotlib for this to work. """ -# Author: Aric Hagberg (hagberg@lanl.gov) import matplotlib.pyplot as plt import networkx as nx @@ -16,38 +12,41 @@ pos = nx.spring_layout(G) # positions for all nodes # nodes -nx.draw_networkx_nodes(G, pos, - nodelist=[0, 1, 2, 3], - node_color='r', - node_size=500, - alpha=0.8) -nx.draw_networkx_nodes(G, pos, - nodelist=[4, 5, 6, 7], - node_color='b', - node_size=500, - alpha=0.8) +options = {"node_size": 500, "alpha": 0.8} +nx.draw_networkx_nodes(G, pos, nodelist=[0, 1, 2, 3], node_color="r", **options) +nx.draw_networkx_nodes(G, pos, nodelist=[4, 5, 6, 7], node_color="b", **options) # edges nx.draw_networkx_edges(G, pos, width=1.0, alpha=0.5) -nx.draw_networkx_edges(G, pos, - edgelist=[(0, 1), (1, 2), (2, 3), (3, 0)], - width=8, alpha=0.5, edge_color='r') -nx.draw_networkx_edges(G, pos, - edgelist=[(4, 5), (5, 6), (6, 7), (7, 4)], - width=8, alpha=0.5, edge_color='b') +nx.draw_networkx_edges( + G, + pos, + edgelist=[(0, 1), (1, 2), (2, 3), (3, 0)], + width=8, + alpha=0.5, + edge_color="r", +) +nx.draw_networkx_edges( + G, + pos, + edgelist=[(4, 5), (5, 6), (6, 7), (7, 4)], + width=8, + alpha=0.5, + edge_color="b", +) # some math labels labels = {} -labels[0] = r'$a$' -labels[1] = r'$b$' -labels[2] = r'$c$' -labels[3] = r'$d$' -labels[4] = r'$\alpha$' -labels[5] = r'$\beta$' -labels[6] = r'$\gamma$' -labels[7] = r'$\delta$' +labels[0] = r"$a$" +labels[1] = r"$b$" +labels[2] = r"$c$" +labels[3] = r"$d$" +labels[4] = r"$\alpha$" +labels[5] = r"$\beta$" +labels[6] = r"$\gamma$" +labels[7] = r"$\delta$" nx.draw_networkx_labels(G, pos, labels, font_size=16) -plt.axis('off') +plt.axis("off") plt.show() diff --git a/examples/drawing/plot_lanl_routes.py b/examples/drawing/plot_lanl_routes.py index a326675..b1dfe9e 100644 --- a/examples/drawing/plot_lanl_routes.py +++ b/examples/drawing/plot_lanl_routes.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ =========== Lanl Routes @@ -6,38 +5,25 @@ Routes to LANL from 186 sites on the Internet. -This uses Graphviz for layout so you need PyGraphviz or pydot. +The data file can be found at: +- https://github.com/networkx/networkx/blob/master/examples/drawing/lanl_routes.edgelist """ -# Author: Aric Hagberg (hagberg@lanl.gov) - -# Copyright (C) 2004-2018 -# Aric Hagberg <hagberg@lanl.gov> -# Dan Schult <dschult@colgate.edu> -# Pieter Swart <swart@lanl.gov> -# All rights reserved. -# BSD license. - import matplotlib.pyplot as plt import networkx as nx -try: - import pygraphviz - from networkx.drawing.nx_agraph import graphviz_layout -except ImportError: - try: - import pydot - from networkx.drawing.nx_pydot import graphviz_layout - except ImportError: - raise ImportError("This example needs Graphviz and either " - "PyGraphviz or pydot") + +# This example needs Graphviz and either PyGraphviz or pydot +# from networkx.drawing.nx_pydot import graphviz_layout +from networkx.drawing.nx_agraph import graphviz_layout + def lanl_graph(): """ Return the lanl internet view graph from lanl.edges """ try: - fh = open('lanl_routes.edgelist', 'r') - except IOError: + fh = open("lanl_routes.edgelist") + except OSError: print("lanl.edges not found") raise @@ -51,7 +37,8 @@ def lanl_graph(): time[int(head)] = float(rtt) # get largest component and assign ping times to G0time dictionary - G0 = sorted(nx.connected_component_subgraphs(G), key=len, reverse=True)[0] + Gcc = sorted(nx.connected_components(G), key=len, reverse=True)[0] + G0 = G.subgraph(Gcc) G0.rtt = {} for n in G0: G0.rtt[n] = time[n] @@ -59,26 +46,20 @@ def lanl_graph(): return G0 -if __name__ == '__main__': - - G = lanl_graph() +G = lanl_graph() - print("graph has %d nodes with %d edges" - % (nx.number_of_nodes(G), nx.number_of_edges(G))) - print(nx.number_connected_components(G), "connected components") +print(f"graph has {nx.number_of_nodes(G)} nodes with {nx.number_of_edges(G)} edges") +print(nx.number_connected_components(G), "connected components") - plt.figure(figsize=(8, 8)) - # use graphviz to find radial layout - pos = graphviz_layout(G, prog="twopi", root=0) - # draw nodes, coloring by rtt ping time - nx.draw(G, pos, - node_color=[G.rtt[v] for v in G], - with_labels=False, - alpha=0.5, - node_size=15) - # adjust the plot limits - xmax = 1.02 * max(xx for xx, yy in pos.values()) - ymax = 1.02 * max(yy for xx, yy in pos.values()) - plt.xlim(0, xmax) - plt.ylim(0, ymax) - plt.show() +plt.figure(figsize=(8, 8)) +# use graphviz to find radial layout +pos = graphviz_layout(G, prog="twopi", root=0) +# draw nodes, coloring by rtt ping time +options = {"with_labels": False, "alpha": 0.5, "node_size": 15} +nx.draw(G, pos, node_color=[G.rtt[v] for v in G], **options) +# adjust the plot limits +xmax = 1.02 * max(xx for xx, yy in pos.values()) +ymax = 1.02 * max(yy for xx, yy in pos.values()) +plt.xlim(0, xmax) +plt.ylim(0, ymax) +plt.show() diff --git a/examples/drawing/plot_multipartite_graph.py b/examples/drawing/plot_multipartite_graph.py new file mode 100644 index 0000000..4b3c4ce --- /dev/null +++ b/examples/drawing/plot_multipartite_graph.py @@ -0,0 +1,43 @@ +""" +=================== +Multipartite Layout +=================== +""" + +import itertools +import matplotlib.pyplot as plt +import networkx as nx + +from networkx.utils import pairwise + +subset_sizes = [5, 5, 4, 3, 2, 4, 4, 3] +subset_color = [ + "gold", + "violet", + "violet", + "violet", + "violet", + "limegreen", + "limegreen", + "darkorange", +] + + +def multilayered_graph(*subset_sizes): + extents = pairwise(itertools.accumulate((0,) + subset_sizes)) + layers = [range(start, end) for start, end in extents] + G = nx.Graph() + for (i, layer) in enumerate(layers): + G.add_nodes_from(layer, layer=i) + for layer1, layer2 in pairwise(layers): + G.add_edges_from(itertools.product(layer1, layer2)) + return G + + +G = multilayered_graph(*subset_sizes) +color = [subset_color[data["layer"]] for v, data in G.nodes(data=True)] +pos = nx.multipartite_layout(G, subset_key="layer") +plt.figure(figsize=(8, 8)) +nx.draw(G, pos, node_color=color, with_labels=False) +plt.axis("equal") +plt.show() diff --git a/examples/drawing/plot_node_colormap.py b/examples/drawing/plot_node_colormap.py index a86258c..d72d80b 100644 --- a/examples/drawing/plot_node_colormap.py +++ b/examples/drawing/plot_node_colormap.py @@ -1,13 +1,10 @@ -#!/usr/bin/env python """ ============= Node Colormap ============= Draw a graph with matplotlib, color by degree. -You must have matplotlib for this to work. """ -# Author: Aric Hagberg (hagberg@lanl.gov) import matplotlib.pyplot as plt import networkx as nx diff --git a/examples/drawing/plot_random_geometric_graph.py b/examples/drawing/plot_random_geometric_graph.py index b658d37..a3cd78c 100644 --- a/examples/drawing/plot_random_geometric_graph.py +++ b/examples/drawing/plot_random_geometric_graph.py @@ -11,14 +11,14 @@ G = nx.random_geometric_graph(200, 0.125) # position is stored as node attribute data for random_geometric_graph -pos = nx.get_node_attributes(G, 'pos') +pos = nx.get_node_attributes(G, "pos") # find node near center (0.5,0.5) dmin = 1 ncenter = 0 for n in pos: x, y = pos[n] - d = (x - 0.5)**2 + (y - 0.5)**2 + d = (x - 0.5) ** 2 + (y - 0.5) ** 2 if d < dmin: ncenter = n dmin = d @@ -28,12 +28,16 @@ plt.figure(figsize=(8, 8)) nx.draw_networkx_edges(G, pos, nodelist=[ncenter], alpha=0.4) -nx.draw_networkx_nodes(G, pos, nodelist=list(p.keys()), - node_size=80, - node_color=list(p.values()), - cmap=plt.cm.Reds_r) +nx.draw_networkx_nodes( + G, + pos, + nodelist=list(p.keys()), + node_size=80, + node_color=list(p.values()), + cmap=plt.cm.Reds_r, +) plt.xlim(-0.05, 1.05) plt.ylim(-0.05, 1.05) -plt.axis('off') +plt.axis("off") plt.show() diff --git a/examples/drawing/plot_sampson.py b/examples/drawing/plot_sampson.py index 9cef98a..9111546 100644 --- a/examples/drawing/plot_sampson.py +++ b/examples/drawing/plot_sampson.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ ======= Sampson @@ -8,46 +7,39 @@ Shows how to read data from a zip file and plot multiple frames. -""" -# Author: Aric Hagberg (hagberg@lanl.gov) +The data file can be found at: -# Copyright (C) 2010-2018 by -# Aric Hagberg <hagberg@lanl.gov> -# Dan Schult <dschult@colgate.edu> -# Pieter Swart <swart@lanl.gov> -# All rights reserved. -# BSD license. +- https://github.com/networkx/networkx/blob/master/examples/drawing/sampson_data.zip +""" import zipfile -try: - from cStringIO import StringIO -except ImportError: - from io import BytesIO as StringIO +from io import BytesIO as StringIO import matplotlib.pyplot as plt import networkx as nx -zf = zipfile.ZipFile('sampson_data.zip') # zipfile object -e1 = StringIO(zf.read('samplike1.txt')) # read info file -e2 = StringIO(zf.read('samplike2.txt')) # read info file -e3 = StringIO(zf.read('samplike3.txt')) # read info file -G1 = nx.read_edgelist(e1, delimiter='\t') -G2 = nx.read_edgelist(e2, delimiter='\t') -G3 = nx.read_edgelist(e3, delimiter='\t') +with zipfile.ZipFile("sampson_data.zip") as zf: + e1 = StringIO(zf.read("samplike1.txt")) + e2 = StringIO(zf.read("samplike2.txt")) + e3 = StringIO(zf.read("samplike3.txt")) + +G1 = nx.read_edgelist(e1, delimiter="\t") +G2 = nx.read_edgelist(e2, delimiter="\t") +G3 = nx.read_edgelist(e3, delimiter="\t") pos = nx.spring_layout(G3, iterations=100) plt.clf() plt.subplot(221) -plt.title('samplike1') +plt.title("samplike1") nx.draw(G1, pos, node_size=50, with_labels=False) plt.subplot(222) -plt.title('samplike2') +plt.title("samplike2") nx.draw(G2, pos, node_size=50, with_labels=False) plt.subplot(223) -plt.title('samplike3') +plt.title("samplike3") nx.draw(G3, pos, node_size=50, with_labels=False) plt.subplot(224) -plt.title('samplike1,2,3') +plt.title("samplike1,2,3") nx.draw(G3, pos, edgelist=list(G3.edges()), node_size=50, with_labels=False) nx.draw_networkx_edges(G1, pos, alpha=0.25) nx.draw_networkx_edges(G2, pos, alpha=0.25) diff --git a/examples/drawing/plot_simple_path.py b/examples/drawing/plot_simple_path.py index f80e0fe..0c8b930 100644 --- a/examples/drawing/plot_simple_path.py +++ b/examples/drawing/plot_simple_path.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ =========== Simple Path diff --git a/examples/drawing/plot_spectral_grid.py b/examples/drawing/plot_spectral_grid.py index 3f2bc92..80ef6f1 100644 --- a/examples/drawing/plot_spectral_grid.py +++ b/examples/drawing/plot_spectral_grid.py @@ -25,10 +25,7 @@ import networkx as nx -options = { - 'node_color': 'C0', - 'node_size': 100, -} +options = {"node_color": "C0", "node_size": 100} G = nx.grid_2d_graph(6, 6) plt.subplot(332) diff --git a/examples/drawing/plot_unix_email.py b/examples/drawing/plot_unix_email.py index f8dff4e..3d8e3de 100644 --- a/examples/drawing/plot_unix_email.py +++ b/examples/drawing/plot_unix_email.py @@ -1,63 +1,46 @@ -#!/usr/bin/env python """ ========== Unix Email ========== -Create a directed graph, allowing multiple edges and self loops, from -a unix mailbox. The nodes are email addresses with links -that point from the sender to the receivers. The edge data -is a Python email.Message object which contains all of -the email message data. +Create a directed graph, allowing multiple edges and self loops, from a unix +mailbox. The nodes are email addresses with links that point from the sender +to the receivers. The edge data is a Python email.Message object which +contains all of the email message data. + +This example shows the power of `DiGraph` to hold edge data of arbitrary Python +objects (in this case a list of email messages). -This example shows the power of `DiGraph` to hold edge data -of arbitrary Python objects (in this case a list of email messages). The sample unix email mailbox called "unix_email.mbox" may be found here: -https://raw.githubusercontent.com/networkx/networkx/master/examples/drawing/unix_email.mbox +- https://github.com/networkx/networkx/blob/master/examples/drawing/unix_email.mbox """ -# Author: Aric Hagberg (hagberg@lanl.gov) - -# Copyright (C) 2005-2018 by -# Aric Hagberg <hagberg@lanl.gov> -# Dan Schult <dschult@colgate.edu> -# Pieter Swart <swart@lanl.gov> -# All rights reserved. -# BSD license. -import email from email.utils import getaddresses, parseaddr import mailbox -import sys import matplotlib.pyplot as plt import networkx as nx # unix mailbox recipe -# see https://docs.python.org/2/library/mailbox.html +# see https://docs.python.org/3/library/mailbox.html def mbox_graph(): - try: - fh = open("unix_email.mbox", 'rb') - except IOError: - print("unix_email.mbox not found") - raise - - mbox = mailbox.UnixMailbox(fh, email.message_from_file) # parse unix mailbox + mbox = mailbox.mbox("unix_email.mbox") # parse unix mailbox G = nx.MultiDiGraph() # create empty graph # parse each messages and build graph for msg in mbox: # msg is python email.Message.Message object - (source_name, source_addr) = parseaddr(msg['From']) # sender + (source_name, source_addr) = parseaddr(msg["From"]) # sender # get all recipients - # see https://docs.python.org/2/library/email.html - tos = msg.get_all('to', []) - ccs = msg.get_all('cc', []) - resent_tos = msg.get_all('resent-to', []) - resent_ccs = msg.get_all('resent-cc', []) + # see https://docs.python.org/3/library/email.html + tos = msg.get_all("to", []) + ccs = msg.get_all("cc", []) + resent_tos = msg.get_all("resent-to", []) + resent_ccs = msg.get_all("resent-cc", []) all_recipients = getaddresses(tos + ccs + resent_tos + resent_ccs) # now add the edges for this mail message for (target_name, target_addr) in all_recipients: @@ -66,14 +49,12 @@ def mbox_graph(): return G -if __name__ == '__main__': - - G = mbox_graph() +G = mbox_graph() - # print edges with message subject - for (u, v, d) in G.edges(data=True): - print("From: %s To: %s Subject: %s" % (u, v, d['message']["Subject"])) +# print edges with message subject +for (u, v, d) in G.edges(data=True): + print(f"From: {u} To: {v} Subject: {d['message']['Subject']}") - pos = nx.spring_layout(G, iterations=10) - nx.draw(G, pos, node_size=0, alpha=0.4, edge_color='r', font_size=16, with_labels=True) - plt.show() +pos = nx.spring_layout(G, iterations=10) +nx.draw(G, pos, node_size=0, alpha=0.4, edge_color="r", font_size=16, with_labels=True) +plt.show() diff --git a/examples/drawing/plot_weighted_graph.py b/examples/drawing/plot_weighted_graph.py index f178e2a..35200bb 100644 --- a/examples/drawing/plot_weighted_graph.py +++ b/examples/drawing/plot_weighted_graph.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ ============== Weighted Graph @@ -6,21 +5,20 @@ An example using Graph as a weighted network. """ -# Author: Aric Hagberg (hagberg@lanl.gov) import matplotlib.pyplot as plt import networkx as nx G = nx.Graph() -G.add_edge('a', 'b', weight=0.6) -G.add_edge('a', 'c', weight=0.2) -G.add_edge('c', 'd', weight=0.1) -G.add_edge('c', 'e', weight=0.7) -G.add_edge('c', 'f', weight=0.9) -G.add_edge('a', 'd', weight=0.3) +G.add_edge("a", "b", weight=0.6) +G.add_edge("a", "c", weight=0.2) +G.add_edge("c", "d", weight=0.1) +G.add_edge("c", "e", weight=0.7) +G.add_edge("c", "f", weight=0.9) +G.add_edge("a", "d", weight=0.3) -elarge = [(u, v) for (u, v, d) in G.edges(data=True) if d['weight'] > 0.5] -esmall = [(u, v) for (u, v, d) in G.edges(data=True) if d['weight'] <= 0.5] +elarge = [(u, v) for (u, v, d) in G.edges(data=True) if d["weight"] > 0.5] +esmall = [(u, v) for (u, v, d) in G.edges(data=True) if d["weight"] <= 0.5] pos = nx.spring_layout(G) # positions for all nodes @@ -28,13 +26,13 @@ nx.draw_networkx_nodes(G, pos, node_size=700) # edges -nx.draw_networkx_edges(G, pos, edgelist=elarge, - width=6) -nx.draw_networkx_edges(G, pos, edgelist=esmall, - width=6, alpha=0.5, edge_color='b', style='dashed') +nx.draw_networkx_edges(G, pos, edgelist=elarge, width=6) +nx.draw_networkx_edges( + G, pos, edgelist=esmall, width=6, alpha=0.5, edge_color="b", style="dashed" +) # labels -nx.draw_networkx_labels(G, pos, font_size=20, font_family='sans-serif') +nx.draw_networkx_labels(G, pos, font_size=20, font_family="sans-serif") -plt.axis('off') +plt.axis("off") plt.show() diff --git a/examples/graph/atlas2.py b/examples/graph/atlas2.py deleted file mode 100644 index 1c284cb..0000000 --- a/examples/graph/atlas2.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python -""" -====== -Atlas2 -====== - -Write first 20 graphs from the graph atlas as graphviz dot files -Gn.dot where n=0,19. -""" -# Author: Aric Hagberg (hagberg@lanl.gov) -# Date: 2005-05-19 14:23:02 -0600 (Thu, 19 May 2005) - -# Copyright (C) 2006-2018 by -# Aric Hagberg <hagberg@lanl.gov> -# Dan Schult <dschult@colgate.edu> -# Pieter Swart <swart@lanl.gov> -# All rights reserved. -# BSD license. - -import networkx as nx -from networkx.generators.atlas import graph_atlas_g - -atlas = graph_atlas_g()[0:20] - -for G in atlas: - print("graph %s has %d nodes with %d edges" - % (G.name, nx.number_of_nodes(G), nx.number_of_edges(G))) - A = nx.nx_agraph.to_agraph(G) - A.graph_attr['label'] = G.name - # set default node attributes - A.node_attr['color'] = 'red' - A.node_attr['style'] = 'filled' - A.node_attr['shape'] = 'circle' - A.write(G.name + '.dot') diff --git a/examples/graph/dot_atlas.py b/examples/graph/dot_atlas.py new file mode 100644 index 0000000..257336f --- /dev/null +++ b/examples/graph/dot_atlas.py @@ -0,0 +1,25 @@ +""" +====== +Atlas2 +====== + +Write first 20 graphs from the graph atlas as graphviz dot files +Gn.dot where n=0,19. +""" + +import networkx as nx +from networkx.generators.atlas import graph_atlas_g + +atlas = graph_atlas_g()[0:20] + +for G in atlas: + print( + f"{G.name} has {nx.number_of_nodes(G)} nodes with {nx.number_of_edges(G)} edges" + ) + A = nx.nx_agraph.to_agraph(G) + A.graph_attr["label"] = G.name + # set default node attributes + A.node_attr["color"] = "red" + A.node_attr["style"] = "filled" + A.node_attr["shape"] = "circle" + A.write(G.name + ".dot") diff --git a/examples/graph/plot_degree_sequence.py b/examples/graph/plot_degree_sequence.py index 72910a4..103be57 100644 --- a/examples/graph/plot_degree_sequence.py +++ b/examples/graph/plot_degree_sequence.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ =============== Degree Sequence @@ -6,17 +5,6 @@ Random graph from given degree sequence. """ -# Author: Aric Hagberg (hagberg@lanl.gov) -# Date: 2004-11-03 08:11:09 -0700 (Wed, 03 Nov 2004) -# Revision: 503 - -# Copyright (C) 2004-2018 by -# Aric Hagberg <hagberg@lanl.gov> -# Dan Schult <dschult@colgate.edu> -# Pieter Swart <swart@lanl.gov> -# All rights reserved. -# BSD license. - import matplotlib.pyplot as plt from networkx import nx @@ -26,7 +14,7 @@ print("Configuration model") G = nx.configuration_model(z) # configuration model degree_sequence = [d for n, d in G.degree()] # degree sequence -print("Degree sequence %s" % degree_sequence) +print(f"Degree sequence {degree_sequence}") print("Degree histogram") hist = {} for d in degree_sequence: @@ -36,7 +24,7 @@ hist[d] = 1 print("degree #nodes") for d in hist: - print('%d %d' % (d, hist[d])) + print(f"{d:4} {hist[d]:6}") nx.draw(G) plt.show() diff --git a/examples/graph/plot_erdos_renyi.py b/examples/graph/plot_erdos_renyi.py index 03ee435..c04765e 100644 --- a/examples/graph/plot_erdos_renyi.py +++ b/examples/graph/plot_erdos_renyi.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -#!/usr/bin/env python """ =========== Erdos Renyi @@ -12,16 +10,6 @@ but is different from G{n,p} or binomial_graph which is also sometimes called the Erdős-Rényi graph. """ -# Author: Aric Hagberg (hagberg@lanl.gov) - -# Copyright (C) 2004-2018 by -# Aric Hagberg <hagberg@lanl.gov> -# Dan Schult <dschult@colgate.edu> -# Pieter Swart <swart@lanl.gov> -# All rights reserved. -# BSD license. - -import sys import matplotlib.pyplot as plt from networkx import nx @@ -34,13 +22,12 @@ # some properties print("node degree clustering") for v in nx.nodes(G): - print('%s %d %f' % (v, nx.degree(G, v), nx.clustering(G, v))) + print(f"{v} {nx.degree(G, v)} {nx.clustering(G, v)}") -# print the adjacency list to terminal -try: - nx.write_adjlist(G, sys.stdout) -except TypeError: # Python 3.x - nx.write_adjlist(G, sys.stdout.buffer) +print() +print("the adjacency list") +for line in nx.generate_adjlist(G): + print(line) nx.draw(G) plt.show() diff --git a/examples/graph/expected_degree_sequence.py b/examples/graph/plot_expected_degree_sequence.py similarity index 56% rename from examples/graph/expected_degree_sequence.py rename to examples/graph/plot_expected_degree_sequence.py index c7e5d5e..203789f 100644 --- a/examples/graph/expected_degree_sequence.py +++ b/examples/graph/plot_expected_degree_sequence.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ ======================== Expected Degree Sequence @@ -6,14 +5,6 @@ Random graph from given degree sequence. """ -# Author: Aric Hagberg (hagberg@lanl.gov) - -# Copyright (C) 2006-2018 by -# Aric Hagberg <hagberg@lanl.gov> -# Dan Schult <dschult@colgate.edu> -# Pieter Swart <swart@lanl.gov> -# All rights reserved. -# BSD license. import networkx as nx from networkx.generators.degree_seq import expected_degree_graph @@ -26,7 +17,5 @@ print("Degree histogram") print("degree (#nodes) ****") dh = nx.degree_histogram(G) -low = min(nx.degree(G)) -for i in range(low, len(dh)): - bar = ''.join(dh[i] * ['*']) - print("%2s (%2s) %s" % (i, dh[i], bar)) +for i, d in enumerate(dh): + print(f"{i:2} ({d:2}) {'*'*d}") diff --git a/examples/graph/plot_football.py b/examples/graph/plot_football.py index 94baca0..1d215b1 100644 --- a/examples/graph/plot_football.py +++ b/examples/graph/plot_football.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ ======== Football @@ -12,19 +11,8 @@ Requires Internet connection to download the URL http://www-personal.umich.edu/~mejn/netdata/football.zip """ -# Author: Aric Hagberg (hagberg@lanl.gov) - -# Copyright (C) 2007-2018 by -# Aric Hagberg <hagberg@lanl.gov> -# Dan Schult <dschult@colgate.edu> -# Pieter Swart <swart@lanl.gov> -# All rights reserved. -# BSD license. - -try: # Python 3.x - import urllib.request as urllib -except ImportError: # Python 2.x - import urllib + +import urllib.request as urllib import io import zipfile @@ -38,23 +26,22 @@ sock.close() zf = zipfile.ZipFile(s) # zipfile object -txt = zf.read('football.txt').decode() # read info file -gml = zf.read('football.gml').decode() # read gml data +txt = zf.read("football.txt").decode() # read info file +gml = zf.read("football.gml").decode() # read gml data # throw away bogus first line with # from mejn files -gml = gml.split('\n')[1:] +gml = gml.split("\n")[1:] G = nx.parse_gml(gml) # parse gml data print(txt) # print degree for each team - number of games for n, d in G.degree(): - print('%s %d' % (n, d)) + print(f"{n:20} {d:2}") options = { - 'node_color': 'black', - 'node_size': 50, - 'line_color': 'grey', - 'linewidths': 0, - 'width': 0.1, + "node_color": "black", + "node_size": 50, + "linewidths": 0, + "width": 0.1, } nx.draw(G, **options) plt.show() diff --git a/examples/graph/plot_karate_club.py b/examples/graph/plot_karate_club.py index a5b1897..c4fe5bc 100644 --- a/examples/graph/plot_karate_club.py +++ b/examples/graph/plot_karate_club.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ =========== Karate Club @@ -9,7 +8,6 @@ Data file from: http://vlado.fmf.uni-lj.si/pub/networks/data/Ucinet/UciData.htm -Reference: Zachary W. (1977). An information flow model for conflict and fission in small groups. Journal of Anthropological Research, 33, 452-473. @@ -21,7 +19,7 @@ G = nx.karate_club_graph() print("Node Degree") for v in G: - print('%s %s' % (v, G.degree(v))) + print(f"{v:4} {G.degree(v):6}") nx.draw_circular(G, with_labels=True) plt.show() diff --git a/examples/graph/plot_napoleon_russian_campaign.py b/examples/graph/plot_napoleon_russian_campaign.py index 0e48610..0ec8a1c 100644 --- a/examples/graph/plot_napoleon_russian_campaign.py +++ b/examples/graph/plot_napoleon_russian_campaign.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ ========================= Napoleon Russian Campaign @@ -8,14 +7,6 @@ http://www.math.yorku.ca/SCS/Gallery/minard/minard.txt """ -# Author: Aric Hagberg (hagberg@lanl.gov) - -# Copyright (C) 2006-2018 by -# Aric Hagberg <hagberg@lanl.gov> -# Dan Schult <dschult@colgate.edu> -# Pieter Swart <swart@lanl.gov> -# All rights reserved. -# BSD license. import matplotlib.pyplot as plt import networkx as nx @@ -98,8 +89,8 @@ def minard_graph(): 36.5,55.0,Malo-Jarosewii""" c = {} - for line in cities.split('\n'): - x, y, name = line.split(',') + for line in cities.split("\n"): + x, y, name = line.split(",") c[name] = (float(x), float(y)) g = [] @@ -110,8 +101,8 @@ def minard_graph(): G.pos = {} # location G.pop = {} # size last = None - for line in data.split('\n'): - x, y, p, r, n = line.split(',') + for line in data.split("\n"): + x, y, p, r, n = line.split(",") G.pos[i] = (float(x), float(y)) G.pop[i] = int(p) if last is None: @@ -125,21 +116,19 @@ def minard_graph(): return g, c -if __name__ == "__main__": - - (g, city) = minard_graph() +(g, city) = minard_graph() - plt.figure(1, figsize=(11, 5)) - plt.clf() - colors = ['b', 'g', 'r'] - for G in g: - c = colors.pop(0) - node_size = [int(G.pop[n] / 300.0) for n in G] - nx.draw_networkx_edges(G, G.pos, edge_color=c, width=4, alpha=0.5) - nx.draw_networkx_nodes(G, G.pos, node_size=node_size, node_color=c, alpha=0.5) - nx.draw_networkx_nodes(G, G.pos, node_size=5, node_color='k') +plt.figure(1, figsize=(11, 5)) +plt.clf() +colors = ["b", "g", "r"] +for G in g: + c = colors.pop(0) + node_size = [int(G.pop[n] / 300.0) for n in G] + nx.draw_networkx_edges(G, G.pos, edge_color=c, width=4, alpha=0.5) + nx.draw_networkx_nodes(G, G.pos, node_size=node_size, node_color=c, alpha=0.5) + nx.draw_networkx_nodes(G, G.pos, node_size=5, node_color="k") - for c in city: - x, y = city[c] - plt.text(x, y + 0.1, c) - plt.show() +for c in city: + x, y = city[c] + plt.text(x, y + 0.1, c) +plt.show() diff --git a/examples/graph/plot_roget.py b/examples/graph/plot_roget.py index de66f53..41e4347 100644 --- a/examples/graph/plot_roget.py +++ b/examples/graph/plot_roget.py @@ -1,39 +1,24 @@ -#!/usr/bin/env python """ ===== Roget ===== -Build a directed graph of 1022 categories and -5075 cross-references as defined in the 1879 version of Roget's Thesaurus -contained in the datafile roget_dat.txt. This example is described in -Section 1.2 in Knuth's book (see [1]_ and [2]_). +Build a directed graph of 1022 categories and 5075 cross-references as defined +in the 1879 version of Roget's Thesaurus. This example is described in Section +1.2 of -Note that one of the 5075 cross references is a self loop yet -it is included in the graph built here because -the standard networkx `DiGraph` class allows self loops. -(cf. 400pungency:400 401 403 405). + Donald E. Knuth, "The Stanford GraphBase: A Platform for Combinatorial + Computing", ACM Press, New York, 1993. + http://www-cs-faculty.stanford.edu/~knuth/sgb.html -References ----------- +Note that one of the 5075 cross references is a self loop yet it is included in +the graph built here because the standard networkx `DiGraph` class allows self +loops. (cf. 400pungency:400 401 403 405). -.. [1] Donald E. Knuth, - "The Stanford GraphBase: A Platform for Combinatorial Computing", - ACM Press, New York, 1993. -.. [2] http://www-cs-faculty.stanford.edu/~knuth/sgb.html -""" - -from __future__ import print_function - -# Authors: Brendt Wohlberg, Aric Hagberg (hagberg@lanl.gov) -# Date: 2005-04-01 07:56:22 -0700 (Fri, 01 Apr 2005) +The data file can be found at: -# Copyright (C) 2004-2018 by -# Aric Hagberg <hagberg@lanl.gov> -# Dan Schult <dschult@colgate.edu> -# Pieter Swart <swart@lanl.gov> -# All rights reserved. -# BSD license. +- https://github.com/networkx/networkx/blob/master/examples/graph/roget_dat.txt.gz +""" import gzip import re @@ -42,12 +27,13 @@ import matplotlib.pyplot as plt from networkx import nx + def roget_graph(): """ Return the thesaurus graph from the roget.dat example in the Stanford Graph Base. """ - # open file roget_dat.txt.gz (or roget_dat.txt) - fh = gzip.open('roget_dat.txt.gz', 'r') + # open file roget_dat.txt.gz + fh = gzip.open("roget_dat.txt.gz", "r") G = nx.DiGraph() @@ -64,7 +50,7 @@ def roget_graph(): (headname, tails) = line.split(":") # head - numfind = re.compile("^\d+") # re to find the number of this word + numfind = re.compile(r"^\d+") # re to find the number of this word head = numfind.findall(headname)[0] # get the number G.add_node(head) @@ -77,20 +63,18 @@ def roget_graph(): return G -if __name__ == '__main__': - G = roget_graph() - print("Loaded roget_dat.txt containing 1022 categories.") - print("digraph has %d nodes with %d edges" - % (nx.number_of_nodes(G), nx.number_of_edges(G))) - UG = G.to_undirected() - print(nx.number_connected_components(UG), "connected components") - - options = { - 'node_color': 'black', - 'node_size': 1, - 'line_color': 'grey', - 'linewidths': 0, - 'width': 0.1, - } - nx.draw_circular(UG, **options) - plt.show() +G = roget_graph() +print("Loaded roget_dat.txt containing 1022 categories.") +print(f"digraph has {nx.number_of_nodes(G)} nodes with {nx.number_of_edges(G)} edges") +UG = G.to_undirected() +print(nx.number_connected_components(UG), "connected components") + +options = { + "node_color": "black", + "node_size": 1, + "edge_color": "gray", + "linewidths": 0, + "width": 0.1, +} +nx.draw_circular(UG, **options) +plt.show() diff --git a/examples/graph/plot_words.py b/examples/graph/plot_words.py new file mode 100644 index 0000000..ccaaebb --- /dev/null +++ b/examples/graph/plot_words.py @@ -0,0 +1,74 @@ +""" +================== +Words/Ladder Graph +================== + +Generate an undirected graph over the 5757 5-letter words in the datafile +`words_dat.txt.gz`. Two words are connected by an edge if they differ in one +letter, resulting in 14,135 edges. This example is described in Section 1.1 of + + Donald E. Knuth, "The Stanford GraphBase: A Platform for Combinatorial + Computing", ACM Press, New York, 1993. + http://www-cs-faculty.stanford.edu/~knuth/sgb.html + +The data file can be found at: + +- https://github.com/networkx/networkx/blob/master/examples/graph/words_dat.txt.gz +""" + +import gzip +from string import ascii_lowercase as lowercase + +import networkx as nx + + +def generate_graph(words): + G = nx.Graph(name="words") + lookup = {c: lowercase.index(c) for c in lowercase} + + def edit_distance_one(word): + for i in range(len(word)): + left, c, right = word[0:i], word[i], word[i + 1 :] + j = lookup[c] # lowercase.index(c) + for cc in lowercase[j + 1 :]: + yield left + cc + right + + candgen = ( + (word, cand) + for word in sorted(words) + for cand in edit_distance_one(word) + if cand in words + ) + G.add_nodes_from(words) + for word, cand in candgen: + G.add_edge(word, cand) + return G + + +def words_graph(): + """Return the words example graph from the Stanford GraphBase""" + fh = gzip.open("words_dat.txt.gz", "r") + words = set() + for line in fh.readlines(): + line = line.decode() + if line.startswith("*"): + continue + w = str(line[0:5]) + words.add(w) + return generate_graph(words) + + +G = words_graph() +print("Loaded words_dat.txt containing 5757 five-letter English words.") +print("Two words are connected if they differ in one letter.") +print(f"Graph has {nx.number_of_nodes(G)} nodes with {nx.number_of_edges(G)} edges") +print(f"{nx.number_connected_components(G)} connected components") + +for (source, target) in [("chaos", "order"), ("nodes", "graph"), ("pound", "marks")]: + print(f"Shortest path between {source} and {target} is") + try: + sp = nx.shortest_path(G, source, target) + for n in sp: + print(n) + except nx.NetworkXNoPath: + print("None") diff --git a/examples/graph/words.py b/examples/graph/words.py deleted file mode 100644 index f3ff257..0000000 --- a/examples/graph/words.py +++ /dev/null @@ -1,89 +0,0 @@ -""" -===== -Words -===== - -Words/Ladder Graph ------------------- -Generate an undirected graph over the 5757 5-letter words in the -datafile `words_dat.txt.gz`. Two words are connected by an edge -if they differ in one letter, resulting in 14,135 edges. This example -is described in Section 1.1 in Knuth's book (see [1]_ and [2]_). - -References ----------- -.. [1] Donald E. Knuth, - "The Stanford GraphBase: A Platform for Combinatorial Computing", - ACM Press, New York, 1993. -.. [2] http://www-cs-faculty.stanford.edu/~knuth/sgb.html -""" -# Authors: Aric Hagberg (hagberg@lanl.gov), -# Brendt Wohlberg, -# hughdbrown@yahoo.com - -# Copyright (C) 2004-2018 by -# Aric Hagberg <hagberg@lanl.gov> -# Dan Schult <dschult@colgate.edu> -# Pieter Swart <swart@lanl.gov> -# All rights reserved. -# BSD license. - -import gzip -from string import ascii_lowercase as lowercase - -import networkx as nx - -#------------------------------------------------------------------- -# The Words/Ladder graph of Section 1.1 -#------------------------------------------------------------------- - - -def generate_graph(words): - G = nx.Graph(name="words") - lookup = dict((c, lowercase.index(c)) for c in lowercase) - - def edit_distance_one(word): - for i in range(len(word)): - left, c, right = word[0:i], word[i], word[i + 1:] - j = lookup[c] # lowercase.index(c) - for cc in lowercase[j + 1:]: - yield left + cc + right - candgen = ((word, cand) for word in sorted(words) - for cand in edit_distance_one(word) if cand in words) - G.add_nodes_from(words) - for word, cand in candgen: - G.add_edge(word, cand) - return G - - -def words_graph(): - """Return the words example graph from the Stanford GraphBase""" - fh = gzip.open('words_dat.txt.gz', 'r') - words = set() - for line in fh.readlines(): - line = line.decode() - if line.startswith('*'): - continue - w = str(line[0:5]) - words.add(w) - return generate_graph(words) - - -if __name__ == '__main__': - G = words_graph() - print("Loaded words_dat.txt containing 5757 five-letter English words.") - print("Two words are connected if they differ in one letter.") - print("Graph has %d nodes with %d edges" - % (nx.number_of_nodes(G), nx.number_of_edges(G))) - print("%d connected components" % nx.number_connected_components(G)) - - for (source, target) in [('chaos', 'order'), - ('nodes', 'graph'), - ('pound', 'marks')]: - print("Shortest path between %s and %s is" % (source, target)) - try: - sp = nx.shortest_path(G, source, target) - for n in sp: - print(n) - except nx.NetworkXNoPath: - print("None") diff --git a/examples/javascript/force.py b/examples/javascript/force.py index abd572a..db2f367 100644 --- a/examples/javascript/force.py +++ b/examples/javascript/force.py @@ -3,16 +3,13 @@ Javascript ========== -Example of writing JSON format graph data and using the D3 Javascript library to produce an HTML/Javascript drawing. +Example of writing JSON format graph data and using the D3 Javascript library +to produce an HTML/Javascript drawing. + +You will need to download the following directory: + +- https://github.com/networkx/networkx/tree/master/examples/javascript/force """ -# Author: Aric Hagberg <aric.hagberg@gmail.com> - -# Copyright (C) 2011-2018 by -# Aric Hagberg <hagberg@lanl.gov> -# Dan Schult <dschult@colgate.edu> -# Pieter Swart <swart@lanl.gov> -# All rights reserved. -# BSD license. import json import flask @@ -23,19 +20,21 @@ # this d3 example uses the name attribute for the mouse-hover value, # so add a name to each node for n in G: - G.nodes[n]['name'] = n + G.nodes[n]["name"] = n # write json formatted data d = json_graph.node_link_data(G) # node-link format to serialize # write json -json.dump(d, open('force/force.json', 'w')) -print('Wrote node-link JSON data to force/force.json') +json.dump(d, open("force/force.json", "w")) +print("Wrote node-link JSON data to force/force.json") # Serve the file over http to allow for cross origin requests app = flask.Flask(__name__, static_folder="force") -@app.route('/<path:path>') -def static_proxy(path): - return app.send_static_file(path) -print('\nGo to http://localhost:8000/force.html to see the example\n') +@app.route("/") +def static_proxy(): + return app.send_static_file("force.html") + + +print("\nGo to http://localhost:8000 to see the example\n") app.run(port=8000) diff --git a/examples/javascript/force/README b/examples/javascript/force/README.txt similarity index 100% rename from examples/javascript/force/README rename to examples/javascript/force/README.txt diff --git a/examples/javascript/force/force.css b/examples/javascript/force/force.css index cfa437e..fee3f31 100644 --- a/examples/javascript/force/force.css +++ b/examples/javascript/force/force.css @@ -1,10 +1,12 @@ - -circle.node { - stroke: #fff; - stroke-width: 1.5px; +.nodes circle { + cursor: pointer; + fill: #ff3399; + stroke: #000; + stroke-width: .5px; } -line.link { - stroke: #999; - stroke-opacity: .6; -} +.links line { + fill: none; + stroke: #9ecae1; + stroke-width: .5px; +} \ No newline at end of file diff --git a/examples/javascript/force/force.html b/examples/javascript/force/force.html index 894d6bf..21cc7e3 100644 --- a/examples/javascript/force/force.html +++ b/examples/javascript/force/force.html @@ -2,11 +2,11 @@ <html> <head> <title>Force-Directed Layout - - + + -
- + + diff --git a/examples/javascript/force/force.js b/examples/javascript/force/force.js index 828a8b3..5aba4fc 100644 --- a/examples/javascript/force/force.js +++ b/examples/javascript/force/force.js @@ -1,57 +1,86 @@ +// This is adapted from https://bl.ocks.org/mbostock/2675ff61ea5e063ede2b5d63c08020c7 -var w = 400, - h = 400, - fill = d3.scale.category20(); - -var vis = d3.select("#chart") - .append("svg:svg") - .attr("width", w) - .attr("height", h); - -d3.json("force.json", function(json) { - var force = d3.layout.force() - .charge(-120) - .linkDistance(30) - .nodes(json.nodes) - .links(json.links) - .size([w, h]) - .start(); - - var link = vis.selectAll("line.link") - .data(json.links) - .enter().append("svg:line") - .attr("class", "link") - .style("stroke-width", function(d) { return Math.sqrt(d.value); }) - .attr("x1", function(d) { return d.source.x; }) - .attr("y1", function(d) { return d.source.y; }) - .attr("x2", function(d) { return d.target.x; }) - .attr("y2", function(d) { return d.target.y; }); - - var node = vis.selectAll("circle.node") - .data(json.nodes) - .enter().append("svg:circle") - .attr("class", "node") - .attr("cx", function(d) { return d.x; }) - .attr("cy", function(d) { return d.y; }) - .attr("r", 5) - .style("fill", function(d) { return fill(d.group); }) - .call(force.drag); - - node.append("svg:title") - .text(function(d) { return d.name; }); - - vis.style("opacity", 1e-6) - .transition() - .duration(1000) - .style("opacity", 1); - - force.on("tick", function() { - link.attr("x1", function(d) { return d.source.x; }) - .attr("y1", function(d) { return d.source.y; }) - .attr("x2", function(d) { return d.target.x; }) - .attr("y2", function(d) { return d.target.y; }); - - node.attr("cx", function(d) { return d.x; }) - .attr("cy", function(d) { return d.y; }); - }); +var svg = d3.select("svg"), + width = +svg.attr("width"), + height = +svg.attr("height"); + +var simulation = d3.forceSimulation() + .force("link", d3.forceLink().id(function (d) { + return d.id; + })) + .force("charge", d3.forceManyBody()) + .force("center", d3.forceCenter(width / 2, height / 2)); + +d3.json("force/force.json", function (error, graph) { + if (error) throw error; + + var link = svg.append("g") + .attr("class", "links") + .selectAll("line") + .data(graph.links) + .enter().append("line"); + + var node = svg.append("g") + .attr("class", "nodes") + .selectAll("circle") + .data(graph.nodes) + .enter().append("circle") + .attr("r", 5) + .call(d3.drag() + .on("start", dragstarted) + .on("drag", dragged) + .on("end", dragended)); + + node.append("title") + .text(function (d) { + return d.id; + }); + + simulation + .nodes(graph.nodes) + .on("tick", ticked); + + simulation.force("link") + .links(graph.links); + + function ticked() { + link + .attr("x1", function (d) { + return d.source.x; + }) + .attr("y1", function (d) { + return d.source.y; + }) + .attr("x2", function (d) { + return d.target.x; + }) + .attr("y2", function (d) { + return d.target.y; + }); + + node + .attr("cx", function (d) { + return d.x; + }) + .attr("cy", function (d) { + return d.y; + }); + } }); + +function dragstarted(d) { + if (!d3.event.active) simulation.alphaTarget(0.3).restart(); + d.fx = d.x; + d.fy = d.y; +} + +function dragged(d) { + d.fx = d3.event.x; + d.fy = d3.event.y; +} + +function dragended(d) { + if (!d3.event.active) simulation.alphaTarget(0); + d.fx = null; + d.fy = null; +} \ No newline at end of file diff --git a/examples/jit/plot_rgraph.py b/examples/jit/plot_rgraph.py index a4d4e49..764c061 100644 --- a/examples/jit/plot_rgraph.py +++ b/examples/jit/plot_rgraph.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ ====== Rgraph @@ -10,7 +9,6 @@ See the JIT documentation and examples at http://thejit.org """ -__author__ = """Ollie Glass (ollieglaskovik@gmail.com)""" import json @@ -34,8 +32,8 @@ print(jit_json) X = jit_graph(json.loads(jit_json)) -print("Nodes: %s" % list(X.nodes(data=True))) -print("Edges: %s" % list(X.edges(data=True))) +print(f"Nodes: {list(X.nodes(data=True))}") +print(f"Edges: {list(X.edges(data=True))}") nx.draw(G, with_labels=True) plt.show() diff --git a/examples/pygraphviz/pygraphviz_attributes.py b/examples/pygraphviz/plot_pygraphviz_attributes.py similarity index 67% rename from examples/pygraphviz/pygraphviz_attributes.py rename to examples/pygraphviz/plot_pygraphviz_attributes.py index 4735c37..33dbecd 100644 --- a/examples/pygraphviz/pygraphviz_attributes.py +++ b/examples/pygraphviz/plot_pygraphviz_attributes.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ ===================== Pygraphviz Attributes @@ -10,22 +9,14 @@ Also see the pygraphviz documentation and examples at http://pygraphviz.github.io/ """ -# Author: Aric Hagberg (hagberg@lanl.gov) - -# Copyright (C) 2006-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. import networkx as nx # networkx graph G = nx.Graph() # ad edges with red color -G.add_edge(1, 2, color='red') -G.add_edge(2, 3, color='red') +G.add_edge(1, 2, color="red") +G.add_edge(2, 3, color="red") # add nodes 3 and 4 G.add_node(3) G.add_node(4) @@ -34,7 +25,7 @@ A = nx.nx_agraph.to_agraph(G) # write to dot file -A.write('k5_attributes.dot') +A.write("k5_attributes.dot") # convert back to networkx Graph with attributes on edges and # default attributes as dictionary data @@ -44,4 +35,4 @@ print("default graph attributes") print(X.graph) print("node node attributes") -print(X.node) +print(X.nodes.data(True)) diff --git a/examples/pygraphviz/plot_pygraphviz_draw.py b/examples/pygraphviz/plot_pygraphviz_draw.py new file mode 100644 index 0000000..bf64e76 --- /dev/null +++ b/examples/pygraphviz/plot_pygraphviz_draw.py @@ -0,0 +1,19 @@ +""" +=============== +Pygraphviz Draw +=============== + +An example showing how to use the interface to the pygraphviz +AGraph class to draw a graph. + +Also see the pygraphviz documentation and examples at +http://pygraphviz.github.io/ +""" + +import networkx as nx + +# plain graph +G = nx.complete_graph(5) # start with K5 in networkx +A = nx.nx_agraph.to_agraph(G) # convert to a graphviz graph +A.layout() # neato layout +A.draw("k5.ps") # write postscript in k5.ps with neato layout diff --git a/examples/pygraphviz/plot_pygraphviz_simple.py b/examples/pygraphviz/plot_pygraphviz_simple.py new file mode 100644 index 0000000..3b226be --- /dev/null +++ b/examples/pygraphviz/plot_pygraphviz_simple.py @@ -0,0 +1,23 @@ +""" +================= +Pygraphviz Simple +================= + +An example showing how to use the interface to the pygraphviz +AGraph class to convert to and from graphviz. + +Also see the pygraphviz documentation and examples at +http://pygraphviz.github.io/ +""" + +import networkx as nx + +# plain graph +G = nx.complete_graph(5) # start with K5 in networkx +A = nx.nx_agraph.to_agraph(G) # convert to a graphviz graph +X1 = nx.nx_agraph.from_agraph(A) # convert back to networkx (but as Graph) +X2 = nx.Graph(A) # fancy way to do conversion +G1 = nx.Graph(X1) # now make it a Graph + +A.write("k5.dot") # write to dot file +X3 = nx.nx_agraph.read_dot("k5.dot") # read from dotfile diff --git a/examples/pygraphviz/plot_write_dotfile.py b/examples/pygraphviz/plot_write_dotfile.py new file mode 100644 index 0000000..8b34f98 --- /dev/null +++ b/examples/pygraphviz/plot_write_dotfile.py @@ -0,0 +1,23 @@ +""" +============= +Write Dotfile +============= + + +Write a dot file from a networkx graph for further processing with graphviz. + +You need to have either pygraphviz or pydot for this example. + +See https://networkx.github.io/documentation/latest/reference/drawing.html +""" + +import networkx as nx + +# This example needs Graphviz and either PyGraphviz or pydot. +# from networkx.drawing.nx_pydot import write_dot +from networkx.drawing.nx_agraph import write_dot + + +G = nx.grid_2d_graph(5, 5) # 5x5 grid +write_dot(G, "grid.dot") +print("Now run: neato -Tps grid.dot >grid.ps") diff --git a/examples/pygraphviz/pygraphviz_draw.py b/examples/pygraphviz/pygraphviz_draw.py deleted file mode 100644 index 349a58d..0000000 --- a/examples/pygraphviz/pygraphviz_draw.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python -""" -=============== -Pygraphviz Draw -=============== - -An example showing how to use the interface to the pygraphviz -AGraph class to draw a graph. - -Also see the pygraphviz documentation and examples at -http://pygraphviz.github.io/ -""" -# Author: Aric Hagberg (hagberg@lanl.gov) - -# Copyright (C) 2006-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. - -import networkx as nx - -# plain graph - -G = nx.complete_graph(5) # start with K5 in networkx -A = nx.nx_agraph.to_agraph(G) # convert to a graphviz graph -A.layout() # neato layout -A.draw("k5.ps") # write postscript in k5.ps with neato layout diff --git a/examples/pygraphviz/pygraphviz_simple.py b/examples/pygraphviz/pygraphviz_simple.py deleted file mode 100644 index 0d6e9f2..0000000 --- a/examples/pygraphviz/pygraphviz_simple.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python -""" -================= -Pygraphviz Simple -================= - -An example showing how to use the interface to the pygraphviz -AGraph class to convert to and from graphviz. - -Also see the pygraphviz documentation and examples at -http://pygraphviz.github.io/ -""" -# Author: Aric Hagberg (hagberg@lanl.gov) - -# Copyright (C) 2006-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. - -import networkx as nx - -# plain graph - -G = nx.complete_graph(5) # start with K5 in networkx -A = nx.nx_agraph.to_agraph(G) # convert to a graphviz graph -X1 = nx.nx_agraph.from_agraph(A) # convert back to networkx (but as Graph) -X2 = nx.Graph(A) # fancy way to do conversion -G1 = nx.Graph(X1) # now make it a Graph - -A.write('k5.dot') # write to dot file -X3 = nx.nx_agraph.read_dot('k5.dot') # read from dotfile diff --git a/examples/pygraphviz/write_dotfile.py b/examples/pygraphviz/write_dotfile.py deleted file mode 100644 index 5f848a4..0000000 --- a/examples/pygraphviz/write_dotfile.py +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/env python -""" -============= -Write Dotfile -============= - - -Write a dot file from a networkx graph for further processing with graphviz. - -You need to have either pygraphviz or pydot for this example. - -See https://networkx.github.io/documentation/latest/reference/drawing.html -for more info. - -""" -# Author: Aric Hagberg (hagberg@lanl.gov) - -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. - -import networkx as nx - -# and the following code block is not needed -# but we want to see which module is used and -# if and why it fails -try: - import pygraphviz - from networkx.drawing.nx_agraph import write_dot - print("using package pygraphviz") -except ImportError: - try: - import pydot - from networkx.drawing.nx_pydot import write_dot - print("using package pydot") - except ImportError: - print() - print("Both pygraphviz and pydot were not found ") - print("see https://networkx.github.io/documentation/latest/reference/drawing.html") - print() - raise - -G = nx.grid_2d_graph(5, 5) # 5x5 grid -write_dot(G, "grid.dot") -print("Now run: neato -Tps grid.dot >grid.ps") diff --git a/examples/subclass/plot_antigraph.py b/examples/subclass/plot_antigraph.py index 4d94fac..ef80f5c 100644 --- a/examples/subclass/plot_antigraph.py +++ b/examples/subclass/plot_antigraph.py @@ -15,18 +15,10 @@ algorithms. """ -# Author: Jordi Torrents - -# Copyright (C) 2015-2018 by -# Jordi Torrents -# All rights reserved. -# BSD license. import networkx as nx from networkx.exception import NetworkXError import matplotlib.pyplot as plt -__all__ = ['AntiGraph'] - class AntiGraph(nx.Graph): """ @@ -41,10 +33,11 @@ class AntiGraph(nx.Graph): an instance of this class with some of NetworkX functions. """ - all_edge_dict = {'weight': 1} + all_edge_dict = {"weight": 1} def single_edge_dict(self): return self.all_edge_dict + edge_attr_dict_factory = single_edge_dict def __getitem__(self, n): @@ -61,8 +54,9 @@ def __getitem__(self, n): The adjacency dictionary for nodes connected to n. """ - return dict((node, self.all_edge_dict) for node in - set(self.adj) - set(self.adj[n]) - set([n])) + return { + node: self.all_edge_dict for node in set(self.adj) - set(self.adj[n]) - {n} + } def neighbors(self, n): """Return an iterator over all neighbors of node n in the @@ -70,9 +64,9 @@ def neighbors(self, n): """ try: - return iter(set(self.adj) - set(self.adj[n]) - set([n])) - except KeyError: - raise NetworkXError("The node %s is not in the graph." % (n,)) + return iter(set(self.adj) - set(self.adj[n]) - {n}) + except KeyError as e: + raise NetworkXError(f"The node {n} is not in the graph.") from e def degree(self, nbunch=None, weight=None): """Return an iterator for (node, degree) in the dense graph. @@ -109,23 +103,39 @@ def degree(self, nbunch=None, weight=None): """ if nbunch is None: - nodes_nbrs = ((n, {v: self.all_edge_dict for v in - set(self.adj) - set(self.adj[n]) - set([n])}) - for n in self.nodes()) + nodes_nbrs = ( + ( + n, + { + v: self.all_edge_dict + for v in set(self.adj) - set(self.adj[n]) - {n} + }, + ) + for n in self.nodes() + ) elif nbunch in self: nbrs = set(self.nodes()) - set(self.adj[nbunch]) - {nbunch} return len(nbrs) else: - nodes_nbrs = ((n, {v: self.all_edge_dict for v in - set(self.nodes()) - set(self.adj[n]) - set([n])}) - for n in self.nbunch_iter(nbunch)) + nodes_nbrs = ( + ( + n, + { + v: self.all_edge_dict + for v in set(self.nodes()) - set(self.adj[n]) - {n} + }, + ) + for n in self.nbunch_iter(nbunch) + ) if weight is None: return ((n, len(nbrs)) for n, nbrs in nodes_nbrs) else: # AntiGraph is a ThinGraph so all edges have weight 1 - return ((n, sum((nbrs[nbr].get(weight, 1)) for nbr in nbrs)) - for n, nbrs in nodes_nbrs) + return ( + (n, sum((nbrs[nbr].get(weight, 1)) for nbr in nbrs)) + for n, nbrs in nodes_nbrs + ) def adjacency_iter(self): """Return an iterator of (node, adjacency set) tuples for all nodes @@ -142,41 +152,40 @@ def adjacency_iter(self): """ for n in self.adj: - yield (n, set(self.adj) - set(self.adj[n]) - set([n])) - - -if __name__ == '__main__': - # Build several pairs of graphs, a regular graph - # and the AntiGraph of it's complement, which behaves - # as if it were the original graph. - Gnp = nx.gnp_random_graph(20, 0.8, seed=42) - Anp = AntiGraph(nx.complement(Gnp)) - Gd = nx.davis_southern_women_graph() - Ad = AntiGraph(nx.complement(Gd)) - Gk = nx.karate_club_graph() - Ak = AntiGraph(nx.complement(Gk)) - pairs = [(Gnp, Anp), (Gd, Ad), (Gk, Ak)] - # test connected components - for G, A in pairs: - gc = [set(c) for c in nx.connected_components(G)] - ac = [set(c) for c in nx.connected_components(A)] - for comp in ac: - assert comp in gc - # test biconnected components - for G, A in pairs: - gc = [set(c) for c in nx.biconnected_components(G)] - ac = [set(c) for c in nx.biconnected_components(A)] - for comp in ac: - assert comp in gc - # test degree - for G, A in pairs: - node = list(G.nodes())[0] - nodes = list(G.nodes())[1:4] - assert G.degree(node) == A.degree(node) - assert sum(d for n, d in G.degree()) == sum(d for n, d in A.degree()) - # AntiGraph is a ThinGraph, so all the weights are 1 - assert sum(d for n, d in A.degree()) == sum(d for n, d in A.degree(weight='weight')) - assert sum(d for n, d in G.degree(nodes)) == sum(d for n, d in A.degree(nodes)) - - nx.draw(Gnp) - plt.show() + yield (n, set(self.adj) - set(self.adj[n]) - {n}) + + +# Build several pairs of graphs, a regular graph +# and the AntiGraph of it's complement, which behaves +# as if it were the original graph. +Gnp = nx.gnp_random_graph(20, 0.8, seed=42) +Anp = AntiGraph(nx.complement(Gnp)) +Gd = nx.davis_southern_women_graph() +Ad = AntiGraph(nx.complement(Gd)) +Gk = nx.karate_club_graph() +Ak = AntiGraph(nx.complement(Gk)) +pairs = [(Gnp, Anp), (Gd, Ad), (Gk, Ak)] +# test connected components +for G, A in pairs: + gc = [set(c) for c in nx.connected_components(G)] + ac = [set(c) for c in nx.connected_components(A)] + for comp in ac: + assert comp in gc +# test biconnected components +for G, A in pairs: + gc = [set(c) for c in nx.biconnected_components(G)] + ac = [set(c) for c in nx.biconnected_components(A)] + for comp in ac: + assert comp in gc +# test degree +for G, A in pairs: + node = list(G.nodes())[0] + nodes = list(G.nodes())[1:4] + assert G.degree(node) == A.degree(node) + assert sum(d for n, d in G.degree()) == sum(d for n, d in A.degree()) + # AntiGraph is a ThinGraph, so all the weights are 1 + assert sum(d for n, d in A.degree()) == sum(d for n, d in A.degree(weight="weight")) + assert sum(d for n, d in G.degree(nodes)) == sum(d for n, d in A.degree(nodes)) + +nx.draw(Gnp) +plt.show() diff --git a/examples/subclass/plot_printgraph.py b/examples/subclass/plot_printgraph.py index e3ff098..b6a5527 100644 --- a/examples/subclass/plot_printgraph.py +++ b/examples/subclass/plot_printgraph.py @@ -5,23 +5,12 @@ Example subclass of the Graph class. """ -# Author: Aric Hagberg (hagberg@lanl.gov) - -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -__docformat__ = "restructuredtext en" - -from copy import deepcopy import matplotlib.pyplot as plt import networkx as nx from networkx import Graph + class PrintGraph(Graph): """ Example subclass of the Graph class. @@ -29,17 +18,18 @@ class PrintGraph(Graph): Prints activity log to file or standard output. """ - def __init__(self, data=None, name='', file=None, **attr): + def __init__(self, data=None, name="", file=None, **attr): Graph.__init__(self, data=data, name=name, **attr) if file is None: import sys + self.fh = sys.stdout else: - self.fh = open(file, 'w') + self.fh = open(file, "w") def add_node(self, n, attr_dict=None, **attr): Graph.add_node(self, n, attr_dict=attr_dict, **attr) - self.fh.write("Add node: %s\n" % n) + self.fh.write(f"Add node: {n}\n") def add_nodes_from(self, nodes, **attr): for n in nodes: @@ -47,7 +37,7 @@ def add_nodes_from(self, nodes, **attr): def remove_node(self, n): Graph.remove_node(self, n) - self.fh.write("Remove node: %s\n" % n) + self.fh.write(f"Remove node: {n}\n") def remove_nodes_from(self, nodes): for n in nodes: @@ -55,7 +45,7 @@ def remove_nodes_from(self, nodes): def add_edge(self, u, v, attr_dict=None, **attr): Graph.add_edge(self, u, v, attr_dict=attr_dict, **attr) - self.fh.write("Add edge: %s-%s\n" % (u, v)) + self.fh.write(f"Add edge: {u}-{v}\n") def add_edges_from(self, ebunch, attr_dict=None, **attr): for e in ebunch: @@ -64,7 +54,7 @@ def add_edges_from(self, ebunch, attr_dict=None, **attr): def remove_edge(self, u, v): Graph.remove_edge(self, u, v) - self.fh.write("Remove edge: %s-%s\n" % (u, v)) + self.fh.write(f"Remove edge: {u}-{v}\n") def remove_edges_from(self, ebunch): for e in ebunch: @@ -76,23 +66,22 @@ def clear(self): self.fh.write("Clear graph\n") -if __name__ == '__main__': - G = PrintGraph() - G.add_node('foo') - G.add_nodes_from('bar', weight=8) - G.remove_node('b') - G.remove_nodes_from('ar') - print("Nodes in G: ", G.nodes(data=True)) - G.add_edge(0, 1, weight=10) - print("Edges in G: ", G.edges(data=True)) - G.remove_edge(0, 1) - G.add_edges_from(zip(range(0, 3), range(1, 4)), weight=10) - print("Edges in G: ", G.edges(data=True)) - G.remove_edges_from(zip(range(0, 3), range(1, 4))) - print("Edges in G: ", G.edges(data=True)) - - G = PrintGraph() - nx.add_path(G, range(10)) - nx.add_star(G, range(9, 13)) - nx.draw(G) - plt.show() +G = PrintGraph() +G.add_node("foo") +G.add_nodes_from("bar", weight=8) +G.remove_node("b") +G.remove_nodes_from("ar") +print("Nodes in G: ", G.nodes(data=True)) +G.add_edge(0, 1, weight=10) +print("Edges in G: ", G.edges(data=True)) +G.remove_edge(0, 1) +G.add_edges_from(zip(range(0, 3), range(1, 4)), weight=10) +print("Edges in G: ", G.edges(data=True)) +G.remove_edges_from(zip(range(0, 3), range(1, 4))) +print("Edges in G: ", G.edges(data=True)) + +G = PrintGraph() +nx.add_path(G, range(10)) +nx.add_star(G, range(9, 13)) +nx.draw(G) +plt.show() diff --git a/networkx.egg-info/dependency_links.txt b/networkx.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/networkx.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/networkx.egg-info/not-zip-safe b/networkx.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/networkx.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/networkx.egg-info/top_level.txt b/networkx.egg-info/top_level.txt new file mode 100644 index 0000000..4d07dfe --- /dev/null +++ b/networkx.egg-info/top_level.txt @@ -0,0 +1 @@ +networkx diff --git a/networkx/__init__.py b/networkx/__init__.py index ffcc540..235a882 100644 --- a/networkx/__init__.py +++ b/networkx/__init__.py @@ -2,81 +2,28 @@ NetworkX ======== -NetworkX is a Python package for the creation, manipulation, -and study of the structure, dynamics, and functions -of complex networks. +NetworkX is a Python package for the creation, manipulation, and study of the +structure, dynamics, and functions of complex networks. -Website (including documentation):: - - http://networkx.github.io - -Mailing list:: - - https://groups.google.com/forum/#!forum/networkx-discuss - -Source:: - - https://github.com/networkx/networkx - -Bug reports:: - - https://github.com/networkx/networkx/issues - -Simple example --------------- - -Find the shortest path between two nodes in an undirected graph:: - - >>> import networkx as nx - >>> G = nx.Graph() - >>> G.add_edge('A', 'B', weight=4) - >>> G.add_edge('B', 'D', weight=2) - >>> G.add_edge('A', 'C', weight=3) - >>> G.add_edge('C', 'D', weight=4) - >>> nx.shortest_path(G, 'A', 'D', weight='weight') - ['A', 'B', 'D'] - -Bugs ----- - -Please report any bugs that you find `here `_. -Or, even better, fork the repository on GitHub and create a pull request (PR). - -License -------- - -Released under the 3-Clause BSD license:: - - Copyright (C) 2004-2018 NetworkX Developers - Aric Hagberg - Dan Schult - Pieter Swart +See https://networkx.github.io for complete documentation. """ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Add platform dependent shared library path to sys.path -# - -from __future__ import absolute_import import sys -if sys.version_info[:2] < (2, 7): - m = "Python 2.7 or later is required for NetworkX (%d.%d detected)." + +if sys.version_info[:2] < (3, 6): + m = "Python 3.6 or later is required for NetworkX (%d.%d detected)." raise ImportError(m % sys.version_info[:2]) del sys # Release data from networkx import release -__author__ = '%s <%s>\n%s <%s>\n%s <%s>' % \ - (release.authors['Hagberg'] + release.authors['Schult'] + - release.authors['Swart']) -__license__ = release.license + +__author__ = ( + f"{release.authors['Hagberg'][0]} <{release.authors['Hagberg'][1]}>\n" + f"{release.authors['Schult'][0]} <{release.authors['Schult'][1]}>\n" + f"{release.authors['Swart'][0]} <{release.authors['Swart'][1]}>" +) __date__ = release.date __version__ = release.version @@ -123,7 +70,7 @@ import networkx.linalg from networkx.linalg import * -from networkx.tests.test import run as test +from networkx.testing.test import run as test import networkx.drawing from networkx.drawing import * diff --git a/networkx/algorithms/__init__.py b/networkx/algorithms/__init__.py index fa2776e..31fb8b2 100644 --- a/networkx/algorithms/__init__.py +++ b/networkx/algorithms/__init__.py @@ -1,4 +1,5 @@ from networkx.algorithms.assortativity import * +from networkx.algorithms.asteroidal import * from networkx.algorithms.boundary import * from networkx.algorithms.bridges import * from networkx.algorithms.chains import * @@ -13,12 +14,13 @@ from networkx.algorithms.covering import * from networkx.algorithms.cycles import * from networkx.algorithms.cuts import * +from networkx.algorithms.d_separation import * from networkx.algorithms.dag import * from networkx.algorithms.distance_measures import * from networkx.algorithms.distance_regular import * from networkx.algorithms.dominance import * from networkx.algorithms.dominating import * -from networkx.algorithms.efficiency import * +from networkx.algorithms.efficiency_measures import * from networkx.algorithms.euler import * from networkx.algorithms.graphical import * from networkx.algorithms.hierarchy import * @@ -30,15 +32,22 @@ from networkx.algorithms.matching import * from networkx.algorithms.minors import * from networkx.algorithms.mis import * +from networkx.algorithms.moral import * +from networkx.algorithms.non_randomness import * from networkx.algorithms.operators import * +from networkx.algorithms.planarity import * +from networkx.algorithms.planar_drawing import * from networkx.algorithms.reciprocity import * +from networkx.algorithms.regular import * from networkx.algorithms.richclub import * from networkx.algorithms.shortest_paths import * from networkx.algorithms.similarity import * +from networkx.algorithms.graph_hashing import * from networkx.algorithms.simple_paths import * +from networkx.algorithms.smallworld import * from networkx.algorithms.smetric import * from networkx.algorithms.structuralholes import * -from networkx.algorithms.triads import * +from networkx.algorithms.sparsifiers import * from networkx.algorithms.swap import * from networkx.algorithms.traversal import * from networkx.algorithms.triads import * @@ -110,6 +119,7 @@ from networkx.algorithms.tree.branchings import minimum_branching from networkx.algorithms.tree.branchings import minimum_spanning_arborescence from networkx.algorithms.tree.coding import * +from networkx.algorithms.tree.decomposition import * +from networkx.algorithms.tree.mst import * from networkx.algorithms.tree.operations import * from networkx.algorithms.tree.recognition import * -from networkx.algorithms.tree.mst import * diff --git a/networkx/algorithms/approximation/__init__.py b/networkx/algorithms/approximation/__init__.py index ba055f4..99e8340 100644 --- a/networkx/algorithms/approximation/__init__.py +++ b/networkx/algorithms/approximation/__init__.py @@ -1,11 +1,3 @@ -# __init__.py - package containing heuristics for optimization problems -# -# Copyright 2016-2018 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Approximations of graph properties and Heuristic functions for optimization problems. @@ -27,3 +19,4 @@ from networkx.algorithms.approximation.ramsey import * from networkx.algorithms.approximation.steinertree import * from networkx.algorithms.approximation.vertex_cover import * +from networkx.algorithms.approximation.treewidth import * diff --git a/networkx/algorithms/approximation/clique.py b/networkx/algorithms/approximation/clique.py index ad67665..9283baf 100644 --- a/networkx/algorithms/approximation/clique.py +++ b/networkx/algorithms/approximation/clique.py @@ -1,17 +1,4 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2011-2018 by -# Nicholas Mancuso -# All rights reserved. -# BSD license. -# Copyright 2016-2018 NetworkX developers. -# NetworkX is distributed under a BSD license -# -# Authors: Nicholas Mancuso (nick.mancuso@gmail.com) -# Jeffery Finkelstein -# Dan Schult """Functions for computing large cliques.""" -from operator import itemgetter - import networkx as nx from networkx.utils import not_implemented_for from networkx.algorithms.approximation import ramsey @@ -68,7 +55,7 @@ def max_clique(G): def clique_removal(G): - """ Repeatedly remove cliques from the graph. + r""" Repeatedly remove cliques from the graph. Results in a $O(|V|/(\log |V|)^2)$ approximation of maximum clique and independent set. Returns the largest independent set found, along @@ -106,8 +93,8 @@ def clique_removal(G): return maxiset, cliques -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def large_clique_size(G): """Find the size of a large clique in a graph. diff --git a/networkx/algorithms/approximation/clustering_coefficient.py b/networkx/algorithms/approximation/clustering_coefficient.py index 352dd5d..56a8f83 100644 --- a/networkx/algorithms/approximation/clustering_coefficient.py +++ b/networkx/algorithms/approximation/clustering_coefficient.py @@ -1,19 +1,12 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2013 by -# Fred Morstatter -# Jordi Torrents -# All rights reserved. -# BSD license. -import random from networkx.utils import not_implemented_for +from networkx.utils import py_random_state -__all__ = ['average_clustering'] -__author__ = """\n""".join(['Fred Morstatter ', - 'Jordi Torrents ']) +__all__ = ["average_clustering"] -@not_implemented_for('directed') -def average_clustering(G, trials=1000): +@py_random_state(2) +@not_implemented_for("directed") +def average_clustering(G, trials=1000, seed=None): r"""Estimates the average clustering coefficient of G. The local clustering of each node in `G` is the fraction of triangles @@ -35,6 +28,10 @@ def average_clustering(G, trials=1000): trials : integer Number of trials to perform (default 1000). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + Returns ------- c : float @@ -51,11 +48,11 @@ def average_clustering(G, trials=1000): n = len(G) triangles = 0 nodes = list(G) - for i in [int(random.random() * n) for i in range(trials)]: + for i in [int(seed.random() * n) for i in range(trials)]: nbrs = list(G[nodes[i]]) if len(nbrs) < 2: continue - u, v = random.sample(nbrs, 2) + u, v = seed.sample(nbrs, 2) if u in G[v]: triangles += 1 return triangles / float(trials) diff --git a/networkx/algorithms/approximation/connectivity.py b/networkx/algorithms/approximation/connectivity.py index 7701968..96f613b 100644 --- a/networkx/algorithms/approximation/connectivity.py +++ b/networkx/algorithms/approximation/connectivity.py @@ -1,34 +1,30 @@ """ Fast approximation for node connectivity """ -# Copyright (C) 2015 by -# Jordi Torrents -# All rights reserved. -# BSD license. import itertools from operator import itemgetter import networkx as nx -__author__ = """\n""".join(['Jordi Torrents ']) +__all__ = [ + "local_node_connectivity", + "node_connectivity", + "all_pairs_node_connectivity", +] -__all__ = ['local_node_connectivity', - 'node_connectivity', - 'all_pairs_node_connectivity'] - -INF = float('inf') +INF = float("inf") def local_node_connectivity(G, source, target, cutoff=None): """Compute node connectivity between source and target. - Pairwise or local node connectivity between two distinct and nonadjacent - nodes is the minimum number of nodes that must be removed (minimum - separating cutset) to disconnect them. By Menger's theorem, this is equal + Pairwise or local node connectivity between two distinct and nonadjacent + nodes is the minimum number of nodes that must be removed (minimum + separating cutset) to disconnect them. By Menger's theorem, this is equal to the number of node independent paths (paths that share no nodes other than source and target). Which is what we compute in this function. This algorithm is a fast approximation that gives an strict lower - bound on the actual number of node independent paths between two nodes [1]_. + bound on the actual number of node independent paths between two nodes [1]_. It works for both directed and undirected graphs. Parameters @@ -60,17 +56,17 @@ def local_node_connectivity(G, source, target, cutoff=None): >>> approx.local_node_connectivity(G, 0, 5) 4 - Notes + Notes ----- - This algorithm [1]_ finds node independents paths between two nodes by - computing their shortest path using BFS, marking the nodes of the path - found as 'used' and then searching other shortest paths excluding the - nodes marked as used until no more paths exist. It is not exact because + This algorithm [1]_ finds node independents paths between two nodes by + computing their shortest path using BFS, marking the nodes of the path + found as 'used' and then searching other shortest paths excluding the + nodes marked as used until no more paths exist. It is not exact because a shortest path could use nodes that, if the path were longer, may belong to two different node independent paths. Thus it only guarantees an strict lower bound on node connectivity. - Note that the authors propose a further refinement, losing accuracy and + Note that the authors propose a further refinement, losing accuracy and gaining speed, which is not implemented yet. See also @@ -80,7 +76,7 @@ def local_node_connectivity(G, source, target, cutoff=None): References ---------- - .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for + .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035 http://eclectic.ss.uci.edu/~drwhite/working.pdf @@ -118,15 +114,15 @@ def node_connectivity(G, s=None, t=None): Node connectivity is equal to the minimum number of nodes that must be removed to disconnect G or render it trivial. By Menger's theorem, - this is equal to the number of node independent paths (paths that + this is equal to the number of node independent paths (paths that share no nodes other than source and target). - If source and target nodes are provided, this function returns the - local node connectivity: the minimum number of nodes that must be + If source and target nodes are provided, this function returns the + local node connectivity: the minimum number of nodes that must be removed to break all paths from source to target in G. This algorithm is based on a fast approximation that gives an strict lower - bound on the actual number of node independent paths between two nodes [1]_. + bound on the actual number of node independent paths between two nodes [1]_. It works for both directed and undirected graphs. Parameters @@ -148,7 +144,7 @@ def node_connectivity(G, s=None, t=None): Examples -------- - >>> # Platonic octahedral graph is 4-node-connected + >>> # Platonic octahedral graph is 4-node-connected >>> from networkx.algorithms import approximation as approx >>> G = nx.octahedral_graph() >>> approx.node_connectivity(G) @@ -156,10 +152,10 @@ def node_connectivity(G, s=None, t=None): Notes ----- - This algorithm [1]_ finds node independents paths between two nodes by - computing their shortest path using BFS, marking the nodes of the path - found as 'used' and then searching other shortest paths excluding the - nodes marked as used until no more paths exist. It is not exact because + This algorithm [1]_ finds node independents paths between two nodes by + computing their shortest path using BFS, marking the nodes of the path + found as 'used' and then searching other shortest paths excluding the + nodes marked as used until no more paths exist. It is not exact because a shortest path could use nodes that, if the path were longer, may belong to two different node independent paths. Thus it only guarantees an strict lower bound on node connectivity. @@ -171,20 +167,20 @@ def node_connectivity(G, s=None, t=None): References ---------- - .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for + .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035 http://eclectic.ss.uci.edu/~drwhite/working.pdf """ if (s is not None and t is None) or (s is None and t is not None): - raise nx.NetworkXError('Both source and target must be specified.') + raise nx.NetworkXError("Both source and target must be specified.") # Local node connectivity if s is not None and t is not None: if s not in G: - raise nx.NetworkXError('node %s not in graph' % s) + raise nx.NetworkXError(f"node {s} not in graph") if t not in G: - raise nx.NetworkXError('node %s not in graph' % t) + raise nx.NetworkXError(f"node {t} not in graph") return local_node_connectivity(G, s, t) # Global node connectivity @@ -194,6 +190,7 @@ def node_connectivity(G, s=None, t=None): def neighbors(v): return itertools.chain(G.predecessors(v), G.successors(v)) + else: connected_func = nx.is_connected iter_func = itertools.combinations @@ -208,7 +205,7 @@ def neighbors(v): K = minimum_degree # compute local node connectivity with all non-neighbors nodes # and store the minimum - for w in set(G) - set(neighbors(v)) - set([v]): + for w in set(G) - set(neighbors(v)) - {v}: K = min(K, local_node_connectivity(G, v, w, cutoff=K)) # Same for non adjacent pairs of neighbors of v for x, y in iter_func(neighbors(v), 2): @@ -220,14 +217,14 @@ def neighbors(v): def all_pairs_node_connectivity(G, nbunch=None, cutoff=None): """ Compute node connectivity between all pairs of nodes. - Pairwise or local node connectivity between two distinct and nonadjacent - nodes is the minimum number of nodes that must be removed (minimum - separating cutset) to disconnect them. By Menger's theorem, this is equal + Pairwise or local node connectivity between two distinct and nonadjacent + nodes is the minimum number of nodes that must be removed (minimum + separating cutset) to disconnect them. By Menger's theorem, this is equal to the number of node independent paths (paths that share no nodes other than source and target). Which is what we compute in this function. This algorithm is a fast approximation that gives an strict lower - bound on the actual number of node independent paths between two nodes [1]_. + bound on the actual number of node independent paths between two nodes [1]_. It works for both directed and undirected graphs. @@ -252,11 +249,11 @@ def all_pairs_node_connectivity(G, nbunch=None, cutoff=None): See Also -------- local_node_connectivity - all_pairs_node_connectivity + node_connectivity References ---------- - .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for + .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035 http://eclectic.ss.uci.edu/~drwhite/working.pdf """ @@ -283,7 +280,7 @@ def all_pairs_node_connectivity(G, nbunch=None, cutoff=None): def _bidirectional_shortest_path(G, source, target, exclude): - """Return shortest path between source and target ignoring nodes in the + """Returns shortest path between source and target ignoring nodes in the container 'exclude'. Parameters @@ -307,20 +304,20 @@ def _bidirectional_shortest_path(G, source, target, exclude): Raises ------ - NetworkXNoPath: exception + NetworkXNoPath If there is no path or if nodes are adjacent and have only one path between them Notes ----- This function and its helper are originally from - networkx.algorithms.shortest_paths.unweighted and are modified to - accept the extra parameter 'exclude', which is a container for nodes + networkx.algorithms.shortest_paths.unweighted and are modified to + accept the extra parameter 'exclude', which is a container for nodes already used in other paths that should be ignored. References ---------- - .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for + .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035 http://eclectic.ss.uci.edu/~drwhite/working.pdf @@ -350,7 +347,8 @@ def _bidirectional_pred_succ(G, source, target, exclude): # excludes nodes in the container "exclude" from the search if source is None or target is None: raise nx.NetworkXException( - "Bidirectional shortest path called without source or target") + "Bidirectional shortest path called without source or target" + ) if target == source: return ({target: None}, {source: None}, source) @@ -402,4 +400,4 @@ def _bidirectional_pred_succ(G, source, target, exclude): if w in pred: return pred, succ, w # found path - raise nx.NetworkXNoPath("No path between %s and %s." % (source, target)) + raise nx.NetworkXNoPath(f"No path between {source} and {target}.") diff --git a/networkx/algorithms/approximation/dominating_set.py b/networkx/algorithms/approximation/dominating_set.py index 3b47ca1..548e21d 100644 --- a/networkx/algorithms/approximation/dominating_set.py +++ b/networkx/algorithms/approximation/dominating_set.py @@ -1,8 +1,3 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2011-2012 by -# Nicholas Mancuso -# All rights reserved. -# BSD license. """Functions for finding node and edge dominating sets. A `dominating set`_ for an undirected graph *G* with vertex set *V* @@ -15,21 +10,17 @@ .. _edge dominating set: https://en.wikipedia.org/wiki/Edge_dominating_set """ -from __future__ import division from ..matching import maximal_matching from ...utils import not_implemented_for -__all__ = ["min_weighted_dominating_set", - "min_edge_dominating_set"] - -__author__ = """Nicholas Mancuso (nick.mancuso@gmail.com)""" +__all__ = ["min_weighted_dominating_set", "min_edge_dominating_set"] # TODO Why doesn't this algorithm work for directed graphs? -@not_implemented_for('directed') +@not_implemented_for("directed") def min_weighted_dominating_set(G, weight=None): - """Returns a dominating set that approximates the minimum weight node + r"""Returns a dominating set that approximates the minimum weight node dominating set. Parameters @@ -38,7 +29,7 @@ def min_weighted_dominating_set(G, weight=None): Undirected graph. weight : string - The node attribute storing the weight of an edge. If provided, + The node attribute storing the weight of an node. If provided, the node attribute with this key must be a number for each node. If not provided, each node is assumed to have weight one. @@ -109,7 +100,7 @@ def _cost(node_and_neighborhood): def min_edge_dominating_set(G): - r"""Return minimum cardinality edge dominating set. + r"""Returns minimum cardinality edge dominating set. Parameters ---------- diff --git a/networkx/algorithms/approximation/independent_set.py b/networkx/algorithms/approximation/independent_set.py index f86d2ac..35ad8f7 100644 --- a/networkx/algorithms/approximation/independent_set.py +++ b/networkx/algorithms/approximation/independent_set.py @@ -1,5 +1,4 @@ -# -*- coding: utf-8 -*- -""" +r""" Independent Set Independent set or stable set is a set of vertices in a graph, no two of @@ -9,7 +8,7 @@ set is the number of vertices it contains. A maximum independent set is a largest independent set for a given graph G -and its size is denoted α(G). The problem of finding such a set is called +and its size is denoted $\alpha(G)$. The problem of finding such a set is called the maximum independent set problem and is an NP-hard optimization problem. As such, it is unlikely that there exists an efficient algorithm for finding a maximum independent set of a graph. @@ -26,17 +25,13 @@ doi:10.1007/BF01994876 """ -# Copyright (C) 2011-2012 by -# Nicholas Mancuso -# All rights reserved. -# BSD license. from networkx.algorithms.approximation import clique_removal + __all__ = ["maximum_independent_set"] -__author__ = """Nicholas Mancuso (nick.mancuso@gmail.com)""" def maximum_independent_set(G): - """Return an approximate maximum independent set. + """Returns an approximate maximum independent set. Parameters ---------- diff --git a/networkx/algorithms/approximation/kcomponents.py b/networkx/algorithms/approximation/kcomponents.py index ffc8ae0..dfa8a45 100644 --- a/networkx/algorithms/approximation/kcomponents.py +++ b/networkx/algorithms/approximation/kcomponents.py @@ -1,27 +1,20 @@ """ Fast approximation for k-component structure """ -# Copyright (C) 2015 by -# Jordi Torrents -# All rights reserved. -# BSD license. import itertools -from collections import defaultdict, Mapping +from collections import defaultdict +from collections.abc import Mapping import networkx as nx from networkx.exception import NetworkXError from networkx.utils import not_implemented_for from networkx.algorithms.approximation import local_node_connectivity -from networkx.algorithms.connectivity import \ - local_node_connectivity as exact_local_node_connectivity -__author__ = """\n""".join(['Jordi Torrents ']) +__all__ = ["k_components"] -__all__ = ['k_components'] - -not_implemented_for('directed') +not_implemented_for("directed") def k_components(G, min_density=0.95): @@ -165,14 +158,14 @@ def k_components(G, min_density=0.95): def _cliques_heuristic(G, H, k, min_density): h_cnumber = nx.core_number(H) for i, c_value in enumerate(sorted(set(h_cnumber.values()), reverse=True)): - cands = set(n for n, c in h_cnumber.items() if c == c_value) + cands = {n for n, c in h_cnumber.items() if c == c_value} # Skip checking for overlap for the highest core value if i == 0: overlap = False else: - overlap = set.intersection(*[ - set(x for x in H[n] if x not in cands) - for n in cands]) + overlap = set.intersection( + *[{x for x in H[n] if x not in cands} for n in cands] + ) if overlap and len(overlap) < k: SH = H.subgraph(cands | overlap) else: @@ -180,7 +173,7 @@ def _cliques_heuristic(G, H, k, min_density): sh_cnumber = nx.core_number(SH) SG = nx.k_core(G.subgraph(SH), k) while not (_same(sh_cnumber) and nx.density(SH) >= min_density): - #!! This subgraph must be writable => .copy() + # This subgraph must be writable => .copy() SH = H.subgraph(SG).copy() if len(SH) <= k: break @@ -214,14 +207,15 @@ class _AntiGraph(nx.Graph): case we only use k-core, connected_components, and biconnected_components. """ - all_edge_dict = {'weight': 1} + all_edge_dict = {"weight": 1} def single_edge_dict(self): return self.all_edge_dict + edge_attr_dict_factory = single_edge_dict def __getitem__(self, n): - """Return a dict of neighbors of node n in the dense graph. + """Returns a dict of neighbors of node n in the dense graph. Parameters ---------- @@ -235,17 +229,18 @@ def __getitem__(self, n): """ all_edge_dict = self.all_edge_dict - return {node: all_edge_dict for node in - set(self._adj) - set(self._adj[n]) - set([n])} + return { + node: all_edge_dict for node in set(self._adj) - set(self._adj[n]) - {n} + } def neighbors(self, n): - """Return an iterator over all neighbors of node n in the + """Returns an iterator over all neighbors of node n in the dense graph. """ try: - return iter(set(self._adj) - set(self._adj[n]) - set([n])) - except KeyError: - raise NetworkXError("The node %s is not in the graph." % (n,)) + return iter(set(self._adj) - set(self._adj[n]) - {n}) + except KeyError as e: + raise NetworkXError(f"The node {n} is not in the graph.") from e class AntiAtlasView(Mapping): """An adjacency inner dict for AntiGraph""" @@ -262,7 +257,7 @@ def __iter__(self): return (n for n in self._graph if n not in self._atlas and n != self._node) def __getitem__(self, nbr): - nbrs = set(self._graph._adj) - set(self._atlas) - set([self._node]) + nbrs = set(self._graph._adj) - set(self._atlas) - {self._node} if nbr in nbrs: return self._graph.all_edge_dict raise KeyError(nbr) @@ -308,17 +303,17 @@ class AntiDegreeView(nx.reportviews.DegreeView): def __iter__(self): all_nodes = set(self._succ) for n in self._nodes: - nbrs = all_nodes - set(self._succ[n]) - set([n]) + nbrs = all_nodes - set(self._succ[n]) - {n} yield (n, len(nbrs)) def __getitem__(self, n): - nbrs = set(self._succ) - set(self._succ[n]) - set([n]) + nbrs = set(self._succ) - set(self._succ[n]) - {n} # AntiGraph is a ThinGraph so all edges have weight 1 return len(nbrs) + (n in nbrs) @property def degree(self): - """Return an iterator for (node, degree) and degree for single node. + """Returns an iterator for (node, degree) and degree for single node. The node degree is the number of edges adjacent to the node. @@ -347,16 +342,16 @@ def degree(self): Examples -------- >>> G = nx.path_graph(4) - >>> G.degree(0) # node 0 with degree 1 + >>> G.degree(0) # node 0 with degree 1 1 - >>> list(G.degree([0,1])) + >>> list(G.degree([0, 1])) [(0, 1), (1, 2)] """ return self.AntiDegreeView(self) def adjacency(self): - """Return an iterator of (node, adjacency set) tuples for all nodes + """Returns an iterator of (node, adjacency set) tuples for all nodes in the dense graph. This is the fastest way to look at every edge. @@ -370,4 +365,4 @@ def adjacency(self): """ for n in self._adj: - yield (n, set(self._adj) - set(self._adj[n]) - set([n])) + yield (n, set(self._adj) - set(self._adj[n]) - {n}) diff --git a/networkx/algorithms/approximation/matching.py b/networkx/algorithms/approximation/matching.py index d7fc4c4..17a52ed 100644 --- a/networkx/algorithms/approximation/matching.py +++ b/networkx/algorithms/approximation/matching.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ ************** Graph Matching @@ -9,13 +8,9 @@ `Wikipedia: Matching `_ """ -# Copyright (C) 2011-2012 by -# Nicholas Mancuso -# All rights reserved. -# BSD license. import networkx as nx + __all__ = ["min_maximal_matching"] -__author__ = """Nicholas Mancuso (nick.mancuso@gmail.com)""" def min_maximal_matching(G): diff --git a/networkx/algorithms/approximation/ramsey.py b/networkx/algorithms/approximation/ramsey.py index 48a9df7..39762ad 100644 --- a/networkx/algorithms/approximation/ramsey.py +++ b/networkx/algorithms/approximation/ramsey.py @@ -1,20 +1,20 @@ -# -*- coding: utf-8 -*- """ Ramsey numbers. """ -# Copyright (C) 2011 by -# Nicholas Mancuso -# All rights reserved. -# BSD license. import networkx as nx from ...utils import arbitrary_element __all__ = ["ramsey_R2"] -__author__ = """Nicholas Mancuso (nick.mancuso@gmail.com)""" def ramsey_R2(G): - r"""Approximately computes the Ramsey number `R(2;s,t)` for graph. + r"""Compute the largest clique and largest independent set in `G`. + + This can be used to estimate bounds for the 2-color + Ramsey number `R(2;s,t)` for `G`. + + This is a recursive implementation which could run into trouble + for large recursions. Note that self-loop edges are ignored. Parameters ---------- @@ -30,7 +30,7 @@ def ramsey_R2(G): return set(), set() node = arbitrary_element(G) - nbrs = nx.all_neighbors(G, node) + nbrs = (nbr for nbr in nx.all_neighbors(G, node) if nbr != node) nnbrs = nx.non_neighbors(G, node) c_1, i_1 = ramsey_R2(G.subgraph(nbrs).copy()) c_2, i_2 = ramsey_R2(G.subgraph(nnbrs).copy()) diff --git a/networkx/algorithms/approximation/steinertree.py b/networkx/algorithms/approximation/steinertree.py index e71ad3d..b1e2488 100644 --- a/networkx/algorithms/approximation/steinertree.py +++ b/networkx/algorithms/approximation/steinertree.py @@ -1,13 +1,13 @@ -from itertools import combinations, chain +from itertools import chain from networkx.utils import pairwise, not_implemented_for import networkx as nx -__all__ = ['metric_closure', 'steiner_tree'] +__all__ = ["metric_closure", "steiner_tree"] -@not_implemented_for('directed') -def metric_closure(G, weight='weight'): +@not_implemented_for("directed") +def metric_closure(G, weight="weight"): """ Return the metric closure of a graph. The metric closure of a graph *G* is the complete graph in which each edge @@ -25,20 +25,44 @@ def metric_closure(G, weight='weight'): """ M = nx.Graph() - seen = set() Gnodes = set(G) - for u, (distance, path) in nx.all_pairs_dijkstra(G, weight=weight): - seen.add(u) - for v in Gnodes - seen: + + # check for connected graph while processing first node + all_paths_iter = nx.all_pairs_dijkstra(G, weight=weight) + u, (distance, path) = next(all_paths_iter) + if Gnodes - set(distance): + msg = "G is not a connected graph. metric_closure is not defined." + raise nx.NetworkXError(msg) + Gnodes.remove(u) + for v in Gnodes: + M.add_edge(u, v, distance=distance[v], path=path[v]) + + # first node done -- now process the rest + for u, (distance, path) in all_paths_iter: + Gnodes.remove(u) + for v in Gnodes: M.add_edge(u, v, distance=distance[v], path=path[v]) return M -@not_implemented_for('directed') -def steiner_tree(G, terminal_nodes, weight='weight'): +@not_implemented_for("directed") +def steiner_tree(G, terminal_nodes, weight="weight"): """ Return an approximation to the minimum Steiner tree of a graph. + The minimum Steiner tree of `G` w.r.t a set of `terminal_nodes` + is a tree within `G` that spans those nodes and has minimum size + (sum of edge weights) among all such trees. + + The minimum Steiner tree can be approximated by computing the minimum + spanning tree of the subgraph of the metric closure of *G* induced by the + terminal nodes, where the metric closure of *G* is the complete graph in + which each edge is weighted by the shortest path distance between the + nodes in *G* . + This algorithm produces a tree whose weight is within a (2 - (2 / t)) + factor of the weight of the optimal Steiner tree where *t* is number of + terminal nodes. + Parameters ---------- G : NetworkX graph @@ -55,24 +79,26 @@ def steiner_tree(G, terminal_nodes, weight='weight'): Notes ----- - Steiner tree can be approximated by computing the minimum spanning - tree of the subgraph of the metric closure of the graph induced by the - terminal nodes, where the metric closure of *G* is the complete graph in - which each edge is weighted by the shortest path distance between the - nodes in *G* . - This algorithm produces a tree whose weight is within a (2 - (2 / t)) - factor of the weight of the optimal Steiner tree where *t* is number of - terminal nodes. + For multigraphs, the edge between two nodes with minimum weight is the + edge put into the Steiner tree. + + References + ---------- + .. [1] Steiner_tree_problem on Wikipedia. + https://en.wikipedia.org/wiki/Steiner_tree_problem """ - # M is the subgraph of the metric closure induced by the terminal nodes of - # G. + # H is the subgraph induced by terminal_nodes in the metric closure M of G. M = metric_closure(G, weight=weight) - # Use the 'distance' attribute of each edge provided by the metric closure - # graph. H = M.subgraph(terminal_nodes) - mst_edges = nx.minimum_spanning_edges(H, weight='distance', data=True) + # Use the 'distance' attribute of each edge provided by M. + mst_edges = nx.minimum_spanning_edges(H, weight="distance", data=True) # Create an iterator over each edge in each shortest path; repeats are okay - edges = chain.from_iterable(pairwise(d['path']) for u, v, d in mst_edges) + edges = chain.from_iterable(pairwise(d["path"]) for u, v, d in mst_edges) + # For multigraph we should add the minimal weight edge keys + if G.is_multigraph(): + edges = ( + (u, v, min(G[u][v], key=lambda k: G[u][v][k][weight])) for u, v in edges + ) T = G.edge_subgraph(edges) return T diff --git a/networkx/algorithms/approximation/tests/__init__.py b/networkx/algorithms/approximation/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py b/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py index b2cae93..1bb1779 100644 --- a/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py +++ b/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py @@ -1,4 +1,3 @@ -from nose.tools import assert_equal import networkx as nx from networkx.algorithms.approximation import average_clustering @@ -9,31 +8,36 @@ def test_petersen(): # Actual coefficient is 0 G = nx.petersen_graph() - assert_equal(average_clustering(G, trials=int(len(G) / 2)), - nx.average_clustering(G)) + assert average_clustering(G, trials=int(len(G) / 2)) == nx.average_clustering(G) + + +def test_petersen_seed(): + # Actual coefficient is 0 + G = nx.petersen_graph() + assert average_clustering( + G, trials=int(len(G) / 2), seed=1 + ) == nx.average_clustering(G) def test_tetrahedral(): # Actual coefficient is 1 G = nx.tetrahedral_graph() - assert_equal(average_clustering(G, trials=int(len(G) / 2)), - nx.average_clustering(G)) + assert average_clustering(G, trials=int(len(G) / 2)) == nx.average_clustering(G) def test_dodecahedral(): # Actual coefficient is 0 G = nx.dodecahedral_graph() - assert_equal(average_clustering(G, trials=int(len(G) / 2)), - nx.average_clustering(G)) + assert average_clustering(G, trials=int(len(G) / 2)) == nx.average_clustering(G) def test_empty(): G = nx.empty_graph(5) - assert_equal(average_clustering(G, trials=int(len(G) / 2)), 0) + assert average_clustering(G, trials=int(len(G) / 2)) == 0 def test_complete(): G = nx.complete_graph(5) - assert_equal(average_clustering(G, trials=int(len(G) / 2)), 1) + assert average_clustering(G, trials=int(len(G) / 2)) == 1 G = nx.complete_graph(7) - assert_equal(average_clustering(G, trials=int(len(G) / 2)), 1) + assert average_clustering(G, trials=int(len(G) / 2)) == 1 diff --git a/networkx/algorithms/approximation/tests/test_clique.py b/networkx/algorithms/approximation/tests/test_clique.py index 7b2ce12..9cec86c 100644 --- a/networkx/algorithms/approximation/tests/test_clique.py +++ b/networkx/algorithms/approximation/tests/test_clique.py @@ -1,20 +1,8 @@ -# test_clique.py - unit tests for the approximation.clique module -# -# Copyright 2015 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.algorithms.approximation.clique` module. """ -from __future__ import division -from nose.tools import assert_greater -from nose.tools import assert_true -from nose.tools import assert_equal import networkx as nx from networkx.algorithms.approximation import max_clique @@ -45,7 +33,7 @@ def is_clique(G, nodes): return H.number_of_edges() == n * (n - 1) // 2 -class TestCliqueRemoval(object): +class TestCliqueRemoval: """Unit tests for the :func:`~networkx.algorithms.approximation.clique_removal` function. @@ -54,25 +42,25 @@ class TestCliqueRemoval(object): def test_trivial_graph(self): G = nx.trivial_graph() independent_set, cliques = clique_removal(G) - assert_true(is_independent_set(G, independent_set)) - assert_true(all(is_clique(G, clique) for clique in cliques)) + assert is_independent_set(G, independent_set) + assert all(is_clique(G, clique) for clique in cliques) # In fact, we should only have 1-cliques, that is, singleton nodes. - assert_true(all(len(clique) == 1 for clique in cliques)) + assert all(len(clique) == 1 for clique in cliques) def test_complete_graph(self): G = nx.complete_graph(10) independent_set, cliques = clique_removal(G) - assert_true(is_independent_set(G, independent_set)) - assert_true(all(is_clique(G, clique) for clique in cliques)) + assert is_independent_set(G, independent_set) + assert all(is_clique(G, clique) for clique in cliques) def test_barbell_graph(self): G = nx.barbell_graph(10, 5) independent_set, cliques = clique_removal(G) - assert_true(is_independent_set(G, independent_set)) - assert_true(all(is_clique(G, clique) for clique in cliques)) + assert is_independent_set(G, independent_set) + assert all(is_clique(G, clique) for clique in cliques) -class TestMaxClique(object): +class TestMaxClique: """Unit tests for the :func:`networkx.algorithms.approximation.max_clique` function. @@ -80,13 +68,13 @@ class TestMaxClique(object): def test_null_graph(self): G = nx.null_graph() - assert_equal(len(max_clique(G)), 0) + assert len(max_clique(G)) == 0 def test_complete_graph(self): graph = nx.complete_graph(30) # this should return the entire graph mc = max_clique(graph) - assert_equal(30, len(mc)) + assert 30 == len(mc) def test_maximal_by_cardinality(self): """Tests that the maximal clique is computed according to maximum @@ -98,11 +86,11 @@ def test_maximal_by_cardinality(self): G = nx.complete_graph(5) G.add_edge(4, 5) clique = max_clique(G) - assert_greater(len(clique), 1) + assert len(clique) > 1 G = nx.lollipop_graph(30, 2) clique = max_clique(G) - assert_greater(len(clique), 2) + assert len(clique) > 2 def test_large_clique_size(): @@ -112,8 +100,8 @@ def test_large_clique_size(): G.add_edge(1, 12) G.add_node(13) - assert_equal(large_clique_size(G), 9) + assert large_clique_size(G) == 9 G.remove_node(5) - assert_equal(large_clique_size(G), 8) + assert large_clique_size(G) == 8 G.remove_edge(2, 3) - assert_equal(large_clique_size(G), 7) + assert large_clique_size(G) == 7 diff --git a/networkx/algorithms/approximation/tests/test_connectivity.py b/networkx/algorithms/approximation/tests/test_connectivity.py index 057ec17..887db20 100644 --- a/networkx/algorithms/approximation/tests/test_connectivity.py +++ b/networkx/algorithms/approximation/tests/test_connectivity.py @@ -1,5 +1,4 @@ -import itertools -from nose.tools import assert_true, assert_equal, assert_raises +import pytest import networkx as nx from networkx.algorithms import approximation as approx @@ -8,12 +7,32 @@ def test_global_node_connectivity(): # Figure 1 chapter on Connectivity G = nx.Graph() - G.add_edges_from([(1, 2), (1, 3), (1, 4), (1, 5), (2, 3), (2, 6), (3, 4), - (3, 6), (4, 6), (4, 7), (5, 7), (6, 8), (6, 9), (7, 8), - (7, 10), (8, 11), (9, 10), (9, 11), (10, 11)]) - assert_equal(2, approx.local_node_connectivity(G, 1, 11)) - assert_equal(2, approx.node_connectivity(G)) - assert_equal(2, approx.node_connectivity(G, 1, 11)) + G.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (2, 3), + (2, 6), + (3, 4), + (3, 6), + (4, 6), + (4, 7), + (5, 7), + (6, 8), + (6, 9), + (7, 8), + (7, 10), + (8, 11), + (9, 10), + (9, 11), + (10, 11), + ] + ) + assert 2 == approx.local_node_connectivity(G, 1, 11) + assert 2 == approx.node_connectivity(G) + assert 2 == approx.node_connectivity(G, 1, 11) def test_white_harary1(): @@ -28,27 +47,28 @@ def test_white_harary1(): G.remove_node(G.order() - 1) for i in range(7, 10): G.add_edge(0, i) - assert_equal(1, approx.node_connectivity(G)) + assert 1 == approx.node_connectivity(G) def test_complete_graphs(): for n in range(5, 25, 5): G = nx.complete_graph(n) - assert_equal(n - 1, approx.node_connectivity(G)) - assert_equal(n - 1, approx.node_connectivity(G, 0, 3)) + assert n - 1 == approx.node_connectivity(G) + assert n - 1 == approx.node_connectivity(G, 0, 3) def test_empty_graphs(): for k in range(5, 25, 5): G = nx.empty_graph(k) - assert_equal(0, approx.node_connectivity(G)) - assert_equal(0, approx.node_connectivity(G, 0, 3)) + assert 0 == approx.node_connectivity(G) + assert 0 == approx.node_connectivity(G, 0, 3) def test_petersen(): G = nx.petersen_graph() - assert_equal(3, approx.node_connectivity(G)) - assert_equal(3, approx.node_connectivity(G, 0, 5)) + assert 3 == approx.node_connectivity(G) + assert 3 == approx.node_connectivity(G, 0, 5) + # Approximation fails with tutte graph # def test_tutte(): @@ -58,14 +78,15 @@ def test_petersen(): def test_dodecahedral(): G = nx.dodecahedral_graph() - assert_equal(3, approx.node_connectivity(G)) - assert_equal(3, approx.node_connectivity(G, 0, 5)) + assert 3 == approx.node_connectivity(G) + assert 3 == approx.node_connectivity(G, 0, 5) def test_octahedral(): G = nx.octahedral_graph() - assert_equal(4, approx.node_connectivity(G)) - assert_equal(4, approx.node_connectivity(G, 0, 5)) + assert 4 == approx.node_connectivity(G) + assert 4 == approx.node_connectivity(G, 0, 5) + # Approximation can fail with icosahedral graph depending # on iteration order. @@ -77,83 +98,91 @@ def test_octahedral(): def test_only_source(): G = nx.complete_graph(5) - assert_raises(nx.NetworkXError, approx.node_connectivity, G, s=0) + pytest.raises(nx.NetworkXError, approx.node_connectivity, G, s=0) def test_only_target(): G = nx.complete_graph(5) - assert_raises(nx.NetworkXError, approx.node_connectivity, G, t=0) + pytest.raises(nx.NetworkXError, approx.node_connectivity, G, t=0) def test_missing_source(): G = nx.path_graph(4) - assert_raises(nx.NetworkXError, approx.node_connectivity, G, 10, 1) + pytest.raises(nx.NetworkXError, approx.node_connectivity, G, 10, 1) def test_missing_target(): G = nx.path_graph(4) - assert_raises(nx.NetworkXError, approx.node_connectivity, G, 1, 10) + pytest.raises(nx.NetworkXError, approx.node_connectivity, G, 1, 10) def test_source_equals_target(): G = nx.complete_graph(5) - assert_raises(nx.NetworkXError, approx.local_node_connectivity, G, 0, 0) + pytest.raises(nx.NetworkXError, approx.local_node_connectivity, G, 0, 0) def test_directed_node_connectivity(): G = nx.cycle_graph(10, create_using=nx.DiGraph()) # only one direction D = nx.cycle_graph(10).to_directed() # 2 reciprocal edges - assert_equal(1, approx.node_connectivity(G)) - assert_equal(1, approx.node_connectivity(G, 1, 4)) - assert_equal(2, approx.node_connectivity(D)) - assert_equal(2, approx.node_connectivity(D, 1, 4)) + assert 1 == approx.node_connectivity(G) + assert 1 == approx.node_connectivity(G, 1, 4) + assert 2 == approx.node_connectivity(D) + assert 2 == approx.node_connectivity(D, 1, 4) class TestAllPairsNodeConnectivityApprox: - - def setUp(self): - self.path = nx.path_graph(7) - self.directed_path = nx.path_graph(7, create_using=nx.DiGraph()) - self.cycle = nx.cycle_graph(7) - self.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph()) - self.gnp = nx.gnp_random_graph(30, 0.1) - self.directed_gnp = nx.gnp_random_graph(30, 0.1, directed=True) - self.K20 = nx.complete_graph(20) - self.K10 = nx.complete_graph(10) - self.K5 = nx.complete_graph(5) - self.G_list = [self.path, self.directed_path, self.cycle, - self.directed_cycle, self.gnp, self.directed_gnp, self.K10, - self.K5, self.K20] + @classmethod + def setup_class(cls): + cls.path = nx.path_graph(7) + cls.directed_path = nx.path_graph(7, create_using=nx.DiGraph()) + cls.cycle = nx.cycle_graph(7) + cls.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph()) + cls.gnp = nx.gnp_random_graph(30, 0.1) + cls.directed_gnp = nx.gnp_random_graph(30, 0.1, directed=True) + cls.K20 = nx.complete_graph(20) + cls.K10 = nx.complete_graph(10) + cls.K5 = nx.complete_graph(5) + cls.G_list = [ + cls.path, + cls.directed_path, + cls.cycle, + cls.directed_cycle, + cls.gnp, + cls.directed_gnp, + cls.K10, + cls.K5, + cls.K20, + ] def test_cycles(self): K_undir = approx.all_pairs_node_connectivity(self.cycle) for source in K_undir: for target, k in K_undir[source].items(): - assert_true(k == 2) + assert k == 2 K_dir = approx.all_pairs_node_connectivity(self.directed_cycle) for source in K_dir: for target, k in K_dir[source].items(): - assert_true(k == 1) + assert k == 1 def test_complete(self): for G in [self.K10, self.K5, self.K20]: K = approx.all_pairs_node_connectivity(G) for source in K: for target, k in K[source].items(): - assert_true(k == len(G) - 1) + assert k == len(G) - 1 def test_paths(self): K_undir = approx.all_pairs_node_connectivity(self.path) for source in K_undir: for target, k in K_undir[source].items(): - assert_true(k == 1) + assert k == 1 K_dir = approx.all_pairs_node_connectivity(self.directed_path) for source in K_dir: for target, k in K_dir[source].items(): if source < target: - assert_true(k == 1) + assert k == 1 else: - assert_true(k == 0) + assert k == 0 def test_cutoff(self): for G in [self.K10, self.K5, self.K20]: @@ -161,10 +190,10 @@ def test_cutoff(self): paths = approx.all_pairs_node_connectivity(G, cutoff=mp) for source in paths: for target, K in paths[source].items(): - assert_true(K == mp) + assert K == mp def test_all_pairs_connectivity_nbunch(self): G = nx.complete_graph(5) nbunch = [0, 2, 3] C = approx.all_pairs_node_connectivity(G, nbunch=nbunch) - assert_equal(len(C), len(nbunch)) + assert len(C) == len(nbunch) diff --git a/networkx/algorithms/approximation/tests/test_dominating_set.py b/networkx/algorithms/approximation/tests/test_dominating_set.py index 2960cb5..da1abdc 100644 --- a/networkx/algorithms/approximation/tests/test_dominating_set.py +++ b/networkx/algorithms/approximation/tests/test_dominating_set.py @@ -1,13 +1,9 @@ -#!/usr/bin/env python -from nose.tools import ok_ -from nose.tools import eq_ import networkx as nx from networkx.algorithms.approximation import min_weighted_dominating_set from networkx.algorithms.approximation import min_edge_dominating_set class TestMinWeightDominatingSet: - def test_min_weighted_dominating_set(self): graph = nx.Graph() graph.add_edge(1, 2) @@ -18,12 +14,12 @@ def test_min_weighted_dominating_set(self): graph.add_edge(3, 6) graph.add_edge(5, 6) - vertices = set([1, 2, 3, 4, 5, 6]) + vertices = {1, 2, 3, 4, 5, 6} # due to ties, this might be hard to test tight bounds dom_set = min_weighted_dominating_set(graph) for vertex in vertices - dom_set: neighbors = set(graph.neighbors(vertex)) - ok_(len(neighbors & dom_set) > 0, "Non dominating set found!") + assert len(neighbors & dom_set) > 0, "Non dominating set found!" def test_star_graph(self): """Tests that an approximate dominating set for the star graph, @@ -37,7 +33,7 @@ def test_star_graph(self): # label instead of the lowest. G = nx.star_graph(10) G = nx.relabel_nodes(G, {0: 9, 9: 0}) - eq_(min_weighted_dominating_set(G), {9}) + assert min_weighted_dominating_set(G) == {9} def test_min_edge_dominating_set(self): graph = nx.path_graph(5) @@ -52,7 +48,7 @@ def test_min_edge_dominating_set(self): found = False for dom_edge in dom_set: found |= u == dom_edge[0] or u == dom_edge[1] - ok_(found, "Non adjacent edge found!") + assert found, "Non adjacent edge found!" graph = nx.complete_graph(10) dom_set = min_edge_dominating_set(graph) @@ -66,4 +62,4 @@ def test_min_edge_dominating_set(self): found = False for dom_edge in dom_set: found |= u == dom_edge[0] or u == dom_edge[1] - ok_(found, "Non adjacent edge found!") + assert found, "Non adjacent edge found!" diff --git a/networkx/algorithms/approximation/tests/test_independent_set.py b/networkx/algorithms/approximation/tests/test_independent_set.py index 199152d..4b7c452 100644 --- a/networkx/algorithms/approximation/tests/test_independent_set.py +++ b/networkx/algorithms/approximation/tests/test_independent_set.py @@ -1,4 +1,3 @@ -from nose.tools import * import networkx as nx import networkx.algorithms.approximation as a @@ -6,4 +5,4 @@ def test_independent_set(): # smoke test G = nx.Graph() - assert_equal(len(a.maximum_independent_set(G)), 0) + assert len(a.maximum_independent_set(G)) == 0 diff --git a/networkx/algorithms/approximation/tests/test_kcomponents.py b/networkx/algorithms/approximation/tests/test_kcomponents.py index 71fb8fe..60a90e8 100644 --- a/networkx/algorithms/approximation/tests/test_kcomponents.py +++ b/networkx/algorithms/approximation/tests/test_kcomponents.py @@ -1,6 +1,5 @@ # Test for approximation to k-components algorithm -from nose.tools import assert_equal, assert_true, assert_false, assert_in -from nose.tools import assert_raises, raises, assert_greater_equal +import pytest import networkx as nx from networkx.algorithms.approximation import k_components from networkx.algorithms.approximation.kcomponents import _AntiGraph, _same @@ -14,21 +13,25 @@ def build_k_number_dict(k_components): k_num[node] = k return k_num + ## # Some nice synthetic graphs ## def graph_example_1(): - G = nx.convert_node_labels_to_integers(nx.grid_graph([5, 5]), - label_attribute='labels') - rlabels = nx.get_node_attributes(G, 'labels') + G = nx.convert_node_labels_to_integers( + nx.grid_graph([5, 5]), label_attribute="labels" + ) + rlabels = nx.get_node_attributes(G, "labels") labels = {v: k for k, v in rlabels.items()} - for nodes in [(labels[(0, 0)], labels[(1, 0)]), - (labels[(0, 4)], labels[(1, 4)]), - (labels[(3, 0)], labels[(4, 0)]), - (labels[(3, 4)], labels[(4, 4)])]: + for nodes in [ + (labels[(0, 0)], labels[(1, 0)]), + (labels[(0, 4)], labels[(1, 4)]), + (labels[(3, 0)], labels[(4, 0)]), + (labels[(3, 4)], labels[(4, 4)]), + ]: new_node = G.order() + 1 # Petersen graph is triconnected P = nx.petersen_graph() @@ -54,13 +57,13 @@ def graph_example_1(): def torrents_and_ferraro_graph(): - G = nx.convert_node_labels_to_integers(nx.grid_graph([5, 5]), - label_attribute='labels') - rlabels = nx.get_node_attributes(G, 'labels') + G = nx.convert_node_labels_to_integers( + nx.grid_graph([5, 5]), label_attribute="labels" + ) + rlabels = nx.get_node_attributes(G, "labels") labels = {v: k for k, v in rlabels.items()} - for nodes in [(labels[(0, 4)], labels[(1, 4)]), - (labels[(3, 4)], labels[(4, 4)])]: + for nodes in [(labels[(0, 4)], labels[(1, 4)]), (labels[(3, 4)], labels[(4, 4)])]: new_node = G.order() + 1 # Petersen graph is triconnected P = nx.petersen_graph() @@ -85,8 +88,7 @@ def torrents_and_ferraro_graph(): # This stupid mistake make one reviewer very angry :P G.add_edge(new_node + 16, new_node + 8) - for nodes in [(labels[(0, 0)], labels[(1, 0)]), - (labels[(3, 0)], labels[(4, 0)])]: + for nodes in [(labels[(0, 0)], labels[(1, 0)]), (labels[(3, 0)], labels[(4, 0)])]: new_node = G.order() + 1 # Petersen graph is triconnected P = nx.petersen_graph() @@ -113,6 +115,7 @@ def torrents_and_ferraro_graph(): G.add_edge(new_node + 18, nbr) return G + # Helper function @@ -124,7 +127,7 @@ def _check_connectivity(G): for component in components: C = G.subgraph(component) K = nx.node_connectivity(C) - assert_greater_equal(K, k) + assert K >= k def test_torrents_and_ferraro_graph(): @@ -143,17 +146,49 @@ def test_karate_0(): def test_karate_1(): - karate_k_num = {0: 4, 1: 4, 2: 4, 3: 4, 4: 3, 5: 3, 6: 3, 7: 4, 8: 4, 9: 2, - 10: 3, 11: 1, 12: 2, 13: 4, 14: 2, 15: 2, 16: 2, 17: 2, 18: 2, - 19: 3, 20: 2, 21: 2, 22: 2, 23: 3, 24: 3, 25: 3, 26: 2, 27: 3, - 28: 3, 29: 3, 30: 4, 31: 3, 32: 4, 33: 4} + karate_k_num = { + 0: 4, + 1: 4, + 2: 4, + 3: 4, + 4: 3, + 5: 3, + 6: 3, + 7: 4, + 8: 4, + 9: 2, + 10: 3, + 11: 1, + 12: 2, + 13: 4, + 14: 2, + 15: 2, + 16: 2, + 17: 2, + 18: 2, + 19: 3, + 20: 2, + 21: 2, + 22: 2, + 23: 3, + 24: 3, + 25: 3, + 26: 2, + 27: 3, + 28: 3, + 29: 3, + 30: 4, + 31: 3, + 32: 4, + 33: 4, + } approx_karate_k_num = karate_k_num.copy() approx_karate_k_num[24] = 2 approx_karate_k_num[25] = 2 G = nx.karate_club_graph() k_comps = k_components(G) k_num = build_k_number_dict(k_comps) - assert_in(k_num, (karate_k_num, approx_karate_k_num)) + assert k_num in (karate_k_num, approx_karate_k_num) def test_example_1_detail_3_and_4(): @@ -161,12 +196,12 @@ def test_example_1_detail_3_and_4(): result = k_components(G) # In this example graph there are 8 3-components, 4 with 15 nodes # and 4 with 5 nodes. - assert_equal(len(result[3]), 8) - assert_equal(len([c for c in result[3] if len(c) == 15]), 4) - assert_equal(len([c for c in result[3] if len(c) == 5]), 4) + assert len(result[3]) == 8 + assert len([c for c in result[3] if len(c) == 15]) == 4 + assert len([c for c in result[3] if len(c) == 5]) == 4 # There are also 8 4-components all with 5 nodes. - assert_equal(len(result[4]), 8) - assert_true(all(len(c) == 5 for c in result[4])) + assert len(result[4]) == 8 + assert all(len(c) == 5 for c in result[4]) # Finally check that the k-components detected have actually node # connectivity >= k. for k, components in result.items(): @@ -174,91 +209,92 @@ def test_example_1_detail_3_and_4(): continue for component in components: K = nx.node_connectivity(G.subgraph(component)) - assert_greater_equal(K, k) + assert K >= k -@raises(nx.NetworkXNotImplemented) def test_directed(): - G = nx.gnp_random_graph(10, 0.4, directed=True) - kc = k_components(G) + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.gnp_random_graph(10, 0.4, directed=True) + kc = k_components(G) def test_same(): - equal = {'A': 2, 'B': 2, 'C': 2} - slightly_different = {'A': 2, 'B': 1, 'C': 2} - different = {'A': 2, 'B': 8, 'C': 18} - assert_true(_same(equal)) - assert_false(_same(slightly_different)) - assert_true(_same(slightly_different, tol=1)) - assert_false(_same(different)) - assert_false(_same(different, tol=4)) + equal = {"A": 2, "B": 2, "C": 2} + slightly_different = {"A": 2, "B": 1, "C": 2} + different = {"A": 2, "B": 8, "C": 18} + assert _same(equal) + assert not _same(slightly_different) + assert _same(slightly_different, tol=1) + assert not _same(different) + assert not _same(different, tol=4) class TestAntiGraph: - def setUp(self): - self.Gnp = nx.gnp_random_graph(20, 0.8) - self.Anp = _AntiGraph(nx.complement(self.Gnp)) - self.Gd = nx.davis_southern_women_graph() - self.Ad = _AntiGraph(nx.complement(self.Gd)) - self.Gk = nx.karate_club_graph() - self.Ak = _AntiGraph(nx.complement(self.Gk)) - self.GA = [(self.Gnp, self.Anp), - (self.Gd, self.Ad), - (self.Gk, self.Ak)] + @classmethod + def setup_class(cls): + cls.Gnp = nx.gnp_random_graph(20, 0.8) + cls.Anp = _AntiGraph(nx.complement(cls.Gnp)) + cls.Gd = nx.davis_southern_women_graph() + cls.Ad = _AntiGraph(nx.complement(cls.Gd)) + cls.Gk = nx.karate_club_graph() + cls.Ak = _AntiGraph(nx.complement(cls.Gk)) + cls.GA = [(cls.Gnp, cls.Anp), (cls.Gd, cls.Ad), (cls.Gk, cls.Ak)] def test_size(self): for G, A in self.GA: n = G.order() s = len(list(G.edges())) + len(list(A.edges())) - assert_true(s == (n * (n - 1)) / 2) + assert s == (n * (n - 1)) / 2 def test_degree(self): for G, A in self.GA: - assert_equal(sorted(G.degree()), sorted(A.degree())) + assert sorted(G.degree()) == sorted(A.degree()) def test_core_number(self): for G, A in self.GA: - assert_equal(nx.core_number(G), nx.core_number(A)) + assert nx.core_number(G) == nx.core_number(A) def test_connected_components(self): for G, A in self.GA: gc = [set(c) for c in nx.connected_components(G)] ac = [set(c) for c in nx.connected_components(A)] for comp in ac: - assert_true(comp in gc) + assert comp in gc def test_adj(self): for G, A in self.GA: for n, nbrs in G.adj.items(): a_adj = sorted((n, sorted(ad)) for n, ad in A.adj.items()) g_adj = sorted((n, sorted(ad)) for n, ad in G.adj.items()) - assert_equal(a_adj, g_adj) + assert a_adj == g_adj def test_adjacency(self): for G, A in self.GA: a_adj = list(A.adjacency()) for n, nbrs in G.adjacency(): - assert_true((n, set(nbrs)) in a_adj) + assert (n, set(nbrs)) in a_adj def test_neighbors(self): for G, A in self.GA: node = list(G.nodes())[0] - assert_equal(set(G.neighbors(node)), set(A.neighbors(node))) + assert set(G.neighbors(node)) == set(A.neighbors(node)) def test_node_not_in_graph(self): for G, A in self.GA: - node = 'non_existent_node' - assert_raises(nx.NetworkXError, A.neighbors, node) - assert_raises(nx.NetworkXError, G.neighbors, node) + node = "non_existent_node" + pytest.raises(nx.NetworkXError, A.neighbors, node) + pytest.raises(nx.NetworkXError, G.neighbors, node) def test_degree_thingraph(self): for G, A in self.GA: node = list(G.nodes())[0] nodes = list(G.nodes())[1:4] - assert_equal(G.degree(node), A.degree(node)) - assert_equal(sum(d for n, d in G.degree()), sum(d for n, d in A.degree())) + assert G.degree(node) == A.degree(node) + assert sum(d for n, d in G.degree()) == sum(d for n, d in A.degree()) # AntiGraph is a ThinGraph, so all the weights are 1 - assert_equal(sum(d for n, d in A.degree()), - sum(d for n, d in A.degree(weight='weight'))) - assert_equal(sum(d for n, d in G.degree(nodes)), - sum(d for n, d in A.degree(nodes))) + assert sum(d for n, d in A.degree()) == sum( + d for n, d in A.degree(weight="weight") + ) + assert sum(d for n, d in G.degree(nodes)) == sum( + d for n, d in A.degree(nodes) + ) diff --git a/networkx/algorithms/approximation/tests/test_matching.py b/networkx/algorithms/approximation/tests/test_matching.py index 5286b31..f50da3d 100644 --- a/networkx/algorithms/approximation/tests/test_matching.py +++ b/networkx/algorithms/approximation/tests/test_matching.py @@ -1,4 +1,3 @@ -from nose.tools import * import networkx as nx import networkx.algorithms.approximation as a @@ -6,4 +5,4 @@ def test_min_maximal_matching(): # smoke test G = nx.Graph() - assert_equal(len(a.min_maximal_matching(G)), 0) + assert len(a.min_maximal_matching(G)) == 0 diff --git a/networkx/algorithms/approximation/tests/test_ramsey.py b/networkx/algorithms/approximation/tests/test_ramsey.py index ae5fd23..856a8ef 100644 --- a/networkx/algorithms/approximation/tests/test_ramsey.py +++ b/networkx/algorithms/approximation/tests/test_ramsey.py @@ -1,4 +1,3 @@ -from nose.tools import * import networkx as nx import networkx.algorithms.approximation as apxa @@ -8,21 +7,25 @@ def test_ramsey(): graph = nx.complete_graph(10) c, i = apxa.ramsey_R2(graph) cdens = nx.density(graph.subgraph(c)) - eq_(cdens, 1.0, "clique not found by ramsey!") + assert cdens == 1.0, "clique not correctly found by ramsey!" idens = nx.density(graph.subgraph(i)) - eq_(idens, 0.0, "i-set not found by ramsey!") + assert idens == 0.0, "i-set not correctly found by ramsey!" # this trival graph has no cliques. should just find i-sets - graph = nx.trivial_graph(nx.Graph()) + graph = nx.trivial_graph() c, i = apxa.ramsey_R2(graph) - cdens = nx.density(graph.subgraph(c)) - eq_(cdens, 0.0, "clique not found by ramsey!") - idens = nx.density(graph.subgraph(i)) - eq_(idens, 0.0, "i-set not found by ramsey!") + assert c == {0}, "clique not correctly found by ramsey!" + assert i == {0}, "i-set not correctly found by ramsey!" graph = nx.barbell_graph(10, 5, nx.Graph()) c, i = apxa.ramsey_R2(graph) cdens = nx.density(graph.subgraph(c)) - eq_(cdens, 1.0, "clique not found by ramsey!") + assert cdens == 1.0, "clique not correctly found by ramsey!" idens = nx.density(graph.subgraph(i)) - eq_(idens, 0.0, "i-set not found by ramsey!") + assert idens == 0.0, "i-set not correctly found by ramsey!" + + # add self-loops and test again + graph.add_edges_from([(n, n) for n in range(0, len(graph), 2)]) + cc, ii = apxa.ramsey_R2(graph) + assert cc == c + assert ii == i diff --git a/networkx/algorithms/approximation/tests/test_steinertree.py b/networkx/algorithms/approximation/tests/test_steinertree.py index f29c2e9..7fe2919 100644 --- a/networkx/algorithms/approximation/tests/test_steinertree.py +++ b/networkx/algorithms/approximation/tests/test_steinertree.py @@ -1,3 +1,4 @@ +import pytest import networkx as nx from networkx.algorithms.approximation.steinertree import metric_closure from networkx.algorithms.approximation.steinertree import steiner_tree @@ -5,7 +6,8 @@ class TestSteinerTree: - def setUp(self): + @classmethod + def setup_class(cls): G = nx.Graph() G.add_edge(1, 2, weight=10) G.add_edge(2, 3, weight=10) @@ -14,39 +16,68 @@ def setUp(self): G.add_edge(5, 6, weight=10) G.add_edge(2, 7, weight=1) G.add_edge(7, 5, weight=1) - self.G = G - self.term_nodes = [1, 2, 3, 4, 5] + cls.G = G + cls.term_nodes = [1, 2, 3, 4, 5] + + def test_connected_metric_closure(self): + G = self.G.copy() + G.add_node(100) + pytest.raises(nx.NetworkXError, metric_closure, G) def test_metric_closure(self): M = metric_closure(self.G) - mc = [(1, 2, {'distance': 10, 'path': [1, 2]}), - (1, 3, {'distance': 20, 'path': [1, 2, 3]}), - (1, 4, {'distance': 22, 'path': [1, 2, 7, 5, 4]}), - (1, 5, {'distance': 12, 'path': [1, 2, 7, 5]}), - (1, 6, {'distance': 22, 'path': [1, 2, 7, 5, 6]}), - (1, 7, {'distance': 11, 'path': [1, 2, 7]}), - (2, 3, {'distance': 10, 'path': [2, 3]}), - (2, 4, {'distance': 12, 'path': [2, 7, 5, 4]}), - (2, 5, {'distance': 2, 'path': [2, 7, 5]}), - (2, 6, {'distance': 12, 'path': [2, 7, 5, 6]}), - (2, 7, {'distance': 1, 'path': [2, 7]}), - (3, 4, {'distance': 10, 'path': [3, 4]}), - (3, 5, {'distance': 12, 'path': [3, 2, 7, 5]}), - (3, 6, {'distance': 22, 'path': [3, 2, 7, 5, 6]}), - (3, 7, {'distance': 11, 'path': [3, 2, 7]}), - (4, 5, {'distance': 10, 'path': [4, 5]}), - (4, 6, {'distance': 20, 'path': [4, 5, 6]}), - (4, 7, {'distance': 11, 'path': [4, 5, 7]}), - (5, 6, {'distance': 10, 'path': [5, 6]}), - (5, 7, {'distance': 1, 'path': [5, 7]}), - (6, 7, {'distance': 11, 'path': [6, 5, 7]})] + mc = [ + (1, 2, {"distance": 10, "path": [1, 2]}), + (1, 3, {"distance": 20, "path": [1, 2, 3]}), + (1, 4, {"distance": 22, "path": [1, 2, 7, 5, 4]}), + (1, 5, {"distance": 12, "path": [1, 2, 7, 5]}), + (1, 6, {"distance": 22, "path": [1, 2, 7, 5, 6]}), + (1, 7, {"distance": 11, "path": [1, 2, 7]}), + (2, 3, {"distance": 10, "path": [2, 3]}), + (2, 4, {"distance": 12, "path": [2, 7, 5, 4]}), + (2, 5, {"distance": 2, "path": [2, 7, 5]}), + (2, 6, {"distance": 12, "path": [2, 7, 5, 6]}), + (2, 7, {"distance": 1, "path": [2, 7]}), + (3, 4, {"distance": 10, "path": [3, 4]}), + (3, 5, {"distance": 12, "path": [3, 2, 7, 5]}), + (3, 6, {"distance": 22, "path": [3, 2, 7, 5, 6]}), + (3, 7, {"distance": 11, "path": [3, 2, 7]}), + (4, 5, {"distance": 10, "path": [4, 5]}), + (4, 6, {"distance": 20, "path": [4, 5, 6]}), + (4, 7, {"distance": 11, "path": [4, 5, 7]}), + (5, 6, {"distance": 10, "path": [5, 6]}), + (5, 7, {"distance": 1, "path": [5, 7]}), + (6, 7, {"distance": 11, "path": [6, 5, 7]}), + ] assert_edges_equal(list(M.edges(data=True)), mc) def test_steiner_tree(self): S = steiner_tree(self.G, self.term_nodes) - expected_steiner_tree = [(1, 2, {'weight': 10}), - (2, 3, {'weight': 10}), - (2, 7, {'weight': 1}), - (3, 4, {'weight': 10}), - (5, 7, {'weight': 1})] + expected_steiner_tree = [ + (1, 2, {"weight": 10}), + (2, 3, {"weight": 10}), + (2, 7, {"weight": 1}), + (3, 4, {"weight": 10}), + (5, 7, {"weight": 1}), + ] assert_edges_equal(list(S.edges(data=True)), expected_steiner_tree) + + def test_multigraph_steiner_tree(self): + G = nx.MultiGraph() + G.add_edges_from( + [ + (1, 2, 0, {"weight": 1}), + (2, 3, 0, {"weight": 999}), + (2, 3, 1, {"weight": 1}), + (3, 4, 0, {"weight": 1}), + (3, 5, 0, {"weight": 1}), + ] + ) + terminal_nodes = [2, 4, 5] + expected_edges = [ + (2, 3, 1, {"weight": 1}), # edge with key 1 has lower weight + (3, 4, 0, {"weight": 1}), + (3, 5, 0, {"weight": 1}), + ] + T = steiner_tree(G, terminal_nodes) + assert_edges_equal(T.edges(data=True, keys=True), expected_edges) diff --git a/networkx/algorithms/approximation/tests/test_treewidth.py b/networkx/algorithms/approximation/tests/test_treewidth.py new file mode 100644 index 0000000..5389b94 --- /dev/null +++ b/networkx/algorithms/approximation/tests/test_treewidth.py @@ -0,0 +1,269 @@ +import networkx as nx +from networkx.algorithms.approximation import treewidth_min_degree +from networkx.algorithms.approximation import treewidth_min_fill_in +from networkx.algorithms.approximation.treewidth import min_fill_in_heuristic +from networkx.algorithms.approximation.treewidth import MinDegreeHeuristic +import itertools + + +def is_tree_decomp(graph, decomp): + """Check if the given tree decomposition is valid.""" + for x in graph.nodes(): + appear_once = False + for bag in decomp.nodes(): + if x in bag: + appear_once = True + break + assert appear_once + + # Check if each connected pair of nodes are at least once together in a bag + for (x, y) in graph.edges(): + appear_together = False + for bag in decomp.nodes(): + if x in bag and y in bag: + appear_together = True + break + assert appear_together + + # Check if the nodes associated with vertex v form a connected subset of T + for v in graph.nodes(): + subset = [] + for bag in decomp.nodes(): + if v in bag: + subset.append(bag) + sub_graph = decomp.subgraph(subset) + assert nx.is_connected(sub_graph) + + +class TestTreewidthMinDegree: + """Unit tests for the min_degree function""" + + @classmethod + def setup_class(cls): + """Setup for different kinds of trees""" + cls.complete = nx.Graph() + cls.complete.add_edge(1, 2) + cls.complete.add_edge(2, 3) + cls.complete.add_edge(1, 3) + + cls.small_tree = nx.Graph() + cls.small_tree.add_edge(1, 3) + cls.small_tree.add_edge(4, 3) + cls.small_tree.add_edge(2, 3) + cls.small_tree.add_edge(3, 5) + cls.small_tree.add_edge(5, 6) + cls.small_tree.add_edge(5, 7) + cls.small_tree.add_edge(6, 7) + + cls.deterministic_graph = nx.Graph() + cls.deterministic_graph.add_edge(0, 1) # deg(0) = 1 + + cls.deterministic_graph.add_edge(1, 2) # deg(1) = 2 + + cls.deterministic_graph.add_edge(2, 3) + cls.deterministic_graph.add_edge(2, 4) # deg(2) = 3 + + cls.deterministic_graph.add_edge(3, 4) + cls.deterministic_graph.add_edge(3, 5) + cls.deterministic_graph.add_edge(3, 6) # deg(3) = 4 + + cls.deterministic_graph.add_edge(4, 5) + cls.deterministic_graph.add_edge(4, 6) + cls.deterministic_graph.add_edge(4, 7) # deg(4) = 5 + + cls.deterministic_graph.add_edge(5, 6) + cls.deterministic_graph.add_edge(5, 7) + cls.deterministic_graph.add_edge(5, 8) + cls.deterministic_graph.add_edge(5, 9) # deg(5) = 6 + + cls.deterministic_graph.add_edge(6, 7) + cls.deterministic_graph.add_edge(6, 8) + cls.deterministic_graph.add_edge(6, 9) # deg(6) = 6 + + cls.deterministic_graph.add_edge(7, 8) + cls.deterministic_graph.add_edge(7, 9) # deg(7) = 5 + + cls.deterministic_graph.add_edge(8, 9) # deg(8) = 4 + + def test_petersen_graph(self): + """Test Petersen graph tree decomposition result""" + G = nx.petersen_graph() + _, decomp = treewidth_min_degree(G) + is_tree_decomp(G, decomp) + + def test_small_tree_treewidth(self): + """Test small tree + + Test if the computed treewidth of the known self.small_tree is 2. + As we know which value we can expect from our heuristic, values other + than two are regressions + """ + G = self.small_tree + # the order of removal should be [1,2,4]3[5,6,7] + # (with [] denoting any order of the containing nodes) + # resulting in treewidth 2 for the heuristic + treewidth, _ = treewidth_min_fill_in(G) + assert treewidth == 2 + + def test_heuristic_abort(self): + """Test heuristic abort condition for fully connected graph""" + graph = {} + for u in self.complete: + graph[u] = set() + for v in self.complete[u]: + if u != v: # ignore self-loop + graph[u].add(v) + + deg_heuristic = MinDegreeHeuristic(graph) + node = deg_heuristic.best_node(graph) + if node is None: + pass + else: + assert False + + def test_empty_graph(self): + """Test empty graph""" + G = nx.Graph() + _, _ = treewidth_min_degree(G) + + def test_two_component_graph(self): + """Test empty graph""" + G = nx.Graph() + G.add_node(1) + G.add_node(2) + treewidth, _ = treewidth_min_degree(G) + assert treewidth == 0 + + def test_heuristic_first_steps(self): + """Test first steps of min_degree heuristic""" + graph = { + n: set(self.deterministic_graph[n]) - {n} for n in self.deterministic_graph + } + deg_heuristic = MinDegreeHeuristic(graph) + elim_node = deg_heuristic.best_node(graph) + print(f"Graph {graph}:") + steps = [] + + while elim_node is not None: + print(f"Removing {elim_node}:") + steps.append(elim_node) + nbrs = graph[elim_node] + + for u, v in itertools.permutations(nbrs, 2): + if v not in graph[u]: + graph[u].add(v) + + for u in graph: + if elim_node in graph[u]: + graph[u].remove(elim_node) + + del graph[elim_node] + print(f"Graph {graph}:") + elim_node = deg_heuristic.best_node(graph) + + # check only the first 5 elements for equality + assert steps[:5] == [0, 1, 2, 3, 4] + + +class TestTreewidthMinFillIn: + """Unit tests for the treewidth_min_fill_in function.""" + + @classmethod + def setup_class(cls): + """Setup for different kinds of trees""" + cls.complete = nx.Graph() + cls.complete.add_edge(1, 2) + cls.complete.add_edge(2, 3) + cls.complete.add_edge(1, 3) + + cls.small_tree = nx.Graph() + cls.small_tree.add_edge(1, 2) + cls.small_tree.add_edge(2, 3) + cls.small_tree.add_edge(3, 4) + cls.small_tree.add_edge(1, 4) + cls.small_tree.add_edge(2, 4) + cls.small_tree.add_edge(4, 5) + cls.small_tree.add_edge(5, 6) + cls.small_tree.add_edge(5, 7) + cls.small_tree.add_edge(6, 7) + + cls.deterministic_graph = nx.Graph() + cls.deterministic_graph.add_edge(1, 2) + cls.deterministic_graph.add_edge(1, 3) + cls.deterministic_graph.add_edge(3, 4) + cls.deterministic_graph.add_edge(2, 4) + cls.deterministic_graph.add_edge(3, 5) + cls.deterministic_graph.add_edge(4, 5) + cls.deterministic_graph.add_edge(3, 6) + cls.deterministic_graph.add_edge(5, 6) + + def test_petersen_graph(self): + """Test Petersen graph tree decomposition result""" + G = nx.petersen_graph() + _, decomp = treewidth_min_fill_in(G) + is_tree_decomp(G, decomp) + + def test_small_tree_treewidth(self): + """Test if the computed treewidth of the known self.small_tree is 2""" + G = self.small_tree + # the order of removal should be [1,2,4]3[5,6,7] + # (with [] denoting any order of the containing nodes) + # resulting in treewidth 2 for the heuristic + treewidth, _ = treewidth_min_fill_in(G) + assert treewidth == 2 + + def test_heuristic_abort(self): + """Test if min_fill_in returns None for fully connected graph""" + graph = {} + for u in self.complete: + graph[u] = set() + for v in self.complete[u]: + if u != v: # ignore self-loop + graph[u].add(v) + next_node = min_fill_in_heuristic(graph) + if next_node is None: + pass + else: + assert False + + def test_empty_graph(self): + """Test empty graph""" + G = nx.Graph() + _, _ = treewidth_min_fill_in(G) + + def test_two_component_graph(self): + """Test empty graph""" + G = nx.Graph() + G.add_node(1) + G.add_node(2) + treewidth, _ = treewidth_min_fill_in(G) + assert treewidth == 0 + + def test_heuristic_first_steps(self): + """Test first steps of min_fill_in heuristic""" + graph = { + n: set(self.deterministic_graph[n]) - {n} for n in self.deterministic_graph + } + print(f"Graph {graph}:") + elim_node = min_fill_in_heuristic(graph) + steps = [] + + while elim_node is not None: + print(f"Removing {elim_node}:") + steps.append(elim_node) + nbrs = graph[elim_node] + + for u, v in itertools.permutations(nbrs, 2): + if v not in graph[u]: + graph[u].add(v) + + for u in graph: + if elim_node in graph[u]: + graph[u].remove(elim_node) + + del graph[elim_node] + print(f"Graph {graph}:") + elim_node = min_fill_in_heuristic(graph) + + # check only the first 2 elements for equality + assert steps[:2] == [6, 5] diff --git a/networkx/algorithms/approximation/tests/test_vertex_cover.py b/networkx/algorithms/approximation/tests/test_vertex_cover.py index b010ea8..8beb40a 100644 --- a/networkx/algorithms/approximation/tests/test_vertex_cover.py +++ b/networkx/algorithms/approximation/tests/test_vertex_cover.py @@ -1,5 +1,3 @@ -from nose.tools import assert_equals -from nose.tools import ok_ import networkx as nx from networkx.algorithms.approximation import min_weighted_vertex_cover @@ -8,7 +6,7 @@ def is_cover(G, node_cover): return all({u, v} & node_cover for u, v in G.edges()) -class TestMWVC(object): +class TestMWVC: """Unit tests for the approximate minimum weighted vertex cover function, :func:`~networkx.algorithms.approximation.vertex_cover.min_weighted_vertex_cover`. @@ -22,16 +20,16 @@ def test_unweighted_directed(self): G.add_edges_from((0, v) for v in range(1, 26)) G.add_edges_from((v, 0) for v in range(26, 51)) cover = min_weighted_vertex_cover(G) - assert_equals(2, len(cover)) - ok_(is_cover(G, cover)) + assert 2 == len(cover) + assert is_cover(G, cover) def test_unweighted_undirected(self): # create a simple star graph size = 50 sg = nx.star_graph(size) cover = min_weighted_vertex_cover(sg) - assert_equals(2, len(cover)) - ok_(is_cover(sg, cover)) + assert 2 == len(cover) + assert is_cover(sg, cover) def test_weighted(self): wg = nx.Graph() @@ -53,5 +51,5 @@ def test_weighted(self): cover = min_weighted_vertex_cover(wg, weight="weight") csum = sum(wg.nodes[node]["weight"] for node in cover) - assert_equals(4, csum) - ok_(is_cover(wg, cover)) + assert 4 == csum + assert is_cover(wg, cover) diff --git a/networkx/algorithms/approximation/treewidth.py b/networkx/algorithms/approximation/treewidth.py new file mode 100644 index 0000000..11716c7 --- /dev/null +++ b/networkx/algorithms/approximation/treewidth.py @@ -0,0 +1,249 @@ +"""Functions for computing treewidth decomposition. + +Treewidth of an undirected graph is a number associated with the graph. +It can be defined as the size of the largest vertex set (bag) in a tree +decomposition of the graph minus one. + +`Wikipedia: Treewidth `_ + +The notions of treewidth and tree decomposition have gained their +attractiveness partly because many graph and network problems that are +intractable (e.g., NP-hard) on arbitrary graphs become efficiently +solvable (e.g., with a linear time algorithm) when the treewidth of the +input graphs is bounded by a constant [1]_ [2]_. + +There are two different functions for computing a tree decomposition: +:func:`treewidth_min_degree` and :func:`treewidth_min_fill_in`. + +.. [1] Hans L. Bodlaender and Arie M. C. A. Koster. 2010. "Treewidth + computations I.Upper bounds". Inf. Comput. 208, 3 (March 2010),259-275. + http://dx.doi.org/10.1016/j.ic.2009.03.008 + +.. [2] Hans L. Bodlaender. "Discovering Treewidth". Institute of Information + and Computing Sciences, Utrecht University. + Technical Report UU-CS-2005-018. + http://www.cs.uu.nl + +.. [3] K. Wang, Z. Lu, and J. Hicks *Treewidth*. + http://web.eecs.utk.edu/~cphillip/cs594_spring2015_projects/treewidth.pdf + +""" + +import sys + +import networkx as nx +from networkx.utils import not_implemented_for +from heapq import heappush, heappop, heapify +import itertools + +__all__ = ["treewidth_min_degree", "treewidth_min_fill_in"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def treewidth_min_degree(G): + """ Returns a treewidth decomposition using the Minimum Degree heuristic. + + The heuristic chooses the nodes according to their degree, i.e., first + the node with the lowest degree is chosen, then the graph is updated + and the corresponding node is removed. Next, a new node with the lowest + degree is chosen, and so on. + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + Treewidth decomposition : (int, Graph) tuple + 2-tuple with treewidth and the corresponding decomposed tree. + """ + deg_heuristic = MinDegreeHeuristic(G) + return treewidth_decomp(G, lambda graph: deg_heuristic.best_node(graph)) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def treewidth_min_fill_in(G): + """ Returns a treewidth decomposition using the Minimum Fill-in heuristic. + + The heuristic chooses a node from the graph, where the number of edges + added turning the neighbourhood of the chosen node into clique is as + small as possible. + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + Treewidth decomposition : (int, Graph) tuple + 2-tuple with treewidth and the corresponding decomposed tree. + """ + return treewidth_decomp(G, min_fill_in_heuristic) + + +class MinDegreeHeuristic: + """ Implements the Minimum Degree heuristic. + + The heuristic chooses the nodes according to their degree + (number of neighbours), i.e., first the node with the lowest degree is + chosen, then the graph is updated and the corresponding node is + removed. Next, a new node with the lowest degree is chosen, and so on. + """ + + def __init__(self, graph): + self._graph = graph + + # nodes that have to be updated in the heap before each iteration + self._update_nodes = [] + + self._degreeq = [] # a heapq with 2-tuples (degree,node) + + # build heap with initial degrees + for n in graph: + self._degreeq.append((len(graph[n]), n)) + heapify(self._degreeq) + + def best_node(self, graph): + # update nodes in self._update_nodes + for n in self._update_nodes: + # insert changed degrees into degreeq + heappush(self._degreeq, (len(graph[n]), n)) + + # get the next valid (minimum degree) node + while self._degreeq: + (min_degree, elim_node) = heappop(self._degreeq) + if elim_node not in graph or len(graph[elim_node]) != min_degree: + # outdated entry in degreeq + continue + elif min_degree == len(graph) - 1: + # fully connected: abort condition + return None + + # remember to update nodes in the heap before getting the next node + self._update_nodes = graph[elim_node] + return elim_node + + # the heap is empty: abort + return None + + +def min_fill_in_heuristic(graph): + """ Implements the Minimum Degree heuristic. + + Returns the node from the graph, where the number of edges added when + turning the neighbourhood of the chosen node into clique is as small as + possible. This algorithm chooses the nodes using the Minimum Fill-In + heuristic. The running time of the algorithm is :math:`O(V^3)` and it uses + additional constant memory.""" + + if len(graph) == 0: + return None + + min_fill_in_node = None + + min_fill_in = sys.maxsize + + # create sorted list of (degree, node) + degree_list = [(len(graph[node]), node) for node in graph] + degree_list.sort() + + # abort condition + min_degree = degree_list[0][0] + if min_degree == len(graph) - 1: + return None + + for (_, node) in degree_list: + num_fill_in = 0 + nbrs = graph[node] + for nbr in nbrs: + # count how many nodes in nbrs current nbr is not connected to + # subtract 1 for the node itself + num_fill_in += len(nbrs - graph[nbr]) - 1 + if num_fill_in >= 2 * min_fill_in: + break + + num_fill_in /= 2 # divide by 2 because of double counting + + if num_fill_in < min_fill_in: # update min-fill-in node + if num_fill_in == 0: + return node + min_fill_in = num_fill_in + min_fill_in_node = node + + return min_fill_in_node + + +def treewidth_decomp(G, heuristic=min_fill_in_heuristic): + """ Returns a treewidth decomposition using the passed heuristic. + + Parameters + ---------- + G : NetworkX graph + heuristic : heuristic function + + Returns + ------- + Treewidth decomposition : (int, Graph) tuple + 2-tuple with treewidth and the corresponding decomposed tree. + """ + + # make dict-of-sets structure + graph = {n: set(G[n]) - {n} for n in G} + + # stack containing nodes and neighbors in the order from the heuristic + node_stack = [] + + # get first node from heuristic + elim_node = heuristic(graph) + while elim_node is not None: + # connect all neighbours with each other + nbrs = graph[elim_node] + for u, v in itertools.permutations(nbrs, 2): + if v not in graph[u]: + graph[u].add(v) + + # push node and its current neighbors on stack + node_stack.append((elim_node, nbrs)) + + # remove node from graph + for u in graph[elim_node]: + graph[u].remove(elim_node) + + del graph[elim_node] + elim_node = heuristic(graph) + + # the abort condition is met; put all remaining nodes into one bag + decomp = nx.Graph() + first_bag = frozenset(graph.keys()) + decomp.add_node(first_bag) + + treewidth = len(first_bag) - 1 + + while node_stack: + # get node and its neighbors from the stack + (curr_node, nbrs) = node_stack.pop() + + # find a bag all neighbors are in + old_bag = None + for bag in decomp.nodes: + if nbrs <= bag: + old_bag = bag + break + + if old_bag is None: + # no old_bag was found: just connect to the first_bag + old_bag = first_bag + + # create new node for decomposition + nbrs.add(curr_node) + new_bag = frozenset(nbrs) + + # update treewidth + treewidth = max(treewidth, len(new_bag) - 1) + + # add edge to decomposition (implicitly also adds the new node) + decomp.add_edge(old_bag, new_bag) + + return treewidth, decomp diff --git a/networkx/algorithms/approximation/vertex_cover.py b/networkx/algorithms/approximation/vertex_cover.py index 02b9ab8..943de64 100644 --- a/networkx/algorithms/approximation/vertex_cover.py +++ b/networkx/algorithms/approximation/vertex_cover.py @@ -1,11 +1,3 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2011-2012 by -# Nicholas Mancuso -# All rights reserved. -# BSD license. -# -# Author: -# Nicholas Mancuso """Functions for computing an approximate minimum weight vertex cover. A |vertex cover|_ is a subset of nodes such that each edge in the graph diff --git a/networkx/algorithms/assortativity/connectivity.py b/networkx/algorithms/assortativity/connectivity.py index 5c0d574..391477c 100644 --- a/networkx/algorithms/assortativity/connectivity.py +++ b/networkx/algorithms/assortativity/connectivity.py @@ -1,26 +1,11 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2011 by -# Jordi Torrents -# Aric Hagberg -# All rights reserved. -# BSD license. -# -# -# Authors: Jordi Torrents -# Aric Hagberg -from __future__ import division - from collections import defaultdict -import networkx as nx - -__all__ = ['average_degree_connectivity', - 'k_nearest_neighbors'] +__all__ = ["average_degree_connectivity", "k_nearest_neighbors"] -def average_degree_connectivity(G, source="in+out", target="in+out", - nodes=None, weight=None): +def average_degree_connectivity( + G, source="in+out", target="in+out", nodes=None, weight=None +): r"""Compute the average degree connectivity of graph. The average degree connectivity is the average nearest neighbor degree of @@ -67,11 +52,11 @@ def average_degree_connectivity(G, source="in+out", target="in+out", Examples -------- - >>> G=nx.path_graph(4) - >>> G.edges[1, 2]['weight'] = 3 + >>> G = nx.path_graph(4) + >>> G.edges[1, 2]["weight"] = 3 >>> nx.k_nearest_neighbors(G) {1: 2.0, 2: 1.5} - >>> nx.k_nearest_neighbors(G, weight='weight') + >>> nx.k_nearest_neighbors(G, weight="weight") {1: 2.0, 2: 1.75} See also @@ -91,22 +76,22 @@ def average_degree_connectivity(G, source="in+out", target="in+out", """ # First, determine the type of neighbors and the type of degree to use. if G.is_directed(): - if source not in ('in', 'out', 'in+out'): + if source not in ("in", "out", "in+out"): raise ValueError('source must be one of "in", "out", or "in+out"') - if target not in ('in', 'out', 'in+out'): + if target not in ("in", "out", "in+out"): raise ValueError('target must be one of "in", "out", or "in+out"') - direction = {'out': G.out_degree, - 'in': G.in_degree, - 'in+out': G.degree} - neighbor_funcs = {'out': G.successors, - 'in': G.predecessors, - 'in+out': G.neighbors} + direction = {"out": G.out_degree, "in": G.in_degree, "in+out": G.degree} + neighbor_funcs = { + "out": G.successors, + "in": G.predecessors, + "in+out": G.neighbors, + } source_degree = direction[source] target_degree = direction[target] neighbors = neighbor_funcs[source] # `reverse` indicates whether to look at the in-edge when # computing the weight of an edge. - reverse = (source == 'in') + reverse = source == "in" else: source_degree = G.degree target_degree = G.degree diff --git a/networkx/algorithms/assortativity/correlation.py b/networkx/algorithms/assortativity/correlation.py index feec0a3..17b3331 100644 --- a/networkx/algorithms/assortativity/correlation.py +++ b/networkx/algorithms/assortativity/correlation.py @@ -1,21 +1,21 @@ -#-*- coding: utf-8 -*- """Node assortativity coefficients and correlation measures. """ -import networkx as nx -from networkx.algorithms.assortativity.mixing import degree_mixing_matrix, \ - attribute_mixing_matrix, numeric_mixing_matrix -from networkx.algorithms.assortativity.pairs import node_degree_xy, \ - node_attribute_xy -__author__ = ' '.join(['Aric Hagberg ', - 'Oleguer Sagarra ']) -__all__ = ['degree_pearson_correlation_coefficient', - 'degree_assortativity_coefficient', - 'attribute_assortativity_coefficient', - 'numeric_assortativity_coefficient'] - - -def degree_assortativity_coefficient(G, x='out', y='in', weight=None, - nodes=None): +from networkx.algorithms.assortativity.mixing import ( + degree_mixing_matrix, + attribute_mixing_matrix, + numeric_mixing_matrix, +) +from networkx.algorithms.assortativity.pairs import node_degree_xy + +__all__ = [ + "degree_pearson_correlation_coefficient", + "degree_assortativity_coefficient", + "attribute_assortativity_coefficient", + "numeric_assortativity_coefficient", +] + + +def degree_assortativity_coefficient(G, x="out", y="in", weight=None, nodes=None): """Compute degree assortativity of graph. Assortativity measures the similarity of connections @@ -32,12 +32,12 @@ def degree_assortativity_coefficient(G, x='out', y='in', weight=None, The degree type for target node (directed graphs only). weight: string or None, optional (default=None) - The edge attribute that holds the numerical value used + The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights adjacent to the node. nodes: list or iterable (optional) - Compute degree assortativity only for nodes in container. + Compute degree assortativity only for nodes in container. The default is all nodes. Returns @@ -47,9 +47,9 @@ def degree_assortativity_coefficient(G, x='out', y='in', weight=None, Examples -------- - >>> G=nx.path_graph(4) - >>> r=nx.degree_assortativity_coefficient(G) - >>> print("%3.1f"%r) + >>> G = nx.path_graph(4) + >>> r = nx.degree_assortativity_coefficient(G) + >>> print(f"{r:3.1f}") -0.5 See Also @@ -64,28 +64,27 @@ def degree_assortativity_coefficient(G, x='out', y='in', weight=None, ----- This computes Eq. (21) in Ref. [1]_ , where e is the joint probability distribution (mixing matrix) of the degrees. If G is - directed than the matrix e is the joint probability of the + directed than the matrix e is the joint probability of the user-specified degree type for the source and target. References ---------- .. [1] M. E. J. Newman, Mixing patterns in networks, Physical Review E, 67 026126, 2003 - .. [2] Foster, J.G., Foster, D.V., Grassberger, P. & Paczuski, M. + .. [2] Foster, J.G., Foster, D.V., Grassberger, P. & Paczuski, M. Edge direction and the structure of networks, PNAS 107, 10815-20 (2010). """ M = degree_mixing_matrix(G, x=x, y=y, nodes=nodes, weight=weight) return numeric_ac(M) -def degree_pearson_correlation_coefficient(G, x='out', y='in', - weight=None, nodes=None): - """Compute degree assortativity of graph. +def degree_pearson_correlation_coefficient(G, x="out", y="in", weight=None, nodes=None): + """Compute degree assortativity of graph. Assortativity measures the similarity of connections in the graph with respect to the node degree. - This is the same as degree_assortativity_coefficient but uses the + This is the same as degree_assortativity_coefficient but uses the potentially faster scipy.stats.pearsonr function. Parameters @@ -99,7 +98,7 @@ def degree_pearson_correlation_coefficient(G, x='out', y='in', The degree type for target node (directed graphs only). weight: string or None, optional (default=None) - The edge attribute that holds the numerical value used + The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights adjacent to the node. @@ -114,9 +113,9 @@ def degree_pearson_correlation_coefficient(G, x='out', y='in', Examples -------- - >>> G=nx.path_graph(4) - >>> r=nx.degree_pearson_correlation_coefficient(G) - >>> print("%3.1f"%r) + >>> G = nx.path_graph(4) + >>> r = nx.degree_pearson_correlation_coefficient(G) + >>> print(f"{r:3.1f}") -0.5 Notes @@ -127,14 +126,13 @@ def degree_pearson_correlation_coefficient(G, x='out', y='in', ---------- .. [1] M. E. J. Newman, Mixing patterns in networks Physical Review E, 67 026126, 2003 - .. [2] Foster, J.G., Foster, D.V., Grassberger, P. & Paczuski, M. + .. [2] Foster, J.G., Foster, D.V., Grassberger, P. & Paczuski, M. Edge direction and the structure of networks, PNAS 107, 10815-20 (2010). """ try: import scipy.stats as stats - except ImportError: - raise ImportError( - "Assortativity requires SciPy: http://scipy.org/ ") + except ImportError as e: + raise ImportError("Assortativity requires SciPy:" "http://scipy.org/ ") from e xy = node_degree_xy(G, x=x, y=y, nodes=nodes, weight=weight) x, y = zip(*xy) return stats.pearsonr(x, y)[0] @@ -150,12 +148,12 @@ def attribute_assortativity_coefficient(G, attribute, nodes=None): ---------- G : NetworkX graph - attribute : string + attribute : string Node attribute key nodes: list or iterable (optional) - Compute attribute assortativity for nodes in container. - The default is all nodes. + Compute attribute assortativity for nodes in container. + The default is all nodes. Returns ------- @@ -164,16 +162,16 @@ def attribute_assortativity_coefficient(G, attribute, nodes=None): Examples -------- - >>> G=nx.Graph() - >>> G.add_nodes_from([0,1],color='red') - >>> G.add_nodes_from([2,3],color='blue') - >>> G.add_edges_from([(0,1),(2,3)]) - >>> print(nx.attribute_assortativity_coefficient(G,'color')) + >>> G = nx.Graph() + >>> G.add_nodes_from([0, 1], color="red") + >>> G.add_nodes_from([2, 3], color="blue") + >>> G.add_edges_from([(0, 1), (2, 3)]) + >>> print(nx.attribute_assortativity_coefficient(G, "color")) 1.0 Notes ----- - This computes Eq. (2) in Ref. [1]_ , trace(M)-sum(M))/(1-sum(M), + This computes Eq. (2) in Ref. [1]_ , (trace(M)-sum(M^2))/(1-sum(M^2)), where M is the joint probability distribution (mixing matrix) of the specified attribute. @@ -197,12 +195,12 @@ def numeric_assortativity_coefficient(G, attribute, nodes=None): ---------- G : NetworkX graph - attribute : string + attribute : string Node attribute key. The corresponding attribute value must be an integer. nodes: list or iterable (optional) - Compute numeric assortativity only for attributes of nodes in + Compute numeric assortativity only for attributes of nodes in container. The default is all nodes. Returns @@ -212,16 +210,16 @@ def numeric_assortativity_coefficient(G, attribute, nodes=None): Examples -------- - >>> G=nx.Graph() - >>> G.add_nodes_from([0,1],size=2) - >>> G.add_nodes_from([2,3],size=3) - >>> G.add_edges_from([(0,1),(2,3)]) - >>> print(nx.numeric_assortativity_coefficient(G,'size')) + >>> G = nx.Graph() + >>> G.add_nodes_from([0, 1], size=2) + >>> G.add_nodes_from([2, 3], size=3) + >>> G.add_edges_from([(0, 1), (2, 3)]) + >>> print(nx.numeric_assortativity_coefficient(G, "size")) 1.0 Notes ----- - This computes Eq. (21) in Ref. [1]_ , for the mixing matrix of + This computes Eq. (21) in Ref. [1]_ , for the mixing matrix of of the specified attribute. References @@ -238,12 +236,12 @@ def attribute_ac(M): Parameters ---------- - M : numpy array or matrix - Attribute mixing matrix. + M : numpy.ndarray + 2D ndarray representing the attribute mixing matrix. Notes ----- - This computes Eq. (2) in Ref. [1]_ , (trace(e)-sum(e))/(1-sum(e)), + This computes Eq. (2) in Ref. [1]_ , (trace(e)-sum(e^2))/(1-sum(e^2)), where e is the joint probability distribution (mixing matrix) of the specified attribute. @@ -254,16 +252,16 @@ def attribute_ac(M): """ try: import numpy - except ImportError: + except ImportError as e: raise ImportError( - "attribute_assortativity requires NumPy: http://scipy.org/ ") + "attribute_assortativity requires " "NumPy: http://scipy.org/" + ) from e if M.sum() != 1.0: - M = M / float(M.sum()) - M = numpy.asmatrix(M) - s = (M * M).sum() + M = M / M.sum() + s = (M @ M).sum() t = M.trace() r = (t - s) / (1 - s) - return float(r) + return r def numeric_ac(M): @@ -271,9 +269,10 @@ def numeric_ac(M): # numeric assortativity coefficient, pearsonr try: import numpy - except ImportError: - raise ImportError('numeric_assortativity requires ', - 'NumPy: http://scipy.org/') + except ImportError as e: + raise ImportError( + "numeric_assortativity requires " "NumPy: http://scipy.org/" + ) from e if M.sum() != 1.0: M = M / float(M.sum()) nx, ny = M.shape # nx=ny @@ -281,21 +280,8 @@ def numeric_ac(M): y = numpy.arange(ny) a = M.sum(axis=0) b = M.sum(axis=1) - vara = (a * x**2).sum() - ((a * x).sum())**2 - varb = (b * x**2).sum() - ((b * x).sum())**2 + vara = (a * x ** 2).sum() - ((a * x).sum()) ** 2 + varb = (b * x ** 2).sum() - ((b * x).sum()) ** 2 xy = numpy.outer(x, y) ab = numpy.outer(a, b) return (xy * (M - ab)).sum() / numpy.sqrt(vara * varb) - - -# fixture for nose tests -def setup_module(module): - from nose import SkipTest - try: - import numpy - except: - raise SkipTest("NumPy not available") - try: - import scipy - except: - raise SkipTest("SciPy not available") diff --git a/networkx/algorithms/assortativity/mixing.py b/networkx/algorithms/assortativity/mixing.py index 27acc45..8e76a14 100644 --- a/networkx/algorithms/assortativity/mixing.py +++ b/networkx/algorithms/assortativity/mixing.py @@ -1,22 +1,21 @@ -#-*- coding: utf-8 -*- """ Mixing matrices for node attributes and degree. """ -import networkx as nx from networkx.utils import dict_to_numpy_array -from networkx.algorithms.assortativity.pairs import node_degree_xy, \ - node_attribute_xy -__author__ = ' '.join(['Aric Hagberg ']) -__all__ = ['attribute_mixing_matrix', - 'attribute_mixing_dict', - 'degree_mixing_matrix', - 'degree_mixing_dict', - 'numeric_mixing_matrix', - 'mixing_dict'] +from networkx.algorithms.assortativity.pairs import node_degree_xy, node_attribute_xy + +__all__ = [ + "attribute_mixing_matrix", + "attribute_mixing_dict", + "degree_mixing_matrix", + "degree_mixing_dict", + "numeric_mixing_matrix", + "mixing_dict", +] def attribute_mixing_dict(G, attribute, nodes=None, normalized=False): - """Return dictionary representation of mixing matrix for attribute. + """Returns dictionary representation of mixing matrix for attribute. Parameters ---------- @@ -34,14 +33,14 @@ def attribute_mixing_dict(G, attribute, nodes=None, normalized=False): Examples -------- - >>> G=nx.Graph() - >>> G.add_nodes_from([0,1],color='red') - >>> G.add_nodes_from([2,3],color='blue') - >>> G.add_edge(1,3) - >>> d=nx.attribute_mixing_dict(G,'color') - >>> print(d['red']['blue']) + >>> G = nx.Graph() + >>> G.add_nodes_from([0, 1], color="red") + >>> G.add_nodes_from([2, 3], color="blue") + >>> G.add_edge(1, 3) + >>> d = nx.attribute_mixing_dict(G, "color") + >>> print(d["red"]["blue"]) 1 - >>> print(d['blue']['red']) # d symmetric for undirected graphs + >>> print(d["blue"]["red"]) # d symmetric for undirected graphs 1 Returns @@ -53,9 +52,8 @@ def attribute_mixing_dict(G, attribute, nodes=None, normalized=False): return mixing_dict(xy_iter, normalized=normalized) -def attribute_mixing_matrix(G, attribute, nodes=None, mapping=None, - normalized=True): - """Return mixing matrix for attribute. +def attribute_mixing_matrix(G, attribute, nodes=None, mapping=None, normalized=True): + """Returns mixing matrix for attribute. Parameters ---------- @@ -73,7 +71,7 @@ def attribute_mixing_matrix(G, attribute, nodes=None, mapping=None, Mapping from node attribute to integer index in matrix. If not specified, an arbitrary ordering will be used. - normalized : bool (default=False) + normalized : bool (default=True) Return counts if False or probabilities if True. Returns @@ -88,9 +86,8 @@ def attribute_mixing_matrix(G, attribute, nodes=None, mapping=None, return a -def degree_mixing_dict(G, x='out', y='in', weight=None, - nodes=None, normalized=False): - """Return dictionary representation of mixing matrix for degree. +def degree_mixing_dict(G, x="out", y="in", weight=None, nodes=None, normalized=False): + """Returns dictionary representation of mixing matrix for degree. Parameters ---------- @@ -120,9 +117,8 @@ def degree_mixing_dict(G, x='out', y='in', weight=None, return mixing_dict(xy_iter, normalized=normalized) -def degree_mixing_matrix(G, x='out', y='in', weight=None, - nodes=None, normalized=True): - """Return mixing matrix for attribute. +def degree_mixing_matrix(G, x="out", y="in", weight=None, nodes=None, normalized=True): + """Returns mixing matrix for attribute. Parameters ---------- @@ -144,7 +140,7 @@ def degree_mixing_matrix(G, x='out', y='in', weight=None, as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights adjacent to the node. - normalized : bool (default=False) + normalized : bool (default=True) Return counts if False or probabilities if True. Returns @@ -165,7 +161,7 @@ def degree_mixing_matrix(G, x='out', y='in', weight=None, def numeric_mixing_matrix(G, attribute, nodes=None, normalized=True): - """Return numeric mixing matrix for attribute. + """Returns numeric mixing matrix for attribute. The attribute must be an integer. @@ -180,7 +176,7 @@ def numeric_mixing_matrix(G, attribute, nodes=None, normalized=True): nodes: list or iterable (optional) Build the matrix only with nodes in container. The default is all nodes. - normalized : bool (default=False) + normalized : bool (default=True) Return counts if False or probabilities if True. Returns @@ -201,7 +197,7 @@ def numeric_mixing_matrix(G, attribute, nodes=None, normalized=True): def mixing_dict(xy, normalized=False): - """Return a dictionary representation of mixing matrix. + """Returns a dictionary representation of mixing matrix. Parameters ---------- @@ -235,16 +231,3 @@ def mixing_dict(xy, normalized=False): for j in jdict: jdict[j] /= psum return d - - -# fixture for nose tests -def setup_module(module): - from nose import SkipTest - try: - import numpy - except: - raise SkipTest("NumPy not available") - try: - import scipy - except: - raise SkipTest("SciPy not available") diff --git a/networkx/algorithms/assortativity/neighbor_degree.py b/networkx/algorithms/assortativity/neighbor_degree.py index 1a0dbeb..84dffd8 100644 --- a/networkx/algorithms/assortativity/neighbor_degree.py +++ b/networkx/algorithms/assortativity/neighbor_degree.py @@ -1,12 +1,3 @@ -#-*- coding: utf-8 -*- -# Copyright (C) 2011 by -# Jordi Torrents -# Aric Hagberg -# All rights reserved. -# BSD license. -import networkx as nx -__author__ = """\n""".join(['Jordi Torrents ', - 'Aric Hagberg (hagberg@lanl.gov)']) __all__ = ["average_neighbor_degree"] @@ -21,13 +12,13 @@ def _average_nbr_deg(G, source_degree, target_degree, nodes=None, weight=None): if weight is None: avg[n] = sum(d for n, d in nbrdeg) / float(deg) else: - avg[n] = sum((G[n][nbr].get(weight, 1) * d - for nbr, d in nbrdeg)) / float(deg) + avg[n] = sum((G[n][nbr].get(weight, 1) * d for nbr, d in nbrdeg)) / float( + deg + ) return avg -def average_neighbor_degree(G, source='out', target='out', - nodes=None, weight=None): +def average_neighbor_degree(G, source="out", target="out", nodes=None, weight=None): r"""Returns the average degree of the neighborhood of each node. The average neighborhood degree of a node `i` is @@ -37,7 +28,7 @@ def average_neighbor_degree(G, source='out', target='out', k_{nn,i} = \frac{1}{|N(i)|} \sum_{j \in N(i)} k_j where `N(i)` are the neighbors of node `i` and `k_j` is - the degree of node `j` which belongs to `N(i)`. For weighted + the degree of node `j` which belongs to `N(i)`. For weighted graphs, an analogous measure can be defined [1]_, .. math:: @@ -55,13 +46,13 @@ def average_neighbor_degree(G, source='out', target='out', source : string ("in"|"out") Directed graphs only. - Use "in"- or "out"-degree for source node. + Use "in"- or "out"-degree for source node. target : string ("in"|"out") Directed graphs only. Use "in"- or "out"-degree for target node. - nodes : list or iterable, optional + nodes : list or iterable, optional Compute neighbor degree for specified nodes. The default is all nodes in the graph. @@ -76,47 +67,46 @@ def average_neighbor_degree(G, source='out', target='out', Examples -------- - >>> G=nx.path_graph(4) - >>> G.edges[0, 1]['weight'] = 5 - >>> G.edges[2, 3]['weight'] = 3 + >>> G = nx.path_graph(4) + >>> G.edges[0, 1]["weight"] = 5 + >>> G.edges[2, 3]["weight"] = 3 >>> nx.average_neighbor_degree(G) {0: 2.0, 1: 1.5, 2: 1.5, 3: 2.0} - >>> nx.average_neighbor_degree(G, weight='weight') + >>> nx.average_neighbor_degree(G, weight="weight") {0: 2.0, 1: 1.1666666666666667, 2: 1.25, 3: 2.0} - >>> G=nx.DiGraph() + >>> G = nx.DiGraph() >>> nx.add_path(G, [0, 1, 2, 3]) - >>> nx.average_neighbor_degree(G, source='in', target='in') + >>> nx.average_neighbor_degree(G, source="in", target="in") {0: 1.0, 1: 1.0, 2: 1.0, 3: 0.0} - >>> nx.average_neighbor_degree(G, source='out', target='out') + >>> nx.average_neighbor_degree(G, source="out", target="out") {0: 1.0, 1: 1.0, 2: 0.0, 3: 0.0} Notes ----- - For directed graphs you can also specify in-degree or out-degree + For directed graphs you can also specify in-degree or out-degree by passing keyword arguments. See Also -------- - average_degree_connectivity + average_degree_connectivity References - ---------- - .. [1] A. Barrat, M. Barthélemy, R. Pastor-Satorras, and A. Vespignani, - "The architecture of complex weighted networks". + ---------- + .. [1] A. Barrat, M. Barthélemy, R. Pastor-Satorras, and A. Vespignani, + "The architecture of complex weighted networks". PNAS 101 (11): 3747–3752 (2004). """ source_degree = G.degree target_degree = G.degree if G.is_directed(): - direction = {'out': G.out_degree, - 'in': G.in_degree} + direction = {"out": G.out_degree, "in": G.in_degree} source_degree = direction[source] target_degree = direction[target] - return _average_nbr_deg(G, source_degree, target_degree, - nodes=nodes, weight=weight) + return _average_nbr_deg(G, source_degree, target_degree, nodes=nodes, weight=weight) + # obsolete # def average_neighbor_in_degree(G, nodes=None, weight=None): diff --git a/networkx/algorithms/assortativity/pairs.py b/networkx/algorithms/assortativity/pairs.py index 73d3be5..83bde34 100644 --- a/networkx/algorithms/assortativity/pairs.py +++ b/networkx/algorithms/assortativity/pairs.py @@ -1,13 +1,9 @@ -#-*- coding: utf-8 -*- """Generators of x-y pairs of node data.""" -import networkx as nx -__author__ = ' '.join(['Aric Hagberg ']) -__all__ = ['node_attribute_xy', - 'node_degree_xy'] +__all__ = ["node_attribute_xy", "node_degree_xy"] def node_attribute_xy(G, attribute, nodes=None): - """Return iterator of node-attribute pairs for all edges in G. + """Returns iterator of node-attribute pairs for all edges in G. Parameters ---------- @@ -28,16 +24,16 @@ def node_attribute_xy(G, attribute, nodes=None): Examples -------- >>> G = nx.DiGraph() - >>> G.add_node(1,color='red') - >>> G.add_node(2,color='blue') - >>> G.add_edge(1,2) - >>> list(nx.node_attribute_xy(G,'color')) + >>> G.add_node(1, color="red") + >>> G.add_node(2, color="blue") + >>> G.add_edge(1, 2) + >>> list(nx.node_attribute_xy(G, "color")) [('red', 'blue')] Notes ----- - For undirected graphs each edge is produced twice, once for each edge - representation (u,v) and (v,u), with the exception of self-loop edges + For undirected graphs each edge is produced twice, once for each edge + representation (u,v) and (v,u), with the exception of self-loop edges which only appear once. """ if nodes is None: @@ -60,7 +56,7 @@ def node_attribute_xy(G, attribute, nodes=None): yield (uattr, vattr) -def node_degree_xy(G, x='out', y='in', weight=None, nodes=None): +def node_degree_xy(G, x="out", y="in", weight=None, nodes=None): """Generate node degree-degree pairs for edges in G. Parameters @@ -74,7 +70,7 @@ def node_degree_xy(G, x='out', y='in', weight=None, nodes=None): The degree type for target node (directed graphs only). weight: string or None, optional (default=None) - The edge attribute that holds the numerical value used + The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights adjacent to the node. @@ -91,16 +87,16 @@ def node_degree_xy(G, x='out', y='in', weight=None, nodes=None): Examples -------- >>> G = nx.DiGraph() - >>> G.add_edge(1,2) - >>> list(nx.node_degree_xy(G,x='out',y='in')) + >>> G.add_edge(1, 2) + >>> list(nx.node_degree_xy(G, x="out", y="in")) [(1, 1)] - >>> list(nx.node_degree_xy(G,x='in',y='out')) + >>> list(nx.node_degree_xy(G, x="in", y="out")) [(0, 0)] Notes ----- - For undirected graphs each edge is produced twice, once for each edge - representation (u,v) and (v,u), with the exception of self-loop edges + For undirected graphs each edge is produced twice, once for each edge + representation (u,v) and (v,u), with the exception of self-loop edges which only appear once. """ if nodes is None: @@ -110,8 +106,7 @@ def node_degree_xy(G, x='out', y='in', weight=None, nodes=None): xdeg = G.degree ydeg = G.degree if G.is_directed(): - direction = {'out': G.out_degree, - 'in': G.in_degree} + direction = {"out": G.out_degree, "in": G.in_degree} xdeg = direction[x] ydeg = direction[y] @@ -119,16 +114,3 @@ def node_degree_xy(G, x='out', y='in', weight=None, nodes=None): neighbors = (nbr for _, nbr in G.edges(u) if nbr in nodes) for v, degv in ydeg(neighbors, weight=weight): yield degu, degv - - -# fixture for nose tests -def setup_module(module): - from nose import SkipTest - try: - import numpy - except: - raise SkipTest("NumPy not available") - try: - import scipy - except: - raise SkipTest("SciPy not available") diff --git a/networkx/algorithms/assortativity/tests/__init__.py b/networkx/algorithms/assortativity/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/algorithms/assortativity/tests/base_test.py b/networkx/algorithms/assortativity/tests/base_test.py index d053912..9a8a1e7 100644 --- a/networkx/algorithms/assortativity/tests/base_test.py +++ b/networkx/algorithms/assortativity/tests/base_test.py @@ -1,51 +1,51 @@ import networkx as nx -class BaseTestAttributeMixing(object): - - def setUp(self): +class BaseTestAttributeMixing: + @classmethod + def setup_class(cls): G = nx.Graph() - G.add_nodes_from([0, 1], fish='one') - G.add_nodes_from([2, 3], fish='two') - G.add_nodes_from([4], fish='red') - G.add_nodes_from([5], fish='blue') + G.add_nodes_from([0, 1], fish="one") + G.add_nodes_from([2, 3], fish="two") + G.add_nodes_from([4], fish="red") + G.add_nodes_from([5], fish="blue") G.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)]) - self.G = G + cls.G = G D = nx.DiGraph() - D.add_nodes_from([0, 1], fish='one') - D.add_nodes_from([2, 3], fish='two') - D.add_nodes_from([4], fish='red') - D.add_nodes_from([5], fish='blue') + D.add_nodes_from([0, 1], fish="one") + D.add_nodes_from([2, 3], fish="two") + D.add_nodes_from([4], fish="red") + D.add_nodes_from([5], fish="blue") D.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)]) - self.D = D + cls.D = D M = nx.MultiGraph() - M.add_nodes_from([0, 1], fish='one') - M.add_nodes_from([2, 3], fish='two') - M.add_nodes_from([4], fish='red') - M.add_nodes_from([5], fish='blue') + M.add_nodes_from([0, 1], fish="one") + M.add_nodes_from([2, 3], fish="two") + M.add_nodes_from([4], fish="red") + M.add_nodes_from([5], fish="blue") M.add_edges_from([(0, 1), (0, 1), (2, 3)]) - self.M = M + cls.M = M S = nx.Graph() - S.add_nodes_from([0, 1], fish='one') - S.add_nodes_from([2, 3], fish='two') - S.add_nodes_from([4], fish='red') - S.add_nodes_from([5], fish='blue') + S.add_nodes_from([0, 1], fish="one") + S.add_nodes_from([2, 3], fish="two") + S.add_nodes_from([4], fish="red") + S.add_nodes_from([5], fish="blue") S.add_edge(0, 0) S.add_edge(2, 2) - self.S = S - - -class BaseTestDegreeMixing(object): - - def setUp(self): - self.P4 = nx.path_graph(4) - self.D = nx.DiGraph() - self.D.add_edges_from([(0, 2), (0, 3), (1, 3), (2, 3)]) - self.M = nx.MultiGraph() - nx.add_path(self.M, range(4)) - self.M.add_edge(0, 1) - self.S = nx.Graph() - self.S.add_edges_from([(0, 0), (1, 1)]) + cls.S = S + + +class BaseTestDegreeMixing: + @classmethod + def setup_class(cls): + cls.P4 = nx.path_graph(4) + cls.D = nx.DiGraph() + cls.D.add_edges_from([(0, 2), (0, 3), (1, 3), (2, 3)]) + cls.M = nx.MultiGraph() + nx.add_path(cls.M, range(4)) + cls.M.add_edge(0, 1) + cls.S = nx.Graph() + cls.S.add_edges_from([(0, 0), (1, 1)]) diff --git a/networkx/algorithms/assortativity/tests/test_connectivity.py b/networkx/algorithms/assortativity/tests/test_connectivity.py index 701d30f..b1b0ac8 100644 --- a/networkx/algorithms/assortativity/tests/test_connectivity.py +++ b/networkx/algorithms/assortativity/tests/test_connectivity.py @@ -1,96 +1,94 @@ from itertools import permutations -from nose.tools import assert_almost_equal -from nose.tools import assert_equal -from nose.tools import raises +import pytest import networkx as nx +from networkx.testing import almost_equal -class TestNeighborConnectivity(object): - +class TestNeighborConnectivity: def test_degree_p4(self): G = nx.path_graph(4) answer = {1: 2.0, 2: 1.5} nd = nx.average_degree_connectivity(G) - assert_equal(nd, answer) + assert nd == answer D = G.to_directed() answer = {2: 2.0, 4: 1.5} nd = nx.average_degree_connectivity(D) - assert_equal(nd, answer) + assert nd == answer answer = {1: 2.0, 2: 1.5} D = G.to_directed() - nd = nx.average_degree_connectivity(D, source='in', target='in') - assert_equal(nd, answer) + nd = nx.average_degree_connectivity(D, source="in", target="in") + assert nd == answer D = G.to_directed() - nd = nx.average_degree_connectivity(D, source='in', target='in') - assert_equal(nd, answer) + nd = nx.average_degree_connectivity(D, source="in", target="in") + assert nd == answer def test_degree_p4_weighted(self): G = nx.path_graph(4) - G[1][2]['weight'] = 4 + G[1][2]["weight"] = 4 answer = {1: 2.0, 2: 1.8} - nd = nx.average_degree_connectivity(G, weight='weight') - assert_equal(nd, answer) + nd = nx.average_degree_connectivity(G, weight="weight") + assert nd == answer answer = {1: 2.0, 2: 1.5} nd = nx.average_degree_connectivity(G) - assert_equal(nd, answer) + assert nd == answer D = G.to_directed() answer = {2: 2.0, 4: 1.8} - nd = nx.average_degree_connectivity(D, weight='weight') - assert_equal(nd, answer) + nd = nx.average_degree_connectivity(D, weight="weight") + assert nd == answer answer = {1: 2.0, 2: 1.8} D = G.to_directed() - nd = nx.average_degree_connectivity(D, weight='weight', source='in', - target='in') - assert_equal(nd, answer) + nd = nx.average_degree_connectivity( + D, weight="weight", source="in", target="in" + ) + assert nd == answer D = G.to_directed() - nd = nx.average_degree_connectivity(D, source='in', target='out', - weight='weight') - assert_equal(nd, answer) + nd = nx.average_degree_connectivity( + D, source="in", target="out", weight="weight" + ) + assert nd == answer def test_weight_keyword(self): G = nx.path_graph(4) - G[1][2]['other'] = 4 + G[1][2]["other"] = 4 answer = {1: 2.0, 2: 1.8} - nd = nx.average_degree_connectivity(G, weight='other') - assert_equal(nd, answer) + nd = nx.average_degree_connectivity(G, weight="other") + assert nd == answer answer = {1: 2.0, 2: 1.5} nd = nx.average_degree_connectivity(G, weight=None) - assert_equal(nd, answer) + assert nd == answer D = G.to_directed() answer = {2: 2.0, 4: 1.8} - nd = nx.average_degree_connectivity(D, weight='other') - assert_equal(nd, answer) + nd = nx.average_degree_connectivity(D, weight="other") + assert nd == answer answer = {1: 2.0, 2: 1.8} D = G.to_directed() - nd = nx.average_degree_connectivity(D, weight='other', source='in', - target='in') - assert_equal(nd, answer) + nd = nx.average_degree_connectivity(D, weight="other", source="in", target="in") + assert nd == answer D = G.to_directed() - nd = nx.average_degree_connectivity(D, weight='other', source='in', - target='in') - assert_equal(nd, answer) + nd = nx.average_degree_connectivity(D, weight="other", source="in", target="in") + assert nd == answer def test_degree_barrat(self): G = nx.star_graph(5) G.add_edges_from([(5, 6), (5, 7), (5, 8), (5, 9)]) - G[0][5]['weight'] = 5 + G[0][5]["weight"] = 5 nd = nx.average_degree_connectivity(G)[5] - assert_equal(nd, 1.8) - nd = nx.average_degree_connectivity(G, weight='weight')[5] - assert_almost_equal(nd, 3.222222, places=5) - nd = nx.k_nearest_neighbors(G, weight='weight')[5] - assert_almost_equal(nd, 3.222222, places=5) + assert nd == 1.8 + nd = nx.average_degree_connectivity(G, weight="weight")[5] + assert almost_equal(nd, 3.222222, places=5) + nd = nx.k_nearest_neighbors(G, weight="weight")[5] + assert almost_equal(nd, 3.222222, places=5) def test_zero_deg(self): G = nx.DiGraph() @@ -98,40 +96,39 @@ def test_zero_deg(self): G.add_edge(1, 3) G.add_edge(1, 4) c = nx.average_degree_connectivity(G) - assert_equal(c, {1: 0, 3: 1}) - c = nx.average_degree_connectivity(G, source='in', target='in') - assert_equal(c, {0: 0, 1: 0}) - c = nx.average_degree_connectivity(G, source='in', target='out') - assert_equal(c, {0: 0, 1: 3}) - c = nx.average_degree_connectivity(G, source='in', target='in+out') - assert_equal(c, {0: 0, 1: 3}) - c = nx.average_degree_connectivity(G, source='out', target='out') - assert_equal(c, {0: 0, 3: 0}) - c = nx.average_degree_connectivity(G, source='out', target='in') - assert_equal(c, {0: 0, 3: 1}) - c = nx.average_degree_connectivity(G, source='out', target='in+out') - assert_equal(c, {0: 0, 3: 1}) + assert c == {1: 0, 3: 1} + c = nx.average_degree_connectivity(G, source="in", target="in") + assert c == {0: 0, 1: 0} + c = nx.average_degree_connectivity(G, source="in", target="out") + assert c == {0: 0, 1: 3} + c = nx.average_degree_connectivity(G, source="in", target="in+out") + assert c == {0: 0, 1: 3} + c = nx.average_degree_connectivity(G, source="out", target="out") + assert c == {0: 0, 3: 0} + c = nx.average_degree_connectivity(G, source="out", target="in") + assert c == {0: 0, 3: 1} + c = nx.average_degree_connectivity(G, source="out", target="in+out") + assert c == {0: 0, 3: 1} def test_in_out_weight(self): G = nx.DiGraph() G.add_edge(1, 2, weight=1) G.add_edge(1, 3, weight=1) G.add_edge(3, 1, weight=1) - for s, t in permutations(['in', 'out', 'in+out'], 2): + for s, t in permutations(["in", "out", "in+out"], 2): c = nx.average_degree_connectivity(G, source=s, target=t) - cw = nx.average_degree_connectivity(G, source=s, target=t, - weight='weight') - assert_equal(c, cw) + cw = nx.average_degree_connectivity(G, source=s, target=t, weight="weight") + assert c == cw - @raises(ValueError) def test_invalid_source(self): - G = nx.DiGraph() - nx.average_degree_connectivity(G, source='bogus') + with pytest.raises(ValueError): + G = nx.DiGraph() + nx.average_degree_connectivity(G, source="bogus") - @raises(ValueError) def test_invalid_target(self): - G = nx.DiGraph() - nx.average_degree_connectivity(G, target='bogus') + with pytest.raises(ValueError): + G = nx.DiGraph() + nx.average_degree_connectivity(G, target="bogus") def test_single_node(self): # TODO Is this really the intended behavior for providing a @@ -139,4 +136,4 @@ def test_single_node(self): # just return the connectivity value itself? G = nx.trivial_graph() conn = nx.average_degree_connectivity(G, nodes=0) - assert_equal(conn, {0: 0}) + assert conn == {0: 0} diff --git a/networkx/algorithms/assortativity/tests/test_correlation.py b/networkx/algorithms/assortativity/tests/test_correlation.py index 5f966de..42aa1d0 100644 --- a/networkx/algorithms/assortativity/tests/test_correlation.py +++ b/networkx/algorithms/assortativity/tests/test_correlation.py @@ -1,27 +1,16 @@ -#!/usr/bin/env python -from nose.tools import * -from nose import SkipTest +import pytest + +np = pytest.importorskip("numpy") +npt = pytest.importorskip("numpy.testing") +scipy = pytest.importorskip("scipy") + + import networkx as nx -from base_test import BaseTestAttributeMixing, BaseTestDegreeMixing +from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing from networkx.algorithms.assortativity.correlation import attribute_ac class TestDegreeMixingCorrelation(BaseTestDegreeMixing): - @classmethod - def setupClass(cls): - global np - global npt - try: - import numpy as np - import numpy.testing as npt - except ImportError: - raise SkipTest('NumPy not available.') - try: - import scipy - import scipy.stats - except ImportError: - raise SkipTest('SciPy not available.') - def test_degree_assortativity_undirected(self): r = nx.degree_assortativity_coefficient(self.P4) npt.assert_almost_equal(r, -1.0 / 2, decimal=4) @@ -48,44 +37,36 @@ def test_degree_pearson_assortativity_multigraph(self): class TestAttributeMixingCorrelation(BaseTestAttributeMixing): - @classmethod - def setupClass(cls): - global np - global npt - try: - import numpy as np - import numpy.testing as npt - - except ImportError: - raise SkipTest('NumPy not available.') - def test_attribute_assortativity_undirected(self): - r = nx.attribute_assortativity_coefficient(self.G, 'fish') - assert_equal(r, 6.0 / 22.0) + r = nx.attribute_assortativity_coefficient(self.G, "fish") + assert r == 6.0 / 22.0 def test_attribute_assortativity_directed(self): - r = nx.attribute_assortativity_coefficient(self.D, 'fish') - assert_equal(r, 1.0 / 3.0) + r = nx.attribute_assortativity_coefficient(self.D, "fish") + assert r == 1.0 / 3.0 def test_attribute_assortativity_multigraph(self): - r = nx.attribute_assortativity_coefficient(self.M, 'fish') - assert_equal(r, 1.0) + r = nx.attribute_assortativity_coefficient(self.M, "fish") + assert r == 1.0 def test_attribute_assortativity_coefficient(self): # from "Mixing patterns in networks" + # fmt: off a = np.array([[0.258, 0.016, 0.035, 0.013], [0.012, 0.157, 0.058, 0.019], [0.013, 0.023, 0.306, 0.035], [0.005, 0.007, 0.024, 0.016]]) + # fmt: on r = attribute_ac(a) npt.assert_almost_equal(r, 0.623, decimal=3) def test_attribute_assortativity_coefficient2(self): + # fmt: off a = np.array([[0.18, 0.02, 0.01, 0.03], [0.02, 0.20, 0.03, 0.02], [0.01, 0.03, 0.16, 0.01], [0.03, 0.02, 0.01, 0.22]]) - + # fmt: on r = attribute_ac(a) npt.assert_almost_equal(r, 0.68, decimal=2) diff --git a/networkx/algorithms/assortativity/tests/test_mixing.py b/networkx/algorithms/assortativity/tests/test_mixing.py index 703c019..1b21eeb 100644 --- a/networkx/algorithms/assortativity/tests/test_mixing.py +++ b/networkx/algorithms/assortativity/tests/test_mixing.py @@ -1,94 +1,82 @@ -#!/usr/bin/env python -from nose.tools import * -from nose import SkipTest +import pytest + +np = pytest.importorskip("numpy") +npt = pytest.importorskip("numpy.testing") + + import networkx as nx -from base_test import BaseTestAttributeMixing, BaseTestDegreeMixing +from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing class TestDegreeMixingDict(BaseTestDegreeMixing): - def test_degree_mixing_dict_undirected(self): d = nx.degree_mixing_dict(self.P4) - d_result = {1: {2: 2}, - 2: {1: 2, 2: 2}, - } - assert_equal(d, d_result) + d_result = {1: {2: 2}, 2: {1: 2, 2: 2}} + assert d == d_result def test_degree_mixing_dict_undirected_normalized(self): d = nx.degree_mixing_dict(self.P4, normalized=True) - d_result = {1: {2: 1.0 / 3}, - 2: {1: 1.0 / 3, 2: 1.0 / 3}, - } - assert_equal(d, d_result) + d_result = {1: {2: 1.0 / 3}, 2: {1: 1.0 / 3, 2: 1.0 / 3}} + assert d == d_result def test_degree_mixing_dict_directed(self): d = nx.degree_mixing_dict(self.D) print(d) - d_result = {1: {3: 2}, - 2: {1: 1, 3: 1}, - 3: {} - } - assert_equal(d, d_result) + d_result = {1: {3: 2}, 2: {1: 1, 3: 1}, 3: {}} + assert d == d_result def test_degree_mixing_dict_multigraph(self): d = nx.degree_mixing_dict(self.M) - d_result = {1: {2: 1}, - 2: {1: 1, 3: 3}, - 3: {2: 3} - } - assert_equal(d, d_result) + d_result = {1: {2: 1}, 2: {1: 1, 3: 3}, 3: {2: 3}} + assert d == d_result class TestDegreeMixingMatrix(BaseTestDegreeMixing): - - @classmethod - def setupClass(cls): - global np - global npt - try: - import numpy as np - import numpy.testing as npt - - except ImportError: - raise SkipTest('NumPy not available.') - def test_degree_mixing_matrix_undirected(self): + # fmt: off a_result = np.array([[0, 0, 0], [0, 0, 2], [0, 2, 2]] ) + # fmt: on a = nx.degree_mixing_matrix(self.P4, normalized=False) npt.assert_equal(a, a_result) a = nx.degree_mixing_matrix(self.P4) npt.assert_equal(a, a_result / float(a_result.sum())) def test_degree_mixing_matrix_directed(self): + # fmt: off a_result = np.array([[0, 0, 0, 0], [0, 0, 0, 2], [0, 1, 0, 1], [0, 0, 0, 0]] ) + # fmt: on a = nx.degree_mixing_matrix(self.D, normalized=False) npt.assert_equal(a, a_result) a = nx.degree_mixing_matrix(self.D) npt.assert_equal(a, a_result / float(a_result.sum())) def test_degree_mixing_matrix_multigraph(self): + # fmt: off a_result = np.array([[0, 0, 0, 0], [0, 0, 1, 0], [0, 1, 0, 3], [0, 0, 3, 0]] ) + # fmt: on a = nx.degree_mixing_matrix(self.M, normalized=False) npt.assert_equal(a, a_result) a = nx.degree_mixing_matrix(self.M) npt.assert_equal(a, a_result / float(a_result.sum())) def test_degree_mixing_matrix_selfloop(self): + # fmt: off a_result = np.array([[0, 0, 0], [0, 0, 0], [0, 0, 2]] ) + # fmt: on a = nx.degree_mixing_matrix(self.S, normalized=False) npt.assert_equal(a, a_result) a = nx.degree_mixing_matrix(self.S) @@ -96,86 +84,59 @@ def test_degree_mixing_matrix_selfloop(self): class TestAttributeMixingDict(BaseTestAttributeMixing): - def test_attribute_mixing_dict_undirected(self): - d = nx.attribute_mixing_dict(self.G, 'fish') - d_result = {'one': {'one': 2, 'red': 1}, - 'two': {'two': 2, 'blue': 1}, - 'red': {'one': 1}, - 'blue': {'two': 1} - } - assert_equal(d, d_result) + d = nx.attribute_mixing_dict(self.G, "fish") + d_result = { + "one": {"one": 2, "red": 1}, + "two": {"two": 2, "blue": 1}, + "red": {"one": 1}, + "blue": {"two": 1}, + } + assert d == d_result def test_attribute_mixing_dict_directed(self): - d = nx.attribute_mixing_dict(self.D, 'fish') - d_result = {'one': {'one': 1, 'red': 1}, - 'two': {'two': 1, 'blue': 1}, - 'red': {}, - 'blue': {} - } - assert_equal(d, d_result) + d = nx.attribute_mixing_dict(self.D, "fish") + d_result = { + "one": {"one": 1, "red": 1}, + "two": {"two": 1, "blue": 1}, + "red": {}, + "blue": {}, + } + assert d == d_result def test_attribute_mixing_dict_multigraph(self): - d = nx.attribute_mixing_dict(self.M, 'fish') - d_result = {'one': {'one': 4}, - 'two': {'two': 2}, - } - assert_equal(d, d_result) + d = nx.attribute_mixing_dict(self.M, "fish") + d_result = {"one": {"one": 4}, "two": {"two": 2}} + assert d == d_result class TestAttributeMixingMatrix(BaseTestAttributeMixing): - @classmethod - def setupClass(cls): - global np - global npt - try: - import numpy as np - import numpy.testing as npt - - except ImportError: - raise SkipTest('NumPy not available.') - def test_attribute_mixing_matrix_undirected(self): - mapping = {'one': 0, 'two': 1, 'red': 2, 'blue': 3} - a_result = np.array([[2, 0, 1, 0], - [0, 2, 0, 1], - [1, 0, 0, 0], - [0, 1, 0, 0]] - ) - a = nx.attribute_mixing_matrix(self.G, 'fish', - mapping=mapping, - normalized=False) + mapping = {"one": 0, "two": 1, "red": 2, "blue": 3} + a_result = np.array([[2, 0, 1, 0], [0, 2, 0, 1], [1, 0, 0, 0], [0, 1, 0, 0]]) + a = nx.attribute_mixing_matrix( + self.G, "fish", mapping=mapping, normalized=False + ) npt.assert_equal(a, a_result) - a = nx.attribute_mixing_matrix(self.G, 'fish', - mapping=mapping) + a = nx.attribute_mixing_matrix(self.G, "fish", mapping=mapping) npt.assert_equal(a, a_result / float(a_result.sum())) def test_attribute_mixing_matrix_directed(self): - mapping = {'one': 0, 'two': 1, 'red': 2, 'blue': 3} - a_result = np.array([[1, 0, 1, 0], - [0, 1, 0, 1], - [0, 0, 0, 0], - [0, 0, 0, 0]] - ) - a = nx.attribute_mixing_matrix(self.D, 'fish', - mapping=mapping, - normalized=False) + mapping = {"one": 0, "two": 1, "red": 2, "blue": 3} + a_result = np.array([[1, 0, 1, 0], [0, 1, 0, 1], [0, 0, 0, 0], [0, 0, 0, 0]]) + a = nx.attribute_mixing_matrix( + self.D, "fish", mapping=mapping, normalized=False + ) npt.assert_equal(a, a_result) - a = nx.attribute_mixing_matrix(self.D, 'fish', - mapping=mapping) + a = nx.attribute_mixing_matrix(self.D, "fish", mapping=mapping) npt.assert_equal(a, a_result / float(a_result.sum())) def test_attribute_mixing_matrix_multigraph(self): - mapping = {'one': 0, 'two': 1, 'red': 2, 'blue': 3} - a_result = np.array([[4, 0, 0, 0], - [0, 2, 0, 0], - [0, 0, 0, 0], - [0, 0, 0, 0]] - ) - a = nx.attribute_mixing_matrix(self.M, 'fish', - mapping=mapping, - normalized=False) + mapping = {"one": 0, "two": 1, "red": 2, "blue": 3} + a_result = np.array([[4, 0, 0, 0], [0, 2, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]) + a = nx.attribute_mixing_matrix( + self.M, "fish", mapping=mapping, normalized=False + ) npt.assert_equal(a, a_result) - a = nx.attribute_mixing_matrix(self.M, 'fish', - mapping=mapping) + a = nx.attribute_mixing_matrix(self.M, "fish", mapping=mapping) npt.assert_equal(a, a_result / float(a_result.sum())) diff --git a/networkx/algorithms/assortativity/tests/test_neighbor_degree.py b/networkx/algorithms/assortativity/tests/test_neighbor_degree.py index c294de4..51b4aa8 100644 --- a/networkx/algorithms/assortativity/tests/test_neighbor_degree.py +++ b/networkx/algorithms/assortativity/tests/test_neighbor_degree.py @@ -1,80 +1,76 @@ -#!/usr/bin/env python -from nose.tools import * import networkx as nx +from networkx.testing import almost_equal -class TestAverageNeighbor(object): - +class TestAverageNeighbor: def test_degree_p4(self): G = nx.path_graph(4) answer = {0: 2, 1: 1.5, 2: 1.5, 3: 2} nd = nx.average_neighbor_degree(G) - assert_equal(nd, answer) + assert nd == answer D = G.to_directed() nd = nx.average_neighbor_degree(D) - assert_equal(nd, answer) + assert nd == answer D = G.to_directed() nd = nx.average_neighbor_degree(D) - assert_equal(nd, answer) + assert nd == answer D = G.to_directed() - nd = nx.average_neighbor_degree(D, source='in', target='in') - assert_equal(nd, answer) + nd = nx.average_neighbor_degree(D, source="in", target="in") + assert nd == answer def test_degree_p4_weighted(self): G = nx.path_graph(4) - G[1][2]['weight'] = 4 + G[1][2]["weight"] = 4 answer = {0: 2, 1: 1.8, 2: 1.8, 3: 2} - nd = nx.average_neighbor_degree(G, weight='weight') - assert_equal(nd, answer) + nd = nx.average_neighbor_degree(G, weight="weight") + assert nd == answer D = G.to_directed() - nd = nx.average_neighbor_degree(D, weight='weight') - assert_equal(nd, answer) + nd = nx.average_neighbor_degree(D, weight="weight") + assert nd == answer D = G.to_directed() - nd = nx.average_neighbor_degree(D, weight='weight') - assert_equal(nd, answer) - nd = nx.average_neighbor_degree(D, source='out', target='out', - weight='weight') - assert_equal(nd, answer) + nd = nx.average_neighbor_degree(D, weight="weight") + assert nd == answer + nd = nx.average_neighbor_degree(D, source="out", target="out", weight="weight") + assert nd == answer D = G.to_directed() - nd = nx.average_neighbor_degree(D, source='in', target='in', - weight='weight') - assert_equal(nd, answer) + nd = nx.average_neighbor_degree(D, source="in", target="in", weight="weight") + assert nd == answer def test_degree_k4(self): G = nx.complete_graph(4) answer = {0: 3, 1: 3, 2: 3, 3: 3} nd = nx.average_neighbor_degree(G) - assert_equal(nd, answer) + assert nd == answer D = G.to_directed() nd = nx.average_neighbor_degree(D) - assert_equal(nd, answer) + assert nd == answer D = G.to_directed() nd = nx.average_neighbor_degree(D) - assert_equal(nd, answer) + assert nd == answer D = G.to_directed() - nd = nx.average_neighbor_degree(D, source='in', target='in') - assert_equal(nd, answer) + nd = nx.average_neighbor_degree(D, source="in", target="in") + assert nd == answer def test_degree_k4_nodes(self): G = nx.complete_graph(4) answer = {1: 3.0, 2: 3.0} nd = nx.average_neighbor_degree(G, nodes=[1, 2]) - assert_equal(nd, answer) + assert nd == answer def test_degree_barrat(self): G = nx.star_graph(5) G.add_edges_from([(5, 6), (5, 7), (5, 8), (5, 9)]) - G[0][5]['weight'] = 5 + G[0][5]["weight"] = 5 nd = nx.average_neighbor_degree(G)[5] - assert_equal(nd, 1.8) - nd = nx.average_neighbor_degree(G, weight='weight')[5] - assert_almost_equal(nd, 3.222222, places=5) + assert nd == 1.8 + nd = nx.average_neighbor_degree(G, weight="weight")[5] + assert almost_equal(nd, 3.222222, places=5) diff --git a/networkx/algorithms/assortativity/tests/test_pairs.py b/networkx/algorithms/assortativity/tests/test_pairs.py index b3d8012..e99f51f 100644 --- a/networkx/algorithms/assortativity/tests/test_pairs.py +++ b/networkx/algorithms/assortativity/tests/test_pairs.py @@ -1,110 +1,86 @@ -#!/usr/bin/env python -from nose.tools import * import networkx as nx -from base_test import BaseTestAttributeMixing, BaseTestDegreeMixing +from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing class TestAttributeMixingXY(BaseTestAttributeMixing): - def test_node_attribute_xy_undirected(self): - attrxy = sorted(nx.node_attribute_xy(self.G, 'fish')) - attrxy_result = sorted([('one', 'one'), - ('one', 'one'), - ('two', 'two'), - ('two', 'two'), - ('one', 'red'), - ('red', 'one'), - ('blue', 'two'), - ('two', 'blue') - ]) - assert_equal(attrxy, attrxy_result) + attrxy = sorted(nx.node_attribute_xy(self.G, "fish")) + attrxy_result = sorted( + [ + ("one", "one"), + ("one", "one"), + ("two", "two"), + ("two", "two"), + ("one", "red"), + ("red", "one"), + ("blue", "two"), + ("two", "blue"), + ] + ) + assert attrxy == attrxy_result def test_node_attribute_xy_undirected_nodes(self): - attrxy = sorted(nx.node_attribute_xy(self.G, 'fish', - nodes=['one', 'yellow'])) - attrxy_result = sorted([ - ]) - assert_equal(attrxy, attrxy_result) + attrxy = sorted(nx.node_attribute_xy(self.G, "fish", nodes=["one", "yellow"])) + attrxy_result = sorted([]) + assert attrxy == attrxy_result def test_node_attribute_xy_directed(self): - attrxy = sorted(nx.node_attribute_xy(self.D, 'fish')) - attrxy_result = sorted([('one', 'one'), - ('two', 'two'), - ('one', 'red'), - ('two', 'blue') - ]) - assert_equal(attrxy, attrxy_result) + attrxy = sorted(nx.node_attribute_xy(self.D, "fish")) + attrxy_result = sorted( + [("one", "one"), ("two", "two"), ("one", "red"), ("two", "blue")] + ) + assert attrxy == attrxy_result def test_node_attribute_xy_multigraph(self): - attrxy = sorted(nx.node_attribute_xy(self.M, 'fish')) - attrxy_result = [('one', 'one'), - ('one', 'one'), - ('one', 'one'), - ('one', 'one'), - ('two', 'two'), - ('two', 'two') - ] - assert_equal(attrxy, attrxy_result) + attrxy = sorted(nx.node_attribute_xy(self.M, "fish")) + attrxy_result = [ + ("one", "one"), + ("one", "one"), + ("one", "one"), + ("one", "one"), + ("two", "two"), + ("two", "two"), + ] + assert attrxy == attrxy_result def test_node_attribute_xy_selfloop(self): - attrxy = sorted(nx.node_attribute_xy(self.S, 'fish')) - attrxy_result = [('one', 'one'), - ('two', 'two') - ] - assert_equal(attrxy, attrxy_result) + attrxy = sorted(nx.node_attribute_xy(self.S, "fish")) + attrxy_result = [("one", "one"), ("two", "two")] + assert attrxy == attrxy_result class TestDegreeMixingXY(BaseTestDegreeMixing): - def test_node_degree_xy_undirected(self): xy = sorted(nx.node_degree_xy(self.P4)) - xy_result = sorted([(1, 2), - (2, 1), - (2, 2), - (2, 2), - (1, 2), - (2, 1)]) - assert_equal(xy, xy_result) + xy_result = sorted([(1, 2), (2, 1), (2, 2), (2, 2), (1, 2), (2, 1)]) + assert xy == xy_result def test_node_degree_xy_undirected_nodes(self): xy = sorted(nx.node_degree_xy(self.P4, nodes=[0, 1, -1])) - xy_result = sorted([(1, 2), - (2, 1), ]) - assert_equal(xy, xy_result) + xy_result = sorted([(1, 2), (2, 1)]) + assert xy == xy_result def test_node_degree_xy_directed(self): xy = sorted(nx.node_degree_xy(self.D)) - xy_result = sorted([(2, 1), - (2, 3), - (1, 3), - (1, 3)]) - assert_equal(xy, xy_result) + xy_result = sorted([(2, 1), (2, 3), (1, 3), (1, 3)]) + assert xy == xy_result def test_node_degree_xy_multigraph(self): xy = sorted(nx.node_degree_xy(self.M)) - xy_result = sorted([(2, 3), - (2, 3), - (3, 2), - (3, 2), - (2, 3), - (3, 2), - (1, 2), - (2, 1)]) - assert_equal(xy, xy_result) + xy_result = sorted( + [(2, 3), (2, 3), (3, 2), (3, 2), (2, 3), (3, 2), (1, 2), (2, 1)] + ) + assert xy == xy_result def test_node_degree_xy_selfloop(self): xy = sorted(nx.node_degree_xy(self.S)) - xy_result = sorted([(2, 2), - (2, 2)]) - assert_equal(xy, xy_result) + xy_result = sorted([(2, 2), (2, 2)]) + assert xy == xy_result def test_node_degree_xy_weighted(self): G = nx.Graph() G.add_edge(1, 2, weight=7) G.add_edge(2, 3, weight=10) - xy = sorted(nx.node_degree_xy(G, weight='weight')) - xy_result = sorted([(7, 17), - (17, 10), - (17, 7), - (10, 17)]) - assert_equal(xy, xy_result) + xy = sorted(nx.node_degree_xy(G, weight="weight")) + xy_result = sorted([(7, 17), (17, 10), (17, 7), (10, 17)]) + assert xy == xy_result diff --git a/networkx/algorithms/asteroidal.py b/networkx/algorithms/asteroidal.py new file mode 100644 index 0000000..c1bc718 --- /dev/null +++ b/networkx/algorithms/asteroidal.py @@ -0,0 +1,168 @@ +""" +Algorithms for asteroidal triples and asteroidal numbers in graphs. + +An asteroidal triple in a graph G is a set of three non-adjacent vertices +u, v and w such that there exist a path between any two of them that avoids +closed neighborhood of the third. More formally, v_j, v_k belongs to the same +connected component of G - N[v_i], where N[v_i] denotes the closed neighborhood +of v_i. A graph which does not contain any asteroidal triples is called +an AT-free graph. The class of AT-free graphs is a graph class for which +many NP-complete problems are solvable in polynomial time. Amongst them, +independent set and coloring. +""" +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["is_at_free", "find_asteroidal_triple"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def find_asteroidal_triple(G): + r"""Find an asteroidal triple in the given graph. + + An asteroidal triple is a triple of non-adjacent vertices such that + there exists a path between any two of them which avoids the closed + neighborhood of the third. It checks all independent triples of vertices + and whether they are an asteroidal triple or not. This is done with the + help of a data structure called a component structure. + A component structure encodes information about which vertices belongs to + the same connected component when the closed neighborhood of a given vertex + is removed from the graph. The algorithm used to check is the trivial + one, outlined in [1]_, which has a runtime of + :math:`O(|V||\overline{E} + |V||E|)`, where the second term is the + creation of the component structure. + + Parameters + ---------- + G : NetworkX Graph + The graph to check whether is AT-free or not + + Returns + ------- + list or None + An asteroidal triple is returned as a list of nodes. If no asteroidal + triple exists, i.e. the graph is AT-free, then None is returned. + The returned value depends on the certificate parameter. The default + option is a bool which is True if the graph is AT-free, i.e. the + given graph contains no asteroidal triples, and False otherwise, i.e. + if the graph contains at least one asteroidal triple. + + Notes + ----- + The component structure and the algorithm is described in [1]_. The current + implementation implements the trivial algorithm for simple graphs. + + References + ---------- + .. [1] Ekkehard Köhler, + "Recognizing Graphs without asteroidal triples", + Journal of Discrete Algorithms 2, pages 439-452, 2004. + https://www.sciencedirect.com/science/article/pii/S157086670400019X + """ + V = set(G.nodes) + + if len(V) < 6: + # An asteroidal triple cannot exist in a graph with 5 or less vertices. + return None + + component_structure = create_component_structure(G) + E_complement = set(nx.complement(G).edges) + + for e in E_complement: + u = e[0] + v = e[1] + u_neighborhood = set(G[u]).union([u]) + v_neighborhood = set(G[v]).union([v]) + union_of_neighborhoods = u_neighborhood.union(v_neighborhood) + for w in V - union_of_neighborhoods: + """Check for each pair of vertices whether they belong to the + same connected component when the closed neighborhood of the + third is removed.""" + if ( + component_structure[u][v] == component_structure[u][w] + and component_structure[v][u] == component_structure[v][w] + and component_structure[w][u] == component_structure[w][v] + ): + return [u, v, w] + + return None + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def is_at_free(G): + """Check if a graph is AT-free. + + The method uses the `find_asteroidal_triple` method to recognize + an AT-free graph. If no asteroidal triple is found the graph is + AT-free and True is returned. If at least one asteroidal triple is + found the graph is not AT-free and False is returned. + + Parameters + ---------- + G : NetworkX Graph + The graph to check whether is AT-free or not. + + Returns + ------- + bool + True if G is AT-free and False otherwise. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)]) + >>> nx.is_at_free(G) + True + + >>> G = nx.cycle_graph(6) + >>> nx.is_at_free(G) + False + """ + return find_asteroidal_triple(G) is None + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def create_component_structure(G): + r"""Create component structure for G. + + A *component structure* is an `nxn` array, denoted `c`, where `n` is + the number of vertices, where each row and column corresponds to a vertex. + + .. math:: + c_{uv} = \begin{cases} 0, if v \in N[u] \\ + k, if v \in component k of G \setminus N[u] \end{cases} + + Where `k` is an arbitrary label for each component. The structure is used + to simplify the detection of asteroidal triples. + + Parameters + ---------- + G : NetworkX Graph + Undirected, simple graph. + + Returns + ------- + component_structure : dictionary + A dictionary of dictionaries, keyed by pairs of vertices. + + """ + V = set(G.nodes) + component_structure = {} + for v in V: + label = 0 + closed_neighborhood = set(G[v]).union({v}) + row_dict = {} + for u in closed_neighborhood: + row_dict[u] = 0 + + G_reduced = G.subgraph(set(G.nodes) - closed_neighborhood) + for cc in nx.connected_components(G_reduced): + label += 1 + for u in cc: + row_dict[u] = label + + component_structure[v] = row_dict + + return component_structure diff --git a/networkx/algorithms/bipartite/__init__.py b/networkx/algorithms/bipartite/__init__.py index 23c6fc3..09e5a38 100644 --- a/networkx/algorithms/bipartite/__init__.py +++ b/networkx/algorithms/bipartite/__init__.py @@ -6,7 +6,6 @@ The bipartite algorithms are not imported into the networkx namespace at the top level so the easiest way to use them is with: ->>> import networkx as nx >>> from networkx.algorithms import bipartite NetworkX does not have a custom bipartite graph class but the Graph() @@ -16,22 +15,22 @@ in NetworkX is to use a node attribute named `bipartite` with values 0 or 1 to identify the sets each node belongs to. This convention is not enforced in the source code of bipartite functions, it's only a recommendation. - + For example: >>> B = nx.Graph() >>> # Add nodes with the node attribute "bipartite" >>> B.add_nodes_from([1, 2, 3, 4], bipartite=0) ->>> B.add_nodes_from(['a', 'b', 'c'], bipartite=1) +>>> B.add_nodes_from(["a", "b", "c"], bipartite=1) >>> # Add edges only between nodes of opposite node sets ->>> B.add_edges_from([(1, 'a'), (1, 'b'), (2, 'b'), (2, 'c'), (3, 'c'), (4, 'a')]) +>>> B.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")]) Many algorithms of the bipartite module of NetworkX require, as an argument, a container with all the nodes that belong to one set, in addition to the bipartite graph `B`. The functions in the bipartite package do not check that the node set is actually correct nor that the input graph is actually bipartite. -If `B` is connected, you can find the two node sets using a two-coloring -algorithm: +If `B` is connected, you can find the two node sets using a two-coloring +algorithm: >>> nx.is_connected(B) True @@ -48,7 +47,7 @@ Using the `bipartite` node attribute, you can easily get the two node sets: ->>> top_nodes = {n for n, d in B.nodes(data=True) if d['bipartite']==0} +>>> top_nodes = {n for n, d in B.nodes(data=True) if d["bipartite"] == 0} >>> bottom_nodes = set(B) - top_nodes So you can easily use the bipartite algorithms that require, as an argument, a @@ -58,11 +57,11 @@ 0.5 >>> G = bipartite.projected_graph(B, top_nodes) -All bipartite graph generators in NetworkX build bipartite graphs with the +All bipartite graph generators in NetworkX build bipartite graphs with the `bipartite` node attribute. Thus, you can use the same approach: >>> RB = bipartite.random_graph(5, 7, 0.2) ->>> RB_top = {n for n, d in RB.nodes(data=True) if d['bipartite']==0} +>>> RB_top = {n for n, d in RB.nodes(data=True) if d["bipartite"] == 0} >>> RB_bottom = set(RB) - RB_top >>> list(RB_top) [0, 1, 2, 3, 4] diff --git a/networkx/algorithms/bipartite/basic.py b/networkx/algorithms/bipartite/basic.py index 6dc0de5..3c3d485 100644 --- a/networkx/algorithms/bipartite/basic.py +++ b/networkx/algorithms/bipartite/basic.py @@ -1,24 +1,19 @@ -# -*- coding: utf-8 -*- """ ========================== Bipartite Graph Algorithms ========================== """ -# Copyright (C) 2013-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. import networkx as nx -__author__ = """\n""".join(['Jordi Torrents ', - 'Aric Hagberg ']) -__all__ = ['is_bipartite', - 'is_bipartite_node_set', - 'color', - 'sets', - 'density', - 'degrees'] +from networkx.algorithms.components import connected_components + +__all__ = [ + "is_bipartite", + "is_bipartite_node_set", + "color", + "sets", + "density", + "degrees", +] def color(G): @@ -33,11 +28,12 @@ def color(G): Returns ------- color : dictionary - A dictionary keyed by node with a 1 or 0 as data for each node color. + A dictionary keyed by node with a 1 or 0 as data for each node color. Raises ------ - exc:`NetworkXError` if the graph is not two-colorable. + NetworkXError + If the graph is not two-colorable. Examples -------- @@ -49,18 +45,18 @@ def color(G): You can use this to set a node attribute indicating the biparite set: - >>> nx.set_node_attributes(G, c, 'bipartite') - >>> print(G.nodes[0]['bipartite']) + >>> nx.set_node_attributes(G, c, "bipartite") + >>> print(G.nodes[0]["bipartite"]) 1 - >>> print(G.nodes[1]['bipartite']) + >>> print(G.nodes[1]["bipartite"]) 0 """ if G.is_directed(): import itertools def neighbors(v): - return itertools.chain.from_iterable([G.predecessors(v), - G.successors(v)]) + return itertools.chain.from_iterable([G.predecessors(v), G.successors(v)]) + else: neighbors = G.neighbors @@ -124,8 +120,8 @@ def is_bipartite_node_set(G, nodes): -------- >>> from networkx.algorithms import bipartite >>> G = nx.path_graph(4) - >>> X = set([1,3]) - >>> bipartite.is_bipartite_node_set(G,X) + >>> X = set([1, 3]) + >>> bipartite.is_bipartite_node_set(G, X) True Notes @@ -134,10 +130,11 @@ def is_bipartite_node_set(G, nodes): disconnected graphs. """ S = set(nodes) - for CC in nx.connected_component_subgraphs(G): + for CC in (G.subgraph(c).copy() for c in connected_components(G)): X, Y = sets(CC) - if not ((X.issubset(S) and Y.isdisjoint(S)) or - (Y.issubset(S) and X.isdisjoint(S))): + if not ( + (X.issubset(S) and Y.isdisjoint(S)) or (Y.issubset(S) and X.isdisjoint(S)) + ): return False return True @@ -154,28 +151,26 @@ def sets(G, top_nodes=None): ---------- G : NetworkX graph - top_nodes : container - + top_nodes : container, optional Container with all nodes in one bipartite node set. If not supplied it will be computed. But if more than one solution exists an exception will be raised. Returns ------- - (X,Y) : two-tuple of sets - One set of nodes for each part of the bipartite graph. + X : set + Nodes from one side of the bipartite graph. + Y : set + Nodes from the other side. Raises ------ - AmbiguousSolution : Exception - + AmbiguousSolution Raised if the input bipartite graph is disconnected and no container with all nodes in one bipartite set is provided. When determining the nodes in each bipartite set more than one valid solution is possible if the input graph is disconnected. - - NetworkXError: Exception - + NetworkXError Raised if the input graph is not bipartite. Examples @@ -202,7 +197,7 @@ def sets(G, top_nodes=None): Y = set(G) - X else: if not is_connected(G): - msg = 'Disconnected graph: Ambiguous solution for bipartite sets.' + msg = "Disconnected graph: Ambiguous solution for bipartite sets." raise nx.AmbiguousSolution(msg) c = color(G) X = {n for n, is_top in c.items() if is_top} @@ -211,7 +206,7 @@ def sets(G, top_nodes=None): def density(B, nodes): - """Return density of bipartite graph B. + """Returns density of bipartite graph B. Parameters ---------- @@ -228,12 +223,12 @@ def density(B, nodes): Examples -------- >>> from networkx.algorithms import bipartite - >>> G = nx.complete_bipartite_graph(3,2) - >>> X=set([0,1,2]) - >>> bipartite.density(G,X) + >>> G = nx.complete_bipartite_graph(3, 2) + >>> X = set([0, 1, 2]) + >>> bipartite.density(G, X) 1.0 - >>> Y=set([3,4]) - >>> bipartite.density(G,Y) + >>> Y = set([3, 4]) + >>> bipartite.density(G, Y) 1.0 Notes @@ -263,7 +258,7 @@ def density(B, nodes): def degrees(B, nodes, weight=None): - """Return the degrees of the two node sets in the bipartite graph B. + """Returns the degrees of the two node sets in the bipartite graph B. Parameters ---------- @@ -285,9 +280,9 @@ def degrees(B, nodes, weight=None): Examples -------- >>> from networkx.algorithms import bipartite - >>> G = nx.complete_bipartite_graph(3,2) - >>> Y=set([3,4]) - >>> degX,degY=bipartite.degrees(G,Y) + >>> G = nx.complete_bipartite_graph(3, 2) + >>> Y = set([3, 4]) + >>> degX, degY = bipartite.degrees(G, Y) >>> dict(degX) {0: 2, 1: 2, 2: 2} diff --git a/networkx/algorithms/bipartite/centrality.py b/networkx/algorithms/bipartite/centrality.py index b1ce57b..ef76a1f 100644 --- a/networkx/algorithms/bipartite/centrality.py +++ b/networkx/algorithms/bipartite/centrality.py @@ -1,15 +1,6 @@ -#-*- coding: utf-8 -*- -# Copyright (C) 2011 by -# Jordi Torrents -# Aric Hagberg -# All rights reserved. -# BSD license. import networkx as nx -__author__ = """\n""".join(['Jordi Torrents ', - 'Aric Hagberg (hagberg@lanl.gov)']) -__all__ = ['degree_centrality', - 'betweenness_centrality', - 'closeness_centrality'] + +__all__ = ["degree_centrality", "betweenness_centrality", "closeness_centrality"] def degree_centrality(G, nodes): @@ -73,9 +64,9 @@ def degree_centrality(G, nodes): top = set(nodes) bottom = set(G) - top s = 1.0 / len(bottom) - centrality = dict((n, d * s) for n, d in G.degree(top)) + centrality = {n: d * s for n, d in G.degree(top)} s = 1.0 / len(top) - centrality.update(dict((n, d * s) for n, d in G.degree(bottom))) + centrality.update({n: d * s for n, d in G.degree(bottom)}) return centrality @@ -157,16 +148,19 @@ def betweenness_centrality(G, nodes): m = float(len(bottom)) s = (n - 1) // m t = (n - 1) % m - bet_max_top = (((m**2) * ((s + 1)**2)) + - (m * (s + 1) * (2 * t - s - 1)) - - (t * ((2 * s) - t + 3))) / 2.0 + bet_max_top = ( + ((m ** 2) * ((s + 1) ** 2)) + + (m * (s + 1) * (2 * t - s - 1)) + - (t * ((2 * s) - t + 3)) + ) / 2.0 p = (m - 1) // n r = (m - 1) % n - bet_max_bot = (((n**2) * ((p + 1)**2)) + - (n * (p + 1) * (2 * r - p - 1)) - - (r * ((2 * p) - r + 3))) / 2.0 - betweenness = nx.betweenness_centrality(G, normalized=False, - weight=None) + bet_max_bot = ( + ((n ** 2) * ((p + 1) ** 2)) + + (n * (p + 1) * (2 * r - p - 1)) + - (r * ((2 * p) - r + 3)) + ) / 2.0 + betweenness = nx.betweenness_centrality(G, normalized=False, weight=None) for node in top: betweenness[node] /= bet_max_top for node in bottom: diff --git a/networkx/algorithms/bipartite/cluster.py b/networkx/algorithms/bipartite/cluster.py index f29bd15..64343d7 100644 --- a/networkx/algorithms/bipartite/cluster.py +++ b/networkx/algorithms/bipartite/cluster.py @@ -1,19 +1,16 @@ -#-*- coding: utf-8 -*- -# Copyright (C) 2011 by -# Jordi Torrents -# Aric Hagberg -# All rights reserved. -# BSD license. +"""Functions for computing clustering of pairs + +""" + import itertools import networkx as nx -__author__ = """\n""".join(['Jordi Torrents ', - 'Aric Hagberg (hagberg@lanl.gov)']) -__all__ = ['clustering', - 'average_clustering', - 'latapy_clustering', - 'robins_alexander_clustering'] -# functions for computing clustering of pairs +__all__ = [ + "clustering", + "average_clustering", + "latapy_clustering", + "robins_alexander_clustering", +] def cc_dot(nu, nv): @@ -28,12 +25,10 @@ def cc_min(nu, nv): return float(len(nu & nv)) / min(len(nu), len(nv)) -modes = {'dot': cc_dot, - 'min': cc_min, - 'max': cc_max} +modes = {"dot": cc_dot, "min": cc_min, "max": cc_max} -def latapy_clustering(G, nodes=None, mode='dot'): +def latapy_clustering(G, nodes=None, mode="dot"): r"""Compute a bipartite clustering coefficient for nodes. The bipartie clustering coefficient is a measure of local density @@ -43,25 +38,25 @@ def latapy_clustering(G, nodes=None, mode='dot'): c_u = \frac{\sum_{v \in N(N(u))} c_{uv} }{|N(N(u))|} - where `N(N(u))` are the second order neighbors of `u` in `G` excluding `u`, - and `c_{uv}` is the pairwise clustering coefficient between nodes + where `N(N(u))` are the second order neighbors of `u` in `G` excluding `u`, + and `c_{uv}` is the pairwise clustering coefficient between nodes `u` and `v`. The mode selects the function for `c_{uv}` which can be: - `dot`: + `dot`: .. math:: c_{uv}=\frac{|N(u)\cap N(v)|}{|N(u) \cup N(v)|} - `min`: + `min`: .. math:: c_{uv}=\frac{|N(u)\cap N(v)|}{min(|N(u)|,|N(v)|)} - `max`: + `max`: .. math:: @@ -74,12 +69,12 @@ def latapy_clustering(G, nodes=None, mode='dot'): A bipartite graph nodes : list or iterable (optional) - Compute bipartite clustering for these nodes. The default + Compute bipartite clustering for these nodes. The default is all nodes in G. mode : string The pariwise bipartite clustering method to be used in the computation. - It must be "dot", "max", or "min". + It must be "dot", "max", or "min". Returns ------- @@ -90,11 +85,11 @@ def latapy_clustering(G, nodes=None, mode='dot'): Examples -------- >>> from networkx.algorithms import bipartite - >>> G = nx.path_graph(4) # path graphs are bipartite - >>> c = bipartite.clustering(G) + >>> G = nx.path_graph(4) # path graphs are bipartite + >>> c = bipartite.clustering(G) >>> c[0] 0.5 - >>> c = bipartite.clustering(G,mode='min') + >>> c = bipartite.clustering(G, mode="min") >>> c[0] 1.0 @@ -107,7 +102,7 @@ def latapy_clustering(G, nodes=None, mode='dot'): References ---------- .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008). - Basic notions for the analysis of large two-mode networks. + Basic notions for the analysis of large two-mode networks. Social Networks 30(1), 31--48. """ if not nx.algorithms.bipartite.is_bipartite(G): @@ -115,16 +110,17 @@ def latapy_clustering(G, nodes=None, mode='dot'): try: cc_func = modes[mode] - except KeyError: + except KeyError as e: raise nx.NetworkXError( - "Mode for bipartite clustering must be: dot, min or max") + "Mode for bipartite clustering must be: dot, min or max" + ) from e if nodes is None: nodes = G ccs = {} for v in nodes: cc = 0.0 - nbrs2 = set([u for nbr in G[v] for u in G[nbr]]) - set([v]) + nbrs2 = {u for nbr in G[v] for u in G[nbr]} - {v} for u in nbrs2: cc += cc_func(set(G[u]), set(G[v])) if cc > 0.0: # len(nbrs2)>0 @@ -136,10 +132,10 @@ def latapy_clustering(G, nodes=None, mode='dot'): clustering = latapy_clustering -def average_clustering(G, nodes=None, mode='dot'): +def average_clustering(G, nodes=None, mode="dot"): r"""Compute the average bipartite clustering coefficient. - A clustering coefficient for the whole graph is the average, + A clustering coefficient for the whole graph is the average, .. math:: @@ -161,40 +157,40 @@ def average_clustering(G, nodes=None, mode='dot'): a bipartite graph nodes : list or iterable, optional - A container of nodes to use in computing the average. - The nodes should be either the entire graph (the default) or one of the + A container of nodes to use in computing the average. + The nodes should be either the entire graph (the default) or one of the bipartite sets. mode : string - The pariwise bipartite clustering method. - It must be "dot", "max", or "min" + The pariwise bipartite clustering method. + It must be "dot", "max", or "min" Returns ------- clustering : float - The average bipartite clustering for the given set of nodes or the + The average bipartite clustering for the given set of nodes or the entire graph if no nodes are specified. Examples -------- >>> from networkx.algorithms import bipartite - >>> G=nx.star_graph(3) # star graphs are bipartite - >>> bipartite.average_clustering(G) + >>> G = nx.star_graph(3) # star graphs are bipartite + >>> bipartite.average_clustering(G) 0.75 - >>> X,Y=bipartite.sets(G) - >>> bipartite.average_clustering(G,X) + >>> X, Y = bipartite.sets(G) + >>> bipartite.average_clustering(G, X) 0.0 - >>> bipartite.average_clustering(G,Y) + >>> bipartite.average_clustering(G, Y) 1.0 See Also -------- clustering - Notes + Notes ----- The container of nodes passed to this function must contain all of the nodes - in one of the bipartite sets ("top" or "bottom") in order to compute + in one of the bipartite sets ("top" or "bottom") in order to compute the correct average bipartite clustering coefficients. See :mod:`bipartite documentation ` for further details on how bipartite graphs are handled in NetworkX. @@ -203,7 +199,7 @@ def average_clustering(G, nodes=None, mode='dot'): References ---------- .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008). - Basic notions for the analysis of large two-mode networks. + Basic notions for the analysis of large two-mode networks. Social Networks 30(1), 31--48. """ if nodes is None: @@ -247,8 +243,8 @@ def robins_alexander_clustering(G): References ---------- - .. [1] Robins, G. and M. Alexander (2004). Small worlds among interlocking - directors: Network structure and distance in bipartite graphs. + .. [1] Robins, G. and M. Alexander (2004). Small worlds among interlocking + directors: Network structure and distance in bipartite graphs. Computational & Mathematical Organization Theory 10(1), 69–94. """ @@ -258,14 +254,14 @@ def robins_alexander_clustering(G): if L_3 == 0: return 0 C_4 = _four_cycles(G) - return (4. * C_4) / L_3 + return (4.0 * C_4) / L_3 def _four_cycles(G): cycles = 0 for v in G: for u, w in itertools.combinations(G[v], 2): - cycles += len((set(G[u]) & set(G[w])) - set([v])) + cycles += len((set(G[u]) & set(G[w])) - {v}) return cycles / 4 @@ -273,8 +269,8 @@ def _threepaths(G): paths = 0 for v in G: for u in G[v]: - for w in set(G[u]) - set([v]): - paths += len(set(G[w]) - set([v, u])) + for w in set(G[u]) - {v}: + paths += len(set(G[w]) - {v, u}) # Divide by two because we count each three path twice # one for each possible starting point return paths / 2 diff --git a/networkx/algorithms/bipartite/covering.py b/networkx/algorithms/bipartite/covering.py index 551dbbe..c8460d7 100644 --- a/networkx/algorithms/bipartite/covering.py +++ b/networkx/algorithms/bipartite/covering.py @@ -1,20 +1,14 @@ -# Copyright 2016-2018 NetworkX developers. -# Copyright (C) 2016 by -# Nishant Nikhil -# All rights reserved. -# BSD license. - """ Functions related to graph covers.""" from networkx.utils import not_implemented_for from networkx.algorithms.bipartite.matching import hopcroft_karp_matching from networkx.algorithms.covering import min_edge_cover as _min_edge_cover -__all__ = ['min_edge_cover'] +__all__ = ["min_edge_cover"] -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def min_edge_cover(G, matching_algorithm=None): """Returns a set of edges which constitutes the minimum edge cover of the graph. diff --git a/networkx/algorithms/bipartite/edgelist.py b/networkx/algorithms/bipartite/edgelist.py index ca0b708..d480bb5 100644 --- a/networkx/algorithms/bipartite/edgelist.py +++ b/networkx/algorithms/bipartite/edgelist.py @@ -1,7 +1,7 @@ """ -********** +******************** Bipartite Edge Lists -********** +******************** Read and write NetworkX graphs as bipartite edge lists. Format @@ -22,25 +22,14 @@ For each edge (u, v) the node u is assigned to part 0 and the node v to part 1. """ -# Copyright (C) 2015 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -__all__ = ['generate_edgelist', - 'write_edgelist', - 'parse_edgelist', - 'read_edgelist'] +__all__ = ["generate_edgelist", "write_edgelist", "parse_edgelist", "read_edgelist"] import networkx as nx -from networkx.utils import open_file, make_str, not_implemented_for -from networkx.convert import _prep_create_using +from networkx.utils import open_file, not_implemented_for -@open_file(1, mode='wb') -def write_edgelist(G, path, comments="#", delimiter=' ', data=True, - encoding='utf-8'): +@open_file(1, mode="wb") +def write_edgelist(G, path, comments="#", delimiter=" ", data=True, encoding="utf-8"): """Write a bipartite graph as a list of edges. Parameters @@ -64,20 +53,20 @@ def write_edgelist(G, path, comments="#", delimiter=' ', data=True, Examples -------- - >>> G=nx.path_graph(4) - >>> G.add_nodes_from([0,2], bipartite=0) - >>> G.add_nodes_from([1,3], bipartite=1) + >>> G = nx.path_graph(4) + >>> G.add_nodes_from([0, 2], bipartite=0) + >>> G.add_nodes_from([1, 3], bipartite=1) >>> nx.write_edgelist(G, "test.edgelist") - >>> fh=open("test.edgelist",'wb') + >>> fh = open("test.edgelist", "wb") >>> nx.write_edgelist(G, fh) >>> nx.write_edgelist(G, "test.edgelist.gz") >>> nx.write_edgelist(G, "test.edgelist.gz", data=False) - >>> G=nx.Graph() - >>> G.add_edge(1,2,weight=7,color='red') - >>> nx.write_edgelist(G,'test.edgelist',data=False) - >>> nx.write_edgelist(G,'test.edgelist',data=['color']) - >>> nx.write_edgelist(G,'test.edgelist',data=['color','weight']) + >>> G = nx.Graph() + >>> G.add_edge(1, 2, weight=7, color="red") + >>> nx.write_edgelist(G, "test.edgelist", data=False) + >>> nx.write_edgelist(G, "test.edgelist", data=["color"]) + >>> nx.write_edgelist(G, "test.edgelist", data=["color", "weight"]) See Also -------- @@ -85,12 +74,12 @@ def write_edgelist(G, path, comments="#", delimiter=' ', data=True, generate_edgelist() """ for line in generate_edgelist(G, delimiter, data): - line += '\n' + line += "\n" path.write(line.encode(encoding)) -@not_implemented_for('directed') -def generate_edgelist(G, delimiter=' ', data=True): +@not_implemented_for("directed") +def generate_edgelist(G, delimiter=" ", data=True): """Generate a single line of the bipartite graph G in edge list format. Parameters @@ -116,10 +105,10 @@ def generate_edgelist(G, delimiter=' ', data=True): -------- >>> from networkx.algorithms import bipartite >>> G = nx.path_graph(4) - >>> G.add_nodes_from([0,2], bipartite=0) - >>> G.add_nodes_from([1,3], bipartite=1) - >>> G[1][2]['weight'] = 3 - >>> G[2][3]['capacity'] = 12 + >>> G.add_nodes_from([0, 2], bipartite=0) + >>> G.add_nodes_from([1, 3], bipartite=1) + >>> G[1][2]["weight"] = 3 + >>> G[2][3]["capacity"] = 12 >>> for line in bipartite.generate_edgelist(G, data=False): ... print(line) 0 1 @@ -132,20 +121,20 @@ def generate_edgelist(G, delimiter=' ', data=True): 2 1 {'weight': 3} 2 3 {'capacity': 12} - >>> for line in bipartite.generate_edgelist(G,data=['weight']): + >>> for line in bipartite.generate_edgelist(G, data=["weight"]): ... print(line) 0 1 2 1 3 2 3 """ try: - part0 = [n for n, d in G.nodes.items() if d['bipartite'] == 0] - except: - raise AttributeError("Missing node attribute `bipartite`") + part0 = [n for n, d in G.nodes.items() if d["bipartite"] == 0] + except BaseException as e: + raise AttributeError("Missing node attribute `bipartite`") from e if data is True or data is False: for n in part0: for e in G.edges(n, data=data): - yield delimiter.join(map(make_str, e)) + yield delimiter.join(map(str, e)) else: for n in part0: for u, v, d in G.edges(n, data=True): @@ -154,11 +143,12 @@ def generate_edgelist(G, delimiter=' ', data=True): e.extend(d[k] for k in data) except KeyError: pass # missing data for this edge, should warn? - yield delimiter.join(map(make_str, e)) + yield delimiter.join(map(str, e)) -def parse_edgelist(lines, comments='#', delimiter=None, - create_using=None, nodetype=None, data=True): +def parse_edgelist( + lines, comments="#", delimiter=None, create_using=None, nodetype=None, data=True +): """Parse lines of an edge list representation of a bipartite graph. Parameters @@ -188,10 +178,8 @@ def parse_edgelist(lines, comments='#', delimiter=None, Edgelist with no data: >>> from networkx.algorithms import bipartite - >>> lines = ["1 2", - ... "2 3", - ... "3 4"] - >>> G = bipartite.parse_edgelist(lines, nodetype = int) + >>> lines = ["1 2", "2 3", "3 4"] + >>> G = bipartite.parse_edgelist(lines, nodetype=int) >>> sorted(G.nodes()) [1, 2, 3, 4] >>> sorted(G.nodes(data=True)) @@ -201,31 +189,28 @@ def parse_edgelist(lines, comments='#', delimiter=None, Edgelist with data in Python dictionary representation: - >>> lines = ["1 2 {'weight':3}", - ... "2 3 {'weight':27}", - ... "3 4 {'weight':3.0}"] - >>> G = bipartite.parse_edgelist(lines, nodetype = int) + >>> lines = ["1 2 {'weight':3}", "2 3 {'weight':27}", "3 4 {'weight':3.0}"] + >>> G = bipartite.parse_edgelist(lines, nodetype=int) >>> sorted(G.nodes()) [1, 2, 3, 4] - >>> sorted(G.edges(data = True)) + >>> sorted(G.edges(data=True)) [(1, 2, {'weight': 3}), (2, 3, {'weight': 27}), (3, 4, {'weight': 3.0})] Edgelist with data in a list: - >>> lines = ["1 2 3", - ... "2 3 27", - ... "3 4 3.0"] - >>> G = bipartite.parse_edgelist(lines, nodetype = int, data=(('weight',float),)) + >>> lines = ["1 2 3", "2 3 27", "3 4 3.0"] + >>> G = bipartite.parse_edgelist(lines, nodetype=int, data=(("weight", float),)) >>> sorted(G.nodes()) [1, 2, 3, 4] - >>> sorted(G.edges(data = True)) + >>> sorted(G.edges(data=True)) [(1, 2, {'weight': 3.0}), (2, 3, {'weight': 27.0}), (3, 4, {'weight': 3.0})] See Also -------- """ from ast import literal_eval - G = _prep_create_using(create_using) + + G = nx.empty_graph(0, create_using) for line in lines: p = line.find(comments) if p >= 0: @@ -243,9 +228,10 @@ def parse_edgelist(lines, comments='#', delimiter=None, try: u = nodetype(u) v = nodetype(v) - except: - raise TypeError("Failed to convert nodes %s,%s to type %s." - % (u, v, nodetype)) + except BaseException as e: + raise TypeError( + f"Failed to convert nodes {u},{v} " f"to type {nodetype}." + ) from e if len(d) == 0 or data is False: # no data or data type specified @@ -253,24 +239,26 @@ def parse_edgelist(lines, comments='#', delimiter=None, elif data is True: # no edge types specified try: # try to evaluate as dictionary - edgedata = dict(literal_eval(' '.join(d))) - except: + edgedata = dict(literal_eval(" ".join(d))) + except BaseException as e: raise TypeError( - "Failed to convert edge data (%s) to dictionary." % (d)) + f"Failed to convert edge data ({d})" f"to dictionary." + ) from e else: # convert edge data to dictionary with specified keys and type if len(d) != len(data): raise IndexError( - "Edge data %s and data_keys %s are not the same length" % - (d, data)) + f"Edge data {d} and data_keys {data} are not the same length" + ) edgedata = {} for (edge_key, edge_type), edge_value in zip(data, d): try: edge_value = edge_type(edge_value) - except: + except BaseException as e: raise TypeError( - "Failed to convert %s data %s to type %s." - % (edge_key, edge_value, edge_type)) + f"Failed to convert {edge_key} data " + f"{edge_value} to type {edge_type}." + ) from e edgedata.update({edge_key: edge_value}) G.add_node(u, bipartite=0) G.add_node(v, bipartite=1) @@ -278,11 +266,17 @@ def parse_edgelist(lines, comments='#', delimiter=None, return G -@open_file(0, mode='rb') -def read_edgelist(path, comments="#", - delimiter=None, create_using=None, - nodetype=None, data=True, edgetype=None, - encoding='utf-8'): +@open_file(0, mode="rb") +def read_edgelist( + path, + comments="#", + delimiter=None, + create_using=None, + nodetype=None, + data=True, + edgetype=None, + encoding="utf-8", +): """Read a bipartite graph from a list of edges. Parameters @@ -316,24 +310,26 @@ def read_edgelist(path, comments="#", -------- >>> from networkx.algorithms import bipartite >>> G = nx.path_graph(4) - >>> G.add_nodes_from([0,2], bipartite=0) - >>> G.add_nodes_from([1,3], bipartite=1) + >>> G.add_nodes_from([0, 2], bipartite=0) + >>> G.add_nodes_from([1, 3], bipartite=1) >>> bipartite.write_edgelist(G, "test.edgelist") >>> G = bipartite.read_edgelist("test.edgelist") - >>> fh = open("test.edgelist", 'rb') + >>> fh = open("test.edgelist", "rb") >>> G = bipartite.read_edgelist(fh) >>> fh.close() - >>> G=bipartite.read_edgelist("test.edgelist", nodetype=int) + >>> G = bipartite.read_edgelist("test.edgelist", nodetype=int) Edgelist with data in a list: - >>> textline = '1 2 3' - >>> fh = open('test.edgelist','w') + >>> textline = "1 2 3" + >>> fh = open("test.edgelist", "w") >>> d = fh.write(textline) >>> fh.close() - >>> G = bipartite.read_edgelist('test.edgelist', nodetype=int, data=(('weight',float),)) + >>> G = bipartite.read_edgelist( + ... "test.edgelist", nodetype=int, data=(("weight", float),) + ... ) >>> list(G) [1, 2] >>> list(G.edges(data=True)) @@ -351,8 +347,11 @@ def read_edgelist(path, comments="#", types (e.g. int, float, str, frozenset - or tuples of those, etc.) """ lines = (line.decode(encoding) for line in path) - return parse_edgelist(lines, comments=comments, - delimiter=delimiter, - create_using=create_using, - nodetype=nodetype, - data=data) + return parse_edgelist( + lines, + comments=comments, + delimiter=delimiter, + create_using=create_using, + nodetype=nodetype, + data=data, + ) diff --git a/networkx/algorithms/bipartite/generators.py b/networkx/algorithms/bipartite/generators.py index b2f31e2..faf84c6 100644 --- a/networkx/algorithms/bipartite/generators.py +++ b/networkx/algorithms/bipartite/generators.py @@ -1,43 +1,31 @@ -# -*- coding: utf-8 -*- """ Generators and functions for bipartite graphs. - """ -# Copyright (C) 2006-2011 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. import math import numbers -import random -import networkx from functools import reduce import networkx as nx -from networkx.utils import nodes_or_number - -__author__ = """\n""".join(['Aric Hagberg (hagberg@lanl.gov)', - 'Pieter Swart (swart@lanl.gov)', - 'Dan Schult(dschult@colgate.edu)']) -__all__ = ['configuration_model', - 'havel_hakimi_graph', - 'reverse_havel_hakimi_graph', - 'alternating_havel_hakimi_graph', - 'preferential_attachment_graph', - 'random_graph', - 'gnmk_random_graph', - 'complete_bipartite_graph', - ] +from networkx.utils import nodes_or_number, py_random_state + +__all__ = [ + "configuration_model", + "havel_hakimi_graph", + "reverse_havel_hakimi_graph", + "alternating_havel_hakimi_graph", + "preferential_attachment_graph", + "random_graph", + "gnmk_random_graph", + "complete_bipartite_graph", +] @nodes_or_number([0, 1]) def complete_bipartite_graph(n1, n2, create_using=None): - """Return the complete bipartite graph `K_{n_1,n_2}`. + """Returns the complete bipartite graph `K_{n_1,n_2}`. - Composed of two partitions with `n_1` nodes in the first - and `n_2` nodes in the second. Each node in the first is - connected to each node in the second. + The graph is composed of two partitions with nodes 0 to (n1 - 1) + in the first and nodes n1 to (n1 + n2 - 1) in the second. + Each node in the first is connected to each node in the second. Parameters ---------- @@ -54,14 +42,13 @@ def complete_bipartite_graph(n1, n2, create_using=None): The nodes are assigned the attribute 'bipartite' with the value 0 or 1 to indicate which bipartite set the node belongs to. + + This function is not imported in the main namespace. + To use it use nx.bipartite.complete_bipartite_graph """ - if create_using is None: - G = nx.Graph() - else: - if create_using.is_directed(): - raise nx.NetworkXError("Directed Graph not supported") - G = create_using - G.clear() + G = nx.empty_graph(0, create_using) + if G.is_directed(): + raise nx.NetworkXError("Directed Graph not supported") n1, top = n1 n2, bottom = n2 @@ -70,12 +57,13 @@ def complete_bipartite_graph(n1, n2, create_using=None): G.add_nodes_from(top, bipartite=0) G.add_nodes_from(bottom, bipartite=1) G.add_edges_from((u, v) for u in top for v in bottom) - G.graph['name'] = "complete_bipartite_graph(%s,%s)" % (n1, n2) + G.graph["name"] = f"complete_bipartite_graph({n1},{n2})" return G +@py_random_state(3) def configuration_model(aseq, bseq, create_using=None, seed=None): - """Return a random bipartite graph from two given degree sequences. + """Returns a random bipartite graph from two given degree sequences. Parameters ---------- @@ -85,12 +73,14 @@ def configuration_model(aseq, bseq, create_using=None, seed=None): Degree sequence for node set B. create_using : NetworkX graph instance, optional Return graph of this type. - seed : integer, optional - Seed for random number generator. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. - Nodes from the set A are connected to nodes in the set B by - choosing randomly from the possible free stubs, one in A and - one in B. + The graph is composed of two partitions. Set A has nodes 0 to + (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1). + Nodes from set A are connected to nodes in set B by choosing + randomly from the possible free stubs, one in A and one in B. Notes ----- @@ -103,18 +93,11 @@ def configuration_model(aseq, bseq, create_using=None, seed=None): to indicate which bipartite set the node belongs to. This function is not imported in the main namespace. - To use it you have to explicitly import the bipartite package. + To use it use nx.bipartite.configuration_model """ - if create_using is None: - create_using = networkx.MultiGraph() - elif create_using.is_directed(): - raise networkx.NetworkXError( - "Directed Graph not supported") - - G = networkx.empty_graph(0, create_using) - - if not seed is None: - random.seed(seed) + G = nx.empty_graph(0, create_using, default=nx.MultiGraph) + if G.is_directed(): + raise nx.NetworkXError("Directed Graph not supported") # length and sum of each sequence lena = len(aseq) @@ -123,13 +106,13 @@ def configuration_model(aseq, bseq, create_using=None, seed=None): sumb = sum(bseq) if not suma == sumb: - raise networkx.NetworkXError( - 'invalid degree sequences, sum(aseq)!=sum(bseq),%s,%s' - % (suma, sumb)) + raise nx.NetworkXError( + f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}" + ) G = _add_nodes_with_bipartite_label(G, lena, lenb) - if max(aseq) == 0: + if len(aseq) == 0 or max(aseq) == 0: return G # done if no edges # build lists of degree-repeated vertex numbers @@ -144,8 +127,8 @@ def configuration_model(aseq, bseq, create_using=None, seed=None): bstubs = [x for subseq in stubs for x in subseq] # shuffle lists - random.shuffle(astubs) - random.shuffle(bstubs) + seed.shuffle(astubs) + seed.shuffle(bstubs) G.add_edges_from([[astubs[i], bstubs[i]] for i in range(suma)]) @@ -154,9 +137,11 @@ def configuration_model(aseq, bseq, create_using=None, seed=None): def havel_hakimi_graph(aseq, bseq, create_using=None): - """Return a bipartite graph from two given degree sequences using a + """Returns a bipartite graph from two given degree sequences using a Havel-Hakimi style construction. + The graph is composed of two partitions. Set A has nodes 0 to + (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1). Nodes from the set A are connected to nodes in the set B by connecting the highest degree nodes in set A to the highest degree nodes in set B until all stubs are connected. @@ -172,9 +157,6 @@ def havel_hakimi_graph(aseq, bseq, create_using=None): Notes ----- - This function is not imported in the main namespace. - To use it you have to explicitly import the bipartite package. - The sum of the two sequences must be equal: sum(aseq)=sum(bseq) If no graph type is specified use MultiGraph with parallel edges. If you want a graph with no parallel edges use create_using=Graph() @@ -182,14 +164,13 @@ def havel_hakimi_graph(aseq, bseq, create_using=None): The nodes are assigned the attribute 'bipartite' with the value 0 or 1 to indicate which bipartite set the node belongs to. - """ - if create_using is None: - create_using = networkx.MultiGraph() - elif create_using.is_directed(): - raise networkx.NetworkXError( - "Directed Graph not supported") - G = networkx.empty_graph(0, create_using) + This function is not imported in the main namespace. + To use it use nx.bipartite.havel_hakimi_graph + """ + G = nx.empty_graph(0, create_using, default=nx.MultiGraph) + if G.is_directed(): + raise nx.NetworkXError("Directed Graph not supported") # length of the each sequence naseq = len(aseq) @@ -199,13 +180,13 @@ def havel_hakimi_graph(aseq, bseq, create_using=None): sumb = sum(bseq) if not suma == sumb: - raise networkx.NetworkXError( - 'invalid degree sequences, sum(aseq)!=sum(bseq),%s,%s' - % (suma, sumb)) + raise nx.NetworkXError( + f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}" + ) G = _add_nodes_with_bipartite_label(G, naseq, nbseq) - if max(aseq) == 0: + if len(aseq) == 0 or max(aseq) == 0: return G # done if no edges # build list of degree-repeated vertex numbers @@ -230,9 +211,11 @@ def havel_hakimi_graph(aseq, bseq, create_using=None): def reverse_havel_hakimi_graph(aseq, bseq, create_using=None): - """Return a bipartite graph from two given degree sequences using a + """Returns a bipartite graph from two given degree sequences using a Havel-Hakimi style construction. + The graph is composed of two partitions. Set A has nodes 0 to + (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1). Nodes from set A are connected to nodes in the set B by connecting the highest degree nodes in set A to the lowest degree nodes in set B until all stubs are connected. @@ -248,9 +231,6 @@ def reverse_havel_hakimi_graph(aseq, bseq, create_using=None): Notes ----- - This function is not imported in the main namespace. - To use it you have to explicitly import the bipartite package. - The sum of the two sequences must be equal: sum(aseq)=sum(bseq) If no graph type is specified use MultiGraph with parallel edges. If you want a graph with no parallel edges use create_using=Graph() @@ -258,14 +238,13 @@ def reverse_havel_hakimi_graph(aseq, bseq, create_using=None): The nodes are assigned the attribute 'bipartite' with the value 0 or 1 to indicate which bipartite set the node belongs to. - """ - if create_using is None: - create_using = networkx.MultiGraph() - elif create_using.is_directed(): - raise networkx.NetworkXError( - "Directed Graph not supported") - G = networkx.empty_graph(0, create_using) + This function is not imported in the main namespace. + To use it use nx.bipartite.reverse_havel_hakimi_graph + """ + G = nx.empty_graph(0, create_using, default=nx.MultiGraph) + if G.is_directed(): + raise nx.NetworkXError("Directed Graph not supported") # length of the each sequence lena = len(aseq) @@ -274,13 +253,13 @@ def reverse_havel_hakimi_graph(aseq, bseq, create_using=None): sumb = sum(bseq) if not suma == sumb: - raise networkx.NetworkXError( - 'invalid degree sequences, sum(aseq)!=sum(bseq),%s,%s' - % (suma, sumb)) + raise nx.NetworkXError( + f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}" + ) G = _add_nodes_with_bipartite_label(G, lena, lenb) - if max(aseq) == 0: + if len(aseq) == 0 or max(aseq) == 0: return G # done if no edges # build list of degree-repeated vertex numbers @@ -305,9 +284,11 @@ def reverse_havel_hakimi_graph(aseq, bseq, create_using=None): def alternating_havel_hakimi_graph(aseq, bseq, create_using=None): - """Return a bipartite graph from two given degree sequences using + """Returns a bipartite graph from two given degree sequences using an alternating Havel-Hakimi style construction. + The graph is composed of two partitions. Set A has nodes 0 to + (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1). Nodes from the set A are connected to nodes in the set B by connecting the highest degree nodes in set A to alternatively the highest and the lowest degree nodes in set B until all stubs are @@ -324,9 +305,6 @@ def alternating_havel_hakimi_graph(aseq, bseq, create_using=None): Notes ----- - This function is not imported in the main namespace. - To use it you have to explicitly import the bipartite package. - The sum of the two sequences must be equal: sum(aseq)=sum(bseq) If no graph type is specified use MultiGraph with parallel edges. If you want a graph with no parallel edges use create_using=Graph() @@ -334,14 +312,13 @@ def alternating_havel_hakimi_graph(aseq, bseq, create_using=None): The nodes are assigned the attribute 'bipartite' with the value 0 or 1 to indicate which bipartite set the node belongs to. - """ - if create_using is None: - create_using = networkx.MultiGraph() - elif create_using.is_directed(): - raise networkx.NetworkXError( - "Directed Graph not supported") - G = networkx.empty_graph(0, create_using) + This function is not imported in the main namespace. + To use it use nx.bipartite.alternating_havel_hakimi_graph + """ + G = nx.empty_graph(0, create_using, default=nx.MultiGraph) + if G.is_directed(): + raise nx.NetworkXError("Directed Graph not supported") # length of the each sequence naseq = len(aseq) @@ -350,13 +327,13 @@ def alternating_havel_hakimi_graph(aseq, bseq, create_using=None): sumb = sum(bseq) if not suma == sumb: - raise networkx.NetworkXError( - 'invalid degree sequences, sum(aseq)!=sum(bseq),%s,%s' - % (suma, sumb)) + raise nx.NetworkXError( + f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}" + ) G = _add_nodes_with_bipartite_label(G, naseq, nbseq) - if max(aseq) == 0: + if len(aseq) == 0 or max(aseq) == 0: return G # done if no edges # build list of degree-repeated vertex numbers astubs = [[aseq[v], v] for v in range(0, naseq)] @@ -367,8 +344,8 @@ def alternating_havel_hakimi_graph(aseq, bseq, create_using=None): if degree == 0: break # done, all are zero bstubs.sort() - small = bstubs[0:degree // 2] # add these low degree targets - large = bstubs[(-degree + degree // 2):] # and these high degree targets + small = bstubs[0 : degree // 2] # add these low degree targets + large = bstubs[(-degree + degree // 2) :] # now high degree targets stubs = [x for z in zip(large, small) for x in z] # combine, sorry if len(stubs) < len(small) + len(large): # check for zip truncation stubs.append(large.pop()) @@ -383,10 +360,15 @@ def alternating_havel_hakimi_graph(aseq, bseq, create_using=None): return G +@py_random_state(3) def preferential_attachment_graph(aseq, p, create_using=None, seed=None): """Create a bipartite graph with a preferential attachment model from a given single degree sequence. + The graph is composed of two partitions. Set A has nodes 0 to + (len(aseq) - 1) and set B has nodes starting with node len(aseq). + The number of nodes in set B is random. + Parameters ---------- aseq : list @@ -395,35 +377,35 @@ def preferential_attachment_graph(aseq, p, create_using=None, seed=None): Probability that a new bottom node is added. create_using : NetworkX graph instance, optional Return graph of this type. - seed : integer, optional - Seed for random number generator. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. References ---------- - .. [1] Jean-Loup Guillaume and Matthieu Latapy, + .. [1] Guillaume, J.L. and Latapy, M., + Bipartite graphs as models of complex networks. + Physica A: Statistical Mechanics and its Applications, + 2006, 371(2), pp.795-813. + .. [2] Jean-Loup Guillaume and Matthieu Latapy, Bipartite structure of all complex networks, Inf. Process. Lett. 90, 2004, pg. 215-221 https://doi.org/10.1016/j.ipl.2004.03.007 Notes ----- + The nodes are assigned the attribute 'bipartite' with the value 0 or 1 + to indicate which bipartite set the node belongs to. This function is not imported in the main namespace. - To use it you have to explicitly import the bipartite package. + To use it use nx.bipartite.preferential_attachment_graph """ - if create_using is None: - create_using = networkx.MultiGraph() - elif create_using.is_directed(): - raise networkx.NetworkXError( - "Directed Graph not supported") + G = nx.empty_graph(0, create_using, default=nx.MultiGraph) + if G.is_directed(): + raise nx.NetworkXError("Directed Graph not supported") if p > 1: - raise networkx.NetworkXError("probability %s > 1" % (p)) - - G = networkx.empty_graph(0, create_using) - - if not seed is None: - random.seed(seed) + raise nx.NetworkXError(f"probability {p} > 1") naseq = len(aseq) G = _add_nodes_with_bipartite_label(G, naseq, 0) @@ -432,16 +414,16 @@ def preferential_attachment_graph(aseq, p, create_using=None, seed=None): while vv[0]: source = vv[0][0] vv[0].remove(source) - if random.random() < p or G.number_of_nodes() == naseq: - target = G.number_of_nodes() + if seed.random() < p or len(G) == naseq: + target = len(G) G.add_node(target, bipartite=1) G.add_edge(source, target) else: - bb = [[b] * G.degree(b) for b in range(naseq, G.number_of_nodes())] + bb = [[b] * G.degree(b) for b in range(naseq, len(G))] # flatten the list of lists into a list. bbstubs = reduce(lambda x, y: x + y, bb) # choose preferentially a bottom node. - target = random.choice(bbstubs) + target = seed.choice(bbstubs) G.add_node(target, bipartite=1) G.add_edge(source, target) vv.remove(vv[0]) @@ -449,10 +431,13 @@ def preferential_attachment_graph(aseq, p, create_using=None, seed=None): return G +@py_random_state(3) def random_graph(n, m, p, seed=None, directed=False): - """Return a bipartite random graph. + """Returns a bipartite random graph. This is a bipartite version of the binomial (Erdős-Rényi) graph. + The graph is composed of two partitions. Set A has nodes 0 to + (n - 1) and set B has nodes n to (n + m - 1). Parameters ---------- @@ -462,16 +447,14 @@ def random_graph(n, m, p, seed=None, directed=False): The number of nodes in the second bipartite set. p : float Probability for edge creation. - seed : int, optional - Seed for random number generator (default=None). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. directed : bool, optional (default=False) If True return a directed graph Notes ----- - This function is not imported in the main namespace. - To use it you have to explicitly import the bipartite package. - The bipartite random graph algorithm chooses each of the n*m (undirected) or 2*nm (directed) possible edges with probability p. @@ -480,6 +463,9 @@ def random_graph(n, m, p, seed=None, directed=False): The nodes are assigned the attribute 'bipartite' with the value 0 or 1 to indicate which bipartite set the node belongs to. + This function is not imported in the main namespace. + To use it use nx.bipartite.random_graph + See Also -------- gnp_random_graph, configuration_model @@ -494,10 +480,7 @@ def random_graph(n, m, p, seed=None, directed=False): G = _add_nodes_with_bipartite_label(G, n, m) if directed: G = nx.DiGraph(G) - G.name = "fast_gnp_random_graph(%s,%s,%s)" % (n, m, p) - - if not seed is None: - random.seed(seed) + G.name = f"fast_gnp_random_graph({n},{m},{p})" if p <= 0: return G @@ -509,7 +492,7 @@ def random_graph(n, m, p, seed=None, directed=False): v = 0 w = -1 while v < n: - lr = math.log(1.0 - random.random()) + lr = math.log(1.0 - seed.random()) w = w + 1 + int(lr / lp) while w >= m and v < n: w = w - m @@ -523,7 +506,7 @@ def random_graph(n, m, p, seed=None, directed=False): v = 0 w = -1 while v < n: - lr = math.log(1.0 - random.random()) + lr = math.log(1.0 - seed.random()) w = w + 1 + int(lr / lp) while w >= m and v < n: w = w - m @@ -534,11 +517,14 @@ def random_graph(n, m, p, seed=None, directed=False): return G +@py_random_state(3) def gnmk_random_graph(n, m, k, seed=None, directed=False): - """Return a random bipartite graph G_{n,m,k}. + """Returns a random bipartite graph G_{n,m,k}. Produces a bipartite graph chosen randomly out of the set of all graphs with n top nodes, m bottom nodes, and k edges. + The graph is composed of two sets of nodes. + Set A has nodes 0 to (n - 1) and set B has nodes n to (n + m - 1). Parameters ---------- @@ -548,14 +534,15 @@ def gnmk_random_graph(n, m, k, seed=None, directed=False): The number of nodes in the second bipartite set. k : int The number of edges - seed : int, optional - Seed for random number generator (default=None). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. directed : bool, optional (default=False) If True return a directed graph Examples -------- - from networkx.algorithms import bipartite + from nx.algorithms import bipartite G = bipartite.gnmk_random_graph(10,20,50) See Also @@ -564,33 +551,34 @@ def gnmk_random_graph(n, m, k, seed=None, directed=False): Notes ----- - This function is not imported in the main namespace. - To use it you have to explicitly import the bipartite package. - If k > m * n then a complete bipartite graph is returned. This graph is a bipartite version of the `G_{nm}` random graph model. + + The nodes are assigned the attribute 'bipartite' with the value 0 or 1 + to indicate which bipartite set the node belongs to. + + This function is not imported in the main namespace. + To use it use nx.bipartite.gnmk_random_graph """ - G = networkx.Graph() + G = nx.Graph() G = _add_nodes_with_bipartite_label(G, n, m) if directed: G = nx.DiGraph(G) - G.name = "bipartite_gnm_random_graph(%s,%s,%s)" % (n, m, k) - if seed is not None: - random.seed(seed) + G.name = f"bipartite_gnm_random_graph({n},{m},{k})" if n == 1 or m == 1: return G max_edges = n * m # max_edges for bipartite networks if k >= max_edges: # Maybe we should raise an exception here - return networkx.complete_bipartite_graph(n, m, create_using=G) + return nx.complete_bipartite_graph(n, m, create_using=G) - top = [n for n, d in G.nodes(data=True) if d['bipartite'] == 0] + top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0] bottom = list(set(G) - set(top)) edge_count = 0 while edge_count < k: # generate random edge,u,v - u = random.choice(top) - v = random.choice(bottom) + u = seed.choice(top) + v = seed.choice(bottom) if v in G[u]: continue else: @@ -603,5 +591,5 @@ def _add_nodes_with_bipartite_label(G, lena, lenb): G.add_nodes_from(range(0, lena + lenb)) b = dict(zip(range(0, lena), [0] * lena)) b.update(dict(zip(range(lena, lena + lenb), [1] * lenb))) - nx.set_node_attributes(G, b, 'bipartite') + nx.set_node_attributes(G, b, "bipartite") return G diff --git a/networkx/algorithms/bipartite/matching.py b/networkx/algorithms/bipartite/matching.py index eaf7188..e8a3e02 100644 --- a/networkx/algorithms/bipartite/matching.py +++ b/networkx/algorithms/bipartite/matching.py @@ -1,12 +1,3 @@ -# matching.py - bipartite graph maximum matching algorithms -# -# Copyright 2015 Jeffrey Finkelstein . -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. -# # This module uses material from the Wikipedia article Hopcroft--Karp algorithm # , accessed on # January 3, 2015, which is released under the Creative Commons @@ -17,8 +8,8 @@ # Portions of this module use code from David Eppstein's Python Algorithms and # Data Structures (PADS) library, which is dedicated to the public domain (for # proof, see ). -"""Provides functions for computing a maximum cardinality matching in a -bipartite graph. +"""Provides functions for computing maximum cardinality matchings and minimum +weight full matchings in a bipartite graph. If you don't care about the particular implementation of the maximum matching algorithm, simply use the :func:`maximum_matching`. If you do care, you can @@ -27,7 +18,6 @@ For example, to find a maximum matching in the complete bipartite graph with two vertices on the left and three vertices on the right: ->>> import networkx as nx >>> G = nx.complete_bipartite_graph(2, 3) >>> left, right = nx.bipartite.sets(G) >>> list(left) @@ -40,29 +30,48 @@ The dictionary returned by :func:`maximum_matching` includes a mapping for vertices in both the left and right vertex sets. +Similarly, :func:`minimum_weight_full_matching` produces, for a complete +weighted bipartite graph, a matching whose cardinality is the cardinality of +the smaller of the two partitions, and for which the sum of the weights of the +edges included in the matching is minimal. + """ import collections import itertools +from networkx.algorithms.bipartite.matrix import biadjacency_matrix from networkx.algorithms.bipartite import sets as bipartite_sets import networkx as nx -__all__ = ['maximum_matching', 'hopcroft_karp_matching', 'eppstein_matching', - 'to_vertex_cover'] +__all__ = [ + "maximum_matching", + "hopcroft_karp_matching", + "eppstein_matching", + "to_vertex_cover", + "minimum_weight_full_matching", +] -INFINITY = float('inf') +INFINITY = float("inf") def hopcroft_karp_matching(G, top_nodes=None): """Returns the maximum cardinality matching of the bipartite graph `G`. + A matching is a set of edges that do not share any nodes. A maximum + cardinality matching is a matching with the most edges possible. It + is not always unique. Finding a matching in a bipartite graph can be + treated as a networkx flow problem. + + The functions ``hopcroft_karp_matching`` and ``maximum_matching`` + are aliases of the same function. + Parameters ---------- G : NetworkX graph Undirected bipartite graph - top_nodes : container + top_nodes : container of nodes Container with all nodes in one bipartite node set. If not supplied it will be computed. But if more than one solution exists an exception @@ -74,12 +83,11 @@ def hopcroft_karp_matching(G, top_nodes=None): The matching is returned as a dictionary, `matches`, such that ``matches[v] == w`` if node `v` is matched to node `w`. Unmatched - nodes do not occur as a key in mate. + nodes do not occur as a key in `matches`. Raises ------ - AmbiguousSolution : Exception - + AmbiguousSolution Raised if the input bipartite graph is disconnected and no container with all nodes in one bipartite set is provided. When determining the nodes in each bipartite set more than one valid solution is @@ -87,7 +95,6 @@ def hopcroft_karp_matching(G, top_nodes=None): Notes ----- - This function is implemented with the `Hopcroft--Karp matching algorithm `_ for bipartite graphs. @@ -97,7 +104,8 @@ def hopcroft_karp_matching(G, top_nodes=None): See Also -------- - + maximum_matching + hopcroft_karp_matching eppstein_matching References @@ -192,12 +200,11 @@ def eppstein_matching(G, top_nodes=None): The matching is returned as a dictionary, `matching`, such that ``matching[v] == w`` if node `v` is matched to node `w`. Unmatched - nodes do not occur as a key in mate. + nodes do not occur as a key in `matching`. Raises ------ - AmbiguousSolution : Exception - + AmbiguousSolution Raised if the input bipartite graph is disconnected and no container with all nodes in one bipartite set is provided. When determining the nodes in each bipartite set more than one valid solution is @@ -205,7 +212,6 @@ def eppstein_matching(G, top_nodes=None): Notes ----- - This function is implemented with David Eppstein's version of the algorithm Hopcroft--Karp algorithm (see :func:`hopcroft_karp_matching`), which originally appeared in the `Python Algorithms and Data Structures library @@ -306,8 +312,7 @@ def recurse(v): recurse(v) -def _is_connected_by_alternating_path(G, v, matched_edges, unmatched_edges, - targets): +def _is_connected_by_alternating_path(G, v, matched_edges, unmatched_edges, targets): """Returns True if and only if the vertex `v` is connected to one of the target vertices by an alternating path in `G`. @@ -329,6 +334,7 @@ def _is_connected_by_alternating_path(G, v, matched_edges, unmatched_edges, `targets` is a set of vertices. """ + def _alternating_dfs(u, along_matched=True): """Returns True if and only if `u` is connected to one of the targets by an alternating path. @@ -351,8 +357,7 @@ def _alternating_dfs(u, along_matched=True): try: child = next(children) if child not in visited: - if ((parent, child) in valid_edges - or (child, parent) in valid_edges): + if (parent, child) in valid_edges or (child, parent) in valid_edges: if child in targets: return True visited.add(child) @@ -364,8 +369,9 @@ def _alternating_dfs(u, along_matched=True): # Check for alternating paths starting with edges in the matching, then # check for alternating paths starting with edges not in the # matching. - return (_alternating_dfs(v, along_matched=True) or - _alternating_dfs(v, along_matched=False)) + return _alternating_dfs(v, along_matched=True) or _alternating_dfs( + v, along_matched=False + ) def _connected_by_alternating_paths(G, matching, targets): @@ -392,12 +398,18 @@ def _connected_by_alternating_paths(G, matching, targets): # require nodes to be orderable. edge_sets = {frozenset((u, v)) for u, v in matching.items()} matched_edges = {tuple(edge) for edge in edge_sets} - unmatched_edges = {(u, v) for (u, v) in G.edges() - if frozenset((u, v)) not in edge_sets} + unmatched_edges = { + (u, v) for (u, v) in G.edges() if frozenset((u, v)) not in edge_sets + } - return {v for v in G if v in targets or - _is_connected_by_alternating_path(G, v, matched_edges, - unmatched_edges, targets)} + return { + v + for v in G + if v in targets + or _is_connected_by_alternating_path( + G, v, matched_edges, unmatched_edges, targets + ) + } def to_vertex_cover(G, matching, top_nodes=None): @@ -406,7 +418,6 @@ def to_vertex_cover(G, matching, top_nodes=None): Parameters ---------- - G : NetworkX graph Undirected bipartite graph @@ -426,15 +437,13 @@ def to_vertex_cover(G, matching, top_nodes=None): Returns ------- - vertex_cover : :class:`set` The minimum vertex cover in `G`. Raises ------ - AmbiguousSolution : Exception - + AmbiguousSolution Raised if the input bipartite graph is disconnected and no container with all nodes in one bipartite set is provided. When determining the nodes in each bipartite set more than one valid solution is @@ -442,7 +451,6 @@ def to_vertex_cover(G, matching, top_nodes=None): Notes ----- - This function is implemented using the procedure guaranteed by `Konig's theorem `_, @@ -453,7 +461,6 @@ def to_vertex_cover(G, matching, top_nodes=None): for any graph, one can compute the maximum independent set of a bipartite graph this way: - >>> import networkx as nx >>> G = nx.complete_bipartite_graph(2, 3) >>> matching = nx.bipartite.maximum_matching(G) >>> vertex_cover = nx.bipartite.to_vertex_cover(G, matching) @@ -483,3 +490,93 @@ def to_vertex_cover(G, matching, top_nodes=None): #: #: This function is simply an alias for :func:`hopcroft_karp_matching`. maximum_matching = hopcroft_karp_matching + + +def minimum_weight_full_matching(G, top_nodes=None, weight="weight"): + r"""Returns a minimum weight full matching of the bipartite graph `G`. + + Let :math:`G = ((U, V), E)` be a weighted bipartite graph with real weights + :math:`w : E \to \mathbb{R}`. This function then produces a matching + :math:`M \subseteq E` with cardinality + + .. math:: + \lvert M \rvert = \min(\lvert U \rvert, \lvert V \rvert), + + which minimizes the sum of the weights of the edges included in the + matching, :math:`\sum_{e \in M} w(e)`, or raises an error if no such + matching exists. + + When :math:`\lvert U \rvert = \lvert V \rvert`, this is commonly + referred to as a perfect matching; here, since we allow + :math:`\lvert U \rvert` and :math:`\lvert V \rvert` to differ, we + follow Karp [1]_ and refer to the matching as *full*. + + Parameters + ---------- + G : NetworkX graph + + Undirected bipartite graph + + top_nodes : container + + Container with all nodes in one bipartite node set. If not supplied + it will be computed. + + weight : string, optional (default='weight') + + The edge data key used to provide each value in the matrix. + + Returns + ------- + matches : dictionary + + The matching is returned as a dictionary, `matches`, such that + ``matches[v] == w`` if node `v` is matched to node `w`. Unmatched + nodes do not occur as a key in `matches`. + + Raises + ------ + ValueError + Raised if no full matching exists. + + ImportError + Raised if SciPy is not available. + + Notes + ----- + The problem of determining a minimum weight full matching is also known as + the rectangular linear assignment problem. This implementation defers the + calculation of the assignment to SciPy. + + References + ---------- + .. [1] Richard Manning Karp: + An algorithm to Solve the m x n Assignment Problem in Expected Time + O(mn log n). + Networks, 10(2):143–152, 1980. + + """ + try: + import numpy as np + import scipy.optimize + except ImportError as e: + raise ImportError( + "minimum_weight_full_matching requires SciPy: " + "https://scipy.org/" + ) from e + left, right = nx.bipartite.sets(G, top_nodes) + U = list(left) + V = list(right) + # We explicitly create the biadjancency matrix having infinities + # where edges are missing (as opposed to zeros, which is what one would + # get by using toarray on the sparse matrix). + weights_sparse = biadjacency_matrix( + G, row_order=U, column_order=V, weight=weight, format="coo" + ) + weights = np.full(weights_sparse.shape, np.inf) + weights[weights_sparse.row, weights_sparse.col] = weights_sparse.data + left_matches = scipy.optimize.linear_sum_assignment(weights) + d = {U[u]: V[v] for u, v in zip(*left_matches)} + # d will contain the matching from edges in left to right; we need to + # add the ones from right to left as well. + d.update({v: u for u, v in d.items()}) + return d diff --git a/networkx/algorithms/bipartite/matrix.py b/networkx/algorithms/bipartite/matrix.py index 9e57d7e..5261a91 100644 --- a/networkx/algorithms/bipartite/matrix.py +++ b/networkx/algorithms/bipartite/matrix.py @@ -1,27 +1,19 @@ -# -*- coding: utf-8 -*- """ ==================== Biadjacency matrices ==================== """ -# Copyright (C) 2013-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. import itertools -from networkx.convert import _prep_create_using from networkx.convert_matrix import _generate_weighted_edges import networkx as nx -__author__ = """\n""".join(['Jordi Torrents ', - 'Aric Hagberg ']) -__all__ = ['biadjacency_matrix', 'from_biadjacency_matrix'] +__all__ = ["biadjacency_matrix", "from_biadjacency_matrix"] -def biadjacency_matrix(G, row_order, column_order=None, - dtype=None, weight='weight', format='csr'): - r"""Return the biadjacency matrix of the bipartite graph G. + +def biadjacency_matrix( + G, row_order, column_order=None, dtype=None, weight="weight", format="csr" +): + r"""Returns the biadjacency matrix of the bipartite graph G. Let `G = (U, V, E)` be a bipartite graph with node sets `U = u_{1},...,u_{r}` and `V = v_{1},...,v_{s}`. The biadjacency @@ -82,6 +74,7 @@ def biadjacency_matrix(G, row_order, column_order=None, https://docs.scipy.org/doc/scipy/reference/sparse.html """ from scipy import sparse + nlen = len(row_order) if nlen == 0: raise nx.NetworkXError("row_order is empty list") @@ -101,18 +94,23 @@ def biadjacency_matrix(G, row_order, column_order=None, if G.number_of_edges() == 0: row, col, data = [], [], [] else: - row, col, data = zip(*((row_index[u], col_index[v], d.get(weight, 1)) - for u, v, d in G.edges(row_order, data=True) - if u in row_index and v in col_index)) - M = sparse.coo_matrix((data, (row, col)), - shape=(nlen, mlen), dtype=dtype) + row, col, data = zip( + *( + (row_index[u], col_index[v], d.get(weight, 1)) + for u, v, d in G.edges(row_order, data=True) + if u in row_index and v in col_index + ) + ) + M = sparse.coo_matrix((data, (row, col)), shape=(nlen, mlen), dtype=dtype) try: return M.asformat(format) - except AttributeError: - raise nx.NetworkXError("Unknown sparse matrix format: %s" % format) + # From Scipy 1.1.0, asformat will throw a ValueError instead of an + # AttributeError if the format if not recognized. + except (AttributeError, ValueError) as e: + raise nx.NetworkXError(f"Unknown sparse matrix format: {format}") from e -def from_biadjacency_matrix(A, create_using=None, edge_attribute='weight'): +def from_biadjacency_matrix(A, create_using=None, edge_attribute="weight"): r"""Creates a new bipartite graph from a biadjacency matrix given as a SciPy sparse matrix. @@ -142,13 +140,13 @@ def from_biadjacency_matrix(A, create_using=None, edge_attribute='weight'): See Also -------- biadjacency_matrix - from_numpy_matrix + from_numpy_array References ---------- [1] https://en.wikipedia.org/wiki/Adjacency_matrix#Adjacency_matrix_of_a_bipartite_graph """ - G = _prep_create_using(create_using) + G = nx.empty_graph(0, create_using) n, m = A.shape # Make sure we get even the isolated nodes of the graph. G.add_nodes_from(range(n), bipartite=0) @@ -161,18 +159,8 @@ def from_biadjacency_matrix(A, create_using=None, edge_attribute='weight'): # entry in the adjacency matrix. Otherwise, create one edge for each # positive entry in the adjacency matrix and set the weight of that edge to # be the entry in the matrix. - if A.dtype.kind in ('i', 'u') and G.is_multigraph(): + if A.dtype.kind in ("i", "u") and G.is_multigraph(): chain = itertools.chain.from_iterable triples = chain(((u, v, 1) for d in range(w)) for (u, v, w) in triples) G.add_weighted_edges_from(triples, weight=edge_attribute) return G - -# fixture for nose tests - - -def setup_module(module): - from nose import SkipTest - try: - import scipy - except: - raise SkipTest("SciPy not available") diff --git a/networkx/algorithms/bipartite/projection.py b/networkx/algorithms/bipartite/projection.py index 92bade4..af42073 100644 --- a/networkx/algorithms/bipartite/projection.py +++ b/networkx/algorithms/bipartite/projection.py @@ -1,23 +1,15 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2017-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg -# Jordi Torrents """One-mode (unipartite) projections of bipartite graphs.""" import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['project', - 'projected_graph', - 'weighted_projected_graph', - 'collaboration_weighted_projected_graph', - 'overlap_weighted_projected_graph', - 'generic_weighted_projected_graph'] +__all__ = [ + "project", + "projected_graph", + "weighted_projected_graph", + "collaboration_weighted_projected_graph", + "overlap_weighted_projected_graph", + "generic_weighted_projected_graph", +] def projected_graph(B, nodes, multigraph=False): @@ -60,8 +52,8 @@ def projected_graph(B, nodes, multigraph=False): [`a`, `b`]: >>> B = nx.Graph() - >>> B.add_edges_from([('a', 1), ('b', 1), ('a', 2), ('b', 2)]) - >>> G = bipartite.projected_graph(B, ['a', 'b'], multigraph=True) + >>> B.add_edges_from([("a", 1), ("b", 1), ("a", 2), ("b", 2)]) + >>> G = bipartite.projected_graph(B, ["a", "b"], multigraph=True) >>> print([sorted((u, v)) for u, v in G.edges()]) [['a', 'b'], ['a', 'b']] @@ -108,7 +100,7 @@ def projected_graph(B, nodes, multigraph=False): G.graph.update(B.graph) G.add_nodes_from((n, B.nodes[n]) for n in nodes) for u in nodes: - nbrs2 = set(v for nbr in B[u] for v in B[nbr] if v != u) + nbrs2 = {v for nbr in B[u] for v in B[nbr] if v != u} if multigraph: for n in nbrs2: if directed: @@ -123,7 +115,7 @@ def projected_graph(B, nodes, multigraph=False): return G -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def weighted_projected_graph(B, nodes, ratio=False): r"""Returns a weighted projection of B onto one of its node sets. @@ -144,8 +136,8 @@ def weighted_projected_graph(B, nodes, ratio=False): ratio: Bool (default=False) If True, edge weight is the ratio between actual shared neighbors - and possible shared neighbors. If False, edges weight is the number - of shared neighbors. + and maximum possible shared neighbors (i.e., the size of the other + node set). If False, edges weight is the number of shared neighbors. Returns ------- @@ -200,7 +192,7 @@ def weighted_projected_graph(B, nodes, ratio=False): n_top = float(len(B) - len(nodes)) for u in nodes: unbrs = set(B[u]) - nbrs2 = set((n for nbr in unbrs for n in B[nbr])) - set([u]) + nbrs2 = {n for nbr in unbrs for n in B[nbr]} - {u} for v in nbrs2: vnbrs = set(pred[v]) common = unbrs & vnbrs @@ -212,7 +204,7 @@ def weighted_projected_graph(B, nodes, ratio=False): return G -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def collaboration_weighted_projected_graph(B, nodes): r"""Newman's weighted projection of B onto one of its node sets. @@ -255,7 +247,8 @@ def collaboration_weighted_projected_graph(B, nodes): >>> G = bipartite.collaboration_weighted_projected_graph(B, [0, 2, 4, 5]) >>> list(G) [0, 2, 4, 5] - >>> for edge in G.edges(data=True): print(edge) + >>> for edge in sorted(G.edges(data=True)): + ... print(edge) ... (0, 2, {'weight': 0.5}) (0, 5, {'weight': 0.5}) @@ -296,7 +289,7 @@ def collaboration_weighted_projected_graph(B, nodes): G.add_nodes_from((n, B.nodes[n]) for n in nodes) for u in nodes: unbrs = set(B[u]) - nbrs2 = set(n for nbr in unbrs for n in B[nbr] if n != u) + nbrs2 = {n for nbr in unbrs for n in B[nbr] if n != u} for v in nbrs2: vnbrs = set(pred[v]) common_degree = (len(B[n]) for n in unbrs & vnbrs) @@ -305,7 +298,7 @@ def collaboration_weighted_projected_graph(B, nodes): return G -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def overlap_weighted_projected_graph(B, nodes, jaccard=True): r"""Overlap weighted projection of B onto one of its node sets. @@ -393,7 +386,7 @@ def overlap_weighted_projected_graph(B, nodes, jaccard=True): G.add_nodes_from((n, B.nodes[n]) for n in nodes) for u in nodes: unbrs = set(B[u]) - nbrs2 = set((n for nbr in unbrs for n in B[nbr])) - set([u]) + nbrs2 = {n for nbr in unbrs for n in B[nbr]} - {u} for v in nbrs2: vnbrs = set(pred[v]) if jaccard: @@ -404,7 +397,7 @@ def overlap_weighted_projected_graph(B, nodes, jaccard=True): return G -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def generic_weighted_projected_graph(B, nodes, weight_function=None): r"""Weighted projection of B with a user-specified weight function. @@ -443,7 +436,7 @@ def generic_weighted_projected_graph(B, nodes, weight_function=None): ... vnbrs = set(G[v]) ... return float(len(unbrs & vnbrs)) / len(unbrs | vnbrs) ... - >>> def my_weight(G, u, v, weight='weight'): + >>> def my_weight(G, u, v, weight="weight"): ... w = 0 ... for nbr in set(G[u]) & set(G[v]): ... w += G[u][nbr].get(weight, 1) + G[v][nbr].get(weight, 1) @@ -452,9 +445,9 @@ def generic_weighted_projected_graph(B, nodes, weight_function=None): >>> # A complete bipartite graph with 4 nodes and 4 edges >>> B = nx.complete_bipartite_graph(2, 2) >>> # Add some arbitrary weight to the edges - >>> for i,(u,v) in enumerate(B.edges()): - ... B.edges[u, v]['weight'] = i + 1 - ... + >>> for i, (u, v) in enumerate(B.edges()): + ... B.edges[u, v]["weight"] = i + 1 + ... >>> for edge in B.edges(data=True): ... print(edge) ... @@ -467,10 +460,14 @@ def generic_weighted_projected_graph(B, nodes, weight_function=None): >>> print(list(G.edges(data=True))) [(0, 1, {'weight': 2})] >>> # To specify a custom weight function use the weight_function parameter - >>> G = bipartite.generic_weighted_projected_graph(B, [0, 1], weight_function=jaccard) + >>> G = bipartite.generic_weighted_projected_graph( + ... B, [0, 1], weight_function=jaccard + ... ) >>> print(list(G.edges(data=True))) [(0, 1, {'weight': 1.0})] - >>> G = bipartite.generic_weighted_projected_graph(B, [0, 1], weight_function=my_weight) + >>> G = bipartite.generic_weighted_projected_graph( + ... B, [0, 1], weight_function=my_weight + ... ) >>> print(list(G.edges(data=True))) [(0, 1, {'weight': 10})] @@ -500,13 +497,15 @@ def generic_weighted_projected_graph(B, nodes, weight_function=None): pred = B.adj G = nx.Graph() if weight_function is None: + def weight_function(G, u, v): # Notice that we use set(pred[v]) for handling the directed case. return len(set(G[u]) & set(pred[v])) + G.graph.update(B.graph) G.add_nodes_from((n, B.nodes[n]) for n in nodes) for u in nodes: - nbrs2 = set((n for nbr in set(B[u]) for n in B[nbr])) - set([u]) + nbrs2 = {n for nbr in set(B[u]) for n in B[nbr]} - {u} for v in nbrs2: weight = weight_function(B, u, v) G.add_edge(u, v, weight=weight) diff --git a/networkx/algorithms/bipartite/redundancy.py b/networkx/algorithms/bipartite/redundancy.py index 2c1b32b..55de063 100644 --- a/networkx/algorithms/bipartite/redundancy.py +++ b/networkx/algorithms/bipartite/redundancy.py @@ -1,19 +1,9 @@ -# -*- coding: utf-8 -*- """Node redundancy for bipartite graphs.""" -# Copyright (C) 2011 by -# Jordi Torrents -# Aric Hagberg -# All rights reserved. -# BSD license. -from __future__ import division - from itertools import combinations from networkx import NetworkXError -__author__ = """\n""".join(['Jordi Torrents ', - 'Aric Hagberg (hagberg@lanl.gov)']) -__all__ = ['node_redundancy'] +__all__ = ["node_redundancy"] def node_redundancy(G, nodes=None): @@ -53,7 +43,6 @@ def node_redundancy(G, nodes=None): -------- Compute the redundancy coefficient of each node in a graph:: - >>> import networkx as nx >>> from networkx.algorithms import bipartite >>> G = nx.cycle_graph(4) >>> rc = bipartite.node_redundancy(G) @@ -62,7 +51,6 @@ def node_redundancy(G, nodes=None): Compute the average redundancy for the graph:: - >>> import networkx as nx >>> from networkx.algorithms import bipartite >>> G = nx.cycle_graph(4) >>> rc = bipartite.node_redundancy(G) @@ -71,7 +59,6 @@ def node_redundancy(G, nodes=None): Compute the average redundancy for a set of nodes:: - >>> import networkx as nx >>> from networkx.algorithms import bipartite >>> G = nx.cycle_graph(4) >>> rc = bipartite.node_redundancy(G) @@ -96,8 +83,10 @@ def node_redundancy(G, nodes=None): if nodes is None: nodes = G if any(len(G[v]) < 2 for v in nodes): - raise NetworkXError('Cannot compute redundancy coefficient for a node' - ' that has fewer than two neighbors.') + raise NetworkXError( + "Cannot compute redundancy coefficient for a node" + " that has fewer than two neighbors." + ) # TODO This can be trivially parallelized. return {v: _node_redundancy(G, v) for v in nodes} @@ -116,6 +105,7 @@ def _node_redundancy(G, v): n = len(G[v]) # TODO On Python 3, we could just use `G[u].keys() & G[w].keys()` instead # of instantiating the entire sets. - overlap = sum(1 for (u, w) in combinations(G[v], 2) - if (set(G[u]) & set(G[w])) - {v}) + overlap = sum( + 1 for (u, w) in combinations(G[v], 2) if (set(G[u]) & set(G[w])) - {v} + ) return (2 * overlap) / (n * (n - 1)) diff --git a/networkx/algorithms/bipartite/spectral.py b/networkx/algorithms/bipartite/spectral.py index ad6cd9b..57dea5a 100644 --- a/networkx/algorithms/bipartite/spectral.py +++ b/networkx/algorithms/bipartite/spectral.py @@ -1,24 +1,17 @@ -# -*- coding: utf-8 -*- """ Spectral bipartivity measure. """ import networkx as nx -__author__ = """Aric Hagberg (hagberg@lanl.gov)""" -# Copyright (C) 2011 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -__all__ = ['spectral_bipartivity'] +__all__ = ["spectral_bipartivity"] -def spectral_bipartivity(G, nodes=None, weight='weight'): + +def spectral_bipartivity(G, nodes=None, weight="weight"): """Returns the spectral bipartivity. Parameters ---------- - G : NetworkX graph + G : NetworkX graph nodes : list or container optional(default is all nodes) Nodes to return value of spectral bipartivity contribution. @@ -43,7 +36,7 @@ def spectral_bipartivity(G, nodes=None, weight='weight'): Notes ----- This implementation uses Numpy (dense) matrices which are not efficient - for storing large sparse graphs. + for storing large sparse graphs. See Also -------- @@ -56,11 +49,12 @@ def spectral_bipartivity(G, nodes=None, weight='weight'): """ try: import scipy.linalg - except ImportError: - raise ImportError('spectral_bipartivity() requires SciPy: ', - 'http://scipy.org/') + except ImportError as e: + raise ImportError( + "spectral_bipartivity() requires SciPy: ", "http://scipy.org/" + ) from e nodelist = list(G) # ordering of nodes in matrix - A = nx.to_numpy_matrix(G, nodelist, weight=weight) + A = nx.to_numpy_array(G, nodelist, weight=weight) expA = scipy.linalg.expm(A) expmA = scipy.linalg.expm(-A) coshA = 0.5 * (expA + expmA) @@ -75,16 +69,3 @@ def spectral_bipartivity(G, nodes=None, weight='weight'): i = index[n] sb[n] = coshA[i, i] / expA[i, i] return sb - - -def setup_module(module): - """Fixture for nose tests.""" - from nose import SkipTest - try: - import numpy - except: - raise SkipTest("NumPy not available") - try: - import scipy - except: - raise SkipTest("SciPy not available") diff --git a/networkx/algorithms/bipartite/tests/__init__.py b/networkx/algorithms/bipartite/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/algorithms/bipartite/tests/test_basic.py b/networkx/algorithms/bipartite/tests/test_basic.py index cbd0738..1df1f07 100644 --- a/networkx/algorithms/bipartite/tests/test_basic.py +++ b/networkx/algorithms/bipartite/tests/test_basic.py @@ -1,136 +1,121 @@ -#!/usr/bin/env python -from nose.tools import * -from nose import SkipTest -from nose.plugins.attrib import attr +import pytest + import networkx as nx from networkx.algorithms import bipartite class TestBipartiteBasic: - def test_is_bipartite(self): - assert_true(bipartite.is_bipartite(nx.path_graph(4))) - assert_true(bipartite.is_bipartite(nx.DiGraph([(1, 0)]))) - assert_false(bipartite.is_bipartite(nx.complete_graph(3))) + assert bipartite.is_bipartite(nx.path_graph(4)) + assert bipartite.is_bipartite(nx.DiGraph([(1, 0)])) + assert not bipartite.is_bipartite(nx.complete_graph(3)) def test_bipartite_color(self): G = nx.path_graph(4) c = bipartite.color(G) - assert_equal(c, {0: 1, 1: 0, 2: 1, 3: 0}) + assert c == {0: 1, 1: 0, 2: 1, 3: 0} - @raises(nx.NetworkXError) def test_not_bipartite_color(self): - c = bipartite.color(nx.complete_graph(4)) + with pytest.raises(nx.NetworkXError): + c = bipartite.color(nx.complete_graph(4)) def test_bipartite_directed(self): G = bipartite.random_graph(10, 10, 0.1, directed=True) - assert_true(bipartite.is_bipartite(G)) + assert bipartite.is_bipartite(G) def test_bipartite_sets(self): G = nx.path_graph(4) X, Y = bipartite.sets(G) - assert_equal(X, {0, 2}) - assert_equal(Y, {1, 3}) + assert X == {0, 2} + assert Y == {1, 3} def test_bipartite_sets_directed(self): G = nx.path_graph(4) D = G.to_directed() X, Y = bipartite.sets(D) - assert_equal(X, {0, 2}) - assert_equal(Y, {1, 3}) + assert X == {0, 2} + assert Y == {1, 3} def test_bipartite_sets_given_top_nodes(self): G = nx.path_graph(4) top_nodes = [0, 2] X, Y = bipartite.sets(G, top_nodes) - assert_equal(X, {0, 2}) - assert_equal(Y, {1, 3}) + assert X == {0, 2} + assert Y == {1, 3} - @raises(nx.AmbiguousSolution) def test_bipartite_sets_disconnected(self): - G = nx.path_graph(4) - G.add_edges_from([(5, 6), (6, 7)]) - X, Y = bipartite.sets(G) + with pytest.raises(nx.AmbiguousSolution): + G = nx.path_graph(4) + G.add_edges_from([(5, 6), (6, 7)]) + X, Y = bipartite.sets(G) def test_is_bipartite_node_set(self): G = nx.path_graph(4) - assert_true(bipartite.is_bipartite_node_set(G, [0, 2])) - assert_true(bipartite.is_bipartite_node_set(G, [1, 3])) - assert_false(bipartite.is_bipartite_node_set(G, [1, 2])) + assert bipartite.is_bipartite_node_set(G, [0, 2]) + assert bipartite.is_bipartite_node_set(G, [1, 3]) + assert not bipartite.is_bipartite_node_set(G, [1, 2]) G.add_edge(10, 20) - assert_true(bipartite.is_bipartite_node_set(G, [0, 2, 10])) - assert_true(bipartite.is_bipartite_node_set(G, [0, 2, 20])) - assert_true(bipartite.is_bipartite_node_set(G, [1, 3, 10])) - assert_true(bipartite.is_bipartite_node_set(G, [1, 3, 20])) + assert bipartite.is_bipartite_node_set(G, [0, 2, 10]) + assert bipartite.is_bipartite_node_set(G, [0, 2, 20]) + assert bipartite.is_bipartite_node_set(G, [1, 3, 10]) + assert bipartite.is_bipartite_node_set(G, [1, 3, 20]) def test_bipartite_density(self): G = nx.path_graph(5) X, Y = bipartite.sets(G) density = float(len(list(G.edges()))) / (len(X) * len(Y)) - assert_equal(bipartite.density(G, X), density) + assert bipartite.density(G, X) == density D = nx.DiGraph(G.edges()) - assert_equal(bipartite.density(D, X), density / 2.0) - assert_equal(bipartite.density(nx.Graph(), {}), 0.0) + assert bipartite.density(D, X) == density / 2.0 + assert bipartite.density(nx.Graph(), {}) == 0.0 def test_bipartite_degrees(self): G = nx.path_graph(5) - X = set([1, 3]) - Y = set([0, 2, 4]) + X = {1, 3} + Y = {0, 2, 4} u, d = bipartite.degrees(G, Y) - assert_equal(dict(u), {1: 2, 3: 2}) - assert_equal(dict(d), {0: 1, 2: 2, 4: 1}) + assert dict(u) == {1: 2, 3: 2} + assert dict(d) == {0: 1, 2: 2, 4: 1} def test_bipartite_weighted_degrees(self): G = nx.path_graph(5) G.add_edge(0, 1, weight=0.1, other=0.2) - X = set([1, 3]) - Y = set([0, 2, 4]) - u, d = bipartite.degrees(G, Y, weight='weight') - assert_equal(dict(u), {1: 1.1, 3: 2}) - assert_equal(dict(d), {0: 0.1, 2: 2, 4: 1}) - u, d = bipartite.degrees(G, Y, weight='other') - assert_equal(dict(u), {1: 1.2, 3: 2}) - assert_equal(dict(d), {0: 0.2, 2: 2, 4: 1}) - - @attr('numpy') + X = {1, 3} + Y = {0, 2, 4} + u, d = bipartite.degrees(G, Y, weight="weight") + assert dict(u) == {1: 1.1, 3: 2} + assert dict(d) == {0: 0.1, 2: 2, 4: 1} + u, d = bipartite.degrees(G, Y, weight="other") + assert dict(u) == {1: 1.2, 3: 2} + assert dict(d) == {0: 0.2, 2: 2, 4: 1} + def test_biadjacency_matrix_weight(self): - try: - import scipy - except ImportError: - raise SkipTest('SciPy not available.') + scipy = pytest.importorskip("scipy") G = nx.path_graph(5) G.add_edge(0, 1, weight=2, other=4) X = [1, 3] Y = [0, 2, 4] - M = bipartite.biadjacency_matrix(G, X, weight='weight') - assert_equal(M[0, 0], 2) - M = bipartite.biadjacency_matrix(G, X, weight='other') - assert_equal(M[0, 0], 4) + M = bipartite.biadjacency_matrix(G, X, weight="weight") + assert M[0, 0] == 2 + M = bipartite.biadjacency_matrix(G, X, weight="other") + assert M[0, 0] == 4 - @attr('numpy') def test_biadjacency_matrix(self): - try: - import scipy - except ImportError: - raise SkipTest('SciPy not available.') + scipy = pytest.importorskip("scipy") tops = [2, 5, 10] bots = [5, 10, 15] for i in range(len(tops)): G = bipartite.random_graph(tops[i], bots[i], 0.2) - top = [n for n, d in G.nodes(data=True) if d['bipartite'] == 0] + top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0] M = bipartite.biadjacency_matrix(G, top) - assert_equal(M.shape[0], tops[i]) - assert_equal(M.shape[1], bots[i]) + assert M.shape[0] == tops[i] + assert M.shape[1] == bots[i] - @attr('numpy') def test_biadjacency_matrix_order(self): - try: - import scipy - except ImportError: - raise SkipTest('SciPy not available.') + scipy = pytest.importorskip("scipy") G = nx.path_graph(5) G.add_edge(0, 1, weight=2) X = [3, 1] Y = [4, 2, 0] - M = bipartite.biadjacency_matrix(G, X, Y, weight='weight') - assert_equal(M[1, 2], 2) + M = bipartite.biadjacency_matrix(G, X, Y, weight="weight") + assert M[1, 2] == 2 diff --git a/networkx/algorithms/bipartite/tests/test_centrality.py b/networkx/algorithms/bipartite/tests/test_centrality.py index 2745915..48a5abd 100644 --- a/networkx/algorithms/bipartite/tests/test_centrality.py +++ b/networkx/algorithms/bipartite/tests/test_centrality.py @@ -1,168 +1,175 @@ -from nose.tools import * import networkx as nx from networkx.algorithms import bipartite +from networkx.testing import almost_equal -class TestBipartiteCentrality(object): - - def setUp(self): - self.P4 = nx.path_graph(4) - self.K3 = nx.complete_bipartite_graph(3, 3) - self.C4 = nx.cycle_graph(4) - self.davis = nx.davis_southern_women_graph() - self.top_nodes = [n for n, d in self.davis.nodes(data=True) - if d['bipartite'] == 0] +class TestBipartiteCentrality: + @classmethod + def setup_class(cls): + cls.P4 = nx.path_graph(4) + cls.K3 = nx.complete_bipartite_graph(3, 3) + cls.C4 = nx.cycle_graph(4) + cls.davis = nx.davis_southern_women_graph() + cls.top_nodes = [ + n for n, d in cls.davis.nodes(data=True) if d["bipartite"] == 0 + ] def test_degree_centrality(self): d = bipartite.degree_centrality(self.P4, [1, 3]) - answer = {0: 0.5, 1: 1.0, 2: 1.0, 3: 0.5} - assert_equal(d, answer) + answer = {0: 0.5, 1: 1.0, 2: 1.0, 3: 0.5} + assert d == answer d = bipartite.degree_centrality(self.K3, [0, 1, 2]) answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0} - assert_equal(d, answer) + assert d == answer d = bipartite.degree_centrality(self.C4, [0, 2]) answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0} - assert_equal(d, answer) + assert d == answer def test_betweenness_centrality(self): c = bipartite.betweenness_centrality(self.P4, [1, 3]) answer = {0: 0.0, 1: 1.0, 2: 1.0, 3: 0.0} - assert_equal(c, answer) + assert c == answer c = bipartite.betweenness_centrality(self.K3, [0, 1, 2]) answer = {0: 0.125, 1: 0.125, 2: 0.125, 3: 0.125, 4: 0.125, 5: 0.125} - assert_equal(c, answer) + assert c == answer c = bipartite.betweenness_centrality(self.C4, [0, 2]) answer = {0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25} - assert_equal(c, answer) + assert c == answer def test_closeness_centrality(self): c = bipartite.closeness_centrality(self.P4, [1, 3]) answer = {0: 2.0 / 3, 1: 1.0, 2: 1.0, 3: 2.0 / 3} - assert_equal(c, answer) + assert c == answer c = bipartite.closeness_centrality(self.K3, [0, 1, 2]) answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0} - assert_equal(c, answer) + assert c == answer c = bipartite.closeness_centrality(self.C4, [0, 2]) answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0} - assert_equal(c, answer) + assert c == answer G = nx.Graph() G.add_node(0) G.add_node(1) c = bipartite.closeness_centrality(G, [0]) - assert_equal(c, {1: 0.0}) + assert c == {1: 0.0} c = bipartite.closeness_centrality(G, [1]) - assert_equal(c, {1: 0.0}) + assert c == {1: 0.0} def test_davis_degree_centrality(self): G = self.davis deg = bipartite.degree_centrality(G, self.top_nodes) - answer = {'E8': 0.78, - 'E9': 0.67, - 'E7': 0.56, - 'Nora Fayette': 0.57, - 'Evelyn Jefferson': 0.57, - 'Theresa Anderson': 0.57, - 'E6': 0.44, - 'Sylvia Avondale': 0.50, - 'Laura Mandeville': 0.50, - 'Brenda Rogers': 0.50, - 'Katherina Rogers': 0.43, - 'E5': 0.44, - 'Helen Lloyd': 0.36, - 'E3': 0.33, - 'Ruth DeSand': 0.29, - 'Verne Sanderson': 0.29, - 'E12': 0.33, - 'Myra Liddel': 0.29, - 'E11': 0.22, - 'Eleanor Nye': 0.29, - 'Frances Anderson': 0.29, - 'Pearl Oglethorpe': 0.21, - 'E4': 0.22, - 'Charlotte McDowd': 0.29, - 'E10': 0.28, - 'Olivia Carleton': 0.14, - 'Flora Price': 0.14, - 'E2': 0.17, - 'E1': 0.17, - 'Dorothy Murchison': 0.14, - 'E13': 0.17, - 'E14': 0.17} + answer = { + "E8": 0.78, + "E9": 0.67, + "E7": 0.56, + "Nora Fayette": 0.57, + "Evelyn Jefferson": 0.57, + "Theresa Anderson": 0.57, + "E6": 0.44, + "Sylvia Avondale": 0.50, + "Laura Mandeville": 0.50, + "Brenda Rogers": 0.50, + "Katherina Rogers": 0.43, + "E5": 0.44, + "Helen Lloyd": 0.36, + "E3": 0.33, + "Ruth DeSand": 0.29, + "Verne Sanderson": 0.29, + "E12": 0.33, + "Myra Liddel": 0.29, + "E11": 0.22, + "Eleanor Nye": 0.29, + "Frances Anderson": 0.29, + "Pearl Oglethorpe": 0.21, + "E4": 0.22, + "Charlotte McDowd": 0.29, + "E10": 0.28, + "Olivia Carleton": 0.14, + "Flora Price": 0.14, + "E2": 0.17, + "E1": 0.17, + "Dorothy Murchison": 0.14, + "E13": 0.17, + "E14": 0.17, + } for node, value in answer.items(): - assert_almost_equal(value, deg[node], places=2) + assert almost_equal(value, deg[node], places=2) def test_davis_betweenness_centrality(self): G = self.davis bet = bipartite.betweenness_centrality(G, self.top_nodes) - answer = {'E8': 0.24, - 'E9': 0.23, - 'E7': 0.13, - 'Nora Fayette': 0.11, - 'Evelyn Jefferson': 0.10, - 'Theresa Anderson': 0.09, - 'E6': 0.07, - 'Sylvia Avondale': 0.07, - 'Laura Mandeville': 0.05, - 'Brenda Rogers': 0.05, - 'Katherina Rogers': 0.05, - 'E5': 0.04, - 'Helen Lloyd': 0.04, - 'E3': 0.02, - 'Ruth DeSand': 0.02, - 'Verne Sanderson': 0.02, - 'E12': 0.02, - 'Myra Liddel': 0.02, - 'E11': 0.02, - 'Eleanor Nye': 0.01, - 'Frances Anderson': 0.01, - 'Pearl Oglethorpe': 0.01, - 'E4': 0.01, - 'Charlotte McDowd': 0.01, - 'E10': 0.01, - 'Olivia Carleton': 0.01, - 'Flora Price': 0.01, - 'E2': 0.00, - 'E1': 0.00, - 'Dorothy Murchison': 0.00, - 'E13': 0.00, - 'E14': 0.00} + answer = { + "E8": 0.24, + "E9": 0.23, + "E7": 0.13, + "Nora Fayette": 0.11, + "Evelyn Jefferson": 0.10, + "Theresa Anderson": 0.09, + "E6": 0.07, + "Sylvia Avondale": 0.07, + "Laura Mandeville": 0.05, + "Brenda Rogers": 0.05, + "Katherina Rogers": 0.05, + "E5": 0.04, + "Helen Lloyd": 0.04, + "E3": 0.02, + "Ruth DeSand": 0.02, + "Verne Sanderson": 0.02, + "E12": 0.02, + "Myra Liddel": 0.02, + "E11": 0.02, + "Eleanor Nye": 0.01, + "Frances Anderson": 0.01, + "Pearl Oglethorpe": 0.01, + "E4": 0.01, + "Charlotte McDowd": 0.01, + "E10": 0.01, + "Olivia Carleton": 0.01, + "Flora Price": 0.01, + "E2": 0.00, + "E1": 0.00, + "Dorothy Murchison": 0.00, + "E13": 0.00, + "E14": 0.00, + } for node, value in answer.items(): - assert_almost_equal(value, bet[node], places=2) + assert almost_equal(value, bet[node], places=2) def test_davis_closeness_centrality(self): G = self.davis clos = bipartite.closeness_centrality(G, self.top_nodes) - answer = {'E8': 0.85, - 'E9': 0.79, - 'E7': 0.73, - 'Nora Fayette': 0.80, - 'Evelyn Jefferson': 0.80, - 'Theresa Anderson': 0.80, - 'E6': 0.69, - 'Sylvia Avondale': 0.77, - 'Laura Mandeville': 0.73, - 'Brenda Rogers': 0.73, - 'Katherina Rogers': 0.73, - 'E5': 0.59, - 'Helen Lloyd': 0.73, - 'E3': 0.56, - 'Ruth DeSand': 0.71, - 'Verne Sanderson': 0.71, - 'E12': 0.56, - 'Myra Liddel': 0.69, - 'E11': 0.54, - 'Eleanor Nye': 0.67, - 'Frances Anderson': 0.67, - 'Pearl Oglethorpe': 0.67, - 'E4': 0.54, - 'Charlotte McDowd': 0.60, - 'E10': 0.55, - 'Olivia Carleton': 0.59, - 'Flora Price': 0.59, - 'E2': 0.52, - 'E1': 0.52, - 'Dorothy Murchison': 0.65, - 'E13': 0.52, - 'E14': 0.52} + answer = { + "E8": 0.85, + "E9": 0.79, + "E7": 0.73, + "Nora Fayette": 0.80, + "Evelyn Jefferson": 0.80, + "Theresa Anderson": 0.80, + "E6": 0.69, + "Sylvia Avondale": 0.77, + "Laura Mandeville": 0.73, + "Brenda Rogers": 0.73, + "Katherina Rogers": 0.73, + "E5": 0.59, + "Helen Lloyd": 0.73, + "E3": 0.56, + "Ruth DeSand": 0.71, + "Verne Sanderson": 0.71, + "E12": 0.56, + "Myra Liddel": 0.69, + "E11": 0.54, + "Eleanor Nye": 0.67, + "Frances Anderson": 0.67, + "Pearl Oglethorpe": 0.67, + "E4": 0.54, + "Charlotte McDowd": 0.60, + "E10": 0.55, + "Olivia Carleton": 0.59, + "Flora Price": 0.59, + "E2": 0.52, + "E1": 0.52, + "Dorothy Murchison": 0.65, + "E13": 0.52, + "E14": 0.52, + } for node, value in answer.items(): - assert_almost_equal(value, clos[node], places=2) + assert almost_equal(value, clos[node], places=2) diff --git a/networkx/algorithms/bipartite/tests/test_cluster.py b/networkx/algorithms/bipartite/tests/test_cluster.py index ce0c1e8..84403bc 100644 --- a/networkx/algorithms/bipartite/tests/test_cluster.py +++ b/networkx/algorithms/bipartite/tests/test_cluster.py @@ -1,5 +1,5 @@ import networkx as nx -from nose.tools import * +import pytest from networkx.algorithms.bipartite.cluster import cc_dot, cc_min, cc_max import networkx.algorithms.bipartite as bipartite @@ -10,70 +10,74 @@ def test_pairwise_bipartite_cc_functions(): # Latapy et al (2008) G1 = nx.Graph([(0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (1, 5), (1, 6), (1, 7)]) G2 = nx.Graph([(0, 2), (0, 3), (0, 4), (1, 3), (1, 4), (1, 5)]) - G3 = nx.Graph([(0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (1, 5), (1, 6), (1, 7), (1, 8), (1, 9)]) - result = {0: [1 / 3.0, 2 / 3.0, 2 / 5.0], - 1: [1 / 2.0, 2 / 3.0, 2 / 3.0], - 2: [2 / 8.0, 2 / 5.0, 2 / 5.0]} + G3 = nx.Graph( + [(0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (1, 5), (1, 6), (1, 7), (1, 8), (1, 9)] + ) + result = { + 0: [1 / 3.0, 2 / 3.0, 2 / 5.0], + 1: [1 / 2.0, 2 / 3.0, 2 / 3.0], + 2: [2 / 8.0, 2 / 5.0, 2 / 5.0], + } for i, G in enumerate([G1, G2, G3]): - assert(bipartite.is_bipartite(G)) - assert(cc_dot(set(G[0]), set(G[1])) == result[i][0]) - assert(cc_min(set(G[0]), set(G[1])) == result[i][1]) - assert(cc_max(set(G[0]), set(G[1])) == result[i][2]) + assert bipartite.is_bipartite(G) + assert cc_dot(set(G[0]), set(G[1])) == result[i][0] + assert cc_min(set(G[0]), set(G[1])) == result[i][1] + assert cc_max(set(G[0]), set(G[1])) == result[i][2] def test_star_graph(): G = nx.star_graph(3) # all modes are the same answer = {0: 0, 1: 1, 2: 1, 3: 1} - assert_equal(bipartite.clustering(G, mode='dot'), answer) - assert_equal(bipartite.clustering(G, mode='min'), answer) - assert_equal(bipartite.clustering(G, mode='max'), answer) + assert bipartite.clustering(G, mode="dot") == answer + assert bipartite.clustering(G, mode="min") == answer + assert bipartite.clustering(G, mode="max") == answer -@raises(nx.NetworkXError) def test_not_bipartite(): - bipartite.clustering(nx.complete_graph(4)) + with pytest.raises(nx.NetworkXError): + bipartite.clustering(nx.complete_graph(4)) -@raises(nx.NetworkXError) def test_bad_mode(): - bipartite.clustering(nx.path_graph(4), mode='foo') + with pytest.raises(nx.NetworkXError): + bipartite.clustering(nx.path_graph(4), mode="foo") def test_path_graph(): G = nx.path_graph(4) answer = {0: 0.5, 1: 0.5, 2: 0.5, 3: 0.5} - assert_equal(bipartite.clustering(G, mode='dot'), answer) - assert_equal(bipartite.clustering(G, mode='max'), answer) + assert bipartite.clustering(G, mode="dot") == answer + assert bipartite.clustering(G, mode="max") == answer answer = {0: 1, 1: 1, 2: 1, 3: 1} - assert_equal(bipartite.clustering(G, mode='min'), answer) + assert bipartite.clustering(G, mode="min") == answer def test_average_path_graph(): G = nx.path_graph(4) - assert_equal(bipartite.average_clustering(G, mode='dot'), 0.5) - assert_equal(bipartite.average_clustering(G, mode='max'), 0.5) - assert_equal(bipartite.average_clustering(G, mode='min'), 1) + assert bipartite.average_clustering(G, mode="dot") == 0.5 + assert bipartite.average_clustering(G, mode="max") == 0.5 + assert bipartite.average_clustering(G, mode="min") == 1 def test_ra_clustering_davis(): G = nx.davis_southern_women_graph() cc4 = round(bipartite.robins_alexander_clustering(G), 3) - assert_equal(cc4, 0.468) + assert cc4 == 0.468 def test_ra_clustering_square(): G = nx.path_graph(4) G.add_edge(0, 3) - assert_equal(bipartite.robins_alexander_clustering(G), 1.0) + assert bipartite.robins_alexander_clustering(G) == 1.0 def test_ra_clustering_zero(): G = nx.Graph() - assert_equal(bipartite.robins_alexander_clustering(G), 0) + assert bipartite.robins_alexander_clustering(G) == 0 G.add_nodes_from(range(4)) - assert_equal(bipartite.robins_alexander_clustering(G), 0) + assert bipartite.robins_alexander_clustering(G) == 0 G.add_edges_from([(0, 1), (2, 3), (3, 4)]) - assert_equal(bipartite.robins_alexander_clustering(G), 0) + assert bipartite.robins_alexander_clustering(G) == 0 G.add_edge(1, 2) - assert_equal(bipartite.robins_alexander_clustering(G), 0) + assert bipartite.robins_alexander_clustering(G) == 0 diff --git a/networkx/algorithms/bipartite/tests/test_covering.py b/networkx/algorithms/bipartite/tests/test_covering.py index fd45816..2f1b02e 100644 --- a/networkx/algorithms/bipartite/tests/test_covering.py +++ b/networkx/algorithms/bipartite/tests/test_covering.py @@ -1,10 +1,3 @@ -# Copyright 2016-2018 NetworkX developers. -# Copyright (C) 2016 by -# Nishant Nikhil -# All rights reserved. -# BSD license. - -from nose.tools import assert_equal, assert_true import networkx as nx import networkx.algorithms.bipartite as bipartite @@ -14,31 +7,27 @@ class TestMinEdgeCover: def test_empty_graph(self): G = nx.Graph() - assert_equal(bipartite.min_edge_cover(G), set()) + assert bipartite.min_edge_cover(G) == set() def test_graph_single_edge(self): G = nx.Graph() G.add_edge(0, 1) - assert_equal(bipartite.min_edge_cover(G), - {(0, 1), (1, 0)}) + assert bipartite.min_edge_cover(G) == {(0, 1), (1, 0)} def test_bipartite_default(self): G = nx.Graph() G.add_nodes_from([1, 2, 3, 4], bipartite=0) - G.add_nodes_from(['a', 'b', 'c'], bipartite=1) - G.add_edges_from([(1, 'a'), (1, 'b'), (2, 'b'), - (2, 'c'), (3, 'c'), (4, 'a')]) + G.add_nodes_from(["a", "b", "c"], bipartite=1) + G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")]) min_cover = bipartite.min_edge_cover(G) - assert_true(nx.is_edge_cover(G, min_cover)) - assert_equal(len(min_cover), 8) + assert nx.is_edge_cover(G, min_cover) + assert len(min_cover) == 8 def test_bipartite_explicit(self): G = nx.Graph() G.add_nodes_from([1, 2, 3, 4], bipartite=0) - G.add_nodes_from(['a', 'b', 'c'], bipartite=1) - G.add_edges_from([(1, 'a'), (1, 'b'), (2, 'b'), - (2, 'c'), (3, 'c'), (4, 'a')]) - min_cover = bipartite.min_edge_cover(G, - bipartite.eppstein_matching) - assert_true(nx.is_edge_cover(G, min_cover)) - assert_equal(len(min_cover), 8) + G.add_nodes_from(["a", "b", "c"], bipartite=1) + G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")]) + min_cover = bipartite.min_edge_cover(G, bipartite.eppstein_matching) + assert nx.is_edge_cover(G, min_cover) + assert len(min_cover) == 8 diff --git a/networkx/algorithms/bipartite/tests/test_edgelist.py b/networkx/algorithms/bipartite/tests/test_edgelist.py index 896a818..4df378a 100644 --- a/networkx/algorithms/bipartite/tests/test_edgelist.py +++ b/networkx/algorithms/bipartite/tests/test_edgelist.py @@ -1,31 +1,30 @@ """ Unit tests for bipartite edgelists. """ -from nose.tools import assert_equal, assert_raises, assert_not_equal, raises +import pytest import io import tempfile import os import networkx as nx -from networkx.testing import (assert_edges_equal, assert_nodes_equal, - assert_graphs_equal) +from networkx.testing import assert_edges_equal, assert_nodes_equal, assert_graphs_equal from networkx.algorithms import bipartite class TestEdgelist: - - def setUp(self): - self.G = nx.Graph(name="test") - e = [('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'e'), ('e', 'f'), ('a', 'f')] - self.G.add_edges_from(e) - self.G.add_nodes_from(['a', 'c', 'e'], bipartite=0) - self.G.add_nodes_from(['b', 'd', 'f'], bipartite=1) - self.G.add_node('g', bipartite=0) - self.DG = nx.DiGraph(self.G) - self.MG = nx.MultiGraph() - self.MG.add_edges_from([(1, 2), (1, 2), (1, 2)]) - self.MG.add_node(1, bipartite=0) - self.MG.add_node(2, bipartite=1) + @classmethod + def setup_class(cls): + cls.G = nx.Graph(name="test") + e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")] + cls.G.add_edges_from(e) + cls.G.add_nodes_from(["a", "c", "e"], bipartite=0) + cls.G.add_nodes_from(["b", "d", "f"], bipartite=1) + cls.G.add_node("g", bipartite=0) + cls.DG = nx.DiGraph(cls.G) + cls.MG = nx.MultiGraph() + cls.MG.add_edges_from([(1, 2), (1, 2), (1, 2)]) + cls.MG.add_node(1, bipartite=0) + cls.MG.add_node(2, bipartite=1) def test_read_edgelist_1(self): s = b"""\ @@ -51,8 +50,9 @@ def test_read_edgelist_3(self): bytesIO = io.BytesIO(s) G = bipartite.read_edgelist(bytesIO, nodetype=int, data=True) - assert_edges_equal(G.edges(data=True), - [(1, 2, {'weight': 2.0}), (2, 3, {'weight': 3.0})]) + assert_edges_equal( + G.edges(data=True), [(1, 2, {"weight": 2.0}), (2, 3, {"weight": 3.0})] + ) def test_write_edgelist_1(self): fh = io.BytesIO() @@ -63,7 +63,7 @@ def test_write_edgelist_1(self): G.add_node(3, bipartite=0) bipartite.write_edgelist(G, fh, data=False) fh.seek(0) - assert_equal(fh.read(), b"1 2\n3 2\n") + assert fh.read() == b"1 2\n3 2\n" def test_write_edgelist_2(self): fh = io.BytesIO() @@ -74,7 +74,7 @@ def test_write_edgelist_2(self): G.add_node(3, bipartite=0) bipartite.write_edgelist(G, fh, data=True) fh.seek(0) - assert_equal(fh.read(), b"1 2 {}\n3 2 {}\n") + assert fh.read() == b"1 2 {}\n3 2 {}\n" def test_write_edgelist_3(self): fh = io.BytesIO() @@ -86,7 +86,7 @@ def test_write_edgelist_3(self): G.add_node(3, bipartite=0) bipartite.write_edgelist(G, fh, data=True) fh.seek(0) - assert_equal(fh.read(), b"1 2 {'weight': 2.0}\n3 2 {'weight': 3.0}\n") + assert fh.read() == b"1 2 {'weight': 2.0}\n3 2 {'weight': 3.0}\n" def test_write_edgelist_4(self): fh = io.BytesIO() @@ -96,21 +96,17 @@ def test_write_edgelist_4(self): G.add_node(1, bipartite=0) G.add_node(2, bipartite=1) G.add_node(3, bipartite=0) - bipartite.write_edgelist(G, fh, data=[('weight')]) + bipartite.write_edgelist(G, fh, data=[("weight")]) fh.seek(0) - assert_equal(fh.read(), b"1 2 2.0\n3 2 3.0\n") + assert fh.read() == b"1 2 2.0\n3 2 3.0\n" def test_unicode(self): G = nx.Graph() - try: # Python 3.x - name1 = chr(2344) + chr(123) + chr(6543) - name2 = chr(5543) + chr(1543) + chr(324) - except ValueError: # Python 2.6+ - name1 = unichr(2344) + unichr(123) + unichr(6543) - name2 = unichr(5543) + unichr(1543) + unichr(324) - G.add_edge(name1, 'Radiohead', **{name2: 3}) + name1 = chr(2344) + chr(123) + chr(6543) + name2 = chr(5543) + chr(1543) + chr(324) + G.add_edge(name1, "Radiohead", **{name2: 3}) G.add_node(name1, bipartite=0) - G.add_node('Radiohead', bipartite=1) + G.add_node("Radiohead", bipartite=1) fd, fname = tempfile.mkstemp() bipartite.write_edgelist(G, fname) H = bipartite.read_edgelist(fname) @@ -120,37 +116,28 @@ def test_unicode(self): def test_latin1_issue(self): G = nx.Graph() - try: # Python 3.x - name1 = chr(2344) + chr(123) + chr(6543) - name2 = chr(5543) + chr(1543) + chr(324) - except ValueError: # Python 2.6+ - name1 = unichr(2344) + unichr(123) + unichr(6543) - name2 = unichr(5543) + unichr(1543) + unichr(324) - G.add_edge(name1, 'Radiohead', **{name2: 3}) + name1 = chr(2344) + chr(123) + chr(6543) + name2 = chr(5543) + chr(1543) + chr(324) + G.add_edge(name1, "Radiohead", **{name2: 3}) G.add_node(name1, bipartite=0) - G.add_node('Radiohead', bipartite=1) + G.add_node("Radiohead", bipartite=1) fd, fname = tempfile.mkstemp() - assert_raises(UnicodeEncodeError, - bipartite.write_edgelist, - G, fname, encoding='latin-1') + pytest.raises( + UnicodeEncodeError, bipartite.write_edgelist, G, fname, encoding="latin-1" + ) os.close(fd) os.unlink(fname) def test_latin1(self): G = nx.Graph() - try: # Python 3.x - blurb = chr(1245) # just to trigger the exception - name1 = 'Bj' + chr(246) + 'rk' - name2 = chr(220) + 'ber' - except ValueError: # Python 2.6+ - name1 = 'Bj' + unichr(246) + 'rk' - name2 = unichr(220) + 'ber' - G.add_edge(name1, 'Radiohead', **{name2: 3}) + name1 = "Bj" + chr(246) + "rk" + name2 = chr(220) + "ber" + G.add_edge(name1, "Radiohead", **{name2: 3}) G.add_node(name1, bipartite=0) - G.add_node('Radiohead', bipartite=1) + G.add_node("Radiohead", bipartite=1) fd, fname = tempfile.mkstemp() - bipartite.write_edgelist(G, fname, encoding='latin-1') - H = bipartite.read_edgelist(fname, encoding='latin-1') + bipartite.write_edgelist(G, fname, encoding="latin-1") + H = bipartite.read_edgelist(fname, encoding="latin-1") assert_graphs_equal(G, H) os.close(fd) os.unlink(fname) @@ -161,8 +148,8 @@ def test_edgelist_graph(self): bipartite.write_edgelist(G, fname) H = bipartite.read_edgelist(fname) H2 = bipartite.read_edgelist(fname) - assert_not_equal(H, H2) # they should be different graphs - G.remove_node('g') # isolated nodes are not written in edgelist + assert H != H2 # they should be different graphs + G.remove_node("g") # isolated nodes are not written in edgelist assert_nodes_equal(list(H), list(G)) assert_edges_equal(list(H.edges()), list(G.edges())) os.close(fd) @@ -186,19 +173,19 @@ def test_edgelist_multigraph(self): bipartite.write_edgelist(G, fname) H = bipartite.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph()) H2 = bipartite.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph()) - assert_not_equal(H, H2) # they should be different graphs + assert H != H2 # they should be different graphs assert_nodes_equal(list(H), list(G)) assert_edges_equal(list(H.edges()), list(G.edges())) os.close(fd) os.unlink(fname) - @raises(nx.NetworkXNotImplemented) def test_empty_digraph(self): - bytesIO = io.BytesIO() - bipartite.write_edgelist(nx.DiGraph(), bytesIO) + with pytest.raises(nx.NetworkXNotImplemented): + bytesIO = io.BytesIO() + bipartite.write_edgelist(nx.DiGraph(), bytesIO) - @raises(AttributeError) def test_raise_attribute(self): - G = nx.path_graph(4) - bytesIO = io.BytesIO() - bipartite.write_edgelist(G, bytesIO) + with pytest.raises(AttributeError): + G = nx.path_graph(4) + bytesIO = io.BytesIO() + bipartite.write_edgelist(G, bytesIO) diff --git a/networkx/algorithms/bipartite/tests/test_generators.py b/networkx/algorithms/bipartite/tests/test_generators.py index 5551c08..f6ffcf4 100644 --- a/networkx/algorithms/bipartite/tests/test_generators.py +++ b/networkx/algorithms/bipartite/tests/test_generators.py @@ -1,215 +1,399 @@ -#!/usr/bin/env python +import pytest +import networkx as nx +from ..generators import ( + alternating_havel_hakimi_graph, + complete_bipartite_graph, + configuration_model, + gnmk_random_graph, + havel_hakimi_graph, + preferential_attachment_graph, + random_graph, + reverse_havel_hakimi_graph, +) -from nose.tools import * -from networkx import * -from networkx.algorithms.bipartite.generators import * - -"""Generators - Bipartite +""" +Generators - Bipartite ---------------------- """ -class TestGeneratorsBipartite(): +class TestGeneratorsBipartite: def test_complete_bipartite_graph(self): G = complete_bipartite_graph(0, 0) - assert_true(is_isomorphic(G, null_graph())) + assert nx.is_isomorphic(G, nx.null_graph()) for i in [1, 5]: G = complete_bipartite_graph(i, 0) - assert_true(is_isomorphic(G, empty_graph(i))) + assert nx.is_isomorphic(G, nx.empty_graph(i)) G = complete_bipartite_graph(0, i) - assert_true(is_isomorphic(G, empty_graph(i))) + assert nx.is_isomorphic(G, nx.empty_graph(i)) G = complete_bipartite_graph(2, 2) - assert_true(is_isomorphic(G, cycle_graph(4))) + assert nx.is_isomorphic(G, nx.cycle_graph(4)) G = complete_bipartite_graph(1, 5) - assert_true(is_isomorphic(G, star_graph(5))) + assert nx.is_isomorphic(G, nx.star_graph(5)) G = complete_bipartite_graph(5, 1) - assert_true(is_isomorphic(G, star_graph(5))) + assert nx.is_isomorphic(G, nx.star_graph(5)) # complete_bipartite_graph(m1,m2) is a connected graph with # m1+m2 nodes and m1*m2 edges for m1, m2 in [(5, 11), (7, 3)]: G = complete_bipartite_graph(m1, m2) - assert_equal(number_of_nodes(G), m1 + m2) - assert_equal(number_of_edges(G), m1 * m2) - - assert_raises(networkx.exception.NetworkXError, - complete_bipartite_graph, 7, 3, create_using=DiGraph()) - - mG = complete_bipartite_graph(7, 3, create_using=MultiGraph()) - assert_equal(sorted(mG.edges()), sorted(G.edges())) + assert nx.number_of_nodes(G) == m1 + m2 + assert nx.number_of_edges(G) == m1 * m2 + + pytest.raises( + nx.NetworkXError, complete_bipartite_graph, 7, 3, create_using=nx.DiGraph + ) + pytest.raises( + nx.NetworkXError, complete_bipartite_graph, 7, 3, create_using=nx.DiGraph + ) + pytest.raises( + nx.NetworkXError, + complete_bipartite_graph, + 7, + 3, + create_using=nx.MultiDiGraph, + ) + + mG = complete_bipartite_graph(7, 3, create_using=nx.MultiGraph) + assert mG.is_multigraph() + assert sorted(mG.edges()) == sorted(G.edges()) + + mG = complete_bipartite_graph(7, 3, create_using=nx.MultiGraph) + assert mG.is_multigraph() + assert sorted(mG.edges()) == sorted(G.edges()) + + mG = complete_bipartite_graph(7, 3) # default to Graph + assert sorted(mG.edges()) == sorted(G.edges()) + assert not mG.is_multigraph() + assert not mG.is_directed() # specify nodes rather than number of nodes - G = complete_bipartite_graph([1, 2], ['a', 'b']) - has_edges = G.has_edge(1, 'a') & G.has_edge(1, 'b') &\ - G.has_edge(2, 'a') & G.has_edge(2, 'b') - assert_true(has_edges) - assert_equal(G.size(), 4) + G = complete_bipartite_graph([1, 2], ["a", "b"]) + has_edges = ( + G.has_edge(1, "a") + & G.has_edge(1, "b") + & G.has_edge(2, "a") + & G.has_edge(2, "b") + ) + assert has_edges + assert G.size() == 4 def test_configuration_model(self): + aseq = [] + bseq = [] + G = configuration_model(aseq, bseq) + assert len(G) == 0 + + aseq = [0, 0] + bseq = [0, 0] + G = configuration_model(aseq, bseq) + assert len(G) == 4 + assert G.number_of_edges() == 0 + aseq = [3, 3, 3, 3] bseq = [2, 2, 2, 2, 2] - assert_raises(networkx.exception.NetworkXError, - configuration_model, aseq, bseq) + pytest.raises(nx.NetworkXError, configuration_model, aseq, bseq) aseq = [3, 3, 3, 3] bseq = [2, 2, 2, 2, 2, 2] G = configuration_model(aseq, bseq) - assert_equal(sorted(d for n, d in G.degree()), - [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] aseq = [2, 2, 2, 2, 2, 2] bseq = [3, 3, 3, 3] G = configuration_model(aseq, bseq) - assert_equal(sorted(d for n, d in G.degree()), - [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] aseq = [2, 2, 2, 1, 1, 1] bseq = [3, 3, 3] G = configuration_model(aseq, bseq) - assert_equal(sorted(d for n, d in G.degree()), - [1, 1, 1, 2, 2, 2, 3, 3, 3]) - - GU = project(Graph(G), range(len(aseq))) - assert_equal(GU.number_of_nodes(), 6) + assert G.is_multigraph() + assert not G.is_directed() + assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3] + + GU = nx.project(nx.Graph(G), range(len(aseq))) + assert GU.number_of_nodes() == 6 + + GD = nx.project(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq))) + assert GD.number_of_nodes() == 3 + + G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph) + assert not G.is_multigraph() + assert not G.is_directed() + + pytest.raises( + nx.NetworkXError, configuration_model, aseq, bseq, create_using=nx.DiGraph() + ) + pytest.raises( + nx.NetworkXError, configuration_model, aseq, bseq, create_using=nx.DiGraph + ) + pytest.raises( + nx.NetworkXError, + configuration_model, + aseq, + bseq, + create_using=nx.MultiDiGraph, + ) - GD = project(Graph(G), range(len(aseq), len(aseq) + len(bseq))) - assert_equal(GD.number_of_nodes(), 3) + def test_havel_hakimi_graph(self): + aseq = [] + bseq = [] + G = havel_hakimi_graph(aseq, bseq) + assert len(G) == 0 - assert_raises(networkx.exception.NetworkXError, - configuration_model, aseq, bseq, - create_using=DiGraph()) + aseq = [0, 0] + bseq = [0, 0] + G = havel_hakimi_graph(aseq, bseq) + assert len(G) == 4 + assert G.number_of_edges() == 0 - def test_havel_hakimi_graph(self): aseq = [3, 3, 3, 3] bseq = [2, 2, 2, 2, 2] - assert_raises(networkx.exception.NetworkXError, - havel_hakimi_graph, aseq, bseq) + pytest.raises(nx.NetworkXError, havel_hakimi_graph, aseq, bseq) bseq = [2, 2, 2, 2, 2, 2] G = havel_hakimi_graph(aseq, bseq) - assert_equal(sorted(d for n, d in G.degree()), - [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] aseq = [2, 2, 2, 2, 2, 2] bseq = [3, 3, 3, 3] G = havel_hakimi_graph(aseq, bseq) - assert_equal(sorted(d for n, d in G.degree()), - [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) + assert G.is_multigraph() + assert not G.is_directed() + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] + + GU = nx.project(nx.Graph(G), range(len(aseq))) + assert GU.number_of_nodes() == 6 + + GD = nx.project(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq))) + assert GD.number_of_nodes() == 4 + + G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph) + assert not G.is_multigraph() + assert not G.is_directed() + + pytest.raises( + nx.NetworkXError, havel_hakimi_graph, aseq, bseq, create_using=nx.DiGraph + ) + pytest.raises( + nx.NetworkXError, havel_hakimi_graph, aseq, bseq, create_using=nx.DiGraph + ) + pytest.raises( + nx.NetworkXError, + havel_hakimi_graph, + aseq, + bseq, + create_using=nx.MultiDiGraph, + ) - GU = project(Graph(G), range(len(aseq))) - assert_equal(GU.number_of_nodes(), 6) + def test_reverse_havel_hakimi_graph(self): + aseq = [] + bseq = [] + G = reverse_havel_hakimi_graph(aseq, bseq) + assert len(G) == 0 - GD = project(Graph(G), range(len(aseq), len(aseq) + len(bseq))) - assert_equal(GD.number_of_nodes(), 4) - assert_raises(networkx.exception.NetworkXError, - havel_hakimi_graph, aseq, bseq, - create_using=DiGraph()) + aseq = [0, 0] + bseq = [0, 0] + G = reverse_havel_hakimi_graph(aseq, bseq) + assert len(G) == 4 + assert G.number_of_edges() == 0 - def test_reverse_havel_hakimi_graph(self): aseq = [3, 3, 3, 3] bseq = [2, 2, 2, 2, 2] - assert_raises(networkx.exception.NetworkXError, - reverse_havel_hakimi_graph, aseq, bseq) + pytest.raises(nx.NetworkXError, reverse_havel_hakimi_graph, aseq, bseq) bseq = [2, 2, 2, 2, 2, 2] G = reverse_havel_hakimi_graph(aseq, bseq) - assert_equal(sorted(d for n, d in G.degree()), - [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] aseq = [2, 2, 2, 2, 2, 2] bseq = [3, 3, 3, 3] G = reverse_havel_hakimi_graph(aseq, bseq) - assert_equal(sorted(d for n, d in G.degree()), - [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] aseq = [2, 2, 2, 1, 1, 1] bseq = [3, 3, 3] G = reverse_havel_hakimi_graph(aseq, bseq) - assert_equal(sorted(d for n, d in G.degree()), - [1, 1, 1, 2, 2, 2, 3, 3, 3]) + assert G.is_multigraph() + assert not G.is_directed() + assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3] + + GU = nx.project(nx.Graph(G), range(len(aseq))) + assert GU.number_of_nodes() == 6 + + GD = nx.project(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq))) + assert GD.number_of_nodes() == 3 + + G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph) + assert not G.is_multigraph() + assert not G.is_directed() + + pytest.raises( + nx.NetworkXError, + reverse_havel_hakimi_graph, + aseq, + bseq, + create_using=nx.DiGraph, + ) + pytest.raises( + nx.NetworkXError, + reverse_havel_hakimi_graph, + aseq, + bseq, + create_using=nx.DiGraph, + ) + pytest.raises( + nx.NetworkXError, + reverse_havel_hakimi_graph, + aseq, + bseq, + create_using=nx.MultiDiGraph, + ) - GU = project(Graph(G), range(len(aseq))) - assert_equal(GU.number_of_nodes(), 6) + def test_alternating_havel_hakimi_graph(self): + aseq = [] + bseq = [] + G = alternating_havel_hakimi_graph(aseq, bseq) + assert len(G) == 0 - GD = project(Graph(G), range(len(aseq), len(aseq) + len(bseq))) - assert_equal(GD.number_of_nodes(), 3) - assert_raises(networkx.exception.NetworkXError, - reverse_havel_hakimi_graph, aseq, bseq, - create_using=DiGraph()) + aseq = [0, 0] + bseq = [0, 0] + G = alternating_havel_hakimi_graph(aseq, bseq) + assert len(G) == 4 + assert G.number_of_edges() == 0 - def test_alternating_havel_hakimi_graph(self): aseq = [3, 3, 3, 3] bseq = [2, 2, 2, 2, 2] - assert_raises(networkx.exception.NetworkXError, - alternating_havel_hakimi_graph, aseq, bseq) + pytest.raises(nx.NetworkXError, alternating_havel_hakimi_graph, aseq, bseq) bseq = [2, 2, 2, 2, 2, 2] G = alternating_havel_hakimi_graph(aseq, bseq) - assert_equal(sorted(d for n, d in G.degree()), - [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] aseq = [2, 2, 2, 2, 2, 2] bseq = [3, 3, 3, 3] G = alternating_havel_hakimi_graph(aseq, bseq) - assert_equal(sorted(d for n, d in G.degree()), - [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] aseq = [2, 2, 2, 1, 1, 1] bseq = [3, 3, 3] G = alternating_havel_hakimi_graph(aseq, bseq) - assert_equal(sorted(d for n, d in G.degree()), - [1, 1, 1, 2, 2, 2, 3, 3, 3]) - - GU = project(Graph(G), range(len(aseq))) - assert_equal(GU.number_of_nodes(), 6) - - GD = project(Graph(G), range(len(aseq), len(aseq) + len(bseq))) - assert_equal(GD.number_of_nodes(), 3) - - assert_raises(networkx.exception.NetworkXError, - alternating_havel_hakimi_graph, aseq, bseq, - create_using=DiGraph()) + assert G.is_multigraph() + assert not G.is_directed() + assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3] + + GU = nx.project(nx.Graph(G), range(len(aseq))) + assert GU.number_of_nodes() == 6 + + GD = nx.project(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq))) + assert GD.number_of_nodes() == 3 + + G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph) + assert not G.is_multigraph() + assert not G.is_directed() + + pytest.raises( + nx.NetworkXError, + alternating_havel_hakimi_graph, + aseq, + bseq, + create_using=nx.DiGraph, + ) + pytest.raises( + nx.NetworkXError, + alternating_havel_hakimi_graph, + aseq, + bseq, + create_using=nx.DiGraph, + ) + pytest.raises( + nx.NetworkXError, + alternating_havel_hakimi_graph, + aseq, + bseq, + create_using=nx.MultiDiGraph, + ) def test_preferential_attachment(self): aseq = [3, 2, 1, 1] G = preferential_attachment_graph(aseq, 0.5) - assert_raises(networkx.exception.NetworkXError, - preferential_attachment_graph, aseq, 0.5, - create_using=DiGraph()) + assert G.is_multigraph() + assert not G.is_directed() + + G = preferential_attachment_graph(aseq, 0.5, create_using=nx.Graph) + assert not G.is_multigraph() + assert not G.is_directed() + + pytest.raises( + nx.NetworkXError, + preferential_attachment_graph, + aseq, + 0.5, + create_using=nx.DiGraph(), + ) + pytest.raises( + nx.NetworkXError, + preferential_attachment_graph, + aseq, + 0.5, + create_using=nx.DiGraph(), + ) + pytest.raises( + nx.NetworkXError, + preferential_attachment_graph, + aseq, + 0.5, + create_using=nx.DiGraph(), + ) def test_random_graph(self): n = 10 m = 20 G = random_graph(n, m, 0.9) - assert_equal(len(G), 30) - assert_true(is_bipartite(G)) + assert len(G) == 30 + assert nx.is_bipartite(G) X, Y = nx.algorithms.bipartite.sets(G) - assert_equal(set(range(n)), X) - assert_equal(set(range(n, n + m)), Y) + assert set(range(n)) == X + assert set(range(n, n + m)) == Y def test_random_digraph(self): n = 10 m = 20 G = random_graph(n, m, 0.9, directed=True) - assert_equal(len(G), 30) - assert_true(is_bipartite(G)) + assert len(G) == 30 + assert nx.is_bipartite(G) X, Y = nx.algorithms.bipartite.sets(G) - assert_equal(set(range(n)), X) - assert_equal(set(range(n, n + m)), Y) + assert set(range(n)) == X + assert set(range(n, n + m)) == Y def test_gnmk_random_graph(self): + n = 10 + m = 20 + edges = 100 + # set seed because sometimes it is not connected + # which raises an error in bipartite.sets(G) below. + G = gnmk_random_graph(n, m, edges, seed=1234) + assert len(G) == n + m + assert nx.is_bipartite(G) + X, Y = nx.algorithms.bipartite.sets(G) + # print(X) + assert set(range(n)) == X + assert set(range(n, n + m)) == Y + assert edges == len(list(G.edges())) + + def test_gnmk_random_graph_complete(self): n = 10 m = 20 edges = 200 G = gnmk_random_graph(n, m, edges) - assert_equal(len(G), 30) - assert_true(is_bipartite(G)) + assert len(G) == n + m + assert nx.is_bipartite(G) X, Y = nx.algorithms.bipartite.sets(G) - print(X) - assert_equal(set(range(n)), X) - assert_equal(set(range(n, n + m)), Y) - assert_equal(edges, len(list(G.edges()))) + # print(X) + assert set(range(n)) == X + assert set(range(n, n + m)) == Y + assert edges == len(list(G.edges())) diff --git a/networkx/algorithms/bipartite/tests/test_matching.py b/networkx/algorithms/bipartite/tests/test_matching.py index bd4653d..2c6c84d 100644 --- a/networkx/algorithms/bipartite/tests/test_matching.py +++ b/networkx/algorithms/bipartite/tests/test_matching.py @@ -1,25 +1,18 @@ -# test_matching.py - unit tests for bipartite matching algorithms -# -# Copyright 2015 Jeffrey Finkelstein . -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.algorithms.bipartite.matching` module.""" import itertools import networkx as nx -from nose.tools import assert_true, assert_equal, raises +import pytest from networkx.algorithms.bipartite.matching import eppstein_matching from networkx.algorithms.bipartite.matching import hopcroft_karp_matching from networkx.algorithms.bipartite.matching import maximum_matching +from networkx.algorithms.bipartite.matching import minimum_weight_full_matching from networkx.algorithms.bipartite.matching import to_vertex_cover -class TestMatching(): +class TestMatching: """Tests for bipartite matching algorithms.""" def setup(self): @@ -33,8 +26,7 @@ def setup(self): self.simple_graph = nx.complete_bipartite_graph(2, 3) self.simple_solution = {0: 2, 1: 3, 2: 0, 3: 1} - edges = [(0, 7), (0, 8), (2, 6), (2, 9), (3, 8), (4, 8), (4, 9), - (5, 11)] + edges = [(0, 7), (0, 8), (2, 6), (2, 9), (3, 8), (4, 8), (4, 9), (5, 11)] self.top_nodes = set(range(6)) self.graph = nx.Graph() self.graph.add_nodes_from(range(12)) @@ -42,33 +34,48 @@ def setup(self): # Example bipartite graph from issue 2127 G = nx.Graph() - G.add_nodes_from([ - (1, 'C'), (1, 'B'), (0, 'G'), (1, 'F'), - (1, 'E'), (0, 'C'), (1, 'D'), (1, 'I'), - (0, 'A'), (0, 'D'), (0, 'F'), (0, 'E'), - (0, 'H'), (1, 'G'), (1, 'A'), (0, 'I'), - (0, 'B'), (1, 'H'), - ]) - G.add_edge((1, 'C'), (0, 'A')) - G.add_edge((1, 'B'), (0, 'A')) - G.add_edge((0, 'G'), (1, 'I')) - G.add_edge((0, 'G'), (1, 'H')) - G.add_edge((1, 'F'), (0, 'A')) - G.add_edge((1, 'F'), (0, 'C')) - G.add_edge((1, 'F'), (0, 'E')) - G.add_edge((1, 'E'), (0, 'A')) - G.add_edge((1, 'E'), (0, 'C')) - G.add_edge((0, 'C'), (1, 'D')) - G.add_edge((0, 'C'), (1, 'I')) - G.add_edge((0, 'C'), (1, 'G')) - G.add_edge((0, 'C'), (1, 'H')) - G.add_edge((1, 'D'), (0, 'A')) - G.add_edge((1, 'I'), (0, 'A')) - G.add_edge((1, 'I'), (0, 'E')) - G.add_edge((0, 'A'), (1, 'G')) - G.add_edge((0, 'A'), (1, 'H')) - G.add_edge((0, 'E'), (1, 'G')) - G.add_edge((0, 'E'), (1, 'H')) + G.add_nodes_from( + [ + (1, "C"), + (1, "B"), + (0, "G"), + (1, "F"), + (1, "E"), + (0, "C"), + (1, "D"), + (1, "I"), + (0, "A"), + (0, "D"), + (0, "F"), + (0, "E"), + (0, "H"), + (1, "G"), + (1, "A"), + (0, "I"), + (0, "B"), + (1, "H"), + ] + ) + G.add_edge((1, "C"), (0, "A")) + G.add_edge((1, "B"), (0, "A")) + G.add_edge((0, "G"), (1, "I")) + G.add_edge((0, "G"), (1, "H")) + G.add_edge((1, "F"), (0, "A")) + G.add_edge((1, "F"), (0, "C")) + G.add_edge((1, "F"), (0, "E")) + G.add_edge((1, "E"), (0, "A")) + G.add_edge((1, "E"), (0, "C")) + G.add_edge((0, "C"), (1, "D")) + G.add_edge((0, "C"), (1, "I")) + G.add_edge((0, "C"), (1, "G")) + G.add_edge((0, "C"), (1, "H")) + G.add_edge((1, "D"), (0, "A")) + G.add_edge((1, "I"), (0, "A")) + G.add_edge((1, "I"), (0, "E")) + G.add_edge((0, "A"), (1, "G")) + G.add_edge((0, "A"), (1, "H")) + G.add_edge((0, "E"), (1, "G")) + G.add_edge((0, "E"), (1, "H")) self.disconnected_graph = G def check_match(self, matching): @@ -122,19 +129,19 @@ def test_to_vertex_cover(self): def test_eppstein_matching_simple(self): match = eppstein_matching(self.simple_graph) - assert_equal(match, self.simple_solution) + assert match == self.simple_solution def test_hopcroft_karp_matching_simple(self): match = hopcroft_karp_matching(self.simple_graph) - assert_equal(match, self.simple_solution) + assert match == self.simple_solution - @raises(nx.AmbiguousSolution) def test_eppstein_matching_disconnected(self): - match = eppstein_matching(self.disconnected_graph) + with pytest.raises(nx.AmbiguousSolution): + match = eppstein_matching(self.disconnected_graph) - @raises(nx.AmbiguousSolution) def test_hopcroft_karp_matching_disconnected(self): - match = hopcroft_karp_matching(self.disconnected_graph) + with pytest.raises(nx.AmbiguousSolution): + match = hopcroft_karp_matching(self.disconnected_graph) def test_issue_2127(self): """Test from issue 2127""" @@ -164,14 +171,14 @@ def test_issue_2127(self): matching = hopcroft_karp_matching(btc, top_nodes) vertex_cover = to_vertex_cover(btc, matching, top_nodes) independent_set = set(G) - {v for _, v in vertex_cover} - assert_equal({'B', 'D', 'F', 'I', 'H'}, independent_set) + assert {"B", "D", "F", "I", "H"} == independent_set def test_vertex_cover_issue_2384(self): G = nx.Graph([(0, 3), (1, 3), (1, 4), (2, 3)]) matching = maximum_matching(G) vertex_cover = to_vertex_cover(G, matching) for u, v in G.edges(): - assert_true(u in vertex_cover or v in vertex_cover) + assert u in vertex_cover or v in vertex_cover def test_unorderable_nodes(self): a = object() @@ -183,16 +190,127 @@ def test_unorderable_nodes(self): matching = maximum_matching(G) vertex_cover = to_vertex_cover(G, matching) for u, v in G.edges(): - assert_true(u in vertex_cover or v in vertex_cover) + assert u in vertex_cover or v in vertex_cover def test_eppstein_matching(): """Test in accordance to issue #1927""" G = nx.Graph() - G.add_nodes_from(['a', 2, 3, 4], bipartite=0) - G.add_nodes_from([1, 'b', 'c'], bipartite=1) - G.add_edges_from([('a', 1), ('a', 'b'), (2, 'b'), - (2, 'c'), (3, 'c'), (4, 1)]) + G.add_nodes_from(["a", 2, 3, 4], bipartite=0) + G.add_nodes_from([1, "b", "c"], bipartite=1) + G.add_edges_from([("a", 1), ("a", "b"), (2, "b"), (2, "c"), (3, "c"), (4, 1)]) matching = eppstein_matching(G) - assert_true(len(matching) == len(maximum_matching(G))) + assert len(matching) == len(maximum_matching(G)) assert all(x in set(matching.keys()) for x in set(matching.values())) + + +class TestMinimumWeightFullMatching: + @classmethod + def setup_class(cls): + global scipy + scipy = pytest.importorskip("scipy") + + def test_minimum_weight_full_matching_incomplete_graph(self): + B = nx.Graph() + B.add_nodes_from([1, 2], bipartite=0) + B.add_nodes_from([3, 4], bipartite=1) + B.add_edge(1, 4, weight=100) + B.add_edge(2, 3, weight=100) + B.add_edge(2, 4, weight=50) + matching = minimum_weight_full_matching(B) + assert matching == {1: 4, 2: 3, 4: 1, 3: 2} + + def test_minimum_weight_full_matching_with_no_full_matching(self): + B = nx.Graph() + B.add_nodes_from([1, 2, 3], bipartite=0) + B.add_nodes_from([4, 5, 6], bipartite=1) + B.add_edge(1, 4, weight=100) + B.add_edge(2, 4, weight=100) + B.add_edge(3, 4, weight=50) + B.add_edge(3, 5, weight=50) + B.add_edge(3, 6, weight=50) + with pytest.raises(ValueError): + minimum_weight_full_matching(B) + + def test_minimum_weight_full_matching_square(self): + G = nx.complete_bipartite_graph(3, 3) + G.add_edge(0, 3, weight=400) + G.add_edge(0, 4, weight=150) + G.add_edge(0, 5, weight=400) + G.add_edge(1, 3, weight=400) + G.add_edge(1, 4, weight=450) + G.add_edge(1, 5, weight=600) + G.add_edge(2, 3, weight=300) + G.add_edge(2, 4, weight=225) + G.add_edge(2, 5, weight=300) + matching = minimum_weight_full_matching(G) + assert matching == {0: 4, 1: 3, 2: 5, 4: 0, 3: 1, 5: 2} + + def test_minimum_weight_full_matching_smaller_left(self): + G = nx.complete_bipartite_graph(3, 4) + G.add_edge(0, 3, weight=400) + G.add_edge(0, 4, weight=150) + G.add_edge(0, 5, weight=400) + G.add_edge(0, 6, weight=1) + G.add_edge(1, 3, weight=400) + G.add_edge(1, 4, weight=450) + G.add_edge(1, 5, weight=600) + G.add_edge(1, 6, weight=2) + G.add_edge(2, 3, weight=300) + G.add_edge(2, 4, weight=225) + G.add_edge(2, 5, weight=290) + G.add_edge(2, 6, weight=3) + matching = minimum_weight_full_matching(G) + assert matching == {0: 4, 1: 6, 2: 5, 4: 0, 5: 2, 6: 1} + + def test_minimum_weight_full_matching_smaller_top_nodes_right(self): + G = nx.complete_bipartite_graph(3, 4) + G.add_edge(0, 3, weight=400) + G.add_edge(0, 4, weight=150) + G.add_edge(0, 5, weight=400) + G.add_edge(0, 6, weight=1) + G.add_edge(1, 3, weight=400) + G.add_edge(1, 4, weight=450) + G.add_edge(1, 5, weight=600) + G.add_edge(1, 6, weight=2) + G.add_edge(2, 3, weight=300) + G.add_edge(2, 4, weight=225) + G.add_edge(2, 5, weight=290) + G.add_edge(2, 6, weight=3) + matching = minimum_weight_full_matching(G, top_nodes=[3, 4, 5, 6]) + assert matching == {0: 4, 1: 6, 2: 5, 4: 0, 5: 2, 6: 1} + + def test_minimum_weight_full_matching_smaller_right(self): + G = nx.complete_bipartite_graph(4, 3) + G.add_edge(0, 4, weight=400) + G.add_edge(0, 5, weight=400) + G.add_edge(0, 6, weight=300) + G.add_edge(1, 4, weight=150) + G.add_edge(1, 5, weight=450) + G.add_edge(1, 6, weight=225) + G.add_edge(2, 4, weight=400) + G.add_edge(2, 5, weight=600) + G.add_edge(2, 6, weight=290) + G.add_edge(3, 4, weight=1) + G.add_edge(3, 5, weight=2) + G.add_edge(3, 6, weight=3) + matching = minimum_weight_full_matching(G) + assert matching == {1: 4, 2: 6, 3: 5, 4: 1, 5: 3, 6: 2} + + def test_minimum_weight_full_matching_negative_weights(self): + G = nx.complete_bipartite_graph(2, 2) + G.add_edge(0, 2, weight=-2) + G.add_edge(0, 3, weight=0.2) + G.add_edge(1, 2, weight=-2) + G.add_edge(1, 3, weight=0.3) + matching = minimum_weight_full_matching(G) + assert matching == {0: 3, 1: 2, 2: 1, 3: 0} + + def test_minimum_weight_full_matching_different_weight_key(self): + G = nx.complete_bipartite_graph(2, 2) + G.add_edge(0, 2, mass=2) + G.add_edge(0, 3, mass=0.2) + G.add_edge(1, 2, mass=1) + G.add_edge(1, 3, mass=2) + matching = minimum_weight_full_matching(G, weight="mass") + assert matching == {0: 3, 1: 2, 2: 1, 3: 0} diff --git a/networkx/algorithms/bipartite/tests/test_matrix.py b/networkx/algorithms/bipartite/tests/test_matrix.py index dbe5d52..176741a 100644 --- a/networkx/algorithms/bipartite/tests/test_matrix.py +++ b/networkx/algorithms/bipartite/tests/test_matrix.py @@ -1,87 +1,76 @@ -#!/usr/bin/env python -from nose.tools import * -from nose import SkipTest +import pytest + +np = pytest.importorskip("numpy") +sp = pytest.importorskip("scipy") +sparse = pytest.importorskip("scipy.sparse") + + import networkx as nx from networkx.algorithms import bipartite from networkx.testing.utils import assert_edges_equal class TestBiadjacencyMatrix: - @classmethod - def setupClass(cls): - global np, sp, sparse, np_assert_equal - try: - import numpy as np - import scipy as sp - import scipy.sparse as sparse - np_assert_equal = np.testing.assert_equal - except ImportError: - raise SkipTest('SciPy sparse library not available.') - def test_biadjacency_matrix_weight(self): G = nx.path_graph(5) G.add_edge(0, 1, weight=2, other=4) X = [1, 3] Y = [0, 2, 4] - M = bipartite.biadjacency_matrix(G, X, weight='weight') - assert_equal(M[0, 0], 2) - M = bipartite.biadjacency_matrix(G, X, weight='other') - assert_equal(M[0, 0], 4) + M = bipartite.biadjacency_matrix(G, X, weight="weight") + assert M[0, 0] == 2 + M = bipartite.biadjacency_matrix(G, X, weight="other") + assert M[0, 0] == 4 def test_biadjacency_matrix(self): tops = [2, 5, 10] bots = [5, 10, 15] for i in range(len(tops)): G = bipartite.random_graph(tops[i], bots[i], 0.2) - top = [n for n, d in G.nodes(data=True) if d['bipartite'] == 0] + top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0] M = bipartite.biadjacency_matrix(G, top) - assert_equal(M.shape[0], tops[i]) - assert_equal(M.shape[1], bots[i]) + assert M.shape[0] == tops[i] + assert M.shape[1] == bots[i] def test_biadjacency_matrix_order(self): G = nx.path_graph(5) G.add_edge(0, 1, weight=2) X = [3, 1] Y = [4, 2, 0] - M = bipartite.biadjacency_matrix(G, X, Y, weight='weight') - assert_equal(M[1, 2], 2) + M = bipartite.biadjacency_matrix(G, X, Y, weight="weight") + assert M[1, 2] == 2 - @raises(nx.NetworkXError) def test_null_graph(self): - bipartite.biadjacency_matrix(nx.Graph(), []) + with pytest.raises(nx.NetworkXError): + bipartite.biadjacency_matrix(nx.Graph(), []) - @raises(nx.NetworkXError) def test_empty_graph(self): - bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), []) + with pytest.raises(nx.NetworkXError): + bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), []) - @raises(nx.NetworkXError) def test_duplicate_row(self): - bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [1, 1]) - - @raises(nx.NetworkXError) - def test_duplicate_col(self): - bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], [1, 1]) + with pytest.raises(nx.NetworkXError): + bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [1, 1]) - @raises(nx.NetworkXError) def test_duplicate_col(self): - bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], [1, 1]) + with pytest.raises(nx.NetworkXError): + bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], [1, 1]) - @raises(nx.NetworkXError) def test_format_keyword(self): - bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], format='foo') + with pytest.raises(nx.NetworkXError): + bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], format="foo") def test_from_biadjacency_roundtrip(self): B1 = nx.path_graph(5) M = bipartite.biadjacency_matrix(B1, [0, 2, 4]) B2 = bipartite.from_biadjacency_matrix(M) - assert_true(nx.is_isomorphic(B1, B2)) + assert nx.is_isomorphic(B1, B2) def test_from_biadjacency_weight(self): M = sparse.csc_matrix([[1, 2], [0, 3]]) B = bipartite.from_biadjacency_matrix(M) assert_edges_equal(B.edges(), [(0, 2), (0, 3), (1, 3)]) - B = bipartite.from_biadjacency_matrix(M, edge_attribute='weight') - e = [(0, 2, {'weight': 1}), (0, 3, {'weight': 2}), (1, 3, {'weight': 3})] + B = bipartite.from_biadjacency_matrix(M, edge_attribute="weight") + e = [(0, 2, {"weight": 1}), (0, 3, {"weight": 2}), (1, 3, {"weight": 3})] assert_edges_equal(B.edges(data=True), e) def test_from_biadjacency_multigraph(self): diff --git a/networkx/algorithms/bipartite/tests/test_project.py b/networkx/algorithms/bipartite/tests/test_project.py index 327c576..dbe7589 100644 --- a/networkx/algorithms/bipartite/tests/test_project.py +++ b/networkx/algorithms/bipartite/tests/test_project.py @@ -1,12 +1,9 @@ -#!/usr/bin/env python -from nose.tools import assert_equal import networkx as nx from networkx.algorithms import bipartite from networkx.testing import assert_edges_equal, assert_nodes_equal class TestBipartiteProject: - def test_path_projected_graph(self): G = nx.path_graph(4) P = bipartite.projected_graph(G, [1, 3]) @@ -18,27 +15,27 @@ def test_path_projected_graph(self): def test_path_projected_properties_graph(self): G = nx.path_graph(4) - G.add_node(1, name='one') - G.add_node(2, name='two') + G.add_node(1, name="one") + G.add_node(2, name="two") P = bipartite.projected_graph(G, [1, 3]) assert_nodes_equal(list(P), [1, 3]) assert_edges_equal(list(P.edges()), [(1, 3)]) - assert_equal(P.nodes[1]['name'], G.nodes[1]['name']) + assert P.nodes[1]["name"] == G.nodes[1]["name"] P = bipartite.projected_graph(G, [0, 2]) assert_nodes_equal(list(P), [0, 2]) assert_edges_equal(list(P.edges()), [(0, 2)]) - assert_equal(P.nodes[2]['name'], G.nodes[2]['name']) + assert P.nodes[2]["name"] == G.nodes[2]["name"] def test_path_collaboration_projected_graph(self): G = nx.path_graph(4) P = bipartite.collaboration_weighted_projected_graph(G, [1, 3]) assert_nodes_equal(list(P), [1, 3]) assert_edges_equal(list(P.edges()), [(1, 3)]) - P[1][3]['weight'] = 1 + P[1][3]["weight"] = 1 P = bipartite.collaboration_weighted_projected_graph(G, [0, 2]) assert_nodes_equal(list(P), [0, 2]) assert_edges_equal(list(P.edges()), [(0, 2)]) - P[0][2]['weight'] = 1 + P[0][2]["weight"] = 1 def test_directed_path_collaboration_projected_graph(self): G = nx.DiGraph() @@ -46,22 +43,22 @@ def test_directed_path_collaboration_projected_graph(self): P = bipartite.collaboration_weighted_projected_graph(G, [1, 3]) assert_nodes_equal(list(P), [1, 3]) assert_edges_equal(list(P.edges()), [(1, 3)]) - P[1][3]['weight'] = 1 + P[1][3]["weight"] = 1 P = bipartite.collaboration_weighted_projected_graph(G, [0, 2]) assert_nodes_equal(list(P), [0, 2]) assert_edges_equal(list(P.edges()), [(0, 2)]) - P[0][2]['weight'] = 1 + P[0][2]["weight"] = 1 def test_path_weighted_projected_graph(self): G = nx.path_graph(4) P = bipartite.weighted_projected_graph(G, [1, 3]) assert_nodes_equal(list(P), [1, 3]) assert_edges_equal(list(P.edges()), [(1, 3)]) - P[1][3]['weight'] = 1 + P[1][3]["weight"] = 1 P = bipartite.weighted_projected_graph(G, [0, 2]) assert_nodes_equal(list(P), [0, 2]) assert_edges_equal(list(P.edges()), [(0, 2)]) - P[0][2]['weight'] = 1 + P[0][2]["weight"] = 1 def test_path_weighted_projected_directed_graph(self): G = nx.DiGraph() @@ -69,11 +66,11 @@ def test_path_weighted_projected_directed_graph(self): P = bipartite.weighted_projected_graph(G, [1, 3]) assert_nodes_equal(list(P), [1, 3]) assert_edges_equal(list(P.edges()), [(1, 3)]) - P[1][3]['weight'] = 1 + P[1][3]["weight"] = 1 P = bipartite.weighted_projected_graph(G, [0, 2]) assert_nodes_equal(list(P), [0, 2]) assert_edges_equal(list(P.edges()), [(0, 2)]) - P[0][2]['weight'] = 1 + P[0][2]["weight"] = 1 def test_star_projected_graph(self): G = nx.star_graph(3) @@ -90,255 +87,283 @@ def test_star_projected_graph(self): def test_project_multigraph(self): G = nx.Graph() - G.add_edge('a', 1) - G.add_edge('b', 1) - G.add_edge('a', 2) - G.add_edge('b', 2) - P = bipartite.projected_graph(G, 'ab') - assert_edges_equal(list(P.edges()), [('a', 'b')]) - P = bipartite.weighted_projected_graph(G, 'ab') - assert_edges_equal(list(P.edges()), [('a', 'b')]) - P = bipartite.projected_graph(G, 'ab', multigraph=True) - assert_edges_equal(list(P.edges()), [('a', 'b'), ('a', 'b')]) + G.add_edge("a", 1) + G.add_edge("b", 1) + G.add_edge("a", 2) + G.add_edge("b", 2) + P = bipartite.projected_graph(G, "ab") + assert_edges_equal(list(P.edges()), [("a", "b")]) + P = bipartite.weighted_projected_graph(G, "ab") + assert_edges_equal(list(P.edges()), [("a", "b")]) + P = bipartite.projected_graph(G, "ab", multigraph=True) + assert_edges_equal(list(P.edges()), [("a", "b"), ("a", "b")]) def test_project_collaboration(self): G = nx.Graph() - G.add_edge('a', 1) - G.add_edge('b', 1) - G.add_edge('b', 2) - G.add_edge('c', 2) - G.add_edge('c', 3) - G.add_edge('c', 4) - G.add_edge('b', 4) - P = bipartite.collaboration_weighted_projected_graph(G, 'abc') - assert_equal(P['a']['b']['weight'], 1) - assert_equal(P['b']['c']['weight'], 2) + G.add_edge("a", 1) + G.add_edge("b", 1) + G.add_edge("b", 2) + G.add_edge("c", 2) + G.add_edge("c", 3) + G.add_edge("c", 4) + G.add_edge("b", 4) + P = bipartite.collaboration_weighted_projected_graph(G, "abc") + assert P["a"]["b"]["weight"] == 1 + assert P["b"]["c"]["weight"] == 2 def test_directed_projection(self): G = nx.DiGraph() - G.add_edge('A', 1) - G.add_edge(1, 'B') - G.add_edge('A', 2) - G.add_edge('B', 2) - P = bipartite.projected_graph(G, 'AB') - assert_edges_equal(list(P.edges()), [('A', 'B')]) - P = bipartite.weighted_projected_graph(G, 'AB') - assert_edges_equal(list(P.edges()), [('A', 'B')]) - assert_equal(P['A']['B']['weight'], 1) - - P = bipartite.projected_graph(G, 'AB', multigraph=True) - assert_edges_equal(list(P.edges()), [('A', 'B')]) + G.add_edge("A", 1) + G.add_edge(1, "B") + G.add_edge("A", 2) + G.add_edge("B", 2) + P = bipartite.projected_graph(G, "AB") + assert_edges_equal(list(P.edges()), [("A", "B")]) + P = bipartite.weighted_projected_graph(G, "AB") + assert_edges_equal(list(P.edges()), [("A", "B")]) + assert P["A"]["B"]["weight"] == 1 + + P = bipartite.projected_graph(G, "AB", multigraph=True) + assert_edges_equal(list(P.edges()), [("A", "B")]) G = nx.DiGraph() - G.add_edge('A', 1) - G.add_edge(1, 'B') - G.add_edge('A', 2) - G.add_edge(2, 'B') - P = bipartite.projected_graph(G, 'AB') - assert_edges_equal(list(P.edges()), [('A', 'B')]) - P = bipartite.weighted_projected_graph(G, 'AB') - assert_edges_equal(list(P.edges()), [('A', 'B')]) - assert_equal(P['A']['B']['weight'], 2) + G.add_edge("A", 1) + G.add_edge(1, "B") + G.add_edge("A", 2) + G.add_edge(2, "B") + P = bipartite.projected_graph(G, "AB") + assert_edges_equal(list(P.edges()), [("A", "B")]) + P = bipartite.weighted_projected_graph(G, "AB") + assert_edges_equal(list(P.edges()), [("A", "B")]) + assert P["A"]["B"]["weight"] == 2 - P = bipartite.projected_graph(G, 'AB', multigraph=True) - assert_edges_equal(list(P.edges()), [('A', 'B'), ('A', 'B')]) + P = bipartite.projected_graph(G, "AB", multigraph=True) + assert_edges_equal(list(P.edges()), [("A", "B"), ("A", "B")]) class TestBipartiteWeightedProjection: - - def setUp(self): + @classmethod + def setup_class(cls): # Tore Opsahl's example # http://toreopsahl.com/2009/05/01/projecting-two-mode-networks-onto-weighted-one-mode-networks/ - self.G = nx.Graph() - self.G.add_edge('A', 1) - self.G.add_edge('A', 2) - self.G.add_edge('B', 1) - self.G.add_edge('B', 2) - self.G.add_edge('B', 3) - self.G.add_edge('B', 4) - self.G.add_edge('B', 5) - self.G.add_edge('C', 1) - self.G.add_edge('D', 3) - self.G.add_edge('E', 4) - self.G.add_edge('E', 5) - self.G.add_edge('E', 6) - self.G.add_edge('F', 6) + cls.G = nx.Graph() + cls.G.add_edge("A", 1) + cls.G.add_edge("A", 2) + cls.G.add_edge("B", 1) + cls.G.add_edge("B", 2) + cls.G.add_edge("B", 3) + cls.G.add_edge("B", 4) + cls.G.add_edge("B", 5) + cls.G.add_edge("C", 1) + cls.G.add_edge("D", 3) + cls.G.add_edge("E", 4) + cls.G.add_edge("E", 5) + cls.G.add_edge("E", 6) + cls.G.add_edge("F", 6) # Graph based on figure 6 from Newman (2001) - self.N = nx.Graph() - self.N.add_edge('A', 1) - self.N.add_edge('A', 2) - self.N.add_edge('A', 3) - self.N.add_edge('B', 1) - self.N.add_edge('B', 2) - self.N.add_edge('B', 3) - self.N.add_edge('C', 1) - self.N.add_edge('D', 1) - self.N.add_edge('E', 3) + cls.N = nx.Graph() + cls.N.add_edge("A", 1) + cls.N.add_edge("A", 2) + cls.N.add_edge("A", 3) + cls.N.add_edge("B", 1) + cls.N.add_edge("B", 2) + cls.N.add_edge("B", 3) + cls.N.add_edge("C", 1) + cls.N.add_edge("D", 1) + cls.N.add_edge("E", 3) def test_project_weighted_shared(self): - edges = [('A', 'B', 2), - ('A', 'C', 1), - ('B', 'C', 1), - ('B', 'D', 1), - ('B', 'E', 2), - ('E', 'F', 1)] + edges = [ + ("A", "B", 2), + ("A", "C", 1), + ("B", "C", 1), + ("B", "D", 1), + ("B", "E", 2), + ("E", "F", 1), + ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) - P = bipartite.weighted_projected_graph(self.G, 'ABCDEF') + P = bipartite.weighted_projected_graph(self.G, "ABCDEF") assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): - assert_equal(P[u][v]['weight'], Panswer[u][v]['weight']) - - edges = [('A', 'B', 3), - ('A', 'E', 1), - ('A', 'C', 1), - ('A', 'D', 1), - ('B', 'E', 1), - ('B', 'C', 1), - ('B', 'D', 1), - ('C', 'D', 1)] + assert P[u][v]["weight"] == Panswer[u][v]["weight"] + + edges = [ + ("A", "B", 3), + ("A", "E", 1), + ("A", "C", 1), + ("A", "D", 1), + ("B", "E", 1), + ("B", "C", 1), + ("B", "D", 1), + ("C", "D", 1), + ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) - P = bipartite.weighted_projected_graph(self.N, 'ABCDE') + P = bipartite.weighted_projected_graph(self.N, "ABCDE") assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): - assert_equal(P[u][v]['weight'], Panswer[u][v]['weight']) + assert P[u][v]["weight"] == Panswer[u][v]["weight"] def test_project_weighted_newman(self): - edges = [('A', 'B', 1.5), - ('A', 'C', 0.5), - ('B', 'C', 0.5), - ('B', 'D', 1), - ('B', 'E', 2), - ('E', 'F', 1)] + edges = [ + ("A", "B", 1.5), + ("A", "C", 0.5), + ("B", "C", 0.5), + ("B", "D", 1), + ("B", "E", 2), + ("E", "F", 1), + ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) - P = bipartite.collaboration_weighted_projected_graph(self.G, 'ABCDEF') + P = bipartite.collaboration_weighted_projected_graph(self.G, "ABCDEF") assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): - assert_equal(P[u][v]['weight'], Panswer[u][v]['weight']) - - edges = [('A', 'B', 11 / 6.0), - ('A', 'E', 1 / 2.0), - ('A', 'C', 1 / 3.0), - ('A', 'D', 1 / 3.0), - ('B', 'E', 1 / 2.0), - ('B', 'C', 1 / 3.0), - ('B', 'D', 1 / 3.0), - ('C', 'D', 1 / 3.0)] + assert P[u][v]["weight"] == Panswer[u][v]["weight"] + + edges = [ + ("A", "B", 11 / 6.0), + ("A", "E", 1 / 2.0), + ("A", "C", 1 / 3.0), + ("A", "D", 1 / 3.0), + ("B", "E", 1 / 2.0), + ("B", "C", 1 / 3.0), + ("B", "D", 1 / 3.0), + ("C", "D", 1 / 3.0), + ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) - P = bipartite.collaboration_weighted_projected_graph(self.N, 'ABCDE') + P = bipartite.collaboration_weighted_projected_graph(self.N, "ABCDE") assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): - assert_equal(P[u][v]['weight'], Panswer[u][v]['weight']) + assert P[u][v]["weight"] == Panswer[u][v]["weight"] def test_project_weighted_ratio(self): - edges = [('A', 'B', 2 / 6.0), - ('A', 'C', 1 / 6.0), - ('B', 'C', 1 / 6.0), - ('B', 'D', 1 / 6.0), - ('B', 'E', 2 / 6.0), - ('E', 'F', 1 / 6.0)] + edges = [ + ("A", "B", 2 / 6.0), + ("A", "C", 1 / 6.0), + ("B", "C", 1 / 6.0), + ("B", "D", 1 / 6.0), + ("B", "E", 2 / 6.0), + ("E", "F", 1 / 6.0), + ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) - P = bipartite.weighted_projected_graph(self.G, 'ABCDEF', ratio=True) + P = bipartite.weighted_projected_graph(self.G, "ABCDEF", ratio=True) assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): - assert_equal(P[u][v]['weight'], Panswer[u][v]['weight']) - - edges = [('A', 'B', 3 / 3.0), - ('A', 'E', 1 / 3.0), - ('A', 'C', 1 / 3.0), - ('A', 'D', 1 / 3.0), - ('B', 'E', 1 / 3.0), - ('B', 'C', 1 / 3.0), - ('B', 'D', 1 / 3.0), - ('C', 'D', 1 / 3.0)] + assert P[u][v]["weight"] == Panswer[u][v]["weight"] + + edges = [ + ("A", "B", 3 / 3.0), + ("A", "E", 1 / 3.0), + ("A", "C", 1 / 3.0), + ("A", "D", 1 / 3.0), + ("B", "E", 1 / 3.0), + ("B", "C", 1 / 3.0), + ("B", "D", 1 / 3.0), + ("C", "D", 1 / 3.0), + ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) - P = bipartite.weighted_projected_graph(self.N, 'ABCDE', ratio=True) + P = bipartite.weighted_projected_graph(self.N, "ABCDE", ratio=True) assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): - assert_equal(P[u][v]['weight'], Panswer[u][v]['weight']) + assert P[u][v]["weight"] == Panswer[u][v]["weight"] def test_project_weighted_overlap(self): - edges = [('A', 'B', 2 / 2.0), - ('A', 'C', 1 / 1.0), - ('B', 'C', 1 / 1.0), - ('B', 'D', 1 / 1.0), - ('B', 'E', 2 / 3.0), - ('E', 'F', 1 / 1.0)] + edges = [ + ("A", "B", 2 / 2.0), + ("A", "C", 1 / 1.0), + ("B", "C", 1 / 1.0), + ("B", "D", 1 / 1.0), + ("B", "E", 2 / 3.0), + ("E", "F", 1 / 1.0), + ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) - P = bipartite.overlap_weighted_projected_graph(self.G, 'ABCDEF', jaccard=False) + P = bipartite.overlap_weighted_projected_graph(self.G, "ABCDEF", jaccard=False) assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): - assert_equal(P[u][v]['weight'], Panswer[u][v]['weight']) - - edges = [('A', 'B', 3 / 3.0), - ('A', 'E', 1 / 1.0), - ('A', 'C', 1 / 1.0), - ('A', 'D', 1 / 1.0), - ('B', 'E', 1 / 1.0), - ('B', 'C', 1 / 1.0), - ('B', 'D', 1 / 1.0), - ('C', 'D', 1 / 1.0)] + assert P[u][v]["weight"] == Panswer[u][v]["weight"] + + edges = [ + ("A", "B", 3 / 3.0), + ("A", "E", 1 / 1.0), + ("A", "C", 1 / 1.0), + ("A", "D", 1 / 1.0), + ("B", "E", 1 / 1.0), + ("B", "C", 1 / 1.0), + ("B", "D", 1 / 1.0), + ("C", "D", 1 / 1.0), + ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) - P = bipartite.overlap_weighted_projected_graph(self.N, 'ABCDE', jaccard=False) + P = bipartite.overlap_weighted_projected_graph(self.N, "ABCDE", jaccard=False) assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): - assert_equal(P[u][v]['weight'], Panswer[u][v]['weight']) + assert P[u][v]["weight"] == Panswer[u][v]["weight"] def test_project_weighted_jaccard(self): - edges = [('A', 'B', 2 / 5.0), - ('A', 'C', 1 / 2.0), - ('B', 'C', 1 / 5.0), - ('B', 'D', 1 / 5.0), - ('B', 'E', 2 / 6.0), - ('E', 'F', 1 / 3.0)] + edges = [ + ("A", "B", 2 / 5.0), + ("A", "C", 1 / 2.0), + ("B", "C", 1 / 5.0), + ("B", "D", 1 / 5.0), + ("B", "E", 2 / 6.0), + ("E", "F", 1 / 3.0), + ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) - P = bipartite.overlap_weighted_projected_graph(self.G, 'ABCDEF') + P = bipartite.overlap_weighted_projected_graph(self.G, "ABCDEF") assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): - assert_equal(P[u][v]['weight'], Panswer[u][v]['weight']) - - edges = [('A', 'B', 3 / 3.0), - ('A', 'E', 1 / 3.0), - ('A', 'C', 1 / 3.0), - ('A', 'D', 1 / 3.0), - ('B', 'E', 1 / 3.0), - ('B', 'C', 1 / 3.0), - ('B', 'D', 1 / 3.0), - ('C', 'D', 1 / 1.0)] + assert P[u][v]["weight"] == Panswer[u][v]["weight"] + + edges = [ + ("A", "B", 3 / 3.0), + ("A", "E", 1 / 3.0), + ("A", "C", 1 / 3.0), + ("A", "D", 1 / 3.0), + ("B", "E", 1 / 3.0), + ("B", "C", 1 / 3.0), + ("B", "D", 1 / 3.0), + ("C", "D", 1 / 1.0), + ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) - P = bipartite.overlap_weighted_projected_graph(self.N, 'ABCDE') + P = bipartite.overlap_weighted_projected_graph(self.N, "ABCDE") assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in P.edges(): - assert_equal(P[u][v]['weight'], Panswer[u][v]['weight']) + assert P[u][v]["weight"] == Panswer[u][v]["weight"] def test_generic_weighted_projected_graph_simple(self): def shared(G, u, v): return len(set(G[u]) & set(G[v])) + B = nx.path_graph(5) - G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4], weight_function=shared) + G = bipartite.generic_weighted_projected_graph( + B, [0, 2, 4], weight_function=shared + ) assert_nodes_equal(list(G), [0, 2, 4]) - assert_edges_equal(list(list(G.edges(data=True))), - [(0, 2, {'weight': 1}), (2, 4, {'weight': 1})]) + assert_edges_equal( + list(list(G.edges(data=True))), + [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})], + ) G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4]) assert_nodes_equal(list(G), [0, 2, 4]) - assert_edges_equal(list(list(G.edges(data=True))), - [(0, 2, {'weight': 1}), (2, 4, {'weight': 1})]) + assert_edges_equal( + list(list(G.edges(data=True))), + [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})], + ) B = nx.DiGraph() nx.add_path(B, range(5)) G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4]) assert_nodes_equal(list(G), [0, 2, 4]) - assert_edges_equal(list(G.edges(data=True)), - [(0, 2, {'weight': 1}), (2, 4, {'weight': 1})]) + assert_edges_equal( + list(G.edges(data=True)), [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})] + ) def test_generic_weighted_projected_graph_custom(self): def jaccard(G, u, v): @@ -346,19 +371,22 @@ def jaccard(G, u, v): vnbrs = set(G[v]) return float(len(unbrs & vnbrs)) / len(unbrs | vnbrs) - def my_weight(G, u, v, weight='weight'): + def my_weight(G, u, v, weight="weight"): w = 0 for nbr in set(G[u]) & set(G[v]): w += G.edges[u, nbr].get(weight, 1) + G.edges[v, nbr].get(weight, 1) return w + B = nx.bipartite.complete_bipartite_graph(2, 2) for i, (u, v) in enumerate(B.edges()): - B.edges[u, v]['weight'] = i + 1 - G = bipartite.generic_weighted_projected_graph(B, [0, 1], - weight_function=jaccard) - assert_edges_equal(list(G.edges(data=True)), [(0, 1, {'weight': 1.0})]) - G = bipartite.generic_weighted_projected_graph(B, [0, 1], - weight_function=my_weight) - assert_edges_equal(list(G.edges(data=True)), [(0, 1, {'weight': 10})]) + B.edges[u, v]["weight"] = i + 1 + G = bipartite.generic_weighted_projected_graph( + B, [0, 1], weight_function=jaccard + ) + assert_edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 1.0})]) + G = bipartite.generic_weighted_projected_graph( + B, [0, 1], weight_function=my_weight + ) + assert_edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 10})]) G = bipartite.generic_weighted_projected_graph(B, [0, 1]) - assert_edges_equal(list(G.edges(data=True)), [(0, 1, {'weight': 2})]) + assert_edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 2})]) diff --git a/networkx/algorithms/bipartite/tests/test_redundancy.py b/networkx/algorithms/bipartite/tests/test_redundancy.py index c75dc7d..cabc602 100644 --- a/networkx/algorithms/bipartite/tests/test_redundancy.py +++ b/networkx/algorithms/bipartite/tests/test_redundancy.py @@ -1,19 +1,8 @@ -# test_redundancy.py - unit tests for the bipartite.redundancy module -# -# Copyright 2015 Jeffrey Finkelstein . -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.algorithms.bipartite.redundancy` module. """ -from __future__ import division -from nose.tools import assert_equal -from nose.tools import assert_true -from nose.tools import raises +import pytest from networkx import cycle_graph from networkx import NetworkXError @@ -24,7 +13,7 @@ def test_no_redundant_nodes(): G = complete_bipartite_graph(2, 2) rc = node_redundancy(G) - assert_true(all(redundancy == 1 for redundancy in rc.values())) + assert all(redundancy == 1 for redundancy in rc.values()) def test_redundant_nodes(): @@ -33,12 +22,12 @@ def test_redundant_nodes(): G.add_edge(*edge) redundancy = node_redundancy(G) for v in edge: - assert_equal(redundancy[v], 2 / 3) + assert redundancy[v] == 2 / 3 for v in set(G) - edge: - assert_equal(redundancy[v], 1) + assert redundancy[v] == 1 -@raises(NetworkXError) def test_not_enough_neighbors(): - G = complete_bipartite_graph(1, 2) - node_redundancy(G) + with pytest.raises(NetworkXError): + G = complete_bipartite_graph(1, 2) + node_redundancy(G) diff --git a/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py b/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py index fa4203b..3c85e18 100644 --- a/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py +++ b/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py @@ -1,78 +1,73 @@ -# -*- coding: utf-8 -*- -from nose import SkipTest -from nose.tools import * +import pytest + import networkx as nx from networkx.algorithms.bipartite import spectral_bipartivity as sb +from networkx.testing import almost_equal # Examples from Figure 1 # E. Estrada and J. A. Rodríguez-Velázquez, "Spectral measures of # bipartivity in complex networks", PhysRev E 72, 046105 (2005) -class TestSpectralBipartivity(object): +class TestSpectralBipartivity: @classmethod - def setupClass(cls): + def setup_class(cls): global scipy - global assert_equal - global assert_almost_equal - try: - import scipy.linalg - except ImportError: - raise SkipTest('SciPy not available.') + scipy = pytest.importorskip("scipy") def test_star_like(self): # star-like G = nx.star_graph(2) G.add_edge(1, 2) - assert_almost_equal(sb(G), 0.843, places=3) + assert almost_equal(sb(G), 0.843, places=3) G = nx.star_graph(3) G.add_edge(1, 2) - assert_almost_equal(sb(G), 0.871, places=3) + assert almost_equal(sb(G), 0.871, places=3) G = nx.star_graph(4) G.add_edge(1, 2) - assert_almost_equal(sb(G), 0.890, places=3) + assert almost_equal(sb(G), 0.890, places=3) - def k23_like(self): + def test_k23_like(self): # K2,3-like G = nx.complete_bipartite_graph(2, 3) G.add_edge(0, 1) - assert_almost_equal(sb(G), 0.769, places=3) + assert almost_equal(sb(G), 0.769, places=3) G = nx.complete_bipartite_graph(2, 3) G.add_edge(2, 4) - assert_almost_equal(sb(G), 0.829, places=3) + assert almost_equal(sb(G), 0.829, places=3) G = nx.complete_bipartite_graph(2, 3) G.add_edge(2, 4) G.add_edge(3, 4) - assert_almost_equal(sb(G), 0.731, places=3) + assert almost_equal(sb(G), 0.731, places=3) G = nx.complete_bipartite_graph(2, 3) G.add_edge(0, 1) G.add_edge(2, 4) - assert_almost_equal(sb(G), 0.692, places=3) + assert almost_equal(sb(G), 0.692, places=3) G = nx.complete_bipartite_graph(2, 3) G.add_edge(2, 4) G.add_edge(3, 4) G.add_edge(0, 1) - assert_almost_equal(sb(G), 0.645, places=3) + assert almost_equal(sb(G), 0.645, places=3) G = nx.complete_bipartite_graph(2, 3) G.add_edge(2, 4) G.add_edge(3, 4) G.add_edge(2, 3) - assert_almost_equal(sb(G), 0.645, places=3) + assert almost_equal(sb(G), 0.645, places=3) G = nx.complete_bipartite_graph(2, 3) G.add_edge(2, 4) G.add_edge(3, 4) G.add_edge(2, 3) G.add_edge(0, 1) - assert_almost_equal(sb(G), 0.597, places=3) + assert almost_equal(sb(G), 0.597, places=3) def test_single_nodes(self): @@ -80,11 +75,11 @@ def test_single_nodes(self): G = nx.complete_bipartite_graph(2, 3) G.add_edge(2, 4) sbn = sb(G, nodes=[1, 2]) - assert_almost_equal(sbn[1], 0.85, places=2) - assert_almost_equal(sbn[2], 0.77, places=2) + assert almost_equal(sbn[1], 0.85, places=2) + assert almost_equal(sbn[2], 0.77, places=2) G = nx.complete_bipartite_graph(2, 3) G.add_edge(0, 1) sbn = sb(G, nodes=[1, 2]) - assert_almost_equal(sbn[1], 0.73, places=2) - assert_almost_equal(sbn[2], 0.82, places=2) + assert almost_equal(sbn[1], 0.73, places=2) + assert almost_equal(sbn[2], 0.82, places=2) diff --git a/networkx/algorithms/boundary.py b/networkx/algorithms/boundary.py index e245bf2..bff8804 100644 --- a/networkx/algorithms/boundary.py +++ b/networkx/algorithms/boundary.py @@ -1,10 +1,3 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# Copyright 2015 NetworkX developers. -# All rights reserved. -# BSD license. """Routines to find the boundary of a set of nodes. An edge boundary is a set of edges, each of which has exactly one @@ -17,13 +10,10 @@ """ from itertools import chain -__author__ = """Aric Hagberg (hagberg@lanl.gov)\nPieter Swart (swart@lanl.gov)\nDan Schult (dschult@colgate.edu)""" +__all__ = ["edge_boundary", "node_boundary"] -__all__ = ['edge_boundary', 'node_boundary'] - -def edge_boundary(G, nbunch1, nbunch2=None, data=False, keys=False, - default=None): +def edge_boundary(G, nbunch1, nbunch2=None, data=False, keys=False, default=None): """Returns the edge boundary of `nbunch1`. The *edge boundary* of a set *S* with respect to a set *T* is the @@ -92,9 +82,11 @@ def edge_boundary(G, nbunch1, nbunch2=None, data=False, keys=False, if nbunch2 is None: return (e for e in edges if (e[0] in nset1) ^ (e[1] in nset1)) nset2 = set(nbunch2) - return (e for e in edges - if (e[0] in nset1 and e[1] in nset2) - or (e[1] in nset1 and e[0] in nset2)) + return ( + e + for e in edges + if (e[0] in nset1 and e[1] in nset2) or (e[1] in nset1 and e[0] in nset2) + ) def node_boundary(G, nbunch1, nbunch2=None): diff --git a/networkx/algorithms/bridges.py b/networkx/algorithms/bridges.py index c890473..5788e8f 100644 --- a/networkx/algorithms/bridges.py +++ b/networkx/algorithms/bridges.py @@ -1,23 +1,14 @@ -# -*- coding: utf-8 -*- -# bridges.py - bridge-finding algorithms -# -# Copyright 2004-2018 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Bridge-finding algorithms.""" from itertools import chain import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['bridges', 'has_bridges', 'local_bridges'] +__all__ = ["bridges", "has_bridges", "local_bridges"] -@not_implemented_for('multigraph') -@not_implemented_for('directed') +@not_implemented_for("multigraph") +@not_implemented_for("directed") def bridges(G, root=None): """Generate all bridges in a graph. @@ -55,8 +46,8 @@ def bridges(G, root=None): Notes ----- This is an implementation of the algorithm described in _[1]. An edge is a - bridge iff it is not contained in any chain. Chains are found using the - :func:`networkx.chain_decomposition` function. + bridge if and only if it is not contained in any chain. Chains are found + using the :func:`networkx.chain_decomposition` function. Ignoring polylogarithmic factors, the worst-case time complexity is the same as the :func:`networkx.chain_decomposition` function, @@ -74,8 +65,8 @@ def bridges(G, root=None): yield u, v -@not_implemented_for('multigraph') -@not_implemented_for('directed') +@not_implemented_for("multigraph") +@not_implemented_for("directed") def has_bridges(G, root=None): """Decide whether a graph has any bridges. @@ -131,8 +122,8 @@ def has_bridges(G, root=None): return True -@not_implemented_for('multigraph') -@not_implemented_for('directed') +@not_implemented_for("multigraph") +@not_implemented_for("directed") def local_bridges(G, with_span=True, weight=None): """Iterate over local bridges of `G` optionally computing the span @@ -187,4 +178,4 @@ def hide_edge(n, nbr, d): span = nx.shortest_path_length(G, u, v, weight=hide_edge) yield u, v, span except nx.NetworkXNoPath: - yield u, v, float('inf') + yield u, v, float("inf") diff --git a/networkx/algorithms/centrality/__init__.py b/networkx/algorithms/centrality/__init__.py index 39facf3..c15304c 100644 --- a/networkx/algorithms/centrality/__init__.py +++ b/networkx/algorithms/centrality/__init__.py @@ -8,7 +8,12 @@ from .degree_alg import * from .dispersion import * from .eigenvector import * +from .group import * from .harmonic import * from .katz import * from .load import * from .reaching import * +from .percolation import * +from .second_order import * +from .trophic import * +from .voterank_alg import * diff --git a/networkx/algorithms/centrality/betweenness.py b/networkx/algorithms/centrality/betweenness.py index e2b1604..0829d9b 100644 --- a/networkx/algorithms/centrality/betweenness.py +++ b/networkx/algorithms/centrality/betweenness.py @@ -1,25 +1,19 @@ -# coding=utf8 -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Author: Aric Hagberg (hagberg@lanl.gov) """Betweenness centrality measures.""" from heapq import heappush, heappop from itertools import count -import random +import warnings -import networkx as nx +from networkx.utils import py_random_state +from networkx.utils.decorators import not_implemented_for -__all__ = ['betweenness_centrality', 'edge_betweenness_centrality', - 'edge_betweenness'] +__all__ = ["betweenness_centrality", "edge_betweenness_centrality", "edge_betweenness"] -def betweenness_centrality(G, k=None, normalized=True, weight=None, - endpoints=False, seed=None): +@py_random_state(5) +@not_implemented_for("multigraph") +def betweenness_centrality( + G, k=None, normalized=True, weight=None, endpoints=False, seed=None +): r"""Compute the shortest-path betweenness centrality for nodes. Betweenness centrality of a node $v$ is the sum of the @@ -57,6 +51,11 @@ def betweenness_centrality(G, k=None, normalized=True, weight=None, endpoints : bool, optional If True include the endpoints in the shortest path counts. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + Note that this is only used if k is not None. + Returns ------- nodes : dictionary @@ -81,6 +80,22 @@ def betweenness_centrality(G, k=None, normalized=True, weight=None, Zero edge weights can produce an infinite number of equal length paths between pairs of nodes. + The total number of paths between source and target is counted + differently for directed and undirected graphs. Directed paths + are easy to count. Undirected paths are tricky: should a path + from "u" to "v" count as 1 undirected path or as 2 directed paths? + + For betweenness_centrality we report the number of undirected + paths when G is undirected. + + For betweenness_centrality_subset the reporting is different. + If the source and target subsets are the same, then we want + to count undirected paths. But if the source and target subsets + differ -- for example, if sources is {0} and targets is {1}, + then we are only counting the paths in one direction. They are + undirected paths but we are counting them in a directed way. + To count them as undirected paths, each should count as half a path. + References ---------- .. [1] Ulrik Brandes: @@ -105,8 +120,7 @@ def betweenness_centrality(G, k=None, normalized=True, weight=None, if k is None: nodes = G else: - random.seed(seed) - nodes = random.sample(G.nodes(), k) + nodes = seed.sample(G.nodes(), k) for s in nodes: # single source shortest paths if weight is None: # use BFS @@ -119,13 +133,19 @@ def betweenness_centrality(G, k=None, normalized=True, weight=None, else: betweenness = _accumulate_basic(betweenness, S, P, sigma, s) # rescaling - betweenness = _rescale(betweenness, len(G), normalized=normalized, - directed=G.is_directed(), k=k) + betweenness = _rescale( + betweenness, + len(G), + normalized=normalized, + directed=G.is_directed(), + k=k, + endpoints=endpoints, + ) return betweenness -def edge_betweenness_centrality(G, k=None, normalized=True, weight=None, - seed=None): +@py_random_state(4) +def edge_betweenness_centrality(G, k=None, normalized=True, weight=None, seed=None): r"""Compute betweenness centrality for edges. Betweenness centrality of an edge $e$ is the sum of the @@ -158,6 +178,11 @@ def edge_betweenness_centrality(G, k=None, normalized=True, weight=None, If None, all edge weights are considered equal. Otherwise holds the name of the edge attribute used as weight. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + Note that this is only used if k is not None. + Returns ------- edges : dictionary @@ -192,8 +217,7 @@ def edge_betweenness_centrality(G, k=None, normalized=True, weight=None, if k is None: nodes = G else: - random.seed(seed) - nodes = random.sample(G.nodes(), k) + nodes = seed.sample(G.nodes(), k) for s in nodes: # single source shortest paths if weight is None: # use BFS @@ -205,30 +229,34 @@ def edge_betweenness_centrality(G, k=None, normalized=True, weight=None, # rescaling for n in G: # remove nodes to only return edges del betweenness[n] - betweenness = _rescale_e(betweenness, len(G), normalized=normalized, - directed=G.is_directed()) + betweenness = _rescale_e( + betweenness, len(G), normalized=normalized, directed=G.is_directed() + ) return betweenness -# obsolete name - +# obsolete name def edge_betweenness(G, k=None, normalized=True, weight=None, seed=None): + warnings.warn( + "edge_betweeness is replaced by edge_betweenness_centrality", DeprecationWarning + ) return edge_betweenness_centrality(G, k, normalized, weight, seed) # helpers for betweenness centrality + def _single_source_shortest_path_basic(G, s): S = [] P = {} for v in G: P[v] = [] - sigma = dict.fromkeys(G, 0.0) # sigma[v]=0 for v in G + sigma = dict.fromkeys(G, 0.0) # sigma[v]=0 for v in G D = {} sigma[s] = 1.0 D[s] = 0 Q = [s] - while Q: # use BFS to find shortest paths + while Q: # use BFS to find shortest paths v = Q.pop(0) S.append(v) Dv = D[v] @@ -237,7 +265,7 @@ def _single_source_shortest_path_basic(G, s): if w not in D: Q.append(w) D[w] = Dv + 1 - if D[w] == Dv + 1: # this is a shortest path, count paths + if D[w] == Dv + 1: # this is a shortest path, count paths sigma[w] += sigmav P[w].append(v) # predecessors return S, P, sigma @@ -249,14 +277,14 @@ def _single_source_dijkstra_path_basic(G, s, weight): P = {} for v in G: P[v] = [] - sigma = dict.fromkeys(G, 0.0) # sigma[v]=0 for v in G + sigma = dict.fromkeys(G, 0.0) # sigma[v]=0 for v in G D = {} sigma[s] = 1.0 push = heappush pop = heappop seen = {s: 0} c = count() - Q = [] # use Q as heap with (distance,node id) tuples + Q = [] # use Q as heap with (distance,node id) tuples push(Q, (0, next(c), s, s)) while Q: (dist, _, pred, v) = pop(Q) @@ -282,7 +310,7 @@ def _accumulate_basic(betweenness, S, P, sigma, s): delta = dict.fromkeys(S, 0) while S: w = S.pop() - coeff = (1.0 + delta[w]) / sigma[w] + coeff = (1 + delta[w]) / sigma[w] for v in P[w]: delta[v] += sigma[v] * coeff if w != s: @@ -295,7 +323,7 @@ def _accumulate_endpoints(betweenness, S, P, sigma, s): delta = dict.fromkeys(S, 0) while S: w = S.pop() - coeff = (1.0 + delta[w]) / sigma[w] + coeff = (1 + delta[w]) / sigma[w] for v in P[w]: delta[v] += sigma[v] * coeff if w != s: @@ -307,7 +335,7 @@ def _accumulate_edges(betweenness, S, P, sigma, s): delta = dict.fromkeys(S, 0) while S: w = S.pop() - coeff = (1.0 + delta[w]) / sigma[w] + coeff = (1 + delta[w]) / sigma[w] for v in P[w]: c = sigma[v] * coeff if (v, w) not in betweenness: @@ -320,12 +348,18 @@ def _accumulate_edges(betweenness, S, P, sigma, s): return betweenness -def _rescale(betweenness, n, normalized, directed=False, k=None): +def _rescale(betweenness, n, normalized, directed=False, k=None, endpoints=False): if normalized: - if n <= 2: + if endpoints: + if n < 2: + scale = None # no normalization + else: + # Scale factor should include endpoint nodes + scale = 1 / (n * (n - 1)) + elif n <= 2: scale = None # no normalization b=0 for all nodes else: - scale = 1.0 / ((n - 1) * (n - 2)) + scale = 1 / ((n - 1) * (n - 2)) else: # rescale by 2 for undirected graphs if not directed: scale = 0.5 @@ -344,7 +378,7 @@ def _rescale_e(betweenness, n, normalized, directed=False, k=None): if n <= 1: scale = None # no normalization b=0 for all nodes else: - scale = 1.0 / (n * (n - 1)) + scale = 1 / (n * (n - 1)) else: # rescale by 2 for undirected graphs if not directed: scale = 0.5 diff --git a/networkx/algorithms/centrality/betweenness_subset.py b/networkx/algorithms/centrality/betweenness_subset.py index c85e2df..70d8eec 100644 --- a/networkx/algorithms/centrality/betweenness_subset.py +++ b/networkx/algorithms/centrality/betweenness_subset.py @@ -1,25 +1,21 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Author: Aric Hagberg (hagberg@lanl.gov) """Betweenness centrality measures for subsets of nodes.""" -import networkx as nx +import warnings -from networkx.algorithms.centrality.betweenness import\ - _single_source_dijkstra_path_basic as dijkstra -from networkx.algorithms.centrality.betweenness import\ - _single_source_shortest_path_basic as shortest_path +from networkx.algorithms.centrality.betweenness import ( + _single_source_dijkstra_path_basic as dijkstra, +) +from networkx.algorithms.centrality.betweenness import ( + _single_source_shortest_path_basic as shortest_path, +) -__all__ = ['betweenness_centrality_subset', 'betweenness_centrality_source', - 'edge_betweenness_centrality_subset'] +__all__ = [ + "betweenness_centrality_subset", + "betweenness_centrality_source", + "edge_betweenness_centrality_subset", +] -def betweenness_centrality_subset(G, sources, targets, normalized=False, - weight=None): +def betweenness_centrality_subset(G, sources, targets, normalized=False, weight=None): r"""Compute betweenness centrality for a subset of nodes. .. math:: @@ -72,11 +68,25 @@ def betweenness_centrality_subset(G, sources, targets, normalized=False, Zero edge weights can produce an infinite number of equal length paths between pairs of nodes. - The normalization might seem a little strange but it is the same - as in betweenness_centrality() and is designed to make - betweenness_centrality(G) be the same as + The normalization might seem a little strange but it is + designed to make betweenness_centrality(G) be the same as betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()). + The total number of paths between source and target is counted + differently for directed and undirected graphs. Directed paths + are easy to count. Undirected paths are tricky: should a path + from "u" to "v" count as 1 undirected path or as 2 directed paths? + + For betweenness_centrality we report the number of undirected + paths when G is undirected. + + For betweenness_centrality_subset the reporting is different. + If the source and target subsets are the same, then we want + to count undirected paths. But if the source and target subsets + differ -- for example, if sources is {0} and targets is {1}, + then we are only counting the paths in one direction. They are + undirected paths but we are counting them in a directed way. + To count them as undirected paths, each should count as half a path. References ---------- @@ -100,8 +110,9 @@ def betweenness_centrality_subset(G, sources, targets, normalized=False, return b -def edge_betweenness_centrality_subset(G, sources, targets, normalized=False, - weight=None): +def edge_betweenness_centrality_subset( + G, sources, targets, normalized=False, weight=None +): r"""Compute betweenness centrality for edges for a subset of nodes. .. math:: @@ -182,25 +193,26 @@ def edge_betweenness_centrality_subset(G, sources, targets, normalized=False, # obsolete name -def betweenness_centrality_source(G, normalized=True, weight=None, - sources=None): +def betweenness_centrality_source(G, normalized=True, weight=None, sources=None): + msg = "betweenness_centrality_source --> betweenness_centrality_subset" + warnings.warn(msg, DeprecationWarning) if sources is None: sources = G.nodes() targets = list(G) - return betweenness_centrality_subset(G, sources, targets, normalized, - weight) + return betweenness_centrality_subset(G, sources, targets, normalized, weight) def _accumulate_subset(betweenness, S, P, sigma, s, targets): - delta = dict.fromkeys(S, 0) - target_set = set(targets) + delta = dict.fromkeys(S, 0.0) + target_set = set(targets) - {s} while S: w = S.pop() + if w in target_set: + coeff = (delta[w] + 1.0) / sigma[w] + else: + coeff = delta[w] / sigma[w] for v in P[w]: - if w in target_set: - delta[v] += (sigma[v] / sigma[w]) * (1.0 + delta[w]) - else: - delta[v] += delta[w] / len(P[w]) + delta[v] += sigma[v] * coeff if w != s: betweenness[w] += delta[w] return betweenness diff --git a/networkx/algorithms/centrality/closeness.py b/networkx/algorithms/centrality/closeness.py index 0bae120..dd84268 100644 --- a/networkx/algorithms/centrality/closeness.py +++ b/networkx/algorithms/centrality/closeness.py @@ -1,25 +1,15 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg -# Pieter Swart -# Sasha Gutfraind -# Dan Schult """ Closeness centrality measures. """ import functools import networkx as nx +from networkx.exception import NetworkXError +from networkx.utils.decorators import not_implemented_for -__all__ = ['closeness_centrality'] +__all__ = ["closeness_centrality", "incremental_closeness_centrality"] -def closeness_centrality(G, u=None, distance=None, - wf_improved=True, reverse=False): +def closeness_centrality(G, u=None, distance=None, wf_improved=True): r"""Compute closeness centrality for nodes. Closeness centrality [1]_ of a node `u` is the reciprocal of the @@ -30,7 +20,9 @@ def closeness_centrality(G, u=None, distance=None, C(u) = \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)}, where `d(v, u)` is the shortest-path distance between `v` and `u`, - and `n` is the number of nodes that can reach `u`. + and `n` is the number of nodes that can reach `u`. Notice that the + closeness distance function computes the incoming distance to `u` + for directed graphs. To use outward distance, act on `G.reverse()`. Notice that higher values of closeness indicate higher centrality. @@ -61,11 +53,7 @@ def closeness_centrality(G, u=None, distance=None, wf_improved : bool, optional (default=True) If True, scale by the fraction of nodes reachable. This gives the Wasserman and Faust improved formula. For single component graphs - it is the same as the original formula. - - reverse : bool, optional (default=False) - If True and G is a digraph, reverse the edges of G, using successors - instead of predecessors. + it is the same as the original formula. Returns ------- @@ -75,7 +63,7 @@ def closeness_centrality(G, u=None, distance=None, See Also -------- betweenness_centrality, load_centrality, eigenvector_centrality, - degree_centrality + degree_centrality, incremental_closeness_centrality Notes ----- @@ -89,6 +77,13 @@ def closeness_centrality(G, u=None, distance=None, shortest-path length will be computed using Dijkstra's algorithm with that edge attribute as the edge weight. + The closeness centrality uses *inward* distance to a node, not outward. + If you want to use outword distances apply the function to `G.reverse()` + + In NetworkX 2.2 and earlier a bug caused Dijkstra's algorithm to use the + outward distance rather than the inward distance. If you use a 'distance' + keyword and a DiGraph, your results will change between v2.2 and v2.3. + References ---------- .. [1] Linton C. Freeman: Centrality in networks: I. @@ -98,33 +93,179 @@ def closeness_centrality(G, u=None, distance=None, Social Network Analysis: Methods and Applications, 1994, Cambridge University Press. """ + if G.is_directed(): + G = G.reverse() # create a reversed graph view + if distance is not None: # use Dijkstra's algorithm with specified attribute as edge weight - path_length = functools.partial(nx.single_source_dijkstra_path_length, - weight=distance) - else: # handle either directed or undirected - if G.is_directed() and not reverse: - path_length = nx.single_target_shortest_path_length - else: - path_length = nx.single_source_shortest_path_length + path_length = functools.partial( + nx.single_source_dijkstra_path_length, weight=distance + ) + else: + path_length = nx.single_source_shortest_path_length if u is None: - nodes = G.nodes() + nodes = G.nodes else: nodes = [u] closeness_centrality = {} for n in nodes: - sp = dict(path_length(G, n)) + sp = path_length(G, n) totsp = sum(sp.values()) - if totsp > 0.0 and len(G) > 1: - closeness_centrality[n] = (len(sp) - 1.0) / totsp + len_G = len(G) + _closeness_centrality = 0.0 + if totsp > 0.0 and len_G > 1: + _closeness_centrality = (len(sp) - 1.0) / totsp # normalize to number of nodes-1 in connected part if wf_improved: - s = (len(sp) - 1.0) / (len(G) - 1) - closeness_centrality[n] *= s - else: - closeness_centrality[n] = 0.0 + s = (len(sp) - 1.0) / (len_G - 1) + _closeness_centrality *= s + closeness_centrality[n] = _closeness_centrality if u is not None: return closeness_centrality[u] else: return closeness_centrality + + +@not_implemented_for("directed") +def incremental_closeness_centrality( + G, edge, prev_cc=None, insertion=True, wf_improved=True +): + r"""Incremental closeness centrality for nodes. + + Compute closeness centrality for nodes using level-based work filtering + as described in Incremental Algorithms for Closeness Centrality by Sariyuce et al. + + Level-based work filtering detects unnecessary updates to the closeness + centrality and filters them out. + + --- + From "Incremental Algorithms for Closeness Centrality": + + Theorem 1: Let :math:`G = (V, E)` be a graph and u and v be two vertices in V + such that there is no edge (u, v) in E. Let :math:`G' = (V, E \cup uv)` + Then :math:`cc[s] = cc'[s]` if and only if :math:`\left|dG(s, u) - dG(s, v)\right| \leq 1`. + + Where :math:`dG(u, v)` denotes the length of the shortest path between + two vertices u, v in a graph G, cc[s] is the closeness centrality for a + vertex s in V, and cc'[s] is the closeness centrality for a + vertex s in V, with the (u, v) edge added. + --- + + We use Theorem 1 to filter out updates when adding or removing an edge. + When adding an edge (u, v), we compute the shortest path lengths from all + other nodes to u and to v before the node is added. When removing an edge, + we compute the shortest path lengths after the edge is removed. Then we + apply Theorem 1 to use previously computed closeness centrality for nodes + where :math:`\left|dG(s, u) - dG(s, v)\right| \leq 1`. This works only for + undirected, unweighted graphs; the distance argument is not supported. + + Closeness centrality [1]_ of a node `u` is the reciprocal of the + sum of the shortest path distances from `u` to all `n-1` other nodes. + Since the sum of distances depends on the number of nodes in the + graph, closeness is normalized by the sum of minimum possible + distances `n-1`. + + .. math:: + + C(u) = \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)}, + + where `d(v, u)` is the shortest-path distance between `v` and `u`, + and `n` is the number of nodes in the graph. + + Notice that higher values of closeness indicate higher centrality. + + Parameters + ---------- + G : graph + A NetworkX graph + + edge : tuple + The modified edge (u, v) in the graph. + + prev_cc : dictionary + The previous closeness centrality for all nodes in the graph. + + insertion : bool, optional + If True (default) the edge was inserted, otherwise it was deleted from the graph. + + wf_improved : bool, optional (default=True) + If True, scale by the fraction of nodes reachable. This gives the + Wasserman and Faust improved formula. For single component graphs + it is the same as the original formula. + + Returns + ------- + nodes : dictionary + Dictionary of nodes with closeness centrality as the value. + + See Also + -------- + betweenness_centrality, load_centrality, eigenvector_centrality, + degree_centrality, closeness_centrality + + Notes + ----- + The closeness centrality is normalized to `(n-1)/(|G|-1)` where + `n` is the number of nodes in the connected part of graph + containing the node. If the graph is not completely connected, + this algorithm computes the closeness centrality for each + connected part separately. + + References + ---------- + .. [1] Freeman, L.C., 1979. Centrality in networks: I. + Conceptual clarification. Social Networks 1, 215--239. + http://www.soc.ucsb.edu/faculty/friedkin/Syllabi/Soc146/Freeman78.PDF + .. [2] Sariyuce, A.E. ; Kaya, K. ; Saule, E. ; Catalyiirek, U.V. Incremental + Algorithms for Closeness Centrality. 2013 IEEE International Conference on Big Data + http://sariyuce.com/papers/bigdata13.pdf + """ + if prev_cc is not None and set(prev_cc.keys()) != set(G.nodes()): + raise NetworkXError("prev_cc and G do not have the same nodes") + + # Unpack edge + (u, v) = edge + path_length = nx.single_source_shortest_path_length + + if insertion: + # For edge insertion, we want shortest paths before the edge is inserted + du = path_length(G, u) + dv = path_length(G, v) + + G.add_edge(u, v) + else: + G.remove_edge(u, v) + + # For edge removal, we want shortest paths after the edge is removed + du = path_length(G, u) + dv = path_length(G, v) + + if prev_cc is None: + return nx.closeness_centrality(G) + + nodes = G.nodes() + closeness_centrality = {} + for n in nodes: + if n in du and n in dv and abs(du[n] - dv[n]) <= 1: + closeness_centrality[n] = prev_cc[n] + else: + sp = path_length(G, n) + totsp = sum(sp.values()) + len_G = len(G) + _closeness_centrality = 0.0 + if totsp > 0.0 and len_G > 1: + _closeness_centrality = (len(sp) - 1.0) / totsp + # normalize to number of nodes-1 in connected part + if wf_improved: + s = (len(sp) - 1.0) / (len_G - 1) + _closeness_centrality *= s + closeness_centrality[n] = _closeness_centrality + + # Leave the graph as we found it + if insertion: + G.remove_edge(u, v) + else: + G.add_edge(u, v) + + return closeness_centrality diff --git a/networkx/algorithms/centrality/current_flow_betweenness.py b/networkx/algorithms/centrality/current_flow_betweenness.py index 0429a4f..106ceb4 100644 --- a/networkx/algorithms/centrality/current_flow_betweenness.py +++ b/networkx/algorithms/centrality/current_flow_betweenness.py @@ -1,28 +1,37 @@ -# Copyright (C) 2010-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Author: Aric Hagberg (hagberg@lanl.gov) """Current-flow betweenness centrality measures.""" -import random - import networkx as nx -from networkx.algorithms.centrality.flow_matrix import * -from networkx.utils import not_implemented_for, reverse_cuthill_mckee_ordering - -__all__ = ['current_flow_betweenness_centrality', - 'approximate_current_flow_betweenness_centrality', - 'edge_current_flow_betweenness_centrality'] - - -@not_implemented_for('directed') -def approximate_current_flow_betweenness_centrality(G, normalized=True, - weight=None, - dtype=float, solver='full', - epsilon=0.5, kmax=10000): +from networkx.algorithms.centrality.flow_matrix import ( + CGInverseLaplacian, + flow_matrix_row, + FullInverseLaplacian, + laplacian_sparse_matrix, + SuperLUInverseLaplacian, +) +from networkx.utils import ( + not_implemented_for, + reverse_cuthill_mckee_ordering, + py_random_state, +) + +__all__ = [ + "current_flow_betweenness_centrality", + "approximate_current_flow_betweenness_centrality", + "edge_current_flow_betweenness_centrality", +] + + +@py_random_state(7) +@not_implemented_for("directed") +def approximate_current_flow_betweenness_centrality( + G, + normalized=True, + weight=None, + dtype=float, + solver="full", + epsilon=0.5, + kmax=10000, + seed=None, +): r"""Compute the approximate current-flow betweenness centrality for nodes. Approximates the current-flow betweenness centrality within absolute @@ -46,7 +55,7 @@ def approximate_current_flow_betweenness_centrality(G, normalized=True, Default data type for internal matrices. Set to np.float32 for lower memory consumption. - solver : string (default='lu') + solver : string (default='full') Type of linear solver to use for computing the flow matrix. Options are "full" (uses most memory), "lu" (recommended), and "cg" (uses least memory). @@ -57,6 +66,10 @@ def approximate_current_flow_betweenness_centrality(G, normalized=True, kmax: int Maximum number of sample node pairs to use for approximation. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + Returns ------- nodes : dictionary @@ -84,39 +97,37 @@ def approximate_current_flow_betweenness_centrality(G, normalized=True, """ try: import numpy as np - except ImportError: - raise ImportError('current_flow_betweenness_centrality requires NumPy ', - 'http://scipy.org/') - try: - from scipy import sparse - from scipy.sparse import linalg - except ImportError: - raise ImportError('current_flow_betweenness_centrality requires SciPy ', - 'http://scipy.org/') + except ImportError as e: + raise ImportError( + "current_flow_betweenness_centrality requires NumPy " "http://numpy.org/" + ) from e if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") - solvername = {"full": FullInverseLaplacian, - "lu": SuperLUInverseLaplacian, - "cg": CGInverseLaplacian} + solvername = { + "full": FullInverseLaplacian, + "lu": SuperLUInverseLaplacian, + "cg": CGInverseLaplacian, + } n = G.number_of_nodes() ordering = list(reverse_cuthill_mckee_ordering(G)) # make a copy with integer labels according to rcm ordering # this could be done without a copy if we really wanted to H = nx.relabel_nodes(G, dict(zip(ordering, range(n)))) - L = laplacian_sparse_matrix(H, nodelist=range(n), weight=weight, - dtype=dtype, format='csc') + L = laplacian_sparse_matrix( + H, nodelist=range(n), weight=weight, dtype=dtype, format="csc" + ) C = solvername[solver](L, dtype=dtype) # initialize solver betweenness = dict.fromkeys(H, 0.0) nb = (n - 1.0) * (n - 2.0) # normalization factor cstar = n * (n - 1) / nb l = 1 # parameter in approximation, adjustable - k = l * int(np.ceil((cstar / epsilon)**2 * np.log(n))) + k = l * int(np.ceil((cstar / epsilon) ** 2 * np.log(n))) if k > kmax: - raise nx.NetworkXError('Number random pairs k>kmax (%d>%d) ' % (k, kmax), - 'Increase kmax or epsilon') + msg = f"Number random pairs k>kmax ({k}>{kmax}) " + raise nx.NetworkXError(msg, "Increase kmax or epsilon") cstar2k = cstar / (2 * k) for i in range(k): - s, t = random.sample(range(n), 2) + s, t = seed.sample(range(n), 2) b = np.zeros(n, dtype=dtype) b[s] = 1 b[t] = -1 @@ -132,12 +143,13 @@ def approximate_current_flow_betweenness_centrality(G, normalized=True, else: factor = nb / 2.0 # remap to original node names and "unnormalize" if required - return dict((ordering[k], float(v * factor)) for k, v in betweenness.items()) + return {ordering[k]: float(v * factor) for k, v in betweenness.items()} -@not_implemented_for('directed') -def current_flow_betweenness_centrality(G, normalized=True, weight=None, - dtype=float, solver='full'): +@not_implemented_for("directed") +def current_flow_betweenness_centrality( + G, normalized=True, weight=None, dtype=float, solver="full" +): r"""Compute current-flow betweenness centrality for nodes. Current-flow betweenness centrality uses an electrical current @@ -164,7 +176,7 @@ def current_flow_betweenness_centrality(G, normalized=True, weight=None, Default data type for internal matrices. Set to np.float32 for lower memory consumption. - solver : string (default='lu') + solver : string (default='full') Type of linear solver to use for computing the flow matrix. Options are "full" (uses most memory), "lu" (recommended), and "cg" (uses least memory). @@ -206,16 +218,6 @@ def current_flow_betweenness_centrality(G, normalized=True, weight=None, .. [2] A measure of betweenness centrality based on random walks, M. E. J. Newman, Social Networks 27, 39-54 (2005). """ - try: - import numpy as np - except ImportError: - raise ImportError('current_flow_betweenness_centrality requires NumPy ', - 'http://scipy.org/') - try: - import scipy - except ImportError: - raise ImportError('current_flow_betweenness_centrality requires SciPy ', - 'http://scipy.org/') if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") n = G.number_of_nodes() @@ -224,8 +226,7 @@ def current_flow_betweenness_centrality(G, normalized=True, weight=None, # this could be done without a copy if we really wanted to H = nx.relabel_nodes(G, dict(zip(ordering, range(n)))) betweenness = dict.fromkeys(H, 0.0) # b[v]=0 for v in H - for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, - solver=solver): + for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): pos = dict(zip(row.argsort()[::-1], range(n))) for i in range(n): betweenness[s] += (i - pos[i]) * row[i] @@ -236,14 +237,14 @@ def current_flow_betweenness_centrality(G, normalized=True, weight=None, nb = 2.0 for v in H: betweenness[v] = float((betweenness[v] - v) * 2.0 / nb) - return dict((ordering[k], v) for k, v in betweenness.items()) + return {ordering[k]: v for k, v in betweenness.items()} -@not_implemented_for('directed') -def edge_current_flow_betweenness_centrality(G, normalized=True, - weight=None, - dtype=float, solver='full'): - """Compute current-flow betweenness centrality for edges. +@not_implemented_for("directed") +def edge_current_flow_betweenness_centrality( + G, normalized=True, weight=None, dtype=float, solver="full" +): + r"""Compute current-flow betweenness centrality for edges. Current-flow betweenness centrality uses an electrical current model for information spreading in contrast to betweenness @@ -269,7 +270,7 @@ def edge_current_flow_betweenness_centrality(G, normalized=True, Default data type for internal matrices. Set to np.float32 for lower memory consumption. - solver : string (default='lu') + solver : string (default='full') Type of linear solver to use for computing the flow matrix. Options are "full" (uses most memory), "lu" (recommended), and "cg" (uses least memory). @@ -318,16 +319,7 @@ def edge_current_flow_betweenness_centrality(G, normalized=True, M. E. J. Newman, Social Networks 27, 39-54 (2005). """ from networkx.utils import reverse_cuthill_mckee_ordering - try: - import numpy as np - except ImportError: - raise ImportError('current_flow_betweenness_centrality requires NumPy ', - 'http://scipy.org/') - try: - import scipy - except ImportError: - raise ImportError('current_flow_betweenness_centrality requires SciPy ', - 'http://scipy.org/') + if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") n = G.number_of_nodes() @@ -341,22 +333,10 @@ def edge_current_flow_betweenness_centrality(G, normalized=True, nb = (n - 1.0) * (n - 2.0) # normalization factor else: nb = 2.0 - for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, - solver=solver): + for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): pos = dict(zip(row.argsort()[::-1], range(1, n + 1))) for i in range(n): betweenness[e] += (i + 1 - pos[i]) * row[i] betweenness[e] += (n - i - pos[i]) * row[i] betweenness[e] /= nb - return dict(((ordering[s], ordering[t]), float(v)) - for (s, t), v in betweenness.items()) - - -# fixture for nose tests -def setup_module(module): - from nose import SkipTest - try: - import numpy - import scipy - except: - raise SkipTest("NumPy not available") + return {(ordering[s], ordering[t]): float(v) for (s, t), v in betweenness.items()} diff --git a/networkx/algorithms/centrality/current_flow_betweenness_subset.py b/networkx/algorithms/centrality/current_flow_betweenness_subset.py index 3026652..a928607 100644 --- a/networkx/algorithms/centrality/current_flow_betweenness_subset.py +++ b/networkx/algorithms/centrality/current_flow_betweenness_subset.py @@ -1,27 +1,18 @@ -# Copyright (C) 2010-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Author: Aric Hagberg (hagberg@lanl.gov) """Current-flow betweenness centrality measures for subsets of nodes.""" -import itertools - import networkx as nx -from networkx.algorithms.centrality.flow_matrix import * +from networkx.algorithms.centrality.flow_matrix import flow_matrix_row from networkx.utils import not_implemented_for, reverse_cuthill_mckee_ordering -__all__ = ['current_flow_betweenness_centrality_subset', - 'edge_current_flow_betweenness_centrality_subset'] +__all__ = [ + "current_flow_betweenness_centrality_subset", + "edge_current_flow_betweenness_centrality_subset", +] -@not_implemented_for('directed') -def current_flow_betweenness_centrality_subset(G, sources, targets, - normalized=True, - weight=None, - dtype=float, solver='lu'): +@not_implemented_for("directed") +def current_flow_betweenness_centrality_subset( + G, sources, targets, normalized=True, weight=None, dtype=float, solver="lu" +): r"""Compute current-flow betweenness centrality for subsets of nodes. Current-flow betweenness centrality uses an electrical current @@ -97,16 +88,13 @@ def current_flow_betweenness_centrality_subset(G, sources, targets, M. E. J. Newman, Social Networks 27, 39-54 (2005). """ from networkx.utils import reverse_cuthill_mckee_ordering + try: import numpy as np - except ImportError: - raise ImportError('current_flow_betweenness_centrality requires NumPy ', - 'http://scipy.org/') - try: - import scipy - except ImportError: - raise ImportError('current_flow_betweenness_centrality requires SciPy ', - 'http://scipy.org/') + except ImportError as e: + raise ImportError( + "current_flow_betweenness_centrality requires NumPy ", "http://numpy.org/" + ) from e if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") n = G.number_of_nodes() @@ -116,8 +104,7 @@ def current_flow_betweenness_centrality_subset(G, sources, targets, mapping = dict(zip(ordering, range(n))) H = nx.relabel_nodes(G, mapping) betweenness = dict.fromkeys(H, 0.0) # b[v]=0 for v in H - for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, - solver=solver): + for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): for ss in sources: i = mapping[ss] for tt in targets: @@ -130,14 +117,13 @@ def current_flow_betweenness_centrality_subset(G, sources, targets, nb = 2.0 for v in H: betweenness[v] = betweenness[v] / nb + 1.0 / (2 - n) - return dict((ordering[k], v) for k, v in betweenness.items()) + return {ordering[k]: v for k, v in betweenness.items()} -@not_implemented_for('directed') -def edge_current_flow_betweenness_centrality_subset(G, sources, targets, - normalized=True, - weight=None, - dtype=float, solver='lu'): +@not_implemented_for("directed") +def edge_current_flow_betweenness_centrality_subset( + G, sources, targets, normalized=True, weight=None, dtype=float, solver="lu" +): r"""Compute current-flow betweenness centrality for edges using subsets of nodes. @@ -214,14 +200,10 @@ def edge_current_flow_betweenness_centrality_subset(G, sources, targets, """ try: import numpy as np - except ImportError: - raise ImportError('current_flow_betweenness_centrality requires NumPy ', - 'http://scipy.org/') - try: - import scipy - except ImportError: - raise ImportError('current_flow_betweenness_centrality requires SciPy ', - 'http://scipy.org/') + except ImportError as e: + raise ImportError( + "current_flow_betweenness_centrality requires NumPy " "http://numpy.org/" + ) from e if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") n = G.number_of_nodes() @@ -236,23 +218,11 @@ def edge_current_flow_betweenness_centrality_subset(G, sources, targets, nb = (n - 1.0) * (n - 2.0) # normalization factor else: nb = 2.0 - for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, - solver=solver): + for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): for ss in sources: i = mapping[ss] for tt in targets: j = mapping[tt] betweenness[e] += 0.5 * np.abs(row[i] - row[j]) betweenness[e] /= nb - return dict(((ordering[s], ordering[t]), v) - for (s, t), v in betweenness.items()) - - -# fixture for nose tests -def setup_module(module): - from nose import SkipTest - try: - import numpy - import scipy - except: - raise SkipTest("NumPy not available") + return {(ordering[s], ordering[t]): v for (s, t), v in betweenness.items()} diff --git a/networkx/algorithms/centrality/current_flow_closeness.py b/networkx/algorithms/centrality/current_flow_closeness.py index 8dde7dd..518b9f5 100644 --- a/networkx/algorithms/centrality/current_flow_closeness.py +++ b/networkx/algorithms/centrality/current_flow_closeness.py @@ -1,23 +1,19 @@ -# Copyright (C) 2010-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Author: Aric Hagberg (hagberg@lanl.gov) """Current-flow closeness centrality measures.""" import networkx as nx from networkx.utils import not_implemented_for, reverse_cuthill_mckee_ordering -from networkx.algorithms.centrality.flow_matrix import * +from networkx.algorithms.centrality.flow_matrix import ( + CGInverseLaplacian, + FullInverseLaplacian, + laplacian_sparse_matrix, + SuperLUInverseLaplacian, +) -__all__ = ['current_flow_closeness_centrality', 'information_centrality'] +__all__ = ["current_flow_closeness_centrality", "information_centrality"] -@not_implemented_for('directed') -def current_flow_closeness_centrality(G, weight=None, - dtype=float, solver='lu'): +@not_implemented_for("directed") +def current_flow_closeness_centrality(G, weight=None, dtype=float, solver="lu"): """Compute current-flow closeness centrality for nodes. Current-flow closeness centrality is variant of closeness @@ -70,13 +66,13 @@ def current_flow_closeness_centrality(G, weight=None, Social Networks 11(1):1-37, 1989. https://doi.org/10.1016/0378-8733(89)90016-6 """ - import numpy as np - import scipy if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") - solvername = {"full": FullInverseLaplacian, - "lu": SuperLUInverseLaplacian, - "cg": CGInverseLaplacian} + solvername = { + "full": FullInverseLaplacian, + "lu": SuperLUInverseLaplacian, + "cg": CGInverseLaplacian, + } n = G.number_of_nodes() ordering = list(reverse_cuthill_mckee_ordering(G)) # make a copy with integer labels according to rcm ordering @@ -84,8 +80,9 @@ def current_flow_closeness_centrality(G, weight=None, H = nx.relabel_nodes(G, dict(zip(ordering, range(n)))) betweenness = dict.fromkeys(H, 0.0) # b[v]=0 for v in H n = H.number_of_nodes() - L = laplacian_sparse_matrix(H, nodelist=range(n), weight=weight, - dtype=dtype, format='csc') + L = laplacian_sparse_matrix( + H, nodelist=range(n), weight=weight, dtype=dtype, format="csc" + ) C2 = solvername[solver](L, width=1, dtype=dtype) # initialize solver for v in H: col = C2.get_row(v) @@ -94,16 +91,7 @@ def current_flow_closeness_centrality(G, weight=None, betweenness[w] += col[v] for v in H: betweenness[v] = 1.0 / (betweenness[v]) - return dict((ordering[k], float(v)) for k, v in betweenness.items()) + return {ordering[k]: float(v) for k, v in betweenness.items()} information_centrality = current_flow_closeness_centrality - - -# fixture for nose tests -def setup_module(module): - from nose import SkipTest - try: - import numpy - except: - raise SkipTest("NumPy not available") diff --git a/networkx/algorithms/centrality/degree_alg.py b/networkx/algorithms/centrality/degree_alg.py index 47d52ba..a7e7b92 100644 --- a/networkx/algorithms/centrality/degree_alg.py +++ b/networkx/algorithms/centrality/degree_alg.py @@ -1,20 +1,7 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg (hagberg@lanl.gov) -# Pieter Swart (swart@lanl.gov) -# Sasha Gutfraind (ag362@cornell.edu) """Degree centrality measures.""" -import networkx as nx from networkx.utils.decorators import not_implemented_for -__all__ = ['degree_centrality', - 'in_degree_centrality', - 'out_degree_centrality'] +__all__ = ["degree_centrality", "in_degree_centrality", "out_degree_centrality"] def degree_centrality(G): @@ -46,13 +33,15 @@ def degree_centrality(G): be higher than n-1 and values of degree centrality greater than 1 are possible. """ - centrality = {} + if len(G) <= 1: + return {n: 1 for n in G} + s = 1.0 / (len(G) - 1.0) centrality = {n: d * s for n, d in G.degree()} return centrality -@not_implemented_for('undirected') +@not_implemented_for("undirected") def in_degree_centrality(G): """Compute the in-degree centrality for nodes. @@ -71,7 +60,7 @@ def in_degree_centrality(G): Raises ------ - NetworkXNotImplemented: + NetworkXNotImplemented If G is undirected. See Also @@ -87,13 +76,15 @@ def in_degree_centrality(G): be higher than n-1 and values of degree centrality greater than 1 are possible. """ - centrality = {} + if len(G) <= 1: + return {n: 1 for n in G} + s = 1.0 / (len(G) - 1.0) centrality = {n: d * s for n, d in G.in_degree()} return centrality -@not_implemented_for('undirected') +@not_implemented_for("undirected") def out_degree_centrality(G): """Compute the out-degree centrality for nodes. @@ -112,7 +103,7 @@ def out_degree_centrality(G): Raises ------ - NetworkXNotImplemented: + NetworkXNotImplemented If G is undirected. See Also @@ -128,7 +119,9 @@ def out_degree_centrality(G): be higher than n-1 and values of degree centrality greater than 1 are possible. """ - centrality = {} + if len(G) <= 1: + return {n: 1 for n in G} + s = 1.0 / (len(G) - 1.0) centrality = {n: d * s for n, d in G.out_degree()} return centrality diff --git a/networkx/algorithms/centrality/dispersion.py b/networkx/algorithms/centrality/dispersion.py index 568553a..03fcd3a 100644 --- a/networkx/algorithms/centrality/dispersion.py +++ b/networkx/algorithms/centrality/dispersion.py @@ -1,10 +1,6 @@ from itertools import combinations -__author__ = "\n".join(['Ben Edwards (bedwards@cs.unm.edu)', - 'Huston Hedinger (hstn@hdngr.com)', - 'Dan Schult (dschult@colgate.edu)']) - -__all__ = ['dispersion'] +__all__ = ["dispersion"] def dispersion(G, u=None, v=None, normalized=True, alpha=1.0, b=0.0, c=0.0): @@ -51,8 +47,8 @@ def dispersion(G, u=None, v=None, normalized=True, alpha=1.0, b=0.0, c=0.0): def _dispersion(G_u, u, v): """dispersion for all nodes 'v' in a ego network G_u of node 'u'""" u_nbrs = set(G_u[u]) - ST = set(n for n in G_u[v] if n in u_nbrs) - set_uv = set([u, v]) + ST = {n for n in G_u[v] if n in u_nbrs} + set_uv = {u, v} # all possible ties of connections that u and b share possib = combinations(ST, 2) total = 0 @@ -70,9 +66,9 @@ def _dispersion(G_u, u, v): if normalized: if embededness + c != 0: - norm_disp = ((total + b)**alpha) / (embededness + c) + norm_disp = ((total + b) ** alpha) / (embededness + c) else: - norm_disp = (total + b)**alpha + norm_disp = (total + b) ** alpha dispersion = norm_disp else: @@ -83,7 +79,7 @@ def _dispersion(G_u, u, v): if u is None: # v and u are not specified if v is None: - results = dict((n, {}) for n in G) + results = {n: {} for n in G} for u in G: for v in G[u]: results[u][v] = _dispersion(G, u, v) diff --git a/networkx/algorithms/centrality/eigenvector.py b/networkx/algorithms/centrality/eigenvector.py index 7c42760..611cf40 100644 --- a/networkx/algorithms/centrality/eigenvector.py +++ b/networkx/algorithms/centrality/eigenvector.py @@ -1,43 +1,29 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: -# Aric Hagberg -# Pieter Swart -# Sasha Gutfraind """Functions for computing eigenvector centrality.""" -from __future__ import division from math import sqrt import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['eigenvector_centrality', 'eigenvector_centrality_numpy'] +__all__ = ["eigenvector_centrality", "eigenvector_centrality_numpy"] -@not_implemented_for('multigraph') -def eigenvector_centrality(G, max_iter=100, tol=1.0e-6, nstart=None, - weight=None): +@not_implemented_for("multigraph") +def eigenvector_centrality(G, max_iter=100, tol=1.0e-6, nstart=None, weight=None): r"""Compute the eigenvector centrality for the graph `G`. Eigenvector centrality computes the centrality for a node based on the centrality of its neighbors. The eigenvector centrality for node $i$ is + the $i$-th element of the vector $x$ defined by the equation .. math:: Ax = \lambda x where $A$ is the adjacency matrix of the graph `G` with eigenvalue - $\lambda$. By virtue of the Perron–Frobenius theorem, there is - a unique and positive solution if $\lambda$ is the largest - eigenvalue associated with the eigenvector of the adjacency matrix - $A$ ([2]_). + $\lambda$. By virtue of the Perron–Frobenius theorem, there is a unique + solution $x$, all of whose entries are positive, if $\lambda$ is the + largest eigenvalue of the adjacency matrix $A$ ([2]_). Parameters ---------- @@ -66,7 +52,7 @@ def eigenvector_centrality(G, max_iter=100, tol=1.0e-6, nstart=None, -------- >>> G = nx.path_graph(4) >>> centrality = nx.eigenvector_centrality(G) - >>> sorted((v, '{:0.2f}'.format(c)) for v, c in centrality.items()) + >>> sorted((v, f"{c:0.2f}") for v, c in centrality.items()) [(0, '0.37'), (1, '0.60'), (2, '0.60'), (3, '0.37')] Raises @@ -117,16 +103,18 @@ def eigenvector_centrality(G, max_iter=100, tol=1.0e-6, nstart=None, """ if len(G) == 0: - raise nx.NetworkXPointlessConcept('cannot compute centrality for the' - ' null graph') + raise nx.NetworkXPointlessConcept( + "cannot compute centrality for the null graph" + ) # If no initial vector is provided, start with the all-ones vector. if nstart is None: nstart = {v: 1 for v in G} if all(v == 0 for v in nstart.values()): - raise nx.NetworkXError('initial vector cannot have all zero values') + raise nx.NetworkXError("initial vector cannot have all zero values") # Normalize the initial vector so that each entry is in [0, 1]. This is # guaranteed to never have a divide-by-zero error by the previous line. - x = {k: v / sum(nstart.values()) for k, v in nstart.items()} + nstart_sum = sum(nstart.values()) + x = {k: v / nstart_sum for k, v in nstart.items()} nnodes = G.number_of_nodes() # make up to max_iter iterations for i in range(max_iter): @@ -135,7 +123,8 @@ def eigenvector_centrality(G, max_iter=100, tol=1.0e-6, nstart=None, # do the multiplication y^T = x^T A (left eigenvector) for n in x: for nbr in G[n]: - x[nbr] += xlast[n] * G[n][nbr].get(weight, 1) + w = G[n][nbr].get(weight, 1) if weight else 1 + x[nbr] += xlast[n] * w # Normalize the vector. The normalization denominator `norm` # should never be zero by the Perron--Frobenius # theorem. However, in case it is due to numerical error, we @@ -188,7 +177,7 @@ def eigenvector_centrality_numpy(G, weight=None, max_iter=50, tol=0): -------- >>> G = nx.path_graph(4) >>> centrality = nx.eigenvector_centrality_numpy(G) - >>> print(['{} {:0.2f}'.format(node, centrality[node]) for node in centrality]) + >>> print([f"{node} {centrality[node]:0.2f}" for node in centrality]) ['0 0.37', '1 0.60', '2 0.60', '3 0.37'] See Also @@ -223,24 +212,18 @@ def eigenvector_centrality_numpy(G, weight=None, max_iter=50, tol=0): Networks: An Introduction. Oxford University Press, USA, 2010, pp. 169. """ + import numpy as np import scipy as sp from scipy.sparse import linalg + if len(G) == 0: - raise nx.NetworkXPointlessConcept('cannot compute centrality for the' - ' null graph') - M = nx.to_scipy_sparse_matrix(G, nodelist=list(G), weight=weight, - dtype=float) - eigenvalue, eigenvector = linalg.eigs(M.T, k=1, which='LR', - maxiter=max_iter, tol=tol) + raise nx.NetworkXPointlessConcept( + "cannot compute centrality for the null graph" + ) + M = nx.to_scipy_sparse_matrix(G, nodelist=list(G), weight=weight, dtype=float) + eigenvalue, eigenvector = linalg.eigs( + M.T, k=1, which="LR", maxiter=max_iter, tol=tol + ) largest = eigenvector.flatten().real - norm = sp.sign(largest.sum()) * sp.linalg.norm(largest) + norm = np.sign(largest.sum()) * sp.linalg.norm(largest) return dict(zip(G, largest / norm)) - - -# fixture for nose tests -def setup_module(module): - from nose import SkipTest - try: - import scipy - except: - raise SkipTest("SciPy not available") diff --git a/networkx/algorithms/centrality/flow_matrix.py b/networkx/algorithms/centrality/flow_matrix.py index 8e24e38..dbf5e28 100644 --- a/networkx/algorithms/centrality/flow_matrix.py +++ b/networkx/algorithms/centrality/flow_matrix.py @@ -3,17 +3,19 @@ import networkx as nx -def flow_matrix_row(G, weight=None, dtype=float, solver='lu'): +def flow_matrix_row(G, weight=None, dtype=float, solver="lu"): # Generate a row of the current-flow matrix import numpy as np - from scipy import sparse - from scipy.sparse import linalg - solvername = {"full": FullInverseLaplacian, - "lu": SuperLUInverseLaplacian, - "cg": CGInverseLaplacian} + + solvername = { + "full": FullInverseLaplacian, + "lu": SuperLUInverseLaplacian, + "cg": CGInverseLaplacian, + } n = G.number_of_nodes() - L = laplacian_sparse_matrix(G, nodelist=range(n), weight=weight, - dtype=dtype, format='csc') + L = laplacian_sparse_matrix( + G, nodelist=range(n), weight=weight, dtype=dtype, format="csc" + ) C = solvername[solver](L, dtype=dtype) # initialize solver w = C.w # w is the Laplacian matrix width # row-by-row flow matrix @@ -31,10 +33,11 @@ def flow_matrix_row(G, weight=None, dtype=float, solver='lu'): # Class to compute the inverse laplacian only for specified rows # Allows computation of the current-flow matrix without storing entire # inverse laplacian matrix -class InverseLaplacian(object): +class InverseLaplacian: def __init__(self, L, width=None, dtype=None): global np import numpy as np + (n, n) = L.shape self.dtype = dtype self.n = n @@ -50,10 +53,10 @@ def init_solver(self, L): pass def solve(self, r): - raise("Implement solver") + raise nx.NetworkXError("Implement solver") def solve_inverse(self, r): - raise("Implement solver") + raise nx.NetworkXError("Implement solver") def get_rows(self, r1, r2): for r in range(r1, r2 + 1): @@ -93,6 +96,7 @@ def solve_inverse(self, r): class SuperLUInverseLaplacian(InverseLaplacian): def init_solver(self, L): from scipy.sparse import linalg + self.lusolve = linalg.factorized(self.L1.tocsc()) def solve_inverse(self, r): @@ -110,28 +114,30 @@ class CGInverseLaplacian(InverseLaplacian): def init_solver(self, L): global linalg from scipy.sparse import linalg + ilu = linalg.spilu(self.L1.tocsc()) n = self.n - 1 self.M = linalg.LinearOperator(shape=(n, n), matvec=ilu.solve) def solve(self, rhs): s = np.zeros(rhs.shape, dtype=self.dtype) - s[1:] = linalg.cg(self.L1, rhs[1:], M=self.M)[0] + s[1:] = linalg.cg(self.L1, rhs[1:], M=self.M, atol=0)[0] return s def solve_inverse(self, r): rhs = np.zeros(self.n, self.dtype) rhs[r] = 1 - return linalg.cg(self.L1, rhs[1:], M=self.M)[0] + return linalg.cg(self.L1, rhs[1:], M=self.M, atol=0)[0] # graph laplacian, sparse version, will move to linalg/laplacianmatrix.py -def laplacian_sparse_matrix(G, nodelist=None, weight=None, dtype=None, - format='csr'): +def laplacian_sparse_matrix(G, nodelist=None, weight=None, dtype=None, format="csr"): import numpy as np import scipy.sparse - A = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, weight=weight, - dtype=dtype, format=format) + + A = nx.to_scipy_sparse_matrix( + G, nodelist=nodelist, weight=weight, dtype=dtype, format=format + ) (n, n) = A.shape data = np.asarray(A.sum(axis=1).T) D = scipy.sparse.spdiags(data, 0, n, n, format=format) diff --git a/networkx/algorithms/centrality/group.py b/networkx/algorithms/centrality/group.py new file mode 100644 index 0000000..bd1d5f9 --- /dev/null +++ b/networkx/algorithms/centrality/group.py @@ -0,0 +1,366 @@ +"""Group centrality measures.""" +from itertools import combinations + + +import networkx as nx +from networkx.utils.decorators import not_implemented_for + + +__all__ = [ + "group_betweenness_centrality", + "group_closeness_centrality", + "group_degree_centrality", + "group_in_degree_centrality", + "group_out_degree_centrality", +] + + +def group_betweenness_centrality(G, C, normalized=True, weight=None): + r"""Compute the group betweenness centrality for a group of nodes. + + Group betweenness centrality of a group of nodes $C$ is the sum of the + fraction of all-pairs shortest paths that pass through any vertex in $C$ + + .. math:: + + c_B(C) =\sum_{s,t \in V-C; s -# """Functions for computing the harmonic centrality of a graph.""" -from __future__ import division from functools import partial import networkx as nx -__all__ = ['harmonic_centrality'] +__all__ = ["harmonic_centrality"] def harmonic_centrality(G, nbunch=None, distance=None): @@ -65,5 +57,7 @@ def harmonic_centrality(G, nbunch=None, distance=None): if G.is_directed(): G = G.reverse() spl = partial(nx.shortest_path_length, G, weight=distance) - return {u: sum(1 / d if d > 0 else 0 for v, d in spl(source=u).items()) - for u in G.nbunch_iter(nbunch)} + return { + u: sum(1 / d if d > 0 else 0 for v, d in spl(source=u).items()) + for u in G.nbunch_iter(nbunch) + } diff --git a/networkx/algorithms/centrality/katz.py b/networkx/algorithms/centrality/katz.py index 1cc58c7..8697750 100644 --- a/networkx/algorithms/centrality/katz.py +++ b/networkx/algorithms/centrality/katz.py @@ -1,27 +1,23 @@ -# coding=utf8 -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg (aric.hagberg@gmail.com) -# Pieter Swart (swart@lanl.gov) -# Sasha Gutfraind (ag362@cornell.edu) -# Vincent Gauthier (vgauthier@luxbulb.org) """Katz centrality.""" from math import sqrt import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['katz_centrality', 'katz_centrality_numpy'] +__all__ = ["katz_centrality", "katz_centrality_numpy"] -@not_implemented_for('multigraph') -def katz_centrality(G, alpha=0.1, beta=1.0, max_iter=1000, tol=1.0e-6, - nstart=None, normalized=True, weight=None): +@not_implemented_for("multigraph") +def katz_centrality( + G, + alpha=0.1, + beta=1.0, + max_iter=1000, + tol=1.0e-6, + nstart=None, + normalized=True, + weight=None, +): r"""Compute the Katz centrality for the nodes of the graph G. Katz centrality computes the centrality for a node based on the centrality @@ -101,9 +97,9 @@ def katz_centrality(G, alpha=0.1, beta=1.0, max_iter=1000, tol=1.0e-6, >>> import math >>> G = nx.path_graph(4) >>> phi = (1 + math.sqrt(5)) / 2.0 # largest eigenvalue of adj matrix - >>> centrality = nx.katz_centrality(G, 1/phi - 0.01) + >>> centrality = nx.katz_centrality(G, 1 / phi - 0.01) >>> for n, c in sorted(centrality.items()): - ... print("%d %0.2f" % (n, c)) + ... print(f"{n} {c:.2f}") 0 0.37 1 0.60 2 0.60 @@ -154,17 +150,18 @@ def katz_centrality(G, alpha=0.1, beta=1.0, max_iter=1000, tol=1.0e-6, if nstart is None: # choose starting vector with entries of 0 - x = dict([(n, 0) for n in G]) + x = {n: 0 for n in G} else: x = nstart try: b = dict.fromkeys(G, float(beta)) - except (TypeError, ValueError, AttributeError): + except (TypeError, ValueError, AttributeError) as e: b = beta if set(beta) != set(G): - raise nx.NetworkXError('beta dictionary ' - 'must have a value for every node') + raise nx.NetworkXError( + "beta dictionary " "must have a value for every node" + ) from e # make up to max_iter iterations for i in range(max_iter): @@ -183,7 +180,7 @@ def katz_centrality(G, alpha=0.1, beta=1.0, max_iter=1000, tol=1.0e-6, if normalized: # normalize vector try: - s = 1.0 / sqrt(sum(v**2 for v in x.values())) + s = 1.0 / sqrt(sum(v ** 2 for v in x.values())) # this should never be zero? except ZeroDivisionError: s = 1.0 @@ -195,9 +192,8 @@ def katz_centrality(G, alpha=0.1, beta=1.0, max_iter=1000, tol=1.0e-6, raise nx.PowerIterationFailedConvergence(max_iter) -@not_implemented_for('multigraph') -def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True, - weight=None): +@not_implemented_for("multigraph") +def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True, weight=None): r"""Compute the Katz centrality for the graph G. Katz centrality computes the centrality for a node based on the centrality @@ -263,9 +259,9 @@ def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True, >>> import math >>> G = nx.path_graph(4) >>> phi = (1 + math.sqrt(5)) / 2.0 # largest eigenvalue of adj matrix - >>> centrality = nx.katz_centrality_numpy(G, 1/phi) + >>> centrality = nx.katz_centrality_numpy(G, 1 / phi) >>> for n, c in sorted(centrality.items()): - ... print("%d %0.2f" % (n, c)) + ... print(f"{n} {c:.2f}") 0 0.37 1 0.60 2 0.60 @@ -308,22 +304,23 @@ def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True, """ try: import numpy as np - except ImportError: - raise ImportError('Requires NumPy: http://scipy.org/') + except ImportError as e: + raise ImportError("Requires NumPy: http://numpy.org/") from e if len(G) == 0: return {} try: nodelist = beta.keys() if set(nodelist) != set(G): - raise nx.NetworkXError('beta dictionary ' - 'must have a value for every node') + raise nx.NetworkXError( + "beta dictionary " "must have a value for every node" + ) b = np.array(list(beta.values()), dtype=float) except AttributeError: nodelist = list(G) try: b = np.ones((len(nodelist), 1)) * float(beta) - except (TypeError, ValueError, AttributeError): - raise nx.NetworkXError('beta must be a number') + except (TypeError, ValueError, AttributeError) as e: + raise nx.NetworkXError("beta must be a number") from e A = nx.adj_matrix(G, nodelist=nodelist, weight=weight).todense().T n = A.shape[0] @@ -334,13 +331,3 @@ def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True, norm = 1.0 centrality = dict(zip(nodelist, map(float, centrality / norm))) return centrality - - -# fixture for nose tests -def setup_module(module): - from nose import SkipTest - try: - import numpy - import scipy - except: - raise SkipTest("SciPy not available") diff --git a/networkx/algorithms/centrality/load.py b/networkx/algorithms/centrality/load.py index 5db3db4..6c50c68 100644 --- a/networkx/algorithms/centrality/load.py +++ b/networkx/algorithms/centrality/load.py @@ -1,25 +1,12 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg (hagberg@lanl.gov) -# Pieter Swart (swart@lanl.gov) -# Sasha Gutfraind (ag362@cornell.edu) """Load centrality.""" -from __future__ import division from operator import itemgetter import networkx as nx -__all__ = ['load_centrality', 'edge_load_centrality'] +__all__ = ["load_centrality", "edge_load_centrality"] -def newman_betweenness_centrality(G, v=None, cutoff=None, - normalized=True, weight=None): +def newman_betweenness_centrality(G, v=None, cutoff=None, normalized=True, weight=None): """Compute load centrality for nodes. The load centrality of a node is the fraction of all shortest @@ -67,7 +54,7 @@ def newman_betweenness_centrality(G, v=None, cutoff=None, Physical Review Letters 87(27):1–4, 2001. http://phya.snu.ac.kr/~dkim/PRL87278701.pdf """ - if v is not None: # only one node + if v is not None: # only one node betweenness = 0.0 for source in G: ubetween = _node_betweenness(G, source, cutoff, False, weight) @@ -94,8 +81,7 @@ def newman_betweenness_centrality(G, v=None, cutoff=None, return betweenness # all nodes -def _node_betweenness(G, source, cutoff=False, normalized=True, - weight=None): +def _node_betweenness(G, source, cutoff=False, normalized=True, weight=None): """Node betweenness_centrality helper: See betweenness_centrality for what you probably want. @@ -112,11 +98,9 @@ def _node_betweenness(G, source, cutoff=False, normalized=True, """ # get the predecessor and path length data if weight is None: - (pred, length) = nx.predecessor(G, source, cutoff=cutoff, - return_seen=True) + (pred, length) = nx.predecessor(G, source, cutoff=cutoff, return_seen=True) else: - (pred, length) = nx.dijkstra_predecessor_and_distance(G, source, - cutoff, weight) + (pred, length) = nx.dijkstra_predecessor_and_distance(G, source, cutoff, weight) # order the nodes by path length onodes = [(l, vert) for (vert, l) in length.items()] @@ -130,9 +114,9 @@ def _node_betweenness(G, source, cutoff=False, normalized=True, v = onodes.pop() if v in pred: num_paths = len(pred[v]) # Discount betweenness if more than - for x in pred[v]: # one shortest path. + for x in pred[v]: # one shortest path. if x == source: # stop if hit source because all remaining v - break # also have pred[v]==[source] + break # also have pred[v]==[source] between[x] += between[v] / float(num_paths) # remove source for v in between: @@ -198,7 +182,7 @@ def _edge_betweenness(G, source, nodes=None, cutoff=False): between[(u, v)] = 1.0 between[(v, u)] = 1.0 - while onodes: # work through all paths + while onodes: # work through all paths v = onodes.pop() if v in pred: # Discount betweenness if more than one shortest path. diff --git a/networkx/algorithms/centrality/percolation.py b/networkx/algorithms/centrality/percolation.py new file mode 100644 index 0000000..5867b54 --- /dev/null +++ b/networkx/algorithms/centrality/percolation.py @@ -0,0 +1,123 @@ +"""Percolation centrality measures.""" + +import networkx as nx + +from networkx.algorithms.centrality.betweenness import ( + _single_source_dijkstra_path_basic as dijkstra, +) +from networkx.algorithms.centrality.betweenness import ( + _single_source_shortest_path_basic as shortest_path, +) + +__all__ = ["percolation_centrality"] + + +def percolation_centrality(G, attribute="percolation", states=None, weight=None): + r"""Compute the percolation centrality for nodes. + + Percolation centrality of a node $v$, at a given time, is defined + as the proportion of ‘percolated paths’ that go through that node. + + This measure quantifies relative impact of nodes based on their + topological connectivity, as well as their percolation states. + + Percolation states of nodes are used to depict network percolation + scenarios (such as during infection transmission in a social network + of individuals, spreading of computer viruses on computer networks, or + transmission of disease over a network of towns) over time. In this + measure usually the percolation state is expressed as a decimal + between 0.0 and 1.0. + + When all nodes are in the same percolated state this measure is + equivalent to betweenness centrality. + + Parameters + ---------- + G : graph + A NetworkX graph. + + attribute : None or string, optional (default='percolation') + Name of the node attribute to use for percolation state, used + if `states` is None. + + states : None or dict, optional (default=None) + Specify percolation states for the nodes, nodes as keys states + as values. + + weight : None or string, optional (default=None) + If None, all edge weights are considered equal. + Otherwise holds the name of the edge attribute used as weight. + + Returns + ------- + nodes : dictionary + Dictionary of nodes with percolation centrality as the value. + + See Also + -------- + betweenness_centrality + + Notes + ----- + The algorithm is from Mahendra Piraveenan, Mikhail Prokopenko, and + Liaquat Hossain [1]_ + Pair dependecies are calculated and accumulated using [2]_ + + For weighted graphs the edge weights must be greater than zero. + Zero edge weights can produce an infinite number of equal length + paths between pairs of nodes. + + References + ---------- + .. [1] Mahendra Piraveenan, Mikhail Prokopenko, Liaquat Hossain + Percolation Centrality: Quantifying Graph-Theoretic Impact of Nodes + during Percolation in Networks + http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0053095 + .. [2] Ulrik Brandes: + A Faster Algorithm for Betweenness Centrality. + Journal of Mathematical Sociology 25(2):163-177, 2001. + http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf + """ + percolation = dict.fromkeys(G, 0.0) # b[v]=0 for v in G + + nodes = G + + if states is None: + states = nx.get_node_attributes(nodes, attribute) + + # sum of all percolation states + p_sigma_x_t = 0.0 + for v in states.values(): + p_sigma_x_t += v + + for s in nodes: + # single source shortest paths + if weight is None: # use BFS + S, P, sigma = shortest_path(G, s) + else: # use Dijkstra's algorithm + S, P, sigma = dijkstra(G, s, weight) + # accumulation + percolation = _accumulate_percolation( + percolation, G, S, P, sigma, s, states, p_sigma_x_t + ) + + n = len(G) + + for v in percolation: + percolation[v] *= 1 / (n - 2) + + return percolation + + +def _accumulate_percolation(percolation, G, S, P, sigma, s, states, p_sigma_x_t): + delta = dict.fromkeys(S, 0) + while S: + w = S.pop() + coeff = (1 + delta[w]) / sigma[w] + for v in P[w]: + delta[v] += sigma[v] * coeff + if w != s: + # percolation weight + pw_s_w = states[s] / (p_sigma_x_t - states[w]) + percolation[w] += delta[w] * pw_s_w + return percolation diff --git a/networkx/algorithms/centrality/reaching.py b/networkx/algorithms/centrality/reaching.py index 5ba5ec1..a612918 100644 --- a/networkx/algorithms/centrality/reaching.py +++ b/networkx/algorithms/centrality/reaching.py @@ -1,18 +1,10 @@ -# -*- encoding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. """Functions for computing reaching centrality of a node or a graph.""" -from __future__ import division import networkx as nx from networkx.utils import pairwise -__all__ = ['global_reaching_centrality', 'local_reaching_centrality'] +__all__ = ["global_reaching_centrality", "local_reaching_centrality"] def _average_weight(G, path, weight=None): @@ -72,7 +64,6 @@ def global_reaching_centrality(G, weight=None, normalized=True): Examples -------- - >>> import networkx as nx >>> G = nx.DiGraph() >>> G.add_edge(1, 2) >>> G.add_edge(1, 3) @@ -94,10 +85,10 @@ def global_reaching_centrality(G, weight=None, normalized=True): https://doi.org/10.1371/journal.pone.0033799 """ if nx.is_negatively_weighted(G, weight=weight): - raise nx.NetworkXError('edge weights must be positive') + raise nx.NetworkXError("edge weights must be positive") total_weight = G.size(weight=weight) if total_weight <= 0: - raise nx.NetworkXError('Size of G must be positive') + raise nx.NetworkXError("Size of G must be positive") # If provided, weights must be interpreted as connection strength # (so higher weights are more likely to be chosen). However, the @@ -109,16 +100,20 @@ def global_reaching_centrality(G, weight=None, normalized=True): # If weight is None, we leave it as-is so that the shortest path # algorithm can use a faster, unweighted algorithm. if weight is not None: - def as_distance(u, v, d): return total_weight / d.get(weight, 1) + + def as_distance(u, v, d): + return total_weight / d.get(weight, 1) + shortest_paths = nx.shortest_path(G, weight=as_distance) else: shortest_paths = nx.shortest_path(G) centrality = local_reaching_centrality # TODO This can be trivially parallelized. - lrc = [centrality(G, node, paths=paths, weight=weight, - normalized=normalized) - for node, paths in shortest_paths.items()] + lrc = [ + centrality(G, node, paths=paths, weight=weight, normalized=normalized) + for node, paths in shortest_paths.items() + ] max_lrc = max(lrc) return sum(max_lrc - c for c in lrc) / (len(G) - 1) @@ -163,7 +158,6 @@ def local_reaching_centrality(G, v, paths=None, weight=None, normalized=True): Examples -------- - >>> import networkx as nx >>> G = nx.DiGraph() >>> G.add_edges_from([(1, 2), (1, 3)]) >>> nx.local_reaching_centrality(G, 3) @@ -185,13 +179,15 @@ def local_reaching_centrality(G, v, paths=None, weight=None, normalized=True): """ if paths is None: if nx.is_negatively_weighted(G, weight=weight): - raise nx.NetworkXError('edge weights must be positive') + raise nx.NetworkXError("edge weights must be positive") total_weight = G.size(weight=weight) if total_weight <= 0: - raise nx.NetworkXError('Size of G must be positive') + raise nx.NetworkXError("Size of G must be positive") if weight is not None: # Interpret weights as lengths. - def as_distance(u, v, d): return total_weight / d.get(weight, 1) + def as_distance(u, v, d): + return total_weight / d.get(weight, 1) + paths = nx.shortest_path(G, source=v, weight=as_distance) else: paths = nx.shortest_path(G, source=v) diff --git a/networkx/algorithms/centrality/second_order.py b/networkx/algorithms/centrality/second_order.py new file mode 100644 index 0000000..cf86cb0 --- /dev/null +++ b/networkx/algorithms/centrality/second_order.py @@ -0,0 +1,138 @@ +"""Copyright (c) 2015 – Thomson Licensing, SAS + +Redistribution and use in source and binary forms, with or without +modification, are permitted (subject to the limitations in the +disclaimer below) provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in the +documentation and/or other materials provided with the distribution. + +* Neither the name of Thomson Licensing, or Technicolor, nor the names +of its contributors may be used to endorse or promote products derived +from this software without specific prior written permission. + +NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE +GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT +HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR +BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN +IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +""" + +import networkx as nx +from networkx.utils import not_implemented_for + +# Authors: Erwan Le Merrer (erwan.lemerrer@technicolor.com) +""" Second order centrality measure.""" + +__all__ = ["second_order_centrality"] + + +@not_implemented_for("directed") +def second_order_centrality(G): + """Compute the second order centrality for nodes of G. + + The second order centrality of a given node is the standard deviation of + the return times to that node of a perpetual random walk on G: + + Parameters + ---------- + G : graph + A NetworkX connected and undirected graph. + + Returns + ------- + nodes : dictionary + Dictionary keyed by node with second order centrality as the value. + + Examples + -------- + >>> G = nx.star_graph(10) + >>> soc = nx.second_order_centrality(G) + >>> print(sorted(soc.items(), key=lambda x: x[1])[0][0]) # pick first id + 0 + + Raises + ------ + NetworkXException + If the graph G is empty, non connected or has negative weights. + + See Also + -------- + betweenness_centrality + + Notes + ----- + Lower values of second order centrality indicate higher centrality. + + The algorithm is from Kermarrec, Le Merrer, Sericola and Trédan [1]_. + + This code implements the analytical version of the algorithm, i.e., + there is no simulation of a random walk process involved. The random walk + is here unbiased (corresponding to eq 6 of the paper [1]_), thus the + centrality values are the standard deviations for random walk return times + on the transformed input graph G (equal in-degree at each nodes by adding + self-loops). + + Complexity of this implementation, made to run locally on a single machine, + is O(n^3), with n the size of G, which makes it viable only for small + graphs. + + References + ---------- + .. [1] Anne-Marie Kermarrec, Erwan Le Merrer, Bruno Sericola, Gilles Trédan + "Second order centrality: Distributed assessment of nodes criticity in + complex networks", Elsevier Computer Communications 34(5):619-628, 2011. + """ + + try: + import numpy as np + except ImportError as e: + raise ImportError("Requires NumPy: http://numpy.org/") from e + + n = len(G) + + if n == 0: + raise nx.NetworkXException("Empty graph.") + if not nx.is_connected(G): + raise nx.NetworkXException("Non connected graph.") + if any(d.get("weight", 0) < 0 for u, v, d in G.edges(data=True)): + raise nx.NetworkXException("Graph has negative edge weights.") + + # balancing G for Metropolis-Hastings random walks + G = nx.DiGraph(G) + in_deg = dict(G.in_degree(weight="weight")) + d_max = max(in_deg.values()) + for i, deg in in_deg.items(): + if deg < d_max: + G.add_edge(i, i, weight=d_max - deg) + + P = nx.to_numpy_array(G) + P /= P.sum(axis=1)[:, np.newaxis] # to transition probability matrix + + def _Qj(P, j): + P = P.copy() + P[:, j] = 0 + return P + + M = np.empty([n, n]) + + for i in range(n): + M[:, i] = np.linalg.solve( + np.identity(n) - _Qj(P, i), np.ones([n, 1])[:, 0] + ) # eq 3 + + return dict( + zip(G.nodes, [np.sqrt(2 * np.sum(M[:, i]) - n * (n + 1)) for i in range(n)]) + ) # eq 6 diff --git a/networkx/algorithms/centrality/subgraph_alg.py b/networkx/algorithms/centrality/subgraph_alg.py index 34697db..2c50f17 100644 --- a/networkx/algorithms/centrality/subgraph_alg.py +++ b/networkx/algorithms/centrality/subgraph_alg.py @@ -1,28 +1,21 @@ -# -*- coding: utf-8 -*- """ Subraph centrality and communicability betweenness. """ -# Copyright (C) 2011 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. import networkx as nx -from networkx.utils import * -__author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', - 'Franck Kalala (franckkalala@yahoo.fr']) -__all__ = ['subgraph_centrality_exp', - 'subgraph_centrality', - 'communicability_betweenness_centrality', - 'estrada_index' - ] - - -@not_implemented_for('directed') -@not_implemented_for('multigraph') +from networkx.utils import not_implemented_for + +__all__ = [ + "subgraph_centrality_exp", + "subgraph_centrality", + "communicability_betweenness_centrality", + "estrada_index", +] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") def subgraph_centrality_exp(G): - r"""Return the subgraph centrality for each node of G. + r"""Returns the subgraph centrality for each node of G. Subgraph centrality of a node `n` is the sum of weighted closed walks of all lengths starting and ending at node `n`. The weights @@ -69,27 +62,43 @@ def subgraph_centrality_exp(G): Examples -------- (Example from [1]_) - >>> G = nx.Graph([(1,2),(1,5),(1,8),(2,3),(2,8),(3,4),(3,6),(4,5),(4,7),(5,6),(6,7),(7,8)]) + >>> G = nx.Graph( + ... [ + ... (1, 2), + ... (1, 5), + ... (1, 8), + ... (2, 3), + ... (2, 8), + ... (3, 4), + ... (3, 6), + ... (4, 5), + ... (4, 7), + ... (5, 6), + ... (6, 7), + ... (7, 8), + ... ] + ... ) >>> sc = nx.subgraph_centrality_exp(G) - >>> print(['%s %0.2f'%(node,sc[node]) for node in sorted(sc)]) + >>> print([f"{node} {sc[node]:0.2f}" for node in sorted(sc)]) ['1 3.90', '2 3.90', '3 3.64', '4 3.71', '5 3.64', '6 3.71', '7 3.64', '8 3.90'] """ # alternative implementation that calculates the matrix exponential import scipy.linalg + nodelist = list(G) # ordering of nodes in matrix - A = nx.to_numpy_matrix(G, nodelist) + A = nx.to_numpy_array(G, nodelist) # convert to 0-1 matrix A[A != 0.0] = 1 - expA = scipy.linalg.expm(A.A) + expA = scipy.linalg.expm(A) # convert diagonal to dictionary keyed by node sc = dict(zip(nodelist, map(float, expA.diagonal()))) return sc -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def subgraph_centrality(G): - r"""Return subgraph centrality for each node in G. + r"""Returns subgraph centrality for each node in G. Subgraph centrality of a node `n` is the sum of weighted closed walks of all lengths starting and ending at node `n`. The weights @@ -133,9 +142,24 @@ def subgraph_centrality(G): Examples -------- (Example from [1]_) - >>> G = nx.Graph([(1,2),(1,5),(1,8),(2,3),(2,8),(3,4),(3,6),(4,5),(4,7),(5,6),(6,7),(7,8)]) + >>> G = nx.Graph( + ... [ + ... (1, 2), + ... (1, 5), + ... (1, 8), + ... (2, 3), + ... (2, 8), + ... (3, 4), + ... (3, 6), + ... (4, 5), + ... (4, 7), + ... (5, 6), + ... (6, 7), + ... (7, 8), + ... ] + ... ) >>> sc = nx.subgraph_centrality(G) - >>> print(['%s %0.2f'%(node,sc[node]) for node in sorted(sc)]) + >>> print([f"{node} {sc[node]:0.2f}" for node in sorted(sc)]) ['1 3.90', '2 3.90', '3 3.64', '4 3.71', '5 3.64', '6 3.71', '7 3.64', '8 3.90'] References @@ -146,25 +170,26 @@ def subgraph_centrality(G): https://arxiv.org/abs/cond-mat/0504730 """ - import numpy + import numpy as np import numpy.linalg + nodelist = list(G) # ordering of nodes in matrix - A = nx.to_numpy_matrix(G, nodelist) + A = nx.to_numpy_array(G, nodelist) # convert to 0-1 matrix - A[A != 0.0] = 1 - w, v = numpy.linalg.eigh(A.A) - vsquare = numpy.array(v)**2 - expw = numpy.exp(w) - xg = numpy.dot(vsquare, expw) + A[np.nonzero(A)] = 1 + w, v = numpy.linalg.eigh(A) + vsquare = np.array(v) ** 2 + expw = np.exp(w) + xg = np.dot(vsquare, expw) # convert vector dictionary keyed by node sc = dict(zip(nodelist, map(float, xg))) return sc -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def communicability_betweenness_centrality(G, normalized=True): - r"""Return subgraph communicability for all pairs of nodes in G. + r"""Returns subgraph communicability for all pairs of nodes in G. Communicability betweenness measure makes use of the number of walks connecting every pair of nodes as the basis of a betweenness centrality @@ -223,17 +248,20 @@ def communicability_betweenness_centrality(G, normalized=True): Examples -------- - >>> G = nx.Graph([(0,1),(1,2),(1,5),(5,4),(2,4),(2,3),(4,3),(3,6)]) + >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)]) >>> cbc = nx.communicability_betweenness_centrality(G) + >>> print([f"{node} {cbc[node]:0.2f}" for node in sorted(cbc)]) + ['0 0.03', '1 0.45', '2 0.51', '3 0.45', '4 0.40', '5 0.19', '6 0.03'] """ - import scipy + import numpy as np import scipy.linalg + nodelist = list(G) # ordering of nodes in matrix n = len(nodelist) - A = nx.to_numpy_matrix(G, nodelist) + A = nx.to_numpy_array(G, nodelist) # convert to 0-1 matrix - A[A != 0.0] = 1 - expA = scipy.linalg.expm(A.A) + A[np.nonzero(A)] = 1 + expA = scipy.linalg.expm(A) mapping = dict(zip(nodelist, range(n))) cbc = {} for v in G: @@ -243,11 +271,11 @@ def communicability_betweenness_centrality(G, normalized=True): col = A[:, i].copy() A[i, :] = 0 A[:, i] = 0 - B = (expA - scipy.linalg.expm(A.A)) / expA + B = (expA - scipy.linalg.expm(A)) / expA # sum with row/col of node v and diag set to zero B[i, :] = 0 B[:, i] = 0 - B -= scipy.diag(scipy.diag(B)) + B -= np.diag(np.diag(B)) cbc[v] = float(B.sum()) # put row and col back A[i, :] = row @@ -264,7 +292,7 @@ def _rescale(cbc, normalized): if order <= 2: scale = None else: - scale = 1.0 / ((order - 1.0)**2 - (order - 1.0)) + scale = 1.0 / ((order - 1.0) ** 2 - (order - 1.0)) if scale is not None: for v in cbc: cbc[v] *= scale @@ -272,7 +300,7 @@ def _rescale(cbc, normalized): def estrada_index(G): - r"""Return the Estrada index of a the graph G. + r"""Returns the Estrada index of a the graph G. The Estrada Index is a topological index of folding or 3D "compactness" ([1]_). @@ -311,21 +339,9 @@ def estrada_index(G): Examples -------- - >>> G=nx.Graph([(0,1),(1,2),(1,5),(5,4),(2,4),(2,3),(4,3),(3,6)]) - >>> ei=nx.estrada_index(G) + >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)]) + >>> ei = nx.estrada_index(G) + >>> print(f"{ei:0.5}") + 20.55 """ return sum(subgraph_centrality(G).values()) - -# fixture for nose tests - - -def setup_module(module): - from nose import SkipTest - try: - import numpy - except: - raise SkipTest("NumPy not available") - try: - import scipy - except: - raise SkipTest("SciPy not available") diff --git a/networkx/algorithms/centrality/tests/__init__.py b/networkx/algorithms/centrality/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/algorithms/centrality/tests/test_betweenness_centrality.py b/networkx/algorithms/centrality/tests/test_betweenness_centrality.py index b2d35cd..f827427 100644 --- a/networkx/algorithms/centrality/tests/test_betweenness_centrality.py +++ b/networkx/algorithms/centrality/tests/test_betweenness_centrality.py @@ -1,6 +1,5 @@ -#!/usr/bin/env python -from nose.tools import * import networkx as nx +from networkx.testing import almost_equal def weighted_G(): @@ -15,130 +14,235 @@ def weighted_G(): G.add_edge(3, 4, weight=2) G.add_edge(3, 5, weight=1) G.add_edge(4, 5, weight=4) - return G -class TestBetweennessCentrality(object): - +class TestBetweennessCentrality: def test_K5(self): """Betweenness centrality: K5""" G = nx.complete_graph(5) - b = nx.betweenness_centrality(G, - weight=None, - normalized=False) + b = nx.betweenness_centrality(G, weight=None, normalized=False) b_answer = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0} for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_K5_endpoints(self): """Betweenness centrality: K5 endpoints""" G = nx.complete_graph(5) - b = nx.betweenness_centrality(G, - weight=None, - normalized=False, - endpoints=True) + b = nx.betweenness_centrality(G, weight=None, normalized=False, endpoints=True) b_answer = {0: 4.0, 1: 4.0, 2: 4.0, 3: 4.0, 4: 4.0} for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) + # normalized = True case + b = nx.betweenness_centrality(G, weight=None, normalized=True, endpoints=True) + b_answer = {0: 0.4, 1: 0.4, 2: 0.4, 3: 0.4, 4: 0.4} + for n in sorted(G): + assert almost_equal(b[n], b_answer[n]) def test_P3_normalized(self): """Betweenness centrality: P3 normalized""" G = nx.path_graph(3) - b = nx.betweenness_centrality(G, - weight=None, - normalized=True) + b = nx.betweenness_centrality(G, weight=None, normalized=True) b_answer = {0: 0.0, 1: 1.0, 2: 0.0} for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_P3(self): """Betweenness centrality: P3""" G = nx.path_graph(3) b_answer = {0: 0.0, 1: 1.0, 2: 0.0} - b = nx.betweenness_centrality(G, - weight=None, - normalized=False) + b = nx.betweenness_centrality(G, weight=None, normalized=False) + for n in sorted(G): + assert almost_equal(b[n], b_answer[n]) + + def test_sample_from_P3(self): + G = nx.path_graph(3) + b_answer = {0: 0.0, 1: 1.0, 2: 0.0} + b = nx.betweenness_centrality(G, k=3, weight=None, normalized=False, seed=1) + for n in sorted(G): + assert almost_equal(b[n], b_answer[n]) + b = nx.betweenness_centrality(G, k=2, weight=None, normalized=False, seed=1) + # python versions give different results with same seed + b_approx1 = {0: 0.0, 1: 1.5, 2: 0.0} + b_approx2 = {0: 0.0, 1: 0.75, 2: 0.0} for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert b[n] in (b_approx1[n], b_approx2[n]) def test_P3_endpoints(self): """Betweenness centrality: P3 endpoints""" G = nx.path_graph(3) b_answer = {0: 2.0, 1: 3.0, 2: 2.0} - b = nx.betweenness_centrality(G, - weight=None, - normalized=False, - endpoints=True) + b = nx.betweenness_centrality(G, weight=None, normalized=False, endpoints=True) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) + # normalized = True case + b_answer = {0: 2 / 3, 1: 1.0, 2: 2 / 3} + b = nx.betweenness_centrality(G, weight=None, normalized=True, endpoints=True) + for n in sorted(G): + assert almost_equal(b[n], b_answer[n]) def test_krackhardt_kite_graph(self): """Betweenness centrality: Krackhardt kite graph""" G = nx.krackhardt_kite_graph() - b_answer = {0: 1.667, 1: 1.667, 2: 0.000, 3: 7.333, 4: 0.000, - 5: 16.667, 6: 16.667, 7: 28.000, 8: 16.000, 9: 0.000} + b_answer = { + 0: 1.667, + 1: 1.667, + 2: 0.000, + 3: 7.333, + 4: 0.000, + 5: 16.667, + 6: 16.667, + 7: 28.000, + 8: 16.000, + 9: 0.000, + } for b in b_answer: - b_answer[b] /= 2.0 - b = nx.betweenness_centrality(G, - weight=None, - normalized=False) - + b_answer[b] /= 2 + b = nx.betweenness_centrality(G, weight=None, normalized=False) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n], places=3) + assert almost_equal(b[n], b_answer[n], places=3) def test_krackhardt_kite_graph_normalized(self): """Betweenness centrality: Krackhardt kite graph normalized""" G = nx.krackhardt_kite_graph() - b_answer = {0: 0.023, 1: 0.023, 2: 0.000, 3: 0.102, 4: 0.000, - 5: 0.231, 6: 0.231, 7: 0.389, 8: 0.222, 9: 0.000} - b = nx.betweenness_centrality(G, - weight=None, - normalized=True) - + b_answer = { + 0: 0.023, + 1: 0.023, + 2: 0.000, + 3: 0.102, + 4: 0.000, + 5: 0.231, + 6: 0.231, + 7: 0.389, + 8: 0.222, + 9: 0.000, + } + b = nx.betweenness_centrality(G, weight=None, normalized=True) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n], places=3) + assert almost_equal(b[n], b_answer[n], places=3) def test_florentine_families_graph(self): """Betweenness centrality: Florentine families graph""" G = nx.florentine_families_graph() - b_answer =\ - {'Acciaiuoli': 0.000, - 'Albizzi': 0.212, - 'Barbadori': 0.093, - 'Bischeri': 0.104, - 'Castellani': 0.055, - 'Ginori': 0.000, - 'Guadagni': 0.255, - 'Lamberteschi': 0.000, - 'Medici': 0.522, - 'Pazzi': 0.000, - 'Peruzzi': 0.022, - 'Ridolfi': 0.114, - 'Salviati': 0.143, - 'Strozzi': 0.103, - 'Tornabuoni': 0.092} - - b = nx.betweenness_centrality(G, - weight=None, - normalized=True) + b_answer = { + "Acciaiuoli": 0.000, + "Albizzi": 0.212, + "Barbadori": 0.093, + "Bischeri": 0.104, + "Castellani": 0.055, + "Ginori": 0.000, + "Guadagni": 0.255, + "Lamberteschi": 0.000, + "Medici": 0.522, + "Pazzi": 0.000, + "Peruzzi": 0.022, + "Ridolfi": 0.114, + "Salviati": 0.143, + "Strozzi": 0.103, + "Tornabuoni": 0.092, + } + + b = nx.betweenness_centrality(G, weight=None, normalized=True) + for n in sorted(G): + assert almost_equal(b[n], b_answer[n], places=3) + + def test_les_miserables_graph(self): + """Betweenness centrality: Les Miserables graph""" + G = nx.les_miserables_graph() + b_answer = { + "Napoleon": 0.000, + "Myriel": 0.177, + "MlleBaptistine": 0.000, + "MmeMagloire": 0.000, + "CountessDeLo": 0.000, + "Geborand": 0.000, + "Champtercier": 0.000, + "Cravatte": 0.000, + "Count": 0.000, + "OldMan": 0.000, + "Valjean": 0.570, + "Labarre": 0.000, + "Marguerite": 0.000, + "MmeDeR": 0.000, + "Isabeau": 0.000, + "Gervais": 0.000, + "Listolier": 0.000, + "Tholomyes": 0.041, + "Fameuil": 0.000, + "Blacheville": 0.000, + "Favourite": 0.000, + "Dahlia": 0.000, + "Zephine": 0.000, + "Fantine": 0.130, + "MmeThenardier": 0.029, + "Thenardier": 0.075, + "Cosette": 0.024, + "Javert": 0.054, + "Fauchelevent": 0.026, + "Bamatabois": 0.008, + "Perpetue": 0.000, + "Simplice": 0.009, + "Scaufflaire": 0.000, + "Woman1": 0.000, + "Judge": 0.000, + "Champmathieu": 0.000, + "Brevet": 0.000, + "Chenildieu": 0.000, + "Cochepaille": 0.000, + "Pontmercy": 0.007, + "Boulatruelle": 0.000, + "Eponine": 0.011, + "Anzelma": 0.000, + "Woman2": 0.000, + "MotherInnocent": 0.000, + "Gribier": 0.000, + "MmeBurgon": 0.026, + "Jondrette": 0.000, + "Gavroche": 0.165, + "Gillenormand": 0.020, + "Magnon": 0.000, + "MlleGillenormand": 0.048, + "MmePontmercy": 0.000, + "MlleVaubois": 0.000, + "LtGillenormand": 0.000, + "Marius": 0.132, + "BaronessT": 0.000, + "Mabeuf": 0.028, + "Enjolras": 0.043, + "Combeferre": 0.001, + "Prouvaire": 0.000, + "Feuilly": 0.001, + "Courfeyrac": 0.005, + "Bahorel": 0.002, + "Bossuet": 0.031, + "Joly": 0.002, + "Grantaire": 0.000, + "MotherPlutarch": 0.000, + "Gueulemer": 0.005, + "Babet": 0.005, + "Claquesous": 0.005, + "Montparnasse": 0.004, + "Toussaint": 0.000, + "Child1": 0.000, + "Child2": 0.000, + "Brujon": 0.000, + "MmeHucheloup": 0.000, + } + + b = nx.betweenness_centrality(G, weight=None, normalized=True) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n], places=3) + assert almost_equal(b[n], b_answer[n], places=3) def test_ladder_graph(self): """Betweenness centrality: Ladder graph""" G = nx.Graph() # ladder_graph(3) - G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), - (2, 4), (4, 5), (3, 5)]) - b_answer = {0: 1.667, 1: 1.667, 2: 6.667, - 3: 6.667, 4: 1.667, 5: 1.667} + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)]) + b_answer = {0: 1.667, 1: 1.667, 2: 6.667, 3: 6.667, 4: 1.667, 5: 1.667} for b in b_answer: - b_answer[b] /= 2.0 - b = nx.betweenness_centrality(G, - weight=None, - normalized=False) + b_answer[b] /= 2 + b = nx.betweenness_centrality(G, weight=None, normalized=False) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n], places=3) + assert almost_equal(b[n], b_answer[n], places=3) def test_disconnected_path(self): """Betweenness centrality: disconnected path""" @@ -146,11 +250,9 @@ def test_disconnected_path(self): nx.add_path(G, [0, 1, 2]) nx.add_path(G, [3, 4, 5, 6]) b_answer = {0: 0, 1: 1, 2: 0, 3: 0, 4: 2, 5: 2, 6: 0} - b = nx.betweenness_centrality(G, - weight=None, - normalized=False) + b = nx.betweenness_centrality(G, weight=None, normalized=False) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_disconnected_path_endpoints(self): """Betweenness centrality: disconnected path endpoints""" @@ -158,184 +260,276 @@ def test_disconnected_path_endpoints(self): nx.add_path(G, [0, 1, 2]) nx.add_path(G, [3, 4, 5, 6]) b_answer = {0: 2, 1: 3, 2: 2, 3: 3, 4: 5, 5: 5, 6: 3} - b = nx.betweenness_centrality(G, - weight=None, - normalized=False, - endpoints=True) + b = nx.betweenness_centrality(G, weight=None, normalized=False, endpoints=True) + for n in sorted(G): + assert almost_equal(b[n], b_answer[n]) + # normalized = True case + b = nx.betweenness_centrality(G, weight=None, normalized=True, endpoints=True) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n] / 21) def test_directed_path(self): """Betweenness centrality: directed path""" G = nx.DiGraph() nx.add_path(G, [0, 1, 2]) - b = nx.betweenness_centrality(G, - weight=None, - normalized=False) + b = nx.betweenness_centrality(G, weight=None, normalized=False) b_answer = {0: 0.0, 1: 1.0, 2: 0.0} for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_directed_path_normalized(self): """Betweenness centrality: directed path normalized""" G = nx.DiGraph() nx.add_path(G, [0, 1, 2]) - b = nx.betweenness_centrality(G, - weight=None, - normalized=True) + b = nx.betweenness_centrality(G, weight=None, normalized=True) b_answer = {0: 0.0, 1: 0.5, 2: 0.0} for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) -class TestWeightedBetweennessCentrality(object): - +class TestWeightedBetweennessCentrality: def test_K5(self): """Weighted betweenness centrality: K5""" G = nx.complete_graph(5) - b = nx.betweenness_centrality(G, - weight='weight', - normalized=False) + b = nx.betweenness_centrality(G, weight="weight", normalized=False) b_answer = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0} for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_P3_normalized(self): """Weighted betweenness centrality: P3 normalized""" G = nx.path_graph(3) - b = nx.betweenness_centrality(G, - weight='weight', - normalized=True) + b = nx.betweenness_centrality(G, weight="weight", normalized=True) b_answer = {0: 0.0, 1: 1.0, 2: 0.0} for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_P3(self): """Weighted betweenness centrality: P3""" G = nx.path_graph(3) b_answer = {0: 0.0, 1: 1.0, 2: 0.0} - b = nx.betweenness_centrality(G, - weight='weight', - normalized=False) + b = nx.betweenness_centrality(G, weight="weight", normalized=False) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_krackhardt_kite_graph(self): """Weighted betweenness centrality: Krackhardt kite graph""" G = nx.krackhardt_kite_graph() - b_answer = {0: 1.667, 1: 1.667, 2: 0.000, 3: 7.333, 4: 0.000, - 5: 16.667, 6: 16.667, 7: 28.000, 8: 16.000, 9: 0.000} + b_answer = { + 0: 1.667, + 1: 1.667, + 2: 0.000, + 3: 7.333, + 4: 0.000, + 5: 16.667, + 6: 16.667, + 7: 28.000, + 8: 16.000, + 9: 0.000, + } for b in b_answer: - b_answer[b] /= 2.0 + b_answer[b] /= 2 - b = nx.betweenness_centrality(G, - weight='weight', - normalized=False) + b = nx.betweenness_centrality(G, weight="weight", normalized=False) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n], places=3) + assert almost_equal(b[n], b_answer[n], places=3) def test_krackhardt_kite_graph_normalized(self): - """Weighted betweenness centrality: + """Weighted betweenness centrality: Krackhardt kite graph normalized """ G = nx.krackhardt_kite_graph() - b_answer = {0: 0.023, 1: 0.023, 2: 0.000, 3: 0.102, 4: 0.000, - 5: 0.231, 6: 0.231, 7: 0.389, 8: 0.222, 9: 0.000} - b = nx.betweenness_centrality(G, - weight='weight', - normalized=True) + b_answer = { + 0: 0.023, + 1: 0.023, + 2: 0.000, + 3: 0.102, + 4: 0.000, + 5: 0.231, + 6: 0.231, + 7: 0.389, + 8: 0.222, + 9: 0.000, + } + b = nx.betweenness_centrality(G, weight="weight", normalized=True) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n], places=3) + assert almost_equal(b[n], b_answer[n], places=3) def test_florentine_families_graph(self): - """Weighted betweenness centrality: + """Weighted betweenness centrality: Florentine families graph""" G = nx.florentine_families_graph() - b_answer =\ - {'Acciaiuoli': 0.000, - 'Albizzi': 0.212, - 'Barbadori': 0.093, - 'Bischeri': 0.104, - 'Castellani': 0.055, - 'Ginori': 0.000, - 'Guadagni': 0.255, - 'Lamberteschi': 0.000, - 'Medici': 0.522, - 'Pazzi': 0.000, - 'Peruzzi': 0.022, - 'Ridolfi': 0.114, - 'Salviati': 0.143, - 'Strozzi': 0.103, - 'Tornabuoni': 0.092} - - b = nx.betweenness_centrality(G, - weight='weight', - normalized=True) + b_answer = { + "Acciaiuoli": 0.000, + "Albizzi": 0.212, + "Barbadori": 0.093, + "Bischeri": 0.104, + "Castellani": 0.055, + "Ginori": 0.000, + "Guadagni": 0.255, + "Lamberteschi": 0.000, + "Medici": 0.522, + "Pazzi": 0.000, + "Peruzzi": 0.022, + "Ridolfi": 0.114, + "Salviati": 0.143, + "Strozzi": 0.103, + "Tornabuoni": 0.092, + } + + b = nx.betweenness_centrality(G, weight="weight", normalized=True) + for n in sorted(G): + assert almost_equal(b[n], b_answer[n], places=3) + + def test_les_miserables_graph(self): + """Weighted betweenness centrality: Les Miserables graph""" + G = nx.les_miserables_graph() + b_answer = { + "Napoleon": 0.000, + "Myriel": 0.177, + "MlleBaptistine": 0.000, + "MmeMagloire": 0.000, + "CountessDeLo": 0.000, + "Geborand": 0.000, + "Champtercier": 0.000, + "Cravatte": 0.000, + "Count": 0.000, + "OldMan": 0.000, + "Valjean": 0.454, + "Labarre": 0.000, + "Marguerite": 0.009, + "MmeDeR": 0.000, + "Isabeau": 0.000, + "Gervais": 0.000, + "Listolier": 0.000, + "Tholomyes": 0.066, + "Fameuil": 0.000, + "Blacheville": 0.000, + "Favourite": 0.000, + "Dahlia": 0.000, + "Zephine": 0.000, + "Fantine": 0.114, + "MmeThenardier": 0.046, + "Thenardier": 0.129, + "Cosette": 0.075, + "Javert": 0.193, + "Fauchelevent": 0.026, + "Bamatabois": 0.080, + "Perpetue": 0.000, + "Simplice": 0.001, + "Scaufflaire": 0.000, + "Woman1": 0.000, + "Judge": 0.000, + "Champmathieu": 0.000, + "Brevet": 0.000, + "Chenildieu": 0.000, + "Cochepaille": 0.000, + "Pontmercy": 0.023, + "Boulatruelle": 0.000, + "Eponine": 0.023, + "Anzelma": 0.000, + "Woman2": 0.000, + "MotherInnocent": 0.000, + "Gribier": 0.000, + "MmeBurgon": 0.026, + "Jondrette": 0.000, + "Gavroche": 0.285, + "Gillenormand": 0.024, + "Magnon": 0.005, + "MlleGillenormand": 0.036, + "MmePontmercy": 0.005, + "MlleVaubois": 0.000, + "LtGillenormand": 0.015, + "Marius": 0.072, + "BaronessT": 0.004, + "Mabeuf": 0.089, + "Enjolras": 0.003, + "Combeferre": 0.000, + "Prouvaire": 0.000, + "Feuilly": 0.004, + "Courfeyrac": 0.001, + "Bahorel": 0.007, + "Bossuet": 0.028, + "Joly": 0.000, + "Grantaire": 0.036, + "MotherPlutarch": 0.000, + "Gueulemer": 0.025, + "Babet": 0.015, + "Claquesous": 0.042, + "Montparnasse": 0.050, + "Toussaint": 0.011, + "Child1": 0.000, + "Child2": 0.000, + "Brujon": 0.002, + "MmeHucheloup": 0.034, + } + + b = nx.betweenness_centrality(G, weight="weight", normalized=True) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n], places=3) + assert almost_equal(b[n], b_answer[n], places=3) def test_ladder_graph(self): """Weighted betweenness centrality: Ladder graph""" G = nx.Graph() # ladder_graph(3) - G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), - (2, 4), (4, 5), (3, 5)]) - b_answer = {0: 1.667, 1: 1.667, 2: 6.667, - 3: 6.667, 4: 1.667, 5: 1.667} + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)]) + b_answer = {0: 1.667, 1: 1.667, 2: 6.667, 3: 6.667, 4: 1.667, 5: 1.667} for b in b_answer: - b_answer[b] /= 2.0 - b = nx.betweenness_centrality(G, - weight='weight', - normalized=False) + b_answer[b] /= 2 + b = nx.betweenness_centrality(G, weight="weight", normalized=False) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n], places=3) + assert almost_equal(b[n], b_answer[n], places=3) def test_G(self): """Weighted betweenness centrality: G""" G = weighted_G() b_answer = {0: 2.0, 1: 0.0, 2: 4.0, 3: 3.0, 4: 4.0, 5: 0.0} - b = nx.betweenness_centrality(G, - weight='weight', - normalized=False) + b = nx.betweenness_centrality(G, weight="weight", normalized=False) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_G2(self): """Weighted betweenness centrality: G2""" G = nx.DiGraph() - G.add_weighted_edges_from([('s', 'u', 10), ('s', 'x', 5), - ('u', 'v', 1), ('u', 'x', 2), - ('v', 'y', 1), ('x', 'u', 3), - ('x', 'v', 5), ('x', 'y', 2), - ('y', 's', 7), ('y', 'v', 6)]) - - b_answer = {'y': 5.0, 'x': 5.0, 's': 4.0, 'u': 2.0, 'v': 2.0} - - b = nx.betweenness_centrality(G, - weight='weight', - normalized=False) + G.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + ) + + b_answer = {"y": 5.0, "x": 5.0, "s": 4.0, "u": 2.0, "v": 2.0} + + b = nx.betweenness_centrality(G, weight="weight", normalized=False) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) - + assert almost_equal(b[n], b_answer[n]) -class TestEdgeBetweennessCentrality(object): +class TestEdgeBetweennessCentrality: def test_K5(self): """Edge betweenness centrality: K5""" G = nx.complete_graph(5) b = nx.edge_betweenness_centrality(G, weight=None, normalized=False) b_answer = dict.fromkeys(G.edges(), 1) for n in sorted(G.edges()): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_normalized_K5(self): """Edge betweenness centrality: K5""" G = nx.complete_graph(5) b = nx.edge_betweenness_centrality(G, weight=None, normalized=True) - b_answer = dict.fromkeys(G.edges(), 1 / 10.0) + b_answer = dict.fromkeys(G.edges(), 1 / 10) for n in sorted(G.edges()): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_C4(self): """Edge betweenness centrality: C4""" @@ -343,7 +537,7 @@ def test_C4(self): b = nx.edge_betweenness_centrality(G, weight=None, normalized=True) b_answer = {(0, 1): 2, (0, 3): 2, (1, 2): 2, (2, 3): 2} for n in sorted(G.edges()): - assert_almost_equal(b[n], b_answer[n] / 6.0) + assert almost_equal(b[n], b_answer[n] / 6) def test_P4(self): """Edge betweenness centrality: P4""" @@ -351,7 +545,7 @@ def test_P4(self): b = nx.edge_betweenness_centrality(G, weight=None, normalized=False) b_answer = {(0, 1): 3, (1, 2): 4, (2, 3): 3} for n in sorted(G.edges()): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_normalized_P4(self): """Edge betweenness centrality: P4""" @@ -359,90 +553,105 @@ def test_normalized_P4(self): b = nx.edge_betweenness_centrality(G, weight=None, normalized=True) b_answer = {(0, 1): 3, (1, 2): 4, (2, 3): 3} for n in sorted(G.edges()): - assert_almost_equal(b[n], b_answer[n] / 6.0) + assert almost_equal(b[n], b_answer[n] / 6) def test_balanced_tree(self): """Edge betweenness centrality: balanced tree""" G = nx.balanced_tree(r=2, h=2) b = nx.edge_betweenness_centrality(G, weight=None, normalized=False) - b_answer = {(0, 1): 12, (0, 2): 12, - (1, 3): 6, (1, 4): 6, (2, 5): 6, (2, 6): 6} + b_answer = {(0, 1): 12, (0, 2): 12, (1, 3): 6, (1, 4): 6, (2, 5): 6, (2, 6): 6} for n in sorted(G.edges()): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) -class TestWeightedEdgeBetweennessCentrality(object): - +class TestWeightedEdgeBetweennessCentrality: def test_K5(self): """Edge betweenness centrality: K5""" G = nx.complete_graph(5) - b = nx.edge_betweenness_centrality(G, weight='weight', normalized=False) + b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False) b_answer = dict.fromkeys(G.edges(), 1) for n in sorted(G.edges()): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_C4(self): """Edge betweenness centrality: C4""" G = nx.cycle_graph(4) - b = nx.edge_betweenness_centrality(G, weight='weight', normalized=False) + b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False) b_answer = {(0, 1): 2, (0, 3): 2, (1, 2): 2, (2, 3): 2} for n in sorted(G.edges()): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_P4(self): """Edge betweenness centrality: P4""" G = nx.path_graph(4) - b = nx.edge_betweenness_centrality(G, weight='weight', normalized=False) + b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False) b_answer = {(0, 1): 3, (1, 2): 4, (2, 3): 3} for n in sorted(G.edges()): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_balanced_tree(self): """Edge betweenness centrality: balanced tree""" G = nx.balanced_tree(r=2, h=2) - b = nx.edge_betweenness_centrality(G, weight='weight', normalized=False) - b_answer = {(0, 1): 12, (0, 2): 12, - (1, 3): 6, (1, 4): 6, (2, 5): 6, (2, 6): 6} + b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False) + b_answer = {(0, 1): 12, (0, 2): 12, (1, 3): 6, (1, 4): 6, (2, 5): 6, (2, 6): 6} for n in sorted(G.edges()): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_weighted_graph(self): - eList = [(0, 1, 5), (0, 2, 4), (0, 3, 3), - (0, 4, 2), (1, 2, 4), (1, 3, 1), - (1, 4, 3), (2, 4, 5), (3, 4, 4)] + eList = [ + (0, 1, 5), + (0, 2, 4), + (0, 3, 3), + (0, 4, 2), + (1, 2, 4), + (1, 3, 1), + (1, 4, 3), + (2, 4, 5), + (3, 4, 4), + ] G = nx.Graph() G.add_weighted_edges_from(eList) - b = nx.edge_betweenness_centrality(G, weight='weight', normalized=False) - b_answer = {(0, 1): 0.0, - (0, 2): 1.0, - (0, 3): 2.0, - (0, 4): 1.0, - (1, 2): 2.0, - (1, 3): 3.5, - (1, 4): 1.5, - (2, 4): 1.0, - (3, 4): 0.5} - + b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False) + b_answer = { + (0, 1): 0.0, + (0, 2): 1.0, + (0, 3): 2.0, + (0, 4): 1.0, + (1, 2): 2.0, + (1, 3): 3.5, + (1, 4): 1.5, + (2, 4): 1.0, + (3, 4): 0.5, + } for n in sorted(G.edges()): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_normalized_weighted_graph(self): - eList = [(0, 1, 5), (0, 2, 4), (0, 3, 3), - (0, 4, 2), (1, 2, 4), (1, 3, 1), - (1, 4, 3), (2, 4, 5), (3, 4, 4)] + eList = [ + (0, 1, 5), + (0, 2, 4), + (0, 3, 3), + (0, 4, 2), + (1, 2, 4), + (1, 3, 1), + (1, 4, 3), + (2, 4, 5), + (3, 4, 4), + ] G = nx.Graph() G.add_weighted_edges_from(eList) - b = nx.edge_betweenness_centrality(G, weight='weight', normalized=True) - b_answer = {(0, 1): 0.0, - (0, 2): 1.0, - (0, 3): 2.0, - (0, 4): 1.0, - (1, 2): 2.0, - (1, 3): 3.5, - (1, 4): 1.5, - (2, 4): 1.0, - (3, 4): 0.5} - - norm = len(G) * (len(G) - 1) / 2.0 + b = nx.edge_betweenness_centrality(G, weight="weight", normalized=True) + b_answer = { + (0, 1): 0.0, + (0, 2): 1.0, + (0, 3): 2.0, + (0, 4): 1.0, + (1, 2): 2.0, + (1, 3): 3.5, + (1, 4): 1.5, + (2, 4): 1.0, + (3, 4): 0.5, + } + norm = len(G) * (len(G) - 1) / 2 for n in sorted(G.edges()): - assert_almost_equal(b[n], b_answer[n] / norm) + assert almost_equal(b[n], b_answer[n] / norm) diff --git a/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py b/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py index aa77999..9c770fe 100644 --- a/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py +++ b/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py @@ -1,172 +1,217 @@ -#!/usr/bin/env python -from nose.tools import * import networkx as nx +from networkx.testing import almost_equal class TestSubsetBetweennessCentrality: - def test_K5(self): - """Betweenness centrality: K5""" + """Betweenness Centrality Subset: K5""" G = nx.complete_graph(5) - b = nx.betweenness_centrality_subset(G, sources=[0], targets=[1, 3], - weight=None) + b = nx.betweenness_centrality_subset( + G, sources=[0], targets=[1, 3], weight=None + ) b_answer = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0} for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_P5_directed(self): - """Betweenness centrality: P5 directed""" + """Betweenness Centrality Subset: P5 directed""" G = nx.DiGraph() nx.add_path(G, range(5)) b_answer = {0: 0, 1: 1, 2: 1, 3: 0, 4: 0, 5: 0} - b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3], - weight=None) + b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3], weight=None) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_P5(self): - """Betweenness centrality: P5""" + """Betweenness Centrality Subset: P5""" G = nx.Graph() nx.add_path(G, range(5)) b_answer = {0: 0, 1: 0.5, 2: 0.5, 3: 0, 4: 0, 5: 0} - b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3], - weight=None) + b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3], weight=None) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_P5_multiple_target(self): - """Betweenness centrality: P5 multiple target""" + """Betweenness Centrality Subset: P5 multiple target""" G = nx.Graph() nx.add_path(G, range(5)) b_answer = {0: 0, 1: 1, 2: 1, 3: 0.5, 4: 0, 5: 0} - b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3, 4], - weight=None) + b = nx.betweenness_centrality_subset( + G, sources=[0], targets=[3, 4], weight=None + ) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_box(self): - """Betweenness centrality: box""" + """Betweenness Centrality Subset: box""" G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) b_answer = {0: 0, 1: 0.25, 2: 0.25, 3: 0} - b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3], - weight=None) + b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3], weight=None) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_box_and_path(self): - """Betweenness centrality: box and path""" + """Betweenness Centrality Subset: box and path""" G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (3, 4), (4, 5)]) b_answer = {0: 0, 1: 0.5, 2: 0.5, 3: 0.5, 4: 0, 5: 0} - b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3, 4], - weight=None) + b = nx.betweenness_centrality_subset( + G, sources=[0], targets=[3, 4], weight=None + ) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_box_and_path2(self): - """Betweenness centrality: box and path multiple target""" + """Betweenness Centrality Subset: box and path multiple target""" G = nx.Graph() G.add_edges_from([(0, 1), (1, 2), (2, 3), (1, 20), (20, 3), (3, 4)]) b_answer = {0: 0, 1: 1.0, 2: 0.5, 20: 0.5, 3: 0.5, 4: 0} - b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3, 4], - weight=None) + b = nx.betweenness_centrality_subset( + G, sources=[0], targets=[3, 4], weight=None + ) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) + def test_diamond_multi_path(self): + """Betweenness Centrality Subset: Diamond Multi Path""" + G = nx.Graph() + G.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (1, 10), + (10, 11), + (11, 12), + (12, 9), + (2, 6), + (3, 6), + (4, 6), + (5, 7), + (7, 8), + (6, 8), + (8, 9), + ] + ) + b = nx.betweenness_centrality_subset(G, sources=[1], targets=[9], weight=None) + + expected_b = { + 1: 0, + 2: 1.0 / 10, + 3: 1.0 / 10, + 4: 1.0 / 10, + 5: 1.0 / 10, + 6: 3.0 / 10, + 7: 1.0 / 10, + 8: 4.0 / 10, + 9: 0, + 10: 1.0 / 10, + 11: 1.0 / 10, + 12: 1.0 / 10, + } -class TestBetweennessCentralitySources: + for n in sorted(G): + assert almost_equal(b[n], expected_b[n]) + +class TestBetweennessCentralitySources: def test_K5(self): - """Betweenness centrality: K5""" + """Betweenness Centrality Sources: K5""" G = nx.complete_graph(5) b = nx.betweenness_centrality_source(G, weight=None, normalized=False) b_answer = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0} for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_P3(self): - """Betweenness centrality: P3""" + """Betweenness Centrality Sources: P3""" G = nx.path_graph(3) b_answer = {0: 0.0, 1: 1.0, 2: 0.0} b = nx.betweenness_centrality_source(G, weight=None, normalized=True) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) class TestEdgeSubsetBetweennessCentrality: - def test_K5(self): - """Edge betweenness centrality: K5""" + """Edge betweenness subset centrality: K5""" G = nx.complete_graph(5) - b = nx.edge_betweenness_centrality_subset(G, sources=[0], - targets=[1, 3], weight=None) + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[1, 3], weight=None + ) b_answer = dict.fromkeys(G.edges(), 0) b_answer[(0, 3)] = b_answer[(0, 1)] = 0.5 for n in sorted(G.edges()): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_P5_directed(self): - """Edge betweenness centrality: P5 directed""" + """Edge betweenness subset centrality: P5 directed""" G = nx.DiGraph() nx.add_path(G, range(5)) b_answer = dict.fromkeys(G.edges(), 0) b_answer[(0, 1)] = b_answer[(1, 2)] = b_answer[(2, 3)] = 1 - b = nx.edge_betweenness_centrality_subset(G, sources=[0], targets=[3], - weight=None) + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[3], weight=None + ) for n in sorted(G.edges()): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_P5(self): - """Edge betweenness centrality: P5""" + """Edge betweenness subset centrality: P5""" G = nx.Graph() nx.add_path(G, range(5)) b_answer = dict.fromkeys(G.edges(), 0) b_answer[(0, 1)] = b_answer[(1, 2)] = b_answer[(2, 3)] = 0.5 - b = nx.edge_betweenness_centrality_subset(G, sources=[0], targets=[3], - weight=None) + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[3], weight=None + ) for n in sorted(G.edges()): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_P5_multiple_target(self): - """Edge betweenness centrality: P5 multiple target""" + """Edge betweenness subset centrality: P5 multiple target""" G = nx.Graph() nx.add_path(G, range(5)) b_answer = dict.fromkeys(G.edges(), 0) b_answer[(0, 1)] = b_answer[(1, 2)] = b_answer[(2, 3)] = 1 b_answer[(3, 4)] = 0.5 - b = nx.edge_betweenness_centrality_subset(G, sources=[0], - targets=[3, 4], weight=None) + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[3, 4], weight=None + ) for n in sorted(G.edges()): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_box(self): - """Edge etweenness centrality: box""" + """Edge betweenness subset centrality: box""" G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) b_answer = dict.fromkeys(G.edges(), 0) b_answer[(0, 1)] = b_answer[(0, 2)] = 0.25 b_answer[(1, 3)] = b_answer[(2, 3)] = 0.25 - b = nx.edge_betweenness_centrality_subset(G, sources=[0], targets=[3], - weight=None) + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[3], weight=None + ) for n in sorted(G.edges()): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_box_and_path(self): - """Edge etweenness centrality: box and path""" + """Edge betweenness subset centrality: box and path""" G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (3, 4), (4, 5)]) b_answer = dict.fromkeys(G.edges(), 0) b_answer[(0, 1)] = b_answer[(0, 2)] = 0.5 b_answer[(1, 3)] = b_answer[(2, 3)] = 0.5 b_answer[(3, 4)] = 0.5 - b = nx.edge_betweenness_centrality_subset(G, sources=[0], - targets=[3, 4], weight=None) + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[3, 4], weight=None + ) for n in sorted(G.edges()): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_box_and_path2(self): - """Edge betweenness centrality: box and path multiple target""" + """Edge betweenness subset centrality: box and path multiple target""" G = nx.Graph() G.add_edges_from([(0, 1), (1, 2), (2, 3), (1, 20), (20, 3), (3, 4)]) b_answer = dict.fromkeys(G.edges(), 0) @@ -174,7 +219,8 @@ def test_box_and_path2(self): b_answer[(1, 20)] = b_answer[(3, 20)] = 0.5 b_answer[(1, 2)] = b_answer[(2, 3)] = 0.5 b_answer[(3, 4)] = 0.5 - b = nx.edge_betweenness_centrality_subset(G, sources=[0], - targets=[3, 4], weight=None) + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[3, 4], weight=None + ) for n in sorted(G.edges()): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) diff --git a/networkx/algorithms/centrality/tests/test_closeness_centrality.py b/networkx/algorithms/centrality/tests/test_closeness_centrality.py index 94595d3..b3f883b 100644 --- a/networkx/algorithms/centrality/tests/test_closeness_centrality.py +++ b/networkx/algorithms/centrality/tests/test_closeness_centrality.py @@ -1,112 +1,306 @@ """ -Tests for degree centrality. +Tests for closeness centrality. """ -from nose.tools import * +import pytest import networkx as nx +from networkx.testing import almost_equal class TestClosenessCentrality: - def setUp(self): - self.K = nx.krackhardt_kite_graph() - self.P3 = nx.path_graph(3) - self.P4 = nx.path_graph(4) - self.K5 = nx.complete_graph(5) - - self.C4 = nx.cycle_graph(4) - self.T = nx.balanced_tree(r=2, h=2) - self.Gb = nx.Graph() - self.Gb.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), - (2, 4), (4, 5), (3, 5)]) + @classmethod + def setup_class(cls): + cls.K = nx.krackhardt_kite_graph() + cls.P3 = nx.path_graph(3) + cls.P4 = nx.path_graph(4) + cls.K5 = nx.complete_graph(5) + + cls.C4 = nx.cycle_graph(4) + cls.T = nx.balanced_tree(r=2, h=2) + cls.Gb = nx.Graph() + cls.Gb.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)]) F = nx.florentine_families_graph() - self.F = F + cls.F = F + + cls.LM = nx.les_miserables_graph() + + # Create random undirected, unweighted graph for testing incremental version + cls.undirected_G = nx.fast_gnp_random_graph(n=100, p=0.6, seed=123) + cls.undirected_G_cc = nx.closeness_centrality(cls.undirected_G) def test_wf_improved(self): G = nx.union(self.P4, nx.path_graph([4, 5, 6])) c = nx.closeness_centrality(G) cwf = nx.closeness_centrality(G, wf_improved=False) - res = {0: 0.25, 1: 0.375, 2: 0.375, 3: 0.25, - 4: 0.222, 5: 0.333, 6: 0.222} - wf_res = {0: 0.5, 1: 0.75, 2: 0.75, 3: 0.5, - 4: 0.667, 5: 1.0, 6: 0.667} + res = {0: 0.25, 1: 0.375, 2: 0.375, 3: 0.25, 4: 0.222, 5: 0.333, 6: 0.222} + wf_res = {0: 0.5, 1: 0.75, 2: 0.75, 3: 0.5, 4: 0.667, 5: 1.0, 6: 0.667} for n in G: - assert_almost_equal(c[n], res[n], places=3) - assert_almost_equal(cwf[n], wf_res[n], places=3) + assert almost_equal(c[n], res[n], places=3) + assert almost_equal(cwf[n], wf_res[n], places=3) def test_digraph(self): G = nx.path_graph(3, create_using=nx.DiGraph()) c = nx.closeness_centrality(G) - cr = nx.closeness_centrality(G, reverse=True) + cr = nx.closeness_centrality(G.reverse()) d = {0: 0.0, 1: 0.500, 2: 0.667} dr = {0: 0.667, 1: 0.500, 2: 0.0} for n in sorted(self.P3): - assert_almost_equal(c[n], d[n], places=3) - assert_almost_equal(cr[n], dr[n], places=3) + assert almost_equal(c[n], d[n], places=3) + assert almost_equal(cr[n], dr[n], places=3) def test_k5_closeness(self): c = nx.closeness_centrality(self.K5) - d = {0: 1.000, - 1: 1.000, - 2: 1.000, - 3: 1.000, - 4: 1.000} + d = {0: 1.000, 1: 1.000, 2: 1.000, 3: 1.000, 4: 1.000} for n in sorted(self.K5): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_p3_closeness(self): c = nx.closeness_centrality(self.P3) - d = {0: 0.667, - 1: 1.000, - 2: 0.667} + d = {0: 0.667, 1: 1.000, 2: 0.667} for n in sorted(self.P3): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_krackhardt_closeness(self): c = nx.closeness_centrality(self.K) - d = {0: 0.529, - 1: 0.529, - 2: 0.500, - 3: 0.600, - 4: 0.500, - 5: 0.643, - 6: 0.643, - 7: 0.600, - 8: 0.429, - 9: 0.310} + d = { + 0: 0.529, + 1: 0.529, + 2: 0.500, + 3: 0.600, + 4: 0.500, + 5: 0.643, + 6: 0.643, + 7: 0.600, + 8: 0.429, + 9: 0.310, + } for n in sorted(self.K): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_florentine_families_closeness(self): c = nx.closeness_centrality(self.F) - d = {'Acciaiuoli': 0.368, - 'Albizzi': 0.483, - 'Barbadori': 0.4375, - 'Bischeri': 0.400, - 'Castellani': 0.389, - 'Ginori': 0.333, - 'Guadagni': 0.467, - 'Lamberteschi': 0.326, - 'Medici': 0.560, - 'Pazzi': 0.286, - 'Peruzzi': 0.368, - 'Ridolfi': 0.500, - 'Salviati': 0.389, - 'Strozzi': 0.4375, - 'Tornabuoni': 0.483} + d = { + "Acciaiuoli": 0.368, + "Albizzi": 0.483, + "Barbadori": 0.4375, + "Bischeri": 0.400, + "Castellani": 0.389, + "Ginori": 0.333, + "Guadagni": 0.467, + "Lamberteschi": 0.326, + "Medici": 0.560, + "Pazzi": 0.286, + "Peruzzi": 0.368, + "Ridolfi": 0.500, + "Salviati": 0.389, + "Strozzi": 0.4375, + "Tornabuoni": 0.483, + } for n in sorted(self.F): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) + + def test_les_miserables_closeness(self): + c = nx.closeness_centrality(self.LM) + d = { + "Napoleon": 0.302, + "Myriel": 0.429, + "MlleBaptistine": 0.413, + "MmeMagloire": 0.413, + "CountessDeLo": 0.302, + "Geborand": 0.302, + "Champtercier": 0.302, + "Cravatte": 0.302, + "Count": 0.302, + "OldMan": 0.302, + "Valjean": 0.644, + "Labarre": 0.394, + "Marguerite": 0.413, + "MmeDeR": 0.394, + "Isabeau": 0.394, + "Gervais": 0.394, + "Listolier": 0.341, + "Tholomyes": 0.392, + "Fameuil": 0.341, + "Blacheville": 0.341, + "Favourite": 0.341, + "Dahlia": 0.341, + "Zephine": 0.341, + "Fantine": 0.461, + "MmeThenardier": 0.461, + "Thenardier": 0.517, + "Cosette": 0.478, + "Javert": 0.517, + "Fauchelevent": 0.402, + "Bamatabois": 0.427, + "Perpetue": 0.318, + "Simplice": 0.418, + "Scaufflaire": 0.394, + "Woman1": 0.396, + "Judge": 0.404, + "Champmathieu": 0.404, + "Brevet": 0.404, + "Chenildieu": 0.404, + "Cochepaille": 0.404, + "Pontmercy": 0.373, + "Boulatruelle": 0.342, + "Eponine": 0.396, + "Anzelma": 0.352, + "Woman2": 0.402, + "MotherInnocent": 0.398, + "Gribier": 0.288, + "MmeBurgon": 0.344, + "Jondrette": 0.257, + "Gavroche": 0.514, + "Gillenormand": 0.442, + "Magnon": 0.335, + "MlleGillenormand": 0.442, + "MmePontmercy": 0.315, + "MlleVaubois": 0.308, + "LtGillenormand": 0.365, + "Marius": 0.531, + "BaronessT": 0.352, + "Mabeuf": 0.396, + "Enjolras": 0.481, + "Combeferre": 0.392, + "Prouvaire": 0.357, + "Feuilly": 0.392, + "Courfeyrac": 0.400, + "Bahorel": 0.394, + "Bossuet": 0.475, + "Joly": 0.394, + "Grantaire": 0.358, + "MotherPlutarch": 0.285, + "Gueulemer": 0.463, + "Babet": 0.463, + "Claquesous": 0.452, + "Montparnasse": 0.458, + "Toussaint": 0.402, + "Child1": 0.342, + "Child2": 0.342, + "Brujon": 0.380, + "MmeHucheloup": 0.353, + } + for n in sorted(self.LM): + assert almost_equal(c[n], d[n], places=3) def test_weighted_closeness(self): - edges = ([('s', 'u', 10), ('s', 'x', 5), ('u', 'v', 1), - ('u', 'x', 2), ('v', 'y', 1), ('x', 'u', 3), - ('x', 'v', 5), ('x', 'y', 2), ('y', 's', 7), ('y', 'v', 6)]) + edges = [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] XG = nx.Graph() XG.add_weighted_edges_from(edges) - c = nx.closeness_centrality(XG, distance='weight') - d = {'y': 0.200, - 'x': 0.286, - 's': 0.138, - 'u': 0.235, - 'v': 0.200} + c = nx.closeness_centrality(XG, distance="weight") + d = {"y": 0.200, "x": 0.286, "s": 0.138, "u": 0.235, "v": 0.200} for n in sorted(XG): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) + + # + # Tests for incremental closeness centrality. + # + @staticmethod + def pick_add_edge(g): + u = nx.utils.arbitrary_element(g) + possible_nodes = set(g.nodes()) + neighbors = list(g.neighbors(u)) + [u] + possible_nodes.difference_update(neighbors) + v = nx.utils.arbitrary_element(possible_nodes) + return (u, v) + + @staticmethod + def pick_remove_edge(g): + u = nx.utils.arbitrary_element(g) + possible_nodes = list(g.neighbors(u)) + v = nx.utils.arbitrary_element(possible_nodes) + return (u, v) + + def test_directed_raises(self): + with pytest.raises(nx.NetworkXNotImplemented): + dir_G = nx.gn_graph(n=5) + prev_cc = None + edge = self.pick_add_edge(dir_G) + insert = True + nx.incremental_closeness_centrality(dir_G, edge, prev_cc, insert) + + def test_wrong_size_prev_cc_raises(self): + with pytest.raises(nx.NetworkXError): + G = self.undirected_G.copy() + edge = self.pick_add_edge(G) + insert = True + prev_cc = self.undirected_G_cc.copy() + prev_cc.pop(0) + nx.incremental_closeness_centrality(G, edge, prev_cc, insert) + + def test_wrong_nodes_prev_cc_raises(self): + with pytest.raises(nx.NetworkXError): + G = self.undirected_G.copy() + edge = self.pick_add_edge(G) + insert = True + prev_cc = self.undirected_G_cc.copy() + num_nodes = len(prev_cc) + prev_cc.pop(0) + prev_cc[num_nodes] = 0.5 + nx.incremental_closeness_centrality(G, edge, prev_cc, insert) + + def test_zero_centrality(self): + G = nx.path_graph(3) + prev_cc = nx.closeness_centrality(G) + edge = self.pick_remove_edge(G) + test_cc = nx.incremental_closeness_centrality(G, edge, prev_cc, insertion=False) + G.remove_edges_from([edge]) + real_cc = nx.closeness_centrality(G) + shared_items = set(test_cc.items()) & set(real_cc.items()) + assert len(shared_items) == len(real_cc) + assert 0 in test_cc.values() + + def test_incremental(self): + # Check that incremental and regular give same output + G = self.undirected_G.copy() + prev_cc = None + for i in range(5): + if i % 2 == 0: + # Remove an edge + insert = False + edge = self.pick_remove_edge(G) + else: + # Add an edge + insert = True + edge = self.pick_add_edge(G) + + # start = timeit.default_timer() + test_cc = nx.incremental_closeness_centrality(G, edge, prev_cc, insert) + # inc_elapsed = (timeit.default_timer() - start) + # print(f"incremental time: {inc_elapsed}") + + if insert: + G.add_edges_from([edge]) + else: + G.remove_edges_from([edge]) + + # start = timeit.default_timer() + real_cc = nx.closeness_centrality(G) + # reg_elapsed = (timeit.default_timer() - start) + # print(f"regular time: {reg_elapsed}") + # Example output: + # incremental time: 0.208 + # regular time: 0.276 + # incremental time: 0.00683 + # regular time: 0.260 + # incremental time: 0.0224 + # regular time: 0.278 + # incremental time: 0.00804 + # regular time: 0.208 + # incremental time: 0.00947 + # regular time: 0.188 + + assert set(test_cc.items()) == set(real_cc.items()) + + prev_cc = test_cc diff --git a/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py b/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py index d4b0e83..d3dde1e 100644 --- a/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py +++ b/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py @@ -1,63 +1,55 @@ -#!/usr/bin/env python +import pytest + import networkx as nx -from nose.tools import assert_almost_equal -from nose import SkipTest -from nose.plugins.attrib import attr -from networkx import edge_current_flow_betweenness_centrality \ - as edge_current_flow -from networkx import approximate_current_flow_betweenness_centrality \ - as approximate_cfbc - - -class TestFlowBetweennessCentrality(object): - numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test - - @classmethod - def setupClass(cls): - global np - try: - import numpy as np - import scipy - except ImportError: - raise SkipTest('NumPy not available.') +from networkx.testing import almost_equal +from networkx import edge_current_flow_betweenness_centrality as edge_current_flow +from networkx import approximate_current_flow_betweenness_centrality as approximate_cfbc + + +np = pytest.importorskip("numpy") +npt = pytest.importorskip("numpy.testing") +scipy = pytest.importorskip("scipy") + +class TestFlowBetweennessCentrality: def test_K4_normalized(self): """Betweenness centrality: K4""" G = nx.complete_graph(4) b = nx.current_flow_betweenness_centrality(G, normalized=True) b_answer = {0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25} for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) G.add_edge(0, 1, weight=0.5, other=0.3) b = nx.current_flow_betweenness_centrality(G, normalized=True, weight=None) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) wb_answer = {0: 0.2222222, 1: 0.2222222, 2: 0.30555555, 3: 0.30555555} - b = nx.current_flow_betweenness_centrality(G, normalized=True, weight='weight') + b = nx.current_flow_betweenness_centrality(G, normalized=True, weight="weight") for n in sorted(G): - assert_almost_equal(b[n], wb_answer[n]) + assert almost_equal(b[n], wb_answer[n]) wb_answer = {0: 0.2051282, 1: 0.2051282, 2: 0.33974358, 3: 0.33974358} - b = nx.current_flow_betweenness_centrality(G, normalized=True, weight='other') + b = nx.current_flow_betweenness_centrality(G, normalized=True, weight="other") for n in sorted(G): - assert_almost_equal(b[n], wb_answer[n]) + assert almost_equal(b[n], wb_answer[n]) def test_K4(self): """Betweenness centrality: K4""" G = nx.complete_graph(4) - for solver in ['full', 'lu', 'cg']: - b = nx.current_flow_betweenness_centrality(G, normalized=False, - solver=solver) + for solver in ["full", "lu", "cg"]: + b = nx.current_flow_betweenness_centrality( + G, normalized=False, solver=solver + ) b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75} for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_P4_normalized(self): """Betweenness centrality: P4 normalized""" G = nx.path_graph(4) b = nx.current_flow_betweenness_centrality(G, normalized=True) - b_answer = {0: 0, 1: 2. / 3, 2: 2. / 3, 3: 0} + b_answer = {0: 0, 1: 2.0 / 3, 2: 2.0 / 3, 3: 0} for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_P4(self): """Betweenness centrality: P4""" @@ -65,42 +57,30 @@ def test_P4(self): b = nx.current_flow_betweenness_centrality(G, normalized=False) b_answer = {0: 0, 1: 2, 2: 2, 3: 0} for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_star(self): """Betweenness centrality: star """ G = nx.Graph() - nx.add_star(G, ['a', 'b', 'c', 'd']) + nx.add_star(G, ["a", "b", "c", "d"]) b = nx.current_flow_betweenness_centrality(G, normalized=True) - b_answer = {'a': 1.0, 'b': 0.0, 'c': 0.0, 'd': 0.0} + b_answer = {"a": 1.0, "b": 0.0, "c": 0.0, "d": 0.0} for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) - def test_solers(self): + def test_solvers2(self): """Betweenness centrality: alternate solvers""" G = nx.complete_graph(4) - for solver in ['full', 'lu', 'cg']: - b = nx.current_flow_betweenness_centrality(G, normalized=False, - solver=solver) + for solver in ["full", "lu", "cg"]: + b = nx.current_flow_betweenness_centrality( + G, normalized=False, solver=solver + ) b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75} for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) - - -class TestApproximateFlowBetweennessCentrality(object): - numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test + assert almost_equal(b[n], b_answer[n]) - @classmethod - def setupClass(cls): - global np - global assert_allclose - try: - import numpy as np - import scipy - from numpy.testing import assert_allclose - except ImportError: - raise SkipTest('NumPy not available.') +class TestApproximateFlowBetweennessCentrality: def test_K4_normalized(self): "Approximate current-flow betweenness centrality: K4 normalized" G = nx.complete_graph(4) @@ -108,7 +88,7 @@ def test_K4_normalized(self): epsilon = 0.1 ba = approximate_cfbc(G, normalized=True, epsilon=0.5 * epsilon) for n in sorted(G): - assert_allclose(b[n], ba[n], atol=epsilon) + npt.assert_allclose(b[n], ba[n], atol=epsilon) def test_K4(self): "Approximate current-flow betweenness centrality: K4" @@ -117,17 +97,17 @@ def test_K4(self): epsilon = 0.1 ba = approximate_cfbc(G, normalized=False, epsilon=0.5 * epsilon) for n in sorted(G): - assert_allclose(b[n], ba[n], atol=epsilon * len(G)**2) + npt.assert_allclose(b[n], ba[n], atol=epsilon * len(G) ** 2) def test_star(self): "Approximate current-flow betweenness centrality: star" G = nx.Graph() - nx.add_star(G, ['a', 'b', 'c', 'd']) + nx.add_star(G, ["a", "b", "c", "d"]) b = nx.current_flow_betweenness_centrality(G, normalized=True) epsilon = 0.1 ba = approximate_cfbc(G, normalized=True, epsilon=0.5 * epsilon) for n in sorted(G): - assert_allclose(b[n], ba[n], atol=epsilon) + npt.assert_allclose(b[n], ba[n], atol=epsilon) def test_grid(self): "Approximate current-flow betweenness centrality: 2d grid" @@ -136,36 +116,33 @@ def test_grid(self): epsilon = 0.1 ba = approximate_cfbc(G, normalized=True, epsilon=0.5 * epsilon) for n in sorted(G): - assert_allclose(b[n], ba[n], atol=epsilon) + npt.assert_allclose(b[n], ba[n], atol=epsilon) + + def test_seed(self): + G = nx.complete_graph(4) + b = approximate_cfbc(G, normalized=False, epsilon=0.05, seed=1) + b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75} + for n in sorted(G): + npt.assert_allclose(b[n], b_answer[n], atol=0.1) def test_solvers(self): "Approximate current-flow betweenness centrality: solvers" G = nx.complete_graph(4) epsilon = 0.1 - for solver in ['full', 'lu', 'cg']: - b = approximate_cfbc(G, normalized=False, solver=solver, - epsilon=0.5 * epsilon) + for solver in ["full", "lu", "cg"]: + b = approximate_cfbc( + G, normalized=False, solver=solver, epsilon=0.5 * epsilon + ) b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75} for n in sorted(G): - assert_allclose(b[n], b_answer[n], atol=epsilon) + npt.assert_allclose(b[n], b_answer[n], atol=epsilon) -class TestWeightedFlowBetweennessCentrality(object): +class TestWeightedFlowBetweennessCentrality: pass -class TestEdgeFlowBetweennessCentrality(object): - numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test - - @classmethod - def setupClass(cls): - global np - try: - import numpy as np - import scipy - except ImportError: - raise SkipTest('NumPy not available.') - +class TestEdgeFlowBetweennessCentrality: def test_K4(self): """Edge flow betweenness centrality: K4""" G = nx.complete_graph(4) @@ -173,7 +150,7 @@ def test_K4(self): b_answer = dict.fromkeys(G.edges(), 0.25) for (s, t), v1 in b_answer.items(): v2 = b.get((s, t), b.get((t, s))) - assert_almost_equal(v1, v2) + assert almost_equal(v1, v2) def test_K4_normalized(self): """Edge flow betweenness centrality: K4""" @@ -182,7 +159,7 @@ def test_K4_normalized(self): b_answer = dict.fromkeys(G.edges(), 0.75) for (s, t), v1 in b_answer.items(): v2 = b.get((s, t), b.get((t, s))) - assert_almost_equal(v1, v2) + assert almost_equal(v1, v2) def test_C4(self): """Edge flow betweenness centrality: C4""" @@ -191,7 +168,7 @@ def test_C4(self): b_answer = {(0, 1): 1.25, (0, 3): 1.25, (1, 2): 1.25, (2, 3): 1.25} for (s, t), v1 in b_answer.items(): v2 = b.get((s, t), b.get((t, s))) - assert_almost_equal(v1, v2) + assert almost_equal(v1, v2) def test_P4(self): """Edge betweenness centrality: P4""" @@ -200,4 +177,4 @@ def test_P4(self): b_answer = {(0, 1): 1.5, (1, 2): 2.0, (2, 3): 1.5} for (s, t), v1 in b_answer.items(): v2 = b.get((s, t), b.get((t, s))) - assert_almost_equal(v1, v2) + assert almost_equal(v1, v2) diff --git a/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py b/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py index 7379bd6..1ec1b08 100644 --- a/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py +++ b/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py @@ -1,125 +1,97 @@ -#!/usr/bin/env python -from nose.tools import * -from nose import SkipTest -import networkx as nx -from nose.plugins.attrib import attr +import pytest -from networkx import edge_current_flow_betweenness_centrality \ - as edge_current_flow +np = pytest.importorskip("numpy") +scipy = pytest.importorskip("scipy") -from networkx import edge_current_flow_betweenness_centrality_subset \ - as edge_current_flow_subset +import networkx as nx +from networkx.testing import almost_equal +from networkx import edge_current_flow_betweenness_centrality as edge_current_flow -class TestFlowBetweennessCentrality(object): - numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test +from networkx import ( + edge_current_flow_betweenness_centrality_subset as edge_current_flow_subset, +) - @classmethod - def setupClass(cls): - global np - try: - import numpy as np - import scipy - except ImportError: - raise SkipTest('NumPy not available.') +class TestFlowBetweennessCentrality: def test_K4_normalized(self): """Betweenness centrality: K4""" G = nx.complete_graph(4) - b = nx.current_flow_betweenness_centrality_subset(G, - list(G), - list(G), - normalized=True) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True + ) b_answer = nx.current_flow_betweenness_centrality(G, normalized=True) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_K4(self): """Betweenness centrality: K4""" G = nx.complete_graph(4) - b = nx.current_flow_betweenness_centrality_subset(G, - list(G), - list(G), - normalized=True) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True + ) b_answer = nx.current_flow_betweenness_centrality(G, normalized=True) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) # test weighted network G.add_edge(0, 1, weight=0.5, other=0.3) - b = nx.current_flow_betweenness_centrality_subset(G, - list(G), - list(G), - normalized=True, - weight=None) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True, weight=None + ) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) - b = nx.current_flow_betweenness_centrality_subset(G, - list(G), - list(G), - normalized=True) + assert almost_equal(b[n], b_answer[n]) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True + ) b_answer = nx.current_flow_betweenness_centrality(G, normalized=True) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) - b = nx.current_flow_betweenness_centrality_subset(G, - list(G), - list(G), - normalized=True, - weight='other') - b_answer = nx.current_flow_betweenness_centrality(G, normalized=True, weight='other') + assert almost_equal(b[n], b_answer[n]) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True, weight="other" + ) + b_answer = nx.current_flow_betweenness_centrality( + G, normalized=True, weight="other" + ) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_P4_normalized(self): """Betweenness centrality: P4 normalized""" G = nx.path_graph(4) - b = nx.current_flow_betweenness_centrality_subset(G, - list(G), - list(G), - normalized=True) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True + ) b_answer = nx.current_flow_betweenness_centrality(G, normalized=True) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_P4(self): """Betweenness centrality: P4""" G = nx.path_graph(4) - b = nx.current_flow_betweenness_centrality_subset(G, - list(G), - list(G), - normalized=True) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True + ) b_answer = nx.current_flow_betweenness_centrality(G, normalized=True) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_star(self): """Betweenness centrality: star """ G = nx.Graph() - nx.add_star(G, ['a', 'b', 'c', 'd']) - b = nx.current_flow_betweenness_centrality_subset(G, - list(G), - list(G), - normalized=True) + nx.add_star(G, ["a", "b", "c", "d"]) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True + ) b_answer = nx.current_flow_betweenness_centrality(G, normalized=True) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) # class TestWeightedFlowBetweennessCentrality(): # pass -class TestEdgeFlowBetweennessCentrality(object): - numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test - - @classmethod - def setupClass(cls): - global np - try: - import numpy as np - import scipy - except ImportError: - raise SkipTest('NumPy not available.') - +class TestEdgeFlowBetweennessCentrality: def test_K4_normalized(self): """Betweenness centrality: K4""" G = nx.complete_graph(4) @@ -127,7 +99,7 @@ def test_K4_normalized(self): b_answer = edge_current_flow(G, normalized=True) for (s, t), v1 in b_answer.items(): v2 = b.get((s, t), b.get((t, s))) - assert_almost_equal(v1, v2) + assert almost_equal(v1, v2) def test_K4(self): """Betweenness centrality: K4""" @@ -136,26 +108,28 @@ def test_K4(self): b_answer = edge_current_flow(G, normalized=False) for (s, t), v1 in b_answer.items(): v2 = b.get((s, t), b.get((t, s))) - assert_almost_equal(v1, v2) + assert almost_equal(v1, v2) # test weighted network G.add_edge(0, 1, weight=0.5, other=0.3) b = edge_current_flow_subset(G, list(G), list(G), normalized=False, weight=None) # weight is None => same as unweighted network for (s, t), v1 in b_answer.items(): v2 = b.get((s, t), b.get((t, s))) - assert_almost_equal(v1, v2) + assert almost_equal(v1, v2) b = edge_current_flow_subset(G, list(G), list(G), normalized=False) b_answer = edge_current_flow(G, normalized=False) for (s, t), v1 in b_answer.items(): v2 = b.get((s, t), b.get((t, s))) - assert_almost_equal(v1, v2) + assert almost_equal(v1, v2) - b = edge_current_flow_subset(G, list(G), list(G), normalized=False, weight='other') - b_answer = edge_current_flow(G, normalized=False, weight='other') + b = edge_current_flow_subset( + G, list(G), list(G), normalized=False, weight="other" + ) + b_answer = edge_current_flow(G, normalized=False, weight="other") for (s, t), v1 in b_answer.items(): v2 = b.get((s, t), b.get((t, s))) - assert_almost_equal(v1, v2) + assert almost_equal(v1, v2) def test_C4(self): """Edge betweenness centrality: C4""" @@ -164,7 +138,7 @@ def test_C4(self): b_answer = edge_current_flow(G, normalized=True) for (s, t), v1 in b_answer.items(): v2 = b.get((s, t), b.get((t, s))) - assert_almost_equal(v1, v2) + assert almost_equal(v1, v2) def test_P4(self): """Edge betweenness centrality: P4""" @@ -173,4 +147,4 @@ def test_P4(self): b_answer = edge_current_flow(G, normalized=True) for (s, t), v1 in b_answer.items(): v2 = b.get((s, t), b.get((t, s))) - assert_almost_equal(v1, v2) + assert almost_equal(v1, v2) diff --git a/networkx/algorithms/centrality/tests/test_current_flow_closeness.py b/networkx/algorithms/centrality/tests/test_current_flow_closeness.py index 0ec8bfe..e6a3894 100644 --- a/networkx/algorithms/centrality/tests/test_current_flow_closeness.py +++ b/networkx/algorithms/centrality/tests/test_current_flow_closeness.py @@ -1,28 +1,20 @@ -#!/usr/bin/env python -from nose.tools import * -from nose import SkipTest -import networkx as nx +import pytest +np = pytest.importorskip("numpy") +scipy = pytest.importorskip("scipy") -class TestFlowClosenessCentrality(object): - numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test +import networkx as nx +from networkx.testing import almost_equal - @classmethod - def setupClass(cls): - global np - try: - import numpy as np - import scipy - except ImportError: - raise SkipTest('NumPy not available.') +class TestFlowClosenessCentrality: def test_K4(self): """Closeness centrality: K4""" G = nx.complete_graph(4) b = nx.current_flow_closeness_centrality(G) b_answer = {0: 2.0 / 3, 1: 2.0 / 3, 2: 2.0 / 3, 3: 2.0 / 3} for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_P4(self): """Closeness centrality: P4""" @@ -30,17 +22,17 @@ def test_P4(self): b = nx.current_flow_closeness_centrality(G) b_answer = {0: 1.0 / 6, 1: 1.0 / 4, 2: 1.0 / 4, 3: 1.0 / 6} for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_star(self): """Closeness centrality: star """ G = nx.Graph() - nx.add_star(G, ['a', 'b', 'c', 'd']) + nx.add_star(G, ["a", "b", "c", "d"]) b = nx.current_flow_closeness_centrality(G) - b_answer = {'a': 1.0 / 3, 'b': 0.6 / 3, 'c': 0.6 / 3, 'd': 0.6 / 3} + b_answer = {"a": 1.0 / 3, "b": 0.6 / 3, "c": 0.6 / 3, "d": 0.6 / 3} for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) -class TestWeightedFlowClosenessCentrality(object): +class TestWeightedFlowClosenessCentrality: pass diff --git a/networkx/algorithms/centrality/tests/test_degree_centrality.py b/networkx/algorithms/centrality/tests/test_degree_centrality.py index d80b712..2b204cf 100644 --- a/networkx/algorithms/centrality/tests/test_degree_centrality.py +++ b/networkx/algorithms/centrality/tests/test_degree_centrality.py @@ -2,39 +2,39 @@ Unit tests for degree centrality. """ -from nose.tools import * import networkx as nx +from networkx.testing import almost_equal class TestDegreeCentrality: - def __init__(self): + def setup_method(self): self.K = nx.krackhardt_kite_graph() self.P3 = nx.path_graph(3) self.K5 = nx.complete_graph(5) F = nx.Graph() # Florentine families - F.add_edge('Acciaiuoli', 'Medici') - F.add_edge('Castellani', 'Peruzzi') - F.add_edge('Castellani', 'Strozzi') - F.add_edge('Castellani', 'Barbadori') - F.add_edge('Medici', 'Barbadori') - F.add_edge('Medici', 'Ridolfi') - F.add_edge('Medici', 'Tornabuoni') - F.add_edge('Medici', 'Albizzi') - F.add_edge('Medici', 'Salviati') - F.add_edge('Salviati', 'Pazzi') - F.add_edge('Peruzzi', 'Strozzi') - F.add_edge('Peruzzi', 'Bischeri') - F.add_edge('Strozzi', 'Ridolfi') - F.add_edge('Strozzi', 'Bischeri') - F.add_edge('Ridolfi', 'Tornabuoni') - F.add_edge('Tornabuoni', 'Guadagni') - F.add_edge('Albizzi', 'Ginori') - F.add_edge('Albizzi', 'Guadagni') - F.add_edge('Bischeri', 'Guadagni') - F.add_edge('Guadagni', 'Lamberteschi') + F.add_edge("Acciaiuoli", "Medici") + F.add_edge("Castellani", "Peruzzi") + F.add_edge("Castellani", "Strozzi") + F.add_edge("Castellani", "Barbadori") + F.add_edge("Medici", "Barbadori") + F.add_edge("Medici", "Ridolfi") + F.add_edge("Medici", "Tornabuoni") + F.add_edge("Medici", "Albizzi") + F.add_edge("Medici", "Salviati") + F.add_edge("Salviati", "Pazzi") + F.add_edge("Peruzzi", "Strozzi") + F.add_edge("Peruzzi", "Bischeri") + F.add_edge("Strozzi", "Ridolfi") + F.add_edge("Strozzi", "Bischeri") + F.add_edge("Ridolfi", "Tornabuoni") + F.add_edge("Tornabuoni", "Guadagni") + F.add_edge("Albizzi", "Ginori") + F.add_edge("Albizzi", "Guadagni") + F.add_edge("Bischeri", "Guadagni") + F.add_edge("Guadagni", "Lamberteschi") self.F = F G = nx.DiGraph() @@ -52,40 +52,94 @@ def test_degree_centrality_1(self): d = nx.degree_centrality(self.K5) exact = dict(zip(range(5), [1] * 5)) for n, dc in d.items(): - assert_almost_equal(exact[n], dc) + assert almost_equal(exact[n], dc) def test_degree_centrality_2(self): d = nx.degree_centrality(self.P3) exact = {0: 0.5, 1: 1, 2: 0.5} for n, dc in d.items(): - assert_almost_equal(exact[n], dc) + assert almost_equal(exact[n], dc) def test_degree_centrality_3(self): d = nx.degree_centrality(self.K) - exact = {0: .444, 1: .444, 2: .333, 3: .667, 4: .333, - 5: .556, 6: .556, 7: .333, 8: .222, 9: .111} + exact = { + 0: 0.444, + 1: 0.444, + 2: 0.333, + 3: 0.667, + 4: 0.333, + 5: 0.556, + 6: 0.556, + 7: 0.333, + 8: 0.222, + 9: 0.111, + } for n, dc in d.items(): - assert_almost_equal(exact[n], float("%5.3f" % dc)) + assert almost_equal(exact[n], float(f"{dc:.3f}")) def test_degree_centrality_4(self): d = nx.degree_centrality(self.F) names = sorted(self.F.nodes()) - dcs = [0.071, 0.214, 0.143, 0.214, 0.214, 0.071, 0.286, - 0.071, 0.429, 0.071, 0.214, 0.214, 0.143, 0.286, 0.214] + dcs = [ + 0.071, + 0.214, + 0.143, + 0.214, + 0.214, + 0.071, + 0.286, + 0.071, + 0.429, + 0.071, + 0.214, + 0.214, + 0.143, + 0.286, + 0.214, + ] exact = dict(zip(names, dcs)) for n, dc in d.items(): - assert_almost_equal(exact[n], float("%5.3f" % dc)) + assert almost_equal(exact[n], float(f"{dc:.3f}")) def test_indegree_centrality(self): d = nx.in_degree_centrality(self.G) - exact = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0, - 5: 0.625, 6: 0.125, 7: 0.125, 8: 0.125} + exact = { + 0: 0.0, + 1: 0.0, + 2: 0.0, + 3: 0.0, + 4: 0.0, + 5: 0.625, + 6: 0.125, + 7: 0.125, + 8: 0.125, + } for n, dc in d.items(): - assert_almost_equal(exact[n], dc) + assert almost_equal(exact[n], dc) def test_outdegree_centrality(self): d = nx.out_degree_centrality(self.G) - exact = {0: 0.125, 1: 0.125, 2: 0.125, 3: 0.125, - 4: 0.125, 5: 0.375, 6: 0.0, 7: 0.0, 8: 0.0} + exact = { + 0: 0.125, + 1: 0.125, + 2: 0.125, + 3: 0.125, + 4: 0.125, + 5: 0.375, + 6: 0.0, + 7: 0.0, + 8: 0.0, + } for n, dc in d.items(): - assert_almost_equal(exact[n], dc) + assert almost_equal(exact[n], dc) + + def test_small_graph_centrality(self): + G = nx.empty_graph(create_using=nx.DiGraph) + assert {} == nx.degree_centrality(G) + assert {} == nx.out_degree_centrality(G) + assert {} == nx.in_degree_centrality(G) + + G = nx.empty_graph(1, create_using=nx.DiGraph) + assert {0: 1} == nx.degree_centrality(G) + assert {0: 1} == nx.out_degree_centrality(G) + assert {0: 1} == nx.in_degree_centrality(G) diff --git a/networkx/algorithms/centrality/tests/test_dispersion.py b/networkx/algorithms/centrality/tests/test_dispersion.py index f97d605..fb27efd 100644 --- a/networkx/algorithms/centrality/tests/test_dispersion.py +++ b/networkx/algorithms/centrality/tests/test_dispersion.py @@ -1,27 +1,50 @@ import networkx as nx -from nose.tools import * def small_ego_G(): """The sample network from https://arxiv.org/pdf/1310.6753v1.pdf""" - edges = [('a', 'b'), ('a', 'c'), ('b', 'c'), ('b', 'd'), - ('b', 'e'), ('b', 'f'), ('c', 'd'), ('c', 'f'), ('c', 'h'), ('d', 'f'), ('e', 'f'), - ('f', 'h'), ('h', 'j'), ('h', 'k'), ('i', 'j'), ('i', 'k'), ('j', 'k'), ('u', 'a'), - ('u', 'b'), ('u', 'c'), ('u', 'd'), ('u', 'e'), ('u', 'f'), ('u', 'g'), ('u', 'h'), - ('u', 'i'), ('u', 'j'), ('u', 'k')] + edges = [ + ("a", "b"), + ("a", "c"), + ("b", "c"), + ("b", "d"), + ("b", "e"), + ("b", "f"), + ("c", "d"), + ("c", "f"), + ("c", "h"), + ("d", "f"), + ("e", "f"), + ("f", "h"), + ("h", "j"), + ("h", "k"), + ("i", "j"), + ("i", "k"), + ("j", "k"), + ("u", "a"), + ("u", "b"), + ("u", "c"), + ("u", "d"), + ("u", "e"), + ("u", "f"), + ("u", "g"), + ("u", "h"), + ("u", "i"), + ("u", "j"), + ("u", "k"), + ] G = nx.Graph() G.add_edges_from(edges) return G -class TestDispersion(object): - +class TestDispersion: def test_article(self): """our algorithm matches article's""" G = small_ego_G() - disp_uh = nx.dispersion(G, 'u', 'h', normalized=False) - disp_ub = nx.dispersion(G, 'u', 'b', normalized=False) + disp_uh = nx.dispersion(G, "u", "h", normalized=False) + disp_ub = nx.dispersion(G, "u", "b", normalized=False) assert disp_uh == 4 assert disp_ub == 1 @@ -29,8 +52,8 @@ def test_results_length(self): """there is a result for every node""" G = small_ego_G() disp = nx.dispersion(G) - disp_Gu = nx.dispersion(G, 'u') - disp_uv = nx.dispersion(G, 'u', 'h') + disp_Gu = nx.dispersion(G, "u") + disp_uv = nx.dispersion(G, "u", "h") assert len(disp) == len(G) assert len(disp_Gu) == len(G) - 1 assert type(disp_uv) is float diff --git a/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py b/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py index aa2c333..a2f72ec 100644 --- a/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py +++ b/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py @@ -1,22 +1,15 @@ -#!/usr/bin/env python import math -from nose import SkipTest -from nose.tools import * -import networkx as nx +import pytest +np = pytest.importorskip("numpy") +scipy = pytest.importorskip("scipy") -class TestEigenvectorCentrality(object): - numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test - @classmethod - def setupClass(cls): - global np - try: - import numpy as np - import scipy - except ImportError: - raise SkipTest('SciPy not available.') +import networkx as nx +from networkx.testing import almost_equal + +class TestEigenvectorCentrality: def test_K5(self): """Eigenvector centrality: K5""" G = nx.complete_graph(5) @@ -24,15 +17,15 @@ def test_K5(self): v = math.sqrt(1 / 5.0) b_answer = dict.fromkeys(G, v) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) - nstart = dict([(n, 1) for n in G]) + assert almost_equal(b[n], b_answer[n]) + nstart = {n: 1 for n in G} b = nx.eigenvector_centrality(G, nstart=nstart) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) b = nx.eigenvector_centrality_numpy(G) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n], places=3) + assert almost_equal(b[n], b_answer[n], places=3) def test_P3(self): """Eigenvector centrality: P3""" @@ -40,10 +33,10 @@ def test_P3(self): b_answer = {0: 0.5, 1: 0.7071, 2: 0.5} b = nx.eigenvector_centrality_numpy(G) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n], places=4) + assert almost_equal(b[n], b_answer[n], places=4) b = nx.eigenvector_centrality(G) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n], places=4) + assert almost_equal(b[n], b_answer[n], places=4) def test_P3_unweighted(self): """Eigenvector centrality: P3""" @@ -51,99 +44,125 @@ def test_P3_unweighted(self): b_answer = {0: 0.5, 1: 0.7071, 2: 0.5} b = nx.eigenvector_centrality_numpy(G, weight=None) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n], places=4) + assert almost_equal(b[n], b_answer[n], places=4) - @raises(nx.PowerIterationFailedConvergence) def test_maxiter(self): - G = nx.path_graph(3) - b = nx.eigenvector_centrality(G, max_iter=0) + with pytest.raises(nx.PowerIterationFailedConvergence): + G = nx.path_graph(3) + b = nx.eigenvector_centrality(G, max_iter=0) -class TestEigenvectorCentralityDirected(object): - numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test - +class TestEigenvectorCentralityDirected: @classmethod - def setupClass(cls): - global np - try: - import numpy as np - import scipy - except ImportError: - raise SkipTest('SciPy not available.') - - def setUp(self): - + def setup_class(cls): G = nx.DiGraph() - edges = [(1, 2), (1, 3), (2, 4), (3, 2), (3, 5), (4, 2), (4, 5), (4, 6), - (5, 6), (5, 7), (5, 8), (6, 8), (7, 1), (7, 5), - (7, 8), (8, 6), (8, 7)] + edges = [ + (1, 2), + (1, 3), + (2, 4), + (3, 2), + (3, 5), + (4, 2), + (4, 5), + (4, 6), + (5, 6), + (5, 7), + (5, 8), + (6, 8), + (7, 1), + (7, 5), + (7, 8), + (8, 6), + (8, 7), + ] G.add_edges_from(edges, weight=2.0) - self.G = G.reverse() - self.G.evc = [0.25368793, 0.19576478, 0.32817092, 0.40430835, - 0.48199885, 0.15724483, 0.51346196, 0.32475403] + cls.G = G.reverse() + cls.G.evc = [ + 0.25368793, + 0.19576478, + 0.32817092, + 0.40430835, + 0.48199885, + 0.15724483, + 0.51346196, + 0.32475403, + ] H = nx.DiGraph() - edges = [(1, 2), (1, 3), (2, 4), (3, 2), (3, 5), (4, 2), (4, 5), (4, 6), - (5, 6), (5, 7), (5, 8), (6, 8), (7, 1), (7, 5), - (7, 8), (8, 6), (8, 7)] + edges = [ + (1, 2), + (1, 3), + (2, 4), + (3, 2), + (3, 5), + (4, 2), + (4, 5), + (4, 6), + (5, 6), + (5, 7), + (5, 8), + (6, 8), + (7, 1), + (7, 5), + (7, 8), + (8, 6), + (8, 7), + ] G.add_edges_from(edges) - self.H = G.reverse() - self.H.evc = [0.25368793, 0.19576478, 0.32817092, 0.40430835, - 0.48199885, 0.15724483, 0.51346196, 0.32475403] + cls.H = G.reverse() + cls.H.evc = [ + 0.25368793, + 0.19576478, + 0.32817092, + 0.40430835, + 0.48199885, + 0.15724483, + 0.51346196, + 0.32475403, + ] def test_eigenvector_centrality_weighted(self): G = self.G p = nx.eigenvector_centrality(G) for (a, b) in zip(list(p.values()), self.G.evc): - assert_almost_equal(a, b, places=4) + assert almost_equal(a, b, places=4) def test_eigenvector_centrality_weighted_numpy(self): G = self.G p = nx.eigenvector_centrality_numpy(G) for (a, b) in zip(list(p.values()), self.G.evc): - assert_almost_equal(a, b) + assert almost_equal(a, b) def test_eigenvector_centrality_unweighted(self): G = self.H p = nx.eigenvector_centrality(G) for (a, b) in zip(list(p.values()), self.G.evc): - assert_almost_equal(a, b, places=4) + assert almost_equal(a, b, places=4) def test_eigenvector_centrality_unweighted_numpy(self): G = self.H p = nx.eigenvector_centrality_numpy(G) for (a, b) in zip(list(p.values()), self.G.evc): - assert_almost_equal(a, b) + assert almost_equal(a, b) -class TestEigenvectorCentralityExceptions(object): - numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test - - @classmethod - def setupClass(cls): - global np - try: - import numpy as np - import scipy - except ImportError: - raise SkipTest('SciPy not available.') - - @raises(nx.NetworkXException) +class TestEigenvectorCentralityExceptions: def test_multigraph(self): - e = nx.eigenvector_centrality(nx.MultiGraph()) + with pytest.raises(nx.NetworkXException): + e = nx.eigenvector_centrality(nx.MultiGraph()) - @raises(nx.NetworkXException) def test_multigraph_numpy(self): - e = nx.eigenvector_centrality_numpy(nx.MultiGraph()) + with pytest.raises(nx.NetworkXException): + e = nx.eigenvector_centrality_numpy(nx.MultiGraph()) - @raises(nx.NetworkXException) def test_empty(self): - e = nx.eigenvector_centrality(nx.Graph()) + with pytest.raises(nx.NetworkXException): + e = nx.eigenvector_centrality(nx.Graph()) - @raises(nx.NetworkXException) def test_empty_numpy(self): - e = nx.eigenvector_centrality_numpy(nx.Graph()) + with pytest.raises(nx.NetworkXException): + e = nx.eigenvector_centrality_numpy(nx.Graph()) diff --git a/networkx/algorithms/centrality/tests/test_group.py b/networkx/algorithms/centrality/tests/test_group.py new file mode 100644 index 0000000..d2d333d --- /dev/null +++ b/networkx/algorithms/centrality/tests/test_group.py @@ -0,0 +1,154 @@ +""" + Tests for Group Centrality Measures +""" + + +import pytest +import networkx as nx + + +class TestGroupBetweennessCentrality: + def test_group_betweenness_single_node(self): + """ + Group betweenness centrality for single node group + """ + G = nx.path_graph(5) + C = [1] + b = nx.group_betweenness_centrality(G, C, weight=None, normalized=False) + b_answer = 3.0 + assert b == b_answer + + def test_group_betweenness_normalized(self): + """ + Group betweenness centrality for group with more than + 1 node and normalized + """ + G = nx.path_graph(5) + C = [1, 3] + b = nx.group_betweenness_centrality(G, C, weight=None, normalized=True) + b_answer = 1.0 + assert b == b_answer + + def test_group_betweenness_value_zero(self): + """ + Group betweenness centrality value of 0 + """ + G = nx.cycle_graph(6) + C = [0, 1, 5] + b = nx.group_betweenness_centrality(G, C, weight=None) + b_answer = 0.0 + assert b == b_answer + + def test_group_betweenness_disconnected_graph(self): + """ + Group betweenness centrality in a disconnected graph + """ + G = nx.path_graph(5) + G.remove_edge(0, 1) + C = [1] + b = nx.group_betweenness_centrality(G, C, weight=None) + b_answer = 0.0 + assert b == b_answer + + def test_group_betweenness_node_not_in_graph(self): + """ + Node(s) in C not in graph, raises NodeNotFound exception + """ + with pytest.raises(nx.NodeNotFound): + b = nx.group_betweenness_centrality(nx.path_graph(5), [6, 7, 8]) + + +class TestGroupClosenessCentrality: + def test_group_closeness_single_node(self): + """ + Group closeness centrality for a single node group + """ + G = nx.path_graph(5) + c = nx.group_closeness_centrality(G, [1]) + c_answer = nx.closeness_centrality(G, 1) + assert c == c_answer + + def test_group_closeness_disconnected(self): + """ + Group closeness centrality for a disconnected graph + """ + G = nx.Graph() + G.add_nodes_from([1, 2, 3, 4]) + c = nx.group_closeness_centrality(G, [1, 2]) + c_answer = 0 + assert c == c_answer + + def test_group_closeness_multiple_node(self): + """ + Group closeness centrality for a group with more than + 1 node + """ + G = nx.path_graph(4) + c = nx.group_closeness_centrality(G, [1, 2]) + c_answer = 1 + assert c == c_answer + + def test_group_closeness_node_not_in_graph(self): + """ + Node(s) in S not in graph, raises NodeNotFound exception + """ + with pytest.raises(nx.NodeNotFound): + c = nx.group_closeness_centrality(nx.path_graph(5), [6, 7, 8]) + + +class TestGroupDegreeCentrality: + def test_group_degree_centrality_single_node(self): + """ + Group degree centrality for a single node group + """ + G = nx.path_graph(4) + d = nx.group_degree_centrality(G, [1]) + d_answer = nx.degree_centrality(G)[1] + assert d == d_answer + + def test_group_degree_centrality_multiple_node(self): + """ + Group degree centrality for group with more than + 1 node + """ + G = nx.Graph() + G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8]) + G.add_edges_from( + [(1, 2), (1, 3), (1, 6), (1, 7), (1, 8), (2, 3), (2, 4), (2, 5)] + ) + d = nx.group_degree_centrality(G, [1, 2]) + d_answer = 1 + assert d == d_answer + + def test_group_in_degree_centrality(self): + """ + Group in-degree centrality in a DiGraph + """ + G = nx.DiGraph() + G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8]) + G.add_edges_from( + [(1, 2), (1, 3), (1, 6), (1, 7), (1, 8), (2, 3), (2, 4), (2, 5)] + ) + d = nx.group_in_degree_centrality(G, [1, 2]) + d_answer = 0 + assert d == d_answer + + def test_group_out_degree_centrality(self): + """ + Group out-degree centrality in a DiGraph + """ + G = nx.DiGraph() + G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8]) + G.add_edges_from( + [(1, 2), (1, 3), (1, 6), (1, 7), (1, 8), (2, 3), (2, 4), (2, 5)] + ) + d = nx.group_out_degree_centrality(G, [1, 2]) + d_answer = 1 + assert d == d_answer + + def test_group_degree_centrality_node_not_in_graph(self): + """ + Node(s) in S not in graph, raises NetworkXError + """ + with pytest.raises(nx.NetworkXError): + b = nx.group_degree_centrality(nx.path_graph(5), [6, 7, 8]) diff --git a/networkx/algorithms/centrality/tests/test_harmonic_centrality.py b/networkx/algorithms/centrality/tests/test_harmonic_centrality.py index d870dd9..f9947ee 100644 --- a/networkx/algorithms/centrality/tests/test_harmonic_centrality.py +++ b/networkx/algorithms/centrality/tests/test_harmonic_centrality.py @@ -1,119 +1,94 @@ """ Tests for degree centrality. """ -from nose.tools import * import networkx as nx from networkx.algorithms.centrality import harmonic_centrality +from networkx.testing import almost_equal class TestClosenessCentrality: - def setUp(self): - self.P3 = nx.path_graph(3) - self.P4 = nx.path_graph(4) - self.K5 = nx.complete_graph(5) + @classmethod + def setup_class(cls): + cls.P3 = nx.path_graph(3) + cls.P4 = nx.path_graph(4) + cls.K5 = nx.complete_graph(5) - self.C4 = nx.cycle_graph(4) - self.C5 = nx.cycle_graph(5) + cls.C4 = nx.cycle_graph(4) + cls.C5 = nx.cycle_graph(5) - self.T = nx.balanced_tree(r=2, h=2) + cls.T = nx.balanced_tree(r=2, h=2) - self.Gb = nx.DiGraph() - self.Gb.add_edges_from([(0, 1), (0, 2), (0, 4), (2, 1), - (2, 3), (4, 3)]) + cls.Gb = nx.DiGraph() + cls.Gb.add_edges_from([(0, 1), (0, 2), (0, 4), (2, 1), (2, 3), (4, 3)]) def test_p3_harmonic(self): c = harmonic_centrality(self.P3) - d = {0: 1.5, - 1: 2, - 2: 1.5} + d = {0: 1.5, 1: 2, 2: 1.5} for n in sorted(self.P3): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_p4_harmonic(self): c = harmonic_centrality(self.P4) - d = {0: 1.8333333, - 1: 2.5, - 2: 2.5, - 3: 1.8333333} + d = {0: 1.8333333, 1: 2.5, 2: 2.5, 3: 1.8333333} for n in sorted(self.P4): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_clique_complete(self): c = harmonic_centrality(self.K5) - d = {0: 4, - 1: 4, - 2: 4, - 3: 4, - 4: 4} + d = {0: 4, 1: 4, 2: 4, 3: 4, 4: 4} for n in sorted(self.P3): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_cycle_C4(self): c = harmonic_centrality(self.C4) - d = {0: 2.5, - 1: 2.5, - 2: 2.5, - 3: 2.5, } + d = {0: 2.5, 1: 2.5, 2: 2.5, 3: 2.5} for n in sorted(self.C4): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_cycle_C5(self): c = harmonic_centrality(self.C5) - d = {0: 3, - 1: 3, - 2: 3, - 3: 3, - 4: 3, - 5: 4} + d = {0: 3, 1: 3, 2: 3, 3: 3, 4: 3, 5: 4} for n in sorted(self.C5): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_bal_tree(self): c = harmonic_centrality(self.T) - d = {0: 4.0, - 1: 4.1666, - 2: 4.1666, - 3: 2.8333, - 4: 2.8333, - 5: 2.8333, - 6: 2.8333} + d = {0: 4.0, 1: 4.1666, 2: 4.1666, 3: 2.8333, 4: 2.8333, 5: 2.8333, 6: 2.8333} for n in sorted(self.T): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_exampleGraph(self): c = harmonic_centrality(self.Gb) - d = {0: 0, - 1: 2, - 2: 1, - 3: 2.5, - 4: 1} + d = {0: 0, 1: 2, 2: 1, 3: 2.5, 4: 1} for n in sorted(self.Gb): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_weighted_harmonic(self): XG = nx.DiGraph() - XG.add_weighted_edges_from([('a', 'b', 10), ('d', 'c', 5), ('a', 'c', 1), - ('e', 'f', 2), ('f', 'c', 1), ('a', 'f', 3), - ]) - c = harmonic_centrality(XG, distance='weight') - d = {'a': 0, - 'b': 0.1, - 'c': 2.533, - 'd': 0, - 'e': 0, - 'f': 0.83333} + XG.add_weighted_edges_from( + [ + ("a", "b", 10), + ("d", "c", 5), + ("a", "c", 1), + ("e", "f", 2), + ("f", "c", 1), + ("a", "f", 3), + ] + ) + c = harmonic_centrality(XG, distance="weight") + d = {"a": 0, "b": 0.1, "c": 2.533, "d": 0, "e": 0, "f": 0.83333} for n in sorted(XG): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_empty(self): G = nx.DiGraph() - c = harmonic_centrality(G, distance='weight') + c = harmonic_centrality(G, distance="weight") d = {} - assert_equal(c, d) + assert c == d def test_singleton(self): G = nx.DiGraph() G.add_node(0) - c = harmonic_centrality(G, distance='weight') + c = harmonic_centrality(G, distance="weight") d = {0: 0} - assert_equal(c, d) + assert c == d diff --git a/networkx/algorithms/centrality/tests/test_katz_centrality.py b/networkx/algorithms/centrality/tests/test_katz_centrality.py index f6661f3..7810f51 100644 --- a/networkx/algorithms/centrality/tests/test_katz_centrality.py +++ b/networkx/algorithms/centrality/tests/test_katz_centrality.py @@ -1,13 +1,11 @@ -# -*- coding: utf-8 -*- import math import networkx as nx -from nose import SkipTest -from nose.tools import assert_almost_equal, assert_equal, raises +from networkx.testing import almost_equal +import pytest -class TestKatzCentrality(object): - +class TestKatzCentrality: def test_K5(self): """Katz centrality: K5""" G = nx.complete_graph(5) @@ -16,104 +14,116 @@ def test_K5(self): v = math.sqrt(1 / 5.0) b_answer = dict.fromkeys(G, v) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) - nstart = dict([(n, 1) for n in G]) + assert almost_equal(b[n], b_answer[n]) + nstart = {n: 1 for n in G} b = nx.katz_centrality(G, alpha, nstart=nstart) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) + assert almost_equal(b[n], b_answer[n]) def test_P3(self): """Katz centrality: P3""" alpha = 0.1 G = nx.path_graph(3) - b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, - 2: 0.5598852584152162} + b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} b = nx.katz_centrality(G, alpha) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n], places=4) + assert almost_equal(b[n], b_answer[n], places=4) - @raises(nx.PowerIterationFailedConvergence) def test_maxiter(self): - alpha = 0.1 - G = nx.path_graph(3) - max_iter = 0 - try: - b = nx.katz_centrality(G, alpha, max_iter=max_iter) - except nx.NetworkXError as e: - assert str(max_iter) in e.args[0], "max_iter value not in error msg" - raise # So that the decorater sees the exception. + with pytest.raises(nx.PowerIterationFailedConvergence): + alpha = 0.1 + G = nx.path_graph(3) + max_iter = 0 + try: + b = nx.katz_centrality(G, alpha, max_iter=max_iter) + except nx.NetworkXError as e: + assert str(max_iter) in e.args[0], "max_iter value not in error msg" + raise # So that the decorater sees the exception. def test_beta_as_scalar(self): alpha = 0.1 beta = 0.1 - b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, - 2: 0.5598852584152162} + b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} G = nx.path_graph(3) b = nx.katz_centrality(G, alpha, beta) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n], places=4) + assert almost_equal(b[n], b_answer[n], places=4) def test_beta_as_dict(self): alpha = 0.1 beta = {0: 1.0, 1: 1.0, 2: 1.0} - b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, - 2: 0.5598852584152162} + b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} G = nx.path_graph(3) b = nx.katz_centrality(G, alpha, beta) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n], places=4) + assert almost_equal(b[n], b_answer[n], places=4) def test_multiple_alpha(self): alpha_list = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6] for alpha in alpha_list: - b_answer = {0.1: {0: 0.5598852584152165, 1: 0.6107839182711449, - 2: 0.5598852584152162}, - 0.2: {0: 0.5454545454545454, 1: 0.6363636363636365, - 2: 0.5454545454545454}, - 0.3: {0: 0.5333964609104419, 1: 0.6564879518897746, - 2: 0.5333964609104419}, - 0.4: {0: 0.5232045649263551, 1: 0.6726915834767423, - 2: 0.5232045649263551}, - 0.5: {0: 0.5144957746691622, 1: 0.6859943117075809, - 2: 0.5144957746691622}, - 0.6: {0: 0.5069794004195823, 1: 0.6970966755769258, - 2: 0.5069794004195823}} + b_answer = { + 0.1: { + 0: 0.5598852584152165, + 1: 0.6107839182711449, + 2: 0.5598852584152162, + }, + 0.2: { + 0: 0.5454545454545454, + 1: 0.6363636363636365, + 2: 0.5454545454545454, + }, + 0.3: { + 0: 0.5333964609104419, + 1: 0.6564879518897746, + 2: 0.5333964609104419, + }, + 0.4: { + 0: 0.5232045649263551, + 1: 0.6726915834767423, + 2: 0.5232045649263551, + }, + 0.5: { + 0: 0.5144957746691622, + 1: 0.6859943117075809, + 2: 0.5144957746691622, + }, + 0.6: { + 0: 0.5069794004195823, + 1: 0.6970966755769258, + 2: 0.5069794004195823, + }, + } G = nx.path_graph(3) b = nx.katz_centrality(G, alpha) for n in sorted(G): - assert_almost_equal(b[n], b_answer[alpha][n], places=4) + assert almost_equal(b[n], b_answer[alpha][n], places=4) - @raises(nx.NetworkXException) def test_multigraph(self): - e = nx.katz_centrality(nx.MultiGraph(), 0.1) + with pytest.raises(nx.NetworkXException): + e = nx.katz_centrality(nx.MultiGraph(), 0.1) def test_empty(self): e = nx.katz_centrality(nx.Graph(), 0.1) - assert_equal(e, {}) + assert e == {} - @raises(nx.NetworkXException) def test_bad_beta(self): - G = nx.Graph([(0, 1)]) - beta = {0: 77} - e = nx.katz_centrality(G, 0.1, beta=beta) + with pytest.raises(nx.NetworkXException): + G = nx.Graph([(0, 1)]) + beta = {0: 77} + e = nx.katz_centrality(G, 0.1, beta=beta) - @raises(nx.NetworkXException) def test_bad_beta_numbe(self): - G = nx.Graph([(0, 1)]) - e = nx.katz_centrality(G, 0.1, beta='foo') - + with pytest.raises(nx.NetworkXException): + G = nx.Graph([(0, 1)]) + e = nx.katz_centrality(G, 0.1, beta="foo") -class TestKatzCentralityNumpy(object): - numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test +class TestKatzCentralityNumpy: @classmethod - def setupClass(cls): + def setup_class(cls): global np - try: - import numpy as np - import scipy - except ImportError: - raise SkipTest('SciPy not available.') + np = pytest.importorskip("numpy") + scipy = pytest.importorskip("scipy") def test_K5(self): """Katz centrality: K5""" @@ -123,80 +133,97 @@ def test_K5(self): v = math.sqrt(1 / 5.0) b_answer = dict.fromkeys(G, v) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) - nstart = dict([(n, 1) for n in G]) + assert almost_equal(b[n], b_answer[n]) + nstart = {n: 1 for n in G} b = nx.eigenvector_centrality_numpy(G) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n], places=3) + assert almost_equal(b[n], b_answer[n], places=3) def test_P3(self): """Katz centrality: P3""" alpha = 0.1 G = nx.path_graph(3) - b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, - 2: 0.5598852584152162} + b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} b = nx.katz_centrality_numpy(G, alpha) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n], places=4) + assert almost_equal(b[n], b_answer[n], places=4) def test_beta_as_scalar(self): alpha = 0.1 beta = 0.1 - b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, - 2: 0.5598852584152162} + b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} G = nx.path_graph(3) b = nx.katz_centrality_numpy(G, alpha, beta) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n], places=4) + assert almost_equal(b[n], b_answer[n], places=4) def test_beta_as_dict(self): alpha = 0.1 beta = {0: 1.0, 1: 1.0, 2: 1.0} - b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, - 2: 0.5598852584152162} + b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} G = nx.path_graph(3) b = nx.katz_centrality_numpy(G, alpha, beta) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n], places=4) + assert almost_equal(b[n], b_answer[n], places=4) def test_multiple_alpha(self): alpha_list = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6] for alpha in alpha_list: - b_answer = {0.1: {0: 0.5598852584152165, 1: 0.6107839182711449, - 2: 0.5598852584152162}, - 0.2: {0: 0.5454545454545454, 1: 0.6363636363636365, - 2: 0.5454545454545454}, - 0.3: {0: 0.5333964609104419, 1: 0.6564879518897746, - 2: 0.5333964609104419}, - 0.4: {0: 0.5232045649263551, 1: 0.6726915834767423, - 2: 0.5232045649263551}, - 0.5: {0: 0.5144957746691622, 1: 0.6859943117075809, - 2: 0.5144957746691622}, - 0.6: {0: 0.5069794004195823, 1: 0.6970966755769258, - 2: 0.5069794004195823}} + b_answer = { + 0.1: { + 0: 0.5598852584152165, + 1: 0.6107839182711449, + 2: 0.5598852584152162, + }, + 0.2: { + 0: 0.5454545454545454, + 1: 0.6363636363636365, + 2: 0.5454545454545454, + }, + 0.3: { + 0: 0.5333964609104419, + 1: 0.6564879518897746, + 2: 0.5333964609104419, + }, + 0.4: { + 0: 0.5232045649263551, + 1: 0.6726915834767423, + 2: 0.5232045649263551, + }, + 0.5: { + 0: 0.5144957746691622, + 1: 0.6859943117075809, + 2: 0.5144957746691622, + }, + 0.6: { + 0: 0.5069794004195823, + 1: 0.6970966755769258, + 2: 0.5069794004195823, + }, + } G = nx.path_graph(3) b = nx.katz_centrality_numpy(G, alpha) for n in sorted(G): - assert_almost_equal(b[n], b_answer[alpha][n], places=4) + assert almost_equal(b[n], b_answer[alpha][n], places=4) - @raises(nx.NetworkXException) def test_multigraph(self): - e = nx.katz_centrality(nx.MultiGraph(), 0.1) + with pytest.raises(nx.NetworkXException): + e = nx.katz_centrality(nx.MultiGraph(), 0.1) def test_empty(self): e = nx.katz_centrality(nx.Graph(), 0.1) - assert_equal(e, {}) + assert e == {} - @raises(nx.NetworkXException) def test_bad_beta(self): - G = nx.Graph([(0, 1)]) - beta = {0: 77} - e = nx.katz_centrality_numpy(G, 0.1, beta=beta) + with pytest.raises(nx.NetworkXException): + G = nx.Graph([(0, 1)]) + beta = {0: 77} + e = nx.katz_centrality_numpy(G, 0.1, beta=beta) - @raises(nx.NetworkXException) def test_bad_beta_numbe(self): - G = nx.Graph([(0, 1)]) - e = nx.katz_centrality_numpy(G, 0.1, beta='foo') + with pytest.raises(nx.NetworkXException): + G = nx.Graph([(0, 1)]) + e = nx.katz_centrality_numpy(G, 0.1, beta="foo") def test_K5_unweighted(self): """Katz centrality: K5""" @@ -206,33 +233,49 @@ def test_K5_unweighted(self): v = math.sqrt(1 / 5.0) b_answer = dict.fromkeys(G, v) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n]) - nstart = dict([(n, 1) for n in G]) + assert almost_equal(b[n], b_answer[n]) + nstart = {n: 1 for n in G} b = nx.eigenvector_centrality_numpy(G, weight=None) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n], places=3) + assert almost_equal(b[n], b_answer[n], places=3) def test_P3_unweighted(self): """Katz centrality: P3""" alpha = 0.1 G = nx.path_graph(3) - b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, - 2: 0.5598852584152162} + b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} b = nx.katz_centrality_numpy(G, alpha, weight=None) for n in sorted(G): - assert_almost_equal(b[n], b_answer[n], places=4) + assert almost_equal(b[n], b_answer[n], places=4) -class TestKatzCentralityDirected(object): - def setUp(self): +class TestKatzCentralityDirected: + @classmethod + def setup_class(cls): G = nx.DiGraph() - edges = [(1, 2), (1, 3), (2, 4), (3, 2), (3, 5), (4, 2), (4, 5), - (4, 6), (5, 6), (5, 7), (5, 8), (6, 8), (7, 1), (7, 5), - (7, 8), (8, 6), (8, 7)] + edges = [ + (1, 2), + (1, 3), + (2, 4), + (3, 2), + (3, 5), + (4, 2), + (4, 5), + (4, 6), + (5, 6), + (5, 7), + (5, 8), + (6, 8), + (7, 1), + (7, 5), + (7, 8), + (8, 6), + (8, 7), + ] G.add_edges_from(edges, weight=2.0) - self.G = G.reverse() - self.G.alpha = 0.1 - self.G.evc = [ + cls.G = G.reverse() + cls.G.alpha = 0.1 + cls.G.evc = [ 0.3289589783189635, 0.2832077296243516, 0.3425906003685471, @@ -244,9 +287,9 @@ def setUp(self): ] H = nx.DiGraph(edges) - self.H = G.reverse() - self.H.alpha = 0.1 - self.H.evc = [ + cls.H = G.reverse() + cls.H.alpha = 0.1 + cls.H.evc = [ 0.3289589783189635, 0.2832077296243516, 0.3425906003685471, @@ -260,58 +303,48 @@ def setUp(self): def test_katz_centrality_weighted(self): G = self.G alpha = self.G.alpha - p = nx.katz_centrality(G, alpha, weight='weight') + p = nx.katz_centrality(G, alpha, weight="weight") for (a, b) in zip(list(p.values()), self.G.evc): - assert_almost_equal(a, b) + assert almost_equal(a, b) def test_katz_centrality_unweighted(self): H = self.H alpha = self.H.alpha - p = nx.katz_centrality(H, alpha, weight='weight') + p = nx.katz_centrality(H, alpha, weight="weight") for (a, b) in zip(list(p.values()), self.H.evc): - assert_almost_equal(a, b) + assert almost_equal(a, b) class TestKatzCentralityDirectedNumpy(TestKatzCentralityDirected): - numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test - @classmethod - def setupClass(cls): + def setup_class(cls): global np - try: - import numpy as np - import scipy - except ImportError: - raise SkipTest('SciPy not available.') + np = pytest.importorskip("numpy") + scipy = pytest.importorskip("scipy") def test_katz_centrality_weighted(self): G = self.G alpha = self.G.alpha - p = nx.katz_centrality_numpy(G, alpha, weight='weight') + p = nx.katz_centrality_numpy(G, alpha, weight="weight") for (a, b) in zip(list(p.values()), self.G.evc): - assert_almost_equal(a, b) + assert almost_equal(a, b) def test_katz_centrality_unweighted(self): H = self.H alpha = self.H.alpha - p = nx.katz_centrality_numpy(H, alpha, weight='weight') + p = nx.katz_centrality_numpy(H, alpha, weight="weight") for (a, b) in zip(list(p.values()), self.H.evc): - assert_almost_equal(a, b) - + assert almost_equal(a, b) -class TestKatzEigenvectorVKatz(object): - numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test +class TestKatzEigenvectorVKatz: @classmethod - def setupClass(cls): + def setup_class(cls): global np global eigvals - try: - import numpy as np - import scipy - from numpy.linalg import eigvals - except ImportError: - raise SkipTest('SciPy not available.') + np = pytest.importorskip("numpy") + scipy = pytest.importorskip("scipy") + from numpy.linalg import eigvals def test_eigenvector_v_katz_random(self): G = nx.gnp_random_graph(10, 0.5, seed=1234) @@ -319,4 +352,4 @@ def test_eigenvector_v_katz_random(self): e = nx.eigenvector_centrality_numpy(G) k = nx.katz_centrality_numpy(G, 1.0 / l) for n in G: - assert_almost_equal(e[n], k[n]) + assert almost_equal(e[n], k[n]) diff --git a/networkx/algorithms/centrality/tests/test_load_centrality.py b/networkx/algorithms/centrality/tests/test_load_centrality.py index ef3f4b7..66d0ea5 100644 --- a/networkx/algorithms/centrality/tests/test_load_centrality.py +++ b/networkx/algorithms/centrality/tests/test_load_centrality.py @@ -1,11 +1,10 @@ -#!/usr/bin/env python -from nose.tools import * import networkx as nx +from networkx.testing import almost_equal class TestLoadCentrality: - - def setUp(self): + @classmethod + def setup_class(cls): G = nx.Graph() G.add_edge(0, 1, weight=3) @@ -18,165 +17,241 @@ def setUp(self): G.add_edge(3, 4, weight=2) G.add_edge(3, 5, weight=1) G.add_edge(4, 5, weight=4) - self.G = G - self.exact_weighted = {0: 4.0, 1: 0.0, 2: 8.0, 3: 6.0, 4: 8.0, 5: 0.0} - self.K = nx.krackhardt_kite_graph() - self.P3 = nx.path_graph(3) - self.P4 = nx.path_graph(4) - self.K5 = nx.complete_graph(5) + cls.G = G + cls.exact_weighted = {0: 4.0, 1: 0.0, 2: 8.0, 3: 6.0, 4: 8.0, 5: 0.0} + cls.K = nx.krackhardt_kite_graph() + cls.P3 = nx.path_graph(3) + cls.P4 = nx.path_graph(4) + cls.K5 = nx.complete_graph(5) - self.C4 = nx.cycle_graph(4) - self.T = nx.balanced_tree(r=2, h=2) - self.Gb = nx.Graph() - self.Gb.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), - (2, 4), (4, 5), (3, 5)]) - self.F = nx.florentine_families_graph() - self.D = nx.cycle_graph(3, create_using=nx.DiGraph()) - self.D.add_edges_from([(3, 0), (4, 3)]) + cls.C4 = nx.cycle_graph(4) + cls.T = nx.balanced_tree(r=2, h=2) + cls.Gb = nx.Graph() + cls.Gb.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)]) + cls.F = nx.florentine_families_graph() + cls.LM = nx.les_miserables_graph() + cls.D = nx.cycle_graph(3, create_using=nx.DiGraph()) + cls.D.add_edges_from([(3, 0), (4, 3)]) def test_not_strongly_connected(self): b = nx.load_centrality(self.D) - result = {0: 5. / 12, - 1: 1. / 4, - 2: 1. / 12, - 3: 1. / 4, - 4: 0.000} + result = {0: 5.0 / 12, 1: 1.0 / 4, 2: 1.0 / 12, 3: 1.0 / 4, 4: 0.000} for n in sorted(self.D): - assert_almost_equal(result[n], b[n], places=3) - assert_almost_equal(result[n], nx.load_centrality(self.D, n), places=3) + assert almost_equal(result[n], b[n], places=3) + assert almost_equal(result[n], nx.load_centrality(self.D, n), places=3) def test_weighted_load(self): - b = nx.load_centrality(self.G, weight='weight', normalized=False) + b = nx.load_centrality(self.G, weight="weight", normalized=False) for n in sorted(self.G): - assert_equal(b[n], self.exact_weighted[n]) + assert b[n] == self.exact_weighted[n] def test_k5_load(self): G = self.K5 c = nx.load_centrality(G) - d = {0: 0.000, - 1: 0.000, - 2: 0.000, - 3: 0.000, - 4: 0.000} + d = {0: 0.000, 1: 0.000, 2: 0.000, 3: 0.000, 4: 0.000} for n in sorted(G): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_p3_load(self): G = self.P3 c = nx.load_centrality(G) - d = {0: 0.000, - 1: 1.000, - 2: 0.000} + d = {0: 0.000, 1: 1.000, 2: 0.000} for n in sorted(G): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) c = nx.load_centrality(G, v=1) - assert_almost_equal(c, 1.0) + assert almost_equal(c, 1.0) c = nx.load_centrality(G, v=1, normalized=True) - assert_almost_equal(c, 1.0) + assert almost_equal(c, 1.0) def test_p2_load(self): G = nx.path_graph(2) c = nx.load_centrality(G) - d = {0: 0.000, - 1: 0.000} + d = {0: 0.000, 1: 0.000} for n in sorted(G): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_krackhardt_load(self): G = self.K c = nx.load_centrality(G) - d = {0: 0.023, - 1: 0.023, - 2: 0.000, - 3: 0.102, - 4: 0.000, - 5: 0.231, - 6: 0.231, - 7: 0.389, - 8: 0.222, - 9: 0.000} + d = { + 0: 0.023, + 1: 0.023, + 2: 0.000, + 3: 0.102, + 4: 0.000, + 5: 0.231, + 6: 0.231, + 7: 0.389, + 8: 0.222, + 9: 0.000, + } for n in sorted(G): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_florentine_families_load(self): G = self.F c = nx.load_centrality(G) - d = {'Acciaiuoli': 0.000, - 'Albizzi': 0.211, - 'Barbadori': 0.093, - 'Bischeri': 0.104, - 'Castellani': 0.055, - 'Ginori': 0.000, - 'Guadagni': 0.251, - 'Lamberteschi': 0.000, - 'Medici': 0.522, - 'Pazzi': 0.000, - 'Peruzzi': 0.022, - 'Ridolfi': 0.117, - 'Salviati': 0.143, - 'Strozzi': 0.106, - 'Tornabuoni': 0.090} + d = { + "Acciaiuoli": 0.000, + "Albizzi": 0.211, + "Barbadori": 0.093, + "Bischeri": 0.104, + "Castellani": 0.055, + "Ginori": 0.000, + "Guadagni": 0.251, + "Lamberteschi": 0.000, + "Medici": 0.522, + "Pazzi": 0.000, + "Peruzzi": 0.022, + "Ridolfi": 0.117, + "Salviati": 0.143, + "Strozzi": 0.106, + "Tornabuoni": 0.090, + } + for n in sorted(G): + assert almost_equal(c[n], d[n], places=3) + + def test_les_miserables_load(self): + G = self.LM + c = nx.load_centrality(G) + d = { + "Napoleon": 0.000, + "Myriel": 0.177, + "MlleBaptistine": 0.000, + "MmeMagloire": 0.000, + "CountessDeLo": 0.000, + "Geborand": 0.000, + "Champtercier": 0.000, + "Cravatte": 0.000, + "Count": 0.000, + "OldMan": 0.000, + "Valjean": 0.567, + "Labarre": 0.000, + "Marguerite": 0.000, + "MmeDeR": 0.000, + "Isabeau": 0.000, + "Gervais": 0.000, + "Listolier": 0.000, + "Tholomyes": 0.043, + "Fameuil": 0.000, + "Blacheville": 0.000, + "Favourite": 0.000, + "Dahlia": 0.000, + "Zephine": 0.000, + "Fantine": 0.128, + "MmeThenardier": 0.029, + "Thenardier": 0.075, + "Cosette": 0.024, + "Javert": 0.054, + "Fauchelevent": 0.026, + "Bamatabois": 0.008, + "Perpetue": 0.000, + "Simplice": 0.009, + "Scaufflaire": 0.000, + "Woman1": 0.000, + "Judge": 0.000, + "Champmathieu": 0.000, + "Brevet": 0.000, + "Chenildieu": 0.000, + "Cochepaille": 0.000, + "Pontmercy": 0.007, + "Boulatruelle": 0.000, + "Eponine": 0.012, + "Anzelma": 0.000, + "Woman2": 0.000, + "MotherInnocent": 0.000, + "Gribier": 0.000, + "MmeBurgon": 0.026, + "Jondrette": 0.000, + "Gavroche": 0.164, + "Gillenormand": 0.021, + "Magnon": 0.000, + "MlleGillenormand": 0.047, + "MmePontmercy": 0.000, + "MlleVaubois": 0.000, + "LtGillenormand": 0.000, + "Marius": 0.133, + "BaronessT": 0.000, + "Mabeuf": 0.028, + "Enjolras": 0.041, + "Combeferre": 0.001, + "Prouvaire": 0.000, + "Feuilly": 0.001, + "Courfeyrac": 0.006, + "Bahorel": 0.002, + "Bossuet": 0.032, + "Joly": 0.002, + "Grantaire": 0.000, + "MotherPlutarch": 0.000, + "Gueulemer": 0.005, + "Babet": 0.005, + "Claquesous": 0.005, + "Montparnasse": 0.004, + "Toussaint": 0.000, + "Child1": 0.000, + "Child2": 0.000, + "Brujon": 0.000, + "MmeHucheloup": 0.000, + } for n in sorted(G): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_unnormalized_k5_load(self): G = self.K5 c = nx.load_centrality(G, normalized=False) - d = {0: 0.000, - 1: 0.000, - 2: 0.000, - 3: 0.000, - 4: 0.000} + d = {0: 0.000, 1: 0.000, 2: 0.000, 3: 0.000, 4: 0.000} for n in sorted(G): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_unnormalized_p3_load(self): G = self.P3 c = nx.load_centrality(G, normalized=False) - d = {0: 0.000, - 1: 2.000, - 2: 0.000} + d = {0: 0.000, 1: 2.000, 2: 0.000} for n in sorted(G): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_unnormalized_krackhardt_load(self): G = self.K c = nx.load_centrality(G, normalized=False) - d = {0: 1.667, - 1: 1.667, - 2: 0.000, - 3: 7.333, - 4: 0.000, - 5: 16.667, - 6: 16.667, - 7: 28.000, - 8: 16.000, - 9: 0.000} + d = { + 0: 1.667, + 1: 1.667, + 2: 0.000, + 3: 7.333, + 4: 0.000, + 5: 16.667, + 6: 16.667, + 7: 28.000, + 8: 16.000, + 9: 0.000, + } for n in sorted(G): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_unnormalized_florentine_families_load(self): G = self.F c = nx.load_centrality(G, normalized=False) - d = {'Acciaiuoli': 0.000, - 'Albizzi': 38.333, - 'Barbadori': 17.000, - 'Bischeri': 19.000, - 'Castellani': 10.000, - 'Ginori': 0.000, - 'Guadagni': 45.667, - 'Lamberteschi': 0.000, - 'Medici': 95.000, - 'Pazzi': 0.000, - 'Peruzzi': 4.000, - 'Ridolfi': 21.333, - 'Salviati': 26.000, - 'Strozzi': 19.333, - 'Tornabuoni': 16.333} + d = { + "Acciaiuoli": 0.000, + "Albizzi": 38.333, + "Barbadori": 17.000, + "Bischeri": 19.000, + "Castellani": 10.000, + "Ginori": 0.000, + "Guadagni": 45.667, + "Lamberteschi": 0.000, + "Medici": 95.000, + "Pazzi": 0.000, + "Peruzzi": 4.000, + "Ridolfi": 21.333, + "Salviati": 26.000, + "Strozzi": 19.333, + "Tornabuoni": 16.333, + } for n in sorted(G): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_load_betweenness_difference(self): # Difference Between Load and Betweenness @@ -210,58 +285,52 @@ def test_load_betweenness_difference(self): B = nx.Graph() # ladder_graph(3) B.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)]) c = nx.load_centrality(B, normalized=False) - d = {0: 1.750, - 1: 1.750, - 2: 6.500, - 3: 6.500, - 4: 1.750, - 5: 1.750} + d = {0: 1.750, 1: 1.750, 2: 6.500, 3: 6.500, 4: 1.750, 5: 1.750} for n in sorted(B): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_c4_edge_load(self): G = self.C4 c = nx.edge_load_centrality(G) - d = {(0, 1): 6.000, - (0, 3): 6.000, - (1, 2): 6.000, - (2, 3): 6.000} + d = {(0, 1): 6.000, (0, 3): 6.000, (1, 2): 6.000, (2, 3): 6.000} for n in G.edges(): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_p4_edge_load(self): G = self.P4 c = nx.edge_load_centrality(G) - d = {(0, 1): 6.000, - (1, 2): 8.000, - (2, 3): 6.000} + d = {(0, 1): 6.000, (1, 2): 8.000, (2, 3): 6.000} for n in G.edges(): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_k5_edge_load(self): G = self.K5 c = nx.edge_load_centrality(G) - d = {(0, 1): 5.000, - (0, 2): 5.000, - (0, 3): 5.000, - (0, 4): 5.000, - (1, 2): 5.000, - (1, 3): 5.000, - (1, 4): 5.000, - (2, 3): 5.000, - (2, 4): 5.000, - (3, 4): 5.000} + d = { + (0, 1): 5.000, + (0, 2): 5.000, + (0, 3): 5.000, + (0, 4): 5.000, + (1, 2): 5.000, + (1, 3): 5.000, + (1, 4): 5.000, + (2, 3): 5.000, + (2, 4): 5.000, + (3, 4): 5.000, + } for n in G.edges(): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) def test_tree_edge_load(self): G = self.T c = nx.edge_load_centrality(G) - d = {(0, 1): 24.000, - (0, 2): 24.000, - (1, 3): 12.000, - (1, 4): 12.000, - (2, 5): 12.000, - (2, 6): 12.000} + d = { + (0, 1): 24.000, + (0, 2): 24.000, + (1, 3): 12.000, + (1, 4): 12.000, + (2, 5): 12.000, + (2, 6): 12.000, + } for n in G.edges(): - assert_almost_equal(c[n], d[n], places=3) + assert almost_equal(c[n], d[n], places=3) diff --git a/networkx/algorithms/centrality/tests/test_percolation_centrality.py b/networkx/algorithms/centrality/tests/test_percolation_centrality.py new file mode 100644 index 0000000..4311edb --- /dev/null +++ b/networkx/algorithms/centrality/tests/test_percolation_centrality.py @@ -0,0 +1,81 @@ +import networkx as nx +from networkx.testing import almost_equal + + +def example1a_G(): + G = nx.Graph() + G.add_node(1, percolation=0.1) + G.add_node(2, percolation=0.2) + G.add_node(3, percolation=0.2) + G.add_node(4, percolation=0.2) + G.add_node(5, percolation=0.3) + G.add_node(6, percolation=0.2) + G.add_node(7, percolation=0.5) + G.add_node(8, percolation=0.5) + G.add_edges_from([(1, 4), (2, 4), (3, 4), (4, 5), (5, 6), (6, 7), (6, 8)]) + return G + + +def example1b_G(): + G = nx.Graph() + G.add_node(1, percolation=0.3) + G.add_node(2, percolation=0.5) + G.add_node(3, percolation=0.5) + G.add_node(4, percolation=0.2) + G.add_node(5, percolation=0.3) + G.add_node(6, percolation=0.2) + G.add_node(7, percolation=0.1) + G.add_node(8, percolation=0.1) + G.add_edges_from([(1, 4), (2, 4), (3, 4), (4, 5), (5, 6), (6, 7), (6, 8)]) + return G + + +class TestPercolationCentrality: + def test_percolation_example1a(self): + """percolation centrality: example 1a""" + G = example1a_G() + p = nx.percolation_centrality(G) + p_answer = {4: 0.625, 6: 0.667} + for n in p_answer: + assert almost_equal(p[n], p_answer[n], places=3) + + def test_percolation_example1b(self): + """percolation centrality: example 1a""" + G = example1b_G() + p = nx.percolation_centrality(G) + p_answer = {4: 0.825, 6: 0.4} + for n in p_answer: + assert almost_equal(p[n], p_answer[n], places=3) + + def test_converge_to_betweenness(self): + """percolation centrality: should converge to betweenness + centrality when all nodes are percolated the same""" + # taken from betweenness test test_florentine_families_graph + G = nx.florentine_families_graph() + b_answer = { + "Acciaiuoli": 0.000, + "Albizzi": 0.212, + "Barbadori": 0.093, + "Bischeri": 0.104, + "Castellani": 0.055, + "Ginori": 0.000, + "Guadagni": 0.255, + "Lamberteschi": 0.000, + "Medici": 0.522, + "Pazzi": 0.000, + "Peruzzi": 0.022, + "Ridolfi": 0.114, + "Salviati": 0.143, + "Strozzi": 0.103, + "Tornabuoni": 0.092, + } + + p_states = {k: 1.0 for k, v in b_answer.items()} + p_answer = nx.percolation_centrality(G, states=p_states) + for n in sorted(G): + assert almost_equal(p_answer[n], b_answer[n], places=3) + + p_states = {k: 0.3 for k, v in b_answer.items()} + p_answer = nx.percolation_centrality(G, states=p_states) + for n in sorted(G): + assert almost_equal(p_answer[n], b_answer[n], places=3) diff --git a/networkx/algorithms/centrality/tests/test_reaching.py b/networkx/algorithms/centrality/tests/test_reaching.py index 3450f34..7e9a3f8 100644 --- a/networkx/algorithms/centrality/tests/test_reaching.py +++ b/networkx/algorithms/centrality/tests/test_reaching.py @@ -1,70 +1,63 @@ -# Copyright (C) 2015-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. """Unit tests for the :mod:`networkx.algorithms.centrality.reaching` module.""" -from __future__ import division -from nose.tools import assert_almost_equal, assert_equal, raises -from unittest import TestCase +import pytest from networkx import nx +from networkx.testing import almost_equal -class TestGlobalReachingCentrality(TestCase): +class TestGlobalReachingCentrality: """Unit tests for the global reaching centrality function.""" - @raises(nx.NetworkXError) def test_non_positive_weights(self): - G = nx.DiGraph() - nx.global_reaching_centrality(G, weight='weight') + with pytest.raises(nx.NetworkXError): + G = nx.DiGraph() + nx.global_reaching_centrality(G, weight="weight") - @raises(nx.NetworkXError) def test_negatively_weighted(self): - G = nx.Graph() - G.add_weighted_edges_from([(0, 1, -2), (1, 2, +1)]) - nx.global_reaching_centrality(G, weight='weight') + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + G.add_weighted_edges_from([(0, 1, -2), (1, 2, +1)]) + nx.global_reaching_centrality(G, weight="weight") def test_directed_star(self): G = nx.DiGraph() G.add_weighted_edges_from([(1, 2, 0.5), (1, 3, 0.5)]) grc = nx.global_reaching_centrality - assert_equal(grc(G, normalized=False, weight='weight'), 0.5) - assert_equal(grc(G), 1) + assert grc(G, normalized=False, weight="weight") == 0.5 + assert grc(G) == 1 def test_undirected_unweighted_star(self): G = nx.star_graph(2) grc = nx.global_reaching_centrality - assert_equal(grc(G, normalized=False, weight=None), 0.25) + assert grc(G, normalized=False, weight=None) == 0.25 def test_undirected_weighted_star(self): G = nx.Graph() G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2)]) grc = nx.global_reaching_centrality - assert_equal(grc(G, normalized=False, weight='weight'), 0.375) + assert grc(G, normalized=False, weight="weight") == 0.375 def test_cycle_directed_unweighted(self): G = nx.DiGraph() G.add_edge(1, 2) G.add_edge(2, 1) - assert_equal(nx.global_reaching_centrality(G, weight=None), 0) + assert nx.global_reaching_centrality(G, weight=None) == 0 def test_cycle_undirected_unweighted(self): G = nx.Graph() G.add_edge(1, 2) - assert_equal(nx.global_reaching_centrality(G, weight=None), 0) + assert nx.global_reaching_centrality(G, weight=None) == 0 def test_cycle_directed_weighted(self): G = nx.DiGraph() G.add_weighted_edges_from([(1, 2, 1), (2, 1, 1)]) - assert_equal(nx.global_reaching_centrality(G), 0) + assert nx.global_reaching_centrality(G) == 0 def test_cycle_undirected_weighted(self): G = nx.Graph() G.add_edge(1, 2, weight=1) grc = nx.global_reaching_centrality - assert_equal(grc(G, normalized=False), 0) + assert grc(G, normalized=False) == 0 def test_directed_weighted(self): G = nx.DiGraph() @@ -84,32 +77,34 @@ def test_directed_weighted(self): max_local = max(local_reach_ctrs) expected = sum(max_local - lrc for lrc in local_reach_ctrs) / denom grc = nx.global_reaching_centrality - actual = grc(G, normalized=False, weight='weight') - assert_almost_equal(expected, actual, places=7) + actual = grc(G, normalized=False, weight="weight") + assert almost_equal(expected, actual, places=7) -class TestLocalReachingCentrality(TestCase): +class TestLocalReachingCentrality: """Unit tests for the local reaching centrality function.""" - @raises(nx.NetworkXError) def test_non_positive_weights(self): - G = nx.DiGraph() - G.add_weighted_edges_from([(0, 1, 0)]) - nx.local_reaching_centrality(G, 0, weight='weight') + with pytest.raises(nx.NetworkXError): + G = nx.DiGraph() + G.add_weighted_edges_from([(0, 1, 0)]) + nx.local_reaching_centrality(G, 0, weight="weight") - @raises(nx.NetworkXError) def test_negatively_weighted(self): - G = nx.Graph() - G.add_weighted_edges_from([(0, 1, -2), (1, 2, +1)]) - nx.local_reaching_centrality(G, 0, weight='weight') + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + G.add_weighted_edges_from([(0, 1, -2), (1, 2, +1)]) + nx.local_reaching_centrality(G, 0, weight="weight") def test_undirected_unweighted_star(self): G = nx.star_graph(2) grc = nx.local_reaching_centrality - assert_equal(grc(G, 1, weight=None, normalized=False), 0.75) + assert grc(G, 1, weight=None, normalized=False) == 0.75 def test_undirected_weighted_star(self): G = nx.Graph() G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2)]) - centrality = nx.local_reaching_centrality(G, 1, normalized=False, weight='weight') - assert_equal(centrality, 1.5) + centrality = nx.local_reaching_centrality( + G, 1, normalized=False, weight="weight" + ) + assert centrality == 1.5 diff --git a/networkx/algorithms/centrality/tests/test_second_order_centrality.py b/networkx/algorithms/centrality/tests/test_second_order_centrality.py new file mode 100644 index 0000000..58eee18 --- /dev/null +++ b/networkx/algorithms/centrality/tests/test_second_order_centrality.py @@ -0,0 +1,68 @@ +""" +Tests for second order centrality. +""" + +import pytest + +np = pytest.importorskip("numpy") +scipy = pytest.importorskip("scipy") + +import networkx as nx +from networkx.testing import almost_equal + + +class TestSecondOrderCentrality: + def test_empty(self): + with pytest.raises(nx.NetworkXException): + G = nx.empty_graph() + nx.second_order_centrality(G) + + def test_non_connected(self): + with pytest.raises(nx.NetworkXException): + G = nx.Graph() + G.add_node(0) + G.add_node(1) + nx.second_order_centrality(G) + + def test_non_negative_edge_weights(self): + with pytest.raises(nx.NetworkXException): + G = nx.path_graph(2) + G.add_edge(0, 1, weight=-1) + nx.second_order_centrality(G) + + def test_one_node_graph(self): + """Second order centrality: single node""" + G = nx.Graph() + G.add_node(0) + G.add_edge(0, 0) + assert nx.second_order_centrality(G)[0] == 0 + + def test_P3(self): + """Second order centrality: line graph, as defined in paper""" + G = nx.path_graph(3) + b_answer = {0: 3.741, 1: 1.414, 2: 3.741} + + b = nx.second_order_centrality(G) + + for n in sorted(G): + assert almost_equal(b[n], b_answer[n], places=2) + + def test_K3(self): + """Second order centrality: complete graph, as defined in paper""" + G = nx.complete_graph(3) + b_answer = {0: 1.414, 1: 1.414, 2: 1.414} + + b = nx.second_order_centrality(G) + + for n in sorted(G): + assert almost_equal(b[n], b_answer[n], places=2) + + def test_ring_graph(self): + """Second order centrality: ring graph, as defined in paper""" + G = nx.cycle_graph(5) + b_answer = {0: 4.472, 1: 4.472, 2: 4.472, 3: 4.472, 4: 4.472} + + b = nx.second_order_centrality(G) + + for n in sorted(G): + assert almost_equal(b[n], b_answer[n], places=2) diff --git a/networkx/algorithms/centrality/tests/test_subgraph.py b/networkx/algorithms/centrality/tests/test_subgraph.py index edd6ba8..1a5f5c0 100644 --- a/networkx/algorithms/centrality/tests/test_subgraph.py +++ b/networkx/algorithms/centrality/tests/test_subgraph.py @@ -1,43 +1,48 @@ -from collections import defaultdict -from nose.tools import * -from nose import SkipTest +import pytest + +numpy = pytest.importorskip("numpy") +scipy = pytest.importorskip("scipy") + import networkx as nx -from networkx.algorithms.centrality.subgraph_alg import * +from networkx.algorithms.centrality.subgraph_alg import ( + estrada_index, + communicability_betweenness_centrality, + subgraph_centrality, + subgraph_centrality_exp, +) +from networkx.testing import almost_equal class TestSubgraph: - @classmethod - def setupClass(cls): - global numpy - global scipy - try: - import numpy - except ImportError: - raise SkipTest('NumPy not available.') - try: - import scipy - except ImportError: - raise SkipTest('SciPy not available.') - def test_subgraph_centrality(self): answer = {0: 1.5430806348152433, 1: 1.5430806348152433} result = subgraph_centrality(nx.path_graph(2)) for k, v in result.items(): - assert_almost_equal(answer[k], result[k], places=7) + assert almost_equal(answer[k], result[k], places=7) - answer1 = {'1': 1.6445956054135658, - 'Albert': 2.4368257358712189, - 'Aric': 2.4368257358712193, - 'Dan': 3.1306328496328168, - 'Franck': 2.3876142275231915} - G1 = nx.Graph([('Franck', 'Aric'), ('Aric', 'Dan'), ('Dan', 'Albert'), - ('Albert', 'Franck'), ('Dan', '1'), ('Franck', 'Albert')]) + answer1 = { + "1": 1.6445956054135658, + "Albert": 2.4368257358712189, + "Aric": 2.4368257358712193, + "Dan": 3.1306328496328168, + "Franck": 2.3876142275231915, + } + G1 = nx.Graph( + [ + ("Franck", "Aric"), + ("Aric", "Dan"), + ("Dan", "Albert"), + ("Albert", "Franck"), + ("Dan", "1"), + ("Franck", "Albert"), + ] + ) result1 = subgraph_centrality(G1) for k, v in result1.items(): - assert_almost_equal(answer1[k], result1[k], places=7) + assert almost_equal(answer1[k], result1[k], places=7) result1 = subgraph_centrality_exp(G1) for k, v in result1.items(): - assert_almost_equal(answer1[k], result1[k], places=7) + assert almost_equal(answer1[k], result1[k], places=7) def test_subgraph_centrality_big_graph(self): g199 = nx.complete_graph(199) @@ -50,25 +55,38 @@ def test_subgraph_centrality_big_graph(self): comm200_exp = nx.subgraph_centrality_exp(g200) def test_communicability_betweenness_centrality(self): - answer = {0: 0.07017447951484615, 1: 0.71565598701107991, - 2: 0.71565598701107991, 3: 0.07017447951484615} + answer = { + 0: 0.07017447951484615, + 1: 0.71565598701107991, + 2: 0.71565598701107991, + 3: 0.07017447951484615, + } result = communicability_betweenness_centrality(nx.path_graph(4)) for k, v in result.items(): - assert_almost_equal(answer[k], result[k], places=7) + assert almost_equal(answer[k], result[k], places=7) - answer1 = {'1': 0.060039074193949521, - 'Albert': 0.315470761661372, - 'Aric': 0.31547076166137211, - 'Dan': 0.68297778678316201, - 'Franck': 0.21977926617449497} - G1 = nx.Graph([('Franck', 'Aric'), - ('Aric', 'Dan'), ('Dan', 'Albert'), ('Albert', 'Franck'), - ('Dan', '1'), ('Franck', 'Albert')]) + answer1 = { + "1": 0.060039074193949521, + "Albert": 0.315470761661372, + "Aric": 0.31547076166137211, + "Dan": 0.68297778678316201, + "Franck": 0.21977926617449497, + } + G1 = nx.Graph( + [ + ("Franck", "Aric"), + ("Aric", "Dan"), + ("Dan", "Albert"), + ("Albert", "Franck"), + ("Dan", "1"), + ("Franck", "Albert"), + ] + ) result1 = communicability_betweenness_centrality(G1) for k, v in result1.items(): - assert_almost_equal(answer1[k], result1[k], places=7) + assert almost_equal(answer1[k], result1[k], places=7) def test_estrada_index(self): answer = 1041.2470334195475 result = estrada_index(nx.karate_club_graph()) - assert_almost_equal(answer, result, places=7) + assert almost_equal(answer, result, places=7) diff --git a/networkx/algorithms/centrality/tests/test_trophic.py b/networkx/algorithms/centrality/tests/test_trophic.py new file mode 100644 index 0000000..85af02f --- /dev/null +++ b/networkx/algorithms/centrality/tests/test_trophic.py @@ -0,0 +1,304 @@ +"""Test trophic levels, trophic differences and trophic coherence +""" +import pytest + +np = pytest.importorskip("numpy") + +import networkx as nx +from networkx.testing import almost_equal + + +def test_trophic_levels(): + """Trivial example + """ + G = nx.DiGraph() + G.add_edge("a", "b") + G.add_edge("b", "c") + + d = nx.trophic_levels(G) + assert d == {"a": 1, "b": 2, "c": 3} + + +def test_trophic_levels_levine(): + """Example from Figure 5 in Stephen Levine (1980) J. theor. Biol. 83, + 195-207 + """ + S = nx.DiGraph() + S.add_edge(1, 2, weight=1.0) + S.add_edge(1, 3, weight=0.2) + S.add_edge(1, 4, weight=0.8) + S.add_edge(2, 3, weight=0.2) + S.add_edge(2, 5, weight=0.3) + S.add_edge(4, 3, weight=0.6) + S.add_edge(4, 5, weight=0.7) + S.add_edge(5, 4, weight=0.2) + + # save copy for later, test intermediate implementation details first + S2 = S.copy() + + # drop nodes of in-degree zero + z = [nid for nid, d in S.in_degree if d == 0] + for nid in z: + S.remove_node(nid) + + # find adjacency matrix + q = nx.linalg.graphmatrix.adjacency_matrix(S).T + + # fmt: off + expected_q = np.array([ + [0, 0, 0., 0], + [0.2, 0, 0.6, 0], + [0, 0, 0, 0.2], + [0.3, 0, 0.7, 0] + ]) + # fmt: on + assert np.array_equal(q.todense(), expected_q) + + # must be square, size of number of nodes + assert len(q.shape) == 2 + assert q.shape[0] == q.shape[1] + assert q.shape[0] == len(S) + + nn = q.shape[0] + + i = np.eye(nn) + n = np.linalg.inv(i - q) + y = np.dot(np.asarray(n), np.ones(nn)) + + expected_y = np.array([1, 2.07906977, 1.46511628, 2.3255814]) + assert np.allclose(y, expected_y) + + expected_d = {1: 1, 2: 2, 3: 3.07906977, 4: 2.46511628, 5: 3.3255814} + + d = nx.trophic_levels(S2) + + for nid, level in d.items(): + expected_level = expected_d[nid] + assert almost_equal(expected_level, level) + + +def test_trophic_levels_simple(): + matrix_a = np.array([[0, 0], [1, 0]]) + G = nx.from_numpy_array(matrix_a, create_using=nx.DiGraph) + d = nx.trophic_levels(G) + assert almost_equal(d[0], 2) + assert almost_equal(d[1], 1) + + +def test_trophic_levels_more_complex(): + # fmt: off + matrix = np.array([ + [0, 1, 0, 0], + [0, 0, 1, 0], + [0, 0, 0, 1], + [0, 0, 0, 0] + ]) + # fmt: on + G = nx.from_numpy_array(matrix, create_using=nx.DiGraph) + d = nx.trophic_levels(G) + expected_result = [1, 2, 3, 4] + for ind in range(4): + assert almost_equal(d[ind], expected_result[ind]) + + # fmt: off + matrix = np.array([ + [0, 1, 1, 0], + [0, 0, 1, 1], + [0, 0, 0, 1], + [0, 0, 0, 0] + ]) + # fmt: on + G = nx.from_numpy_array(matrix, create_using=nx.DiGraph) + d = nx.trophic_levels(G) + + expected_result = [1, 2, 2.5, 3.25] + print("Calculated result: ", d) + print("Expected Result: ", expected_result) + + for ind in range(4): + assert almost_equal(d[ind], expected_result[ind]) + + +def test_trophic_levels_even_more_complex(): + # fmt: off + # Another, bigger matrix + matrix = np.array([ + [0, 0, 0, 0, 0], + [0, 1, 0, 1, 0], + [1, 0, 0, 0, 0], + [0, 1, 0, 0, 0], + [0, 0, 0, 1, 0] + ]) + # Generated this linear system using pen and paper: + K = np.array([ + [1, 0, -1, 0, 0], + [0, 0.5, 0, -0.5, 0], + [0, 0, 1, 0, 0], + [0, -0.5, 0, 1, -0.5], + [0, 0, 0, 0, 1], + ]) + # fmt: on + result_1 = np.ravel(np.matmul(np.linalg.inv(K), np.ones(5))) + G = nx.from_numpy_array(matrix, create_using=nx.DiGraph) + result_2 = nx.trophic_levels(G) + + for ind in range(5): + assert almost_equal(result_1[ind], result_2[ind]) + + +def test_trophic_levels_singular_matrix(): + """Should raise an error with graphs with only non-basal nodes + """ + matrix = np.identity(4) + G = nx.from_numpy_array(matrix, create_using=nx.DiGraph) + with pytest.raises(nx.NetworkXError) as e: + nx.trophic_levels(G) + msg = ( + "Trophic levels are only defined for graphs where every node " + + "has a path from a basal node (basal nodes are nodes with no " + + "incoming edges)." + ) + assert msg in str(e.value) + + +def test_trophic_levels_singular_with_basal(): + """Should fail to compute if there are any parts of the graph which are not + reachable from any basal node (with in-degree zero). + """ + G = nx.DiGraph() + # a has in-degree zero + G.add_edge("a", "b") + + # b is one level above a, c and d + G.add_edge("c", "b") + G.add_edge("d", "b") + + # c and d form a loop, neither are reachable from a + G.add_edge("c", "d") + G.add_edge("d", "c") + + with pytest.raises(nx.NetworkXError) as e: + nx.trophic_levels(G) + msg = ( + "Trophic levels are only defined for graphs where every node " + + "has a path from a basal node (basal nodes are nodes with no " + + "incoming edges)." + ) + assert msg in str(e.value) + + # if self-loops are allowed, smaller example: + G = nx.DiGraph() + G.add_edge("a", "b") # a has in-degree zero + G.add_edge("c", "b") # b is one level above a and c + G.add_edge("c", "c") # c has a self-loop + with pytest.raises(nx.NetworkXError) as e: + nx.trophic_levels(G) + msg = ( + "Trophic levels are only defined for graphs where every node " + + "has a path from a basal node (basal nodes are nodes with no " + + "incoming edges)." + ) + assert msg in str(e.value) + + +def test_trophic_differences(): + matrix_a = np.array([[0, 1], [0, 0]]) + G = nx.from_numpy_array(matrix_a, create_using=nx.DiGraph) + diffs = nx.trophic_differences(G) + assert almost_equal(diffs[(0, 1)], 1) + + # fmt: off + matrix_b = np.array([ + [0, 1, 1, 0], + [0, 0, 1, 1], + [0, 0, 0, 1], + [0, 0, 0, 0] + ]) + # fmt: on + G = nx.from_numpy_array(matrix_b, create_using=nx.DiGraph) + diffs = nx.trophic_differences(G) + + assert almost_equal(diffs[(0, 1)], 1) + assert almost_equal(diffs[(0, 2)], 1.5) + assert almost_equal(diffs[(1, 2)], 0.5) + assert almost_equal(diffs[(1, 3)], 1.25) + assert almost_equal(diffs[(2, 3)], 0.75) + + +def test_trophic_incoherence_parameter_no_cannibalism(): + matrix_a = np.array([[0, 1], [0, 0]]) + G = nx.from_numpy_array(matrix_a, create_using=nx.DiGraph) + q = nx.trophic_incoherence_parameter(G, cannibalism=False) + assert almost_equal(q, 0) + + # fmt: off + matrix_b = np.array([ + [0, 1, 1, 0], + [0, 0, 1, 1], + [0, 0, 0, 1], + [0, 0, 0, 0] + ]) + # fmt: on + G = nx.from_numpy_array(matrix_b, create_using=nx.DiGraph) + q = nx.trophic_incoherence_parameter(G, cannibalism=False) + assert almost_equal(q, np.std([1, 1.5, 0.5, 0.75, 1.25])) + + # fmt: off + matrix_c = np.array([ + [0, 1, 1, 0], + [0, 1, 1, 1], + [0, 0, 0, 1], + [0, 0, 0, 1] + ]) + # fmt: on + G = nx.from_numpy_array(matrix_c, create_using=nx.DiGraph) + q = nx.trophic_incoherence_parameter(G, cannibalism=False) + # Ignore the -link + assert almost_equal(q, np.std([1, 1.5, 0.5, 0.75, 1.25])) + + # no self-loops case + # fmt: off + matrix_d = np.array([ + [0, 1, 1, 0], + [0, 0, 1, 1], + [0, 0, 0, 1], + [0, 0, 0, 0] + ]) + # fmt: on + G = nx.from_numpy_array(matrix_d, create_using=nx.DiGraph) + q = nx.trophic_incoherence_parameter(G, cannibalism=False) + # Ignore the -link + assert almost_equal(q, np.std([1, 1.5, 0.5, 0.75, 1.25])) + + +def test_trophic_incoherence_parameter_cannibalism(): + matrix_a = np.array([[0, 1], [0, 0]]) + G = nx.from_numpy_array(matrix_a, create_using=nx.DiGraph) + q = nx.trophic_incoherence_parameter(G, cannibalism=True) + assert almost_equal(q, 0) + + # fmt: off + matrix_b = np.array([ + [0, 0, 0, 0, 0], + [0, 1, 0, 1, 0], + [1, 0, 0, 0, 0], + [0, 1, 0, 0, 0], + [0, 0, 0, 1, 0] + ]) + # fmt: on + G = nx.from_numpy_array(matrix_b, create_using=nx.DiGraph) + q = nx.trophic_incoherence_parameter(G, cannibalism=True) + assert almost_equal(q, 2) + + # fmt: off + matrix_c = np.array([ + [0, 1, 1, 0], + [0, 0, 1, 1], + [0, 0, 0, 1], + [0, 0, 0, 0] + ]) + # fmt: on + G = nx.from_numpy_array(matrix_c, create_using=nx.DiGraph) + q = nx.trophic_incoherence_parameter(G, cannibalism=True) + # Ignore the -link + assert almost_equal(q, np.std([1, 1.5, 0.5, 0.75, 1.25])) diff --git a/networkx/algorithms/centrality/tests/test_voterank.py b/networkx/algorithms/centrality/tests/test_voterank.py new file mode 100644 index 0000000..aa653ae --- /dev/null +++ b/networkx/algorithms/centrality/tests/test_voterank.py @@ -0,0 +1,61 @@ +""" + Unit tests for VoteRank. +""" + + +import networkx as nx + + +class TestVoteRankCentrality: + # Example Graph present in reference paper + def test_voterank_centrality_1(self): + G = nx.Graph() + G.add_edges_from( + [ + (7, 8), + (7, 5), + (7, 9), + (5, 0), + (0, 1), + (0, 2), + (0, 3), + (0, 4), + (1, 6), + (2, 6), + (3, 6), + (4, 6), + ] + ) + assert [0, 7, 6] == nx.voterank(G) + + # Graph unit test + def test_voterank_centrality_2(self): + G = nx.florentine_families_graph() + d = nx.voterank(G, 4) + exact = ["Medici", "Strozzi", "Guadagni", "Castellani"] + assert exact == d + + # DiGraph unit test + def test_voterank_centrality_3(self): + G = nx.gnc_graph(10, seed=7) + d = nx.voterank(G, 4) + exact = [3, 6, 8] + assert exact == d + + # MultiGraph unit test + def test_voterank_centrality_4(self): + G = nx.MultiGraph() + G.add_edges_from( + [(0, 1), (0, 1), (1, 2), (2, 5), (2, 5), (5, 6), (5, 6), (2, 4), (4, 3)] + ) + exact = [2, 1, 5, 4] + assert exact == nx.voterank(G) + + # MultiDiGraph unit test + def test_voterank_centrality_5(self): + G = nx.MultiDiGraph() + G.add_edges_from( + [(0, 1), (0, 1), (1, 2), (2, 5), (2, 5), (5, 6), (5, 6), (2, 4), (4, 3)] + ) + exact = [2, 0, 5, 4] + assert exact == nx.voterank(G) diff --git a/networkx/algorithms/centrality/trophic.py b/networkx/algorithms/centrality/trophic.py new file mode 100644 index 0000000..7662009 --- /dev/null +++ b/networkx/algorithms/centrality/trophic.py @@ -0,0 +1,168 @@ +"""Trophic levels""" +import networkx as nx + +from networkx.utils import not_implemented_for + +__all__ = ["trophic_levels", "trophic_differences", "trophic_incoherence_parameter"] + + +@not_implemented_for("undirected") +def trophic_levels(G, weight="weight"): + r"""Compute the trophic levels of nodes. + + The trophic level of a node $i$ is + + .. math:: + + s_i = 1 + \frac{1}{k^{in}_i} \sum_{j} a_{ij} s_j + + where $k^{in}_i$ is the in-degree of i + + .. math:: + + k^{in}_i = \sum_{j} a_{ij} + + and nodes with $k^{in}_i = 0$ have $s_i = 1$ by convention. + + These are calculated using the method outlined in Levine [1]_. + + Parameters + ---------- + G : DiGraph + A directed networkx graph + + Returns + ------- + nodes : dict + Dictionary of nodes with trophic level as the vale. + + References + ---------- + .. [1] Stephen Levine (1980) J. theor. Biol. 83, 195-207 + """ + try: + import numpy as np + except ImportError as e: + raise ImportError("trophic_levels() requires NumPy: http://numpy.org/") from e + + # find adjacency matrix + a = nx.adjacency_matrix(G, weight=weight).T.toarray() + + # drop rows/columns where in-degree is zero + rowsum = np.sum(a, axis=1) + p = a[rowsum != 0][:, rowsum != 0] + # normalise so sum of in-degree weights is 1 along each row + p = p / rowsum[rowsum != 0][:, np.newaxis] + + # calculate trophic levels + nn = p.shape[0] + i = np.eye(nn) + try: + n = np.linalg.inv(i - p) + except np.linalg.LinAlgError as err: + # LinAlgError is raised when there is a non-basal node + msg = ( + "Trophic levels are only defined for graphs where every " + + "node has a path from a basal node (basal nodes are nodes " + + "with no incoming edges)." + ) + raise nx.NetworkXError(msg) from err + y = n.sum(axis=1) + 1 + + levels = {} + + # all nodes with in-degree zero have trophic level == 1 + zero_node_ids = (node_id for node_id, degree in G.in_degree if degree == 0) + for node_id in zero_node_ids: + levels[node_id] = 1 + + # all other nodes have levels as calculated + nonzero_node_ids = (node_id for node_id, degree in G.in_degree if degree != 0) + for i, node_id in enumerate(nonzero_node_ids): + levels[node_id] = y[i] + + return levels + + +@not_implemented_for("undirected") +def trophic_differences(G, weight="weight"): + r"""Compute the trophic differences of the edges of a directed graph. + + The trophic difference $x_ij$ for each edge is defined in Johnson et al. + [1]_ as: + + .. math:: + x_ij = s_j - s_i + + Where $s_i$ is the trophic level of node $i$. + + Parameters + ---------- + G : DiGraph + A directed networkx graph + + Returns + ------- + diffs : dict + Dictionary of edges with trophic differences as the value. + + References + ---------- + .. [1] Samuel Johnson, Virginia Dominguez-Garcia, Luca Donetti, Miguel A. + Munoz (2014) PNAS "Trophic coherence determines food-web stability" + """ + levels = trophic_levels(G, weight=weight) + diffs = {} + for u, v in G.edges: + diffs[(u, v)] = levels[v] - levels[u] + return diffs + + +@not_implemented_for("undirected") +def trophic_incoherence_parameter(G, weight="weight", cannibalism=False): + r"""Compute the trophic incoherence parameter of a graph. + + Trophic coherence is defined as the homogeneity of the distribution of + trophic distances: the more similar, the more coherent. This is measured by + the standard deviation of the trophic differences and referred to as the + trophic incoherence parameter $q$ by [1]. + + Parameters + ---------- + G : DiGraph + A directed networkx graph + + cannibalism: Boolean + If set to False, self edges are not considered in the calculation + + Returns + ------- + trophic_incoherence_parameter : float + The trophic coherence of a graph + + References + ---------- + .. [1] Samuel Johnson, Virginia Dominguez-Garcia, Luca Donetti, Miguel A. + Munoz (2014) PNAS "Trophic coherence determines food-web stability" + """ + try: + import numpy as np + except ImportError as e: + raise ImportError( + "trophic_incoherence_parameter() requires NumPy: " "http://scipy.org/" + ) from e + + if cannibalism: + diffs = trophic_differences(G, weight=weight) + else: + # If no cannibalism, remove self-edges + self_loops = list(nx.selfloop_edges(G)) + if self_loops: + # Make a copy so we do not change G's edges in memory + G_2 = G.copy() + G_2.remove_edges_from(self_loops) + else: + # Avoid copy otherwise + G_2 = G + diffs = trophic_differences(G_2, weight=weight) + return np.std(list(diffs.values())) diff --git a/networkx/algorithms/centrality/voterank_alg.py b/networkx/algorithms/centrality/voterank_alg.py new file mode 100644 index 0000000..6aab408 --- /dev/null +++ b/networkx/algorithms/centrality/voterank_alg.py @@ -0,0 +1,75 @@ +"""Algorithm to select influential nodes in a graph using VoteRank.""" + +__all__ = ["voterank"] + + +def voterank(G, number_of_nodes=None): + """Select a list of influential nodes in a graph using VoteRank algorithm + + VoteRank [1]_ computes a ranking of the nodes in a graph G based on a + voting scheme. With VoteRank, all nodes vote for each of its in-neighbours + and the node with the highest votes is elected iteratively. The voting + ability of out-neighbors of elected nodes is decreased in subsequent turns. + + Note: We treat each edge independently in case of multigraphs. + + Parameters + ---------- + G : graph + A NetworkX graph. + + number_of_nodes : integer, optional + Number of ranked nodes to extract (default all nodes). + + Returns + ------- + voterank : list + Ordered list of computed seeds. + Only nodes with positive number of votes are returned. + + References + ---------- + .. [1] Zhang, J.-X. et al. (2016). + Identifying a set of influential spreaders in complex networks. + Sci. Rep. 6, 27823; doi: 10.1038/srep27823. + """ + influential_nodes = [] + voterank = {} + if len(G) == 0: + return influential_nodes + if number_of_nodes is None or number_of_nodes > len(G): + number_of_nodes = len(G) + if G.is_directed(): + # For directed graphs compute average out-degree + avgDegree = sum(deg for _, deg in G.out_degree()) / len(G) + else: + # For undirected graphs compute average degree + avgDegree = sum(deg for _, deg in G.degree()) / len(G) + # step 1 - initiate all nodes to (0,1) (score, voting ability) + for n in G.nodes(): + voterank[n] = [0, 1] + # Repeat steps 1b to 4 until num_seeds are elected. + for _ in range(number_of_nodes): + # step 1b - reset rank + for n in G.nodes(): + voterank[n][0] = 0 + # step 2 - vote + for n, nbr in G.edges(): + # In directed graphs nodes only vote for their in-neighbors + voterank[n][0] += voterank[nbr][1] + if not G.is_directed(): + voterank[nbr][0] += voterank[n][1] + for n in influential_nodes: + voterank[n][0] = 0 + # step 3 - select top node + n = max(G.nodes, key=lambda x: voterank[x][0]) + if voterank[n][0] == 0: + return influential_nodes + influential_nodes.append(n) + # weaken the selected node + voterank[n] = [0, 0] + # step 4 - update voterank properties + for _, nbr in G.edges(n): + voterank[nbr][1] -= 1 / avgDegree + voterank[nbr][1] = max(voterank[nbr][1], 0) + return influential_nodes diff --git a/networkx/algorithms/chains.py b/networkx/algorithms/chains.py index 6149579..a76941f 100644 --- a/networkx/algorithms/chains.py +++ b/networkx/algorithms/chains.py @@ -1,22 +1,13 @@ -# -*- coding: utf-8 -*- -# chains.py - functions for finding chains in a graph -# -# Copyright 2004-2018 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Functions for finding chains in a graph.""" import networkx as nx from networkx.utils import not_implemented_for -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def chain_decomposition(G, root=None): - """Return the chain decomposition of a graph. + """Returns the chain decomposition of a graph. The *chain decomposition* of a graph with respect a depth-first search tree is a set of cycles or paths derived from the set of @@ -98,7 +89,7 @@ def _dfs_cycle_forest(G, root=None): H = nx.DiGraph() nodes = [] for u, v, d in nx.dfs_labeled_edges(G, source=root): - if d == 'forward': + if d == "forward": # `dfs_labeled_edges()` yields (root, root, 'forward') # if it is beginning the search on a new connected # component. @@ -112,7 +103,7 @@ def _dfs_cycle_forest(G, root=None): # `dfs_labeled_edges` considers nontree edges in both # orientations, so we need to not add the edge if it its # other orientation has been added. - elif d == 'nontree' and v not in H[u]: + elif d == "nontree" and v not in H[u]: H.add_edge(v, u, nontree=True) else: # Do nothing on 'reverse' edges; we only care about @@ -139,7 +130,7 @@ def _build_chain(G, u, v, visited): while v not in visited: yield u, v visited.add(v) - u, v = v, G.nodes[v]['parent'] + u, v = v, G.nodes[v]["parent"] yield u, v # Create a directed version of H that has the DFS edges directed @@ -160,7 +151,7 @@ def _build_chain(G, u, v, visited): for u in nodes: visited.add(u) # For each nontree edge going out of node u... - edges = ((u, v) for u, v, d in H.out_edges(u, data='nontree') if d) + edges = ((u, v) for u, v, d in H.out_edges(u, data="nontree") if d) for u, v in edges: # Create the cycle or cycle prefix starting with the # nontree edge. diff --git a/networkx/algorithms/chordal.py b/networkx/algorithms/chordal.py index a561f67..392aafe 100644 --- a/networkx/algorithms/chordal.py +++ b/networkx/algorithms/chordal.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Algorithms for chordal graphs. @@ -6,21 +5,22 @@ (an edge joining two nodes not adjacent in the cycle). https://en.wikipedia.org/wiki/Chordal_graph """ -import networkx as nx -import random import sys +import warnings + +import networkx as nx +from networkx.algorithms.components import connected_components +from networkx.utils import arbitrary_element, not_implemented_for -__authors__ = "\n".join(['Jesus Cerquides ']) -# Copyright (C) 2010 by -# Jesus Cerquides -# All rights reserved. -# BSD license. -__all__ = ['is_chordal', - 'find_induced_nodes', - 'chordal_graph_cliques', - 'chordal_graph_treewidth', - 'NetworkXTreewidthBoundExceeded'] +__all__ = [ + "is_chordal", + "find_induced_nodes", + "chordal_graph_cliques", + "chordal_graph_treewidth", + "NetworkXTreewidthBoundExceeded", + "complete_to_chordal_graph", +] class NetworkXTreewidthBoundExceeded(nx.NetworkXException): @@ -53,9 +53,19 @@ def is_chordal(G): Examples -------- - >>> import networkx as nx - >>> e=[(1,2),(1,3),(2,3),(2,4),(3,4),(3,5),(3,6),(4,5),(4,6),(5,6)] - >>> G=nx.Graph(e) + >>> e = [ + ... (1, 2), + ... (1, 3), + ... (2, 3), + ... (2, 4), + ... (3, 4), + ... (3, 5), + ... (3, 6), + ... (4, 5), + ... (4, 6), + ... (5, 6), + ... ] + >>> G = nx.Graph(e) >>> nx.is_chordal(G) True @@ -73,9 +83,9 @@ def is_chordal(G): pp. 566–579. """ if G.is_directed(): - raise nx.NetworkXError('Directed graphs not supported') + raise nx.NetworkXError("Directed graphs not supported") if G.is_multigraph(): - raise nx.NetworkXError('Multiply connected graphs not supported.') + raise nx.NetworkXError("Multiply connected graphs not supported.") if len(_find_chordality_breaker(G)) == 0: return True else: @@ -99,7 +109,7 @@ def find_induced_nodes(G, s, t, treewidth_bound=sys.maxsize): Returns ------- - I : Set of nodes + Induced_nodes : Set of nodes The set of induced nodes in the path from s to t in G Raises @@ -108,16 +118,15 @@ def find_induced_nodes(G, s, t, treewidth_bound=sys.maxsize): The algorithm does not support DiGraph, MultiGraph and MultiDiGraph. If the input graph is an instance of one of these classes, a :exc:`NetworkXError` is raised. - The algorithm can only be applied to chordal graphs. If - the input graph is found to be non-chordal, a :exc:`NetworkXError` is raised. + The algorithm can only be applied to chordal graphs. If the input + graph is found to be non-chordal, a :exc:`NetworkXError` is raised. Examples -------- - >>> import networkx as nx - >>> G=nx.Graph() + >>> G = nx.Graph() >>> G = nx.generators.classic.path_graph(10) - >>> I = nx.find_induced_nodes(G,1,9,2) - >>> list(I) + >>> induced_nodes = nx.find_induced_nodes(G, 1, 9, 2) + >>> sorted(induced_nodes) [1, 2, 3, 4, 5, 6, 7, 8, 9] Notes @@ -141,23 +150,23 @@ def find_induced_nodes(G, s, t, treewidth_bound=sys.maxsize): H = nx.Graph(G) H.add_edge(s, t) - I = set() + Induced_nodes = set() triplet = _find_chordality_breaker(H, s, treewidth_bound) while triplet: (u, v, w) = triplet - I.update(triplet) + Induced_nodes.update(triplet) for n in triplet: if n != s: H.add_edge(s, n) triplet = _find_chordality_breaker(H, s, treewidth_bound) - if I: + if Induced_nodes: # Add t and the second node in the induced path from s to t. - I.add(t) + Induced_nodes.add(t) for u in G[s]: - if len(I & set(G[u])) == 2: - I.add(u) + if len(Induced_nodes & set(G[u])) == 2: + Induced_nodes.add(u) break - return I + return Induced_nodes def chordal_graph_cliques(G): @@ -181,25 +190,31 @@ def chordal_graph_cliques(G): The algorithm does not support DiGraph, MultiGraph and MultiDiGraph. If the input graph is an instance of one of these classes, a :exc:`NetworkXError` is raised. - The algorithm can only be applied to chordal graphs. If the - input graph is found to be non-chordal, a :exc:`NetworkXError` is raised. + The algorithm can only be applied to chordal graphs. If the input + graph is found to be non-chordal, a :exc:`NetworkXError` is raised. Examples -------- - >>> import networkx as nx - >>> e= [(1,2),(1,3),(2,3),(2,4),(3,4),(3,5),(3,6),(4,5),(4,6),(5,6),(7,8)] + >>> e = [ + ... (1, 2), + ... (1, 3), + ... (2, 3), + ... (2, 4), + ... (3, 4), + ... (3, 5), + ... (3, 6), + ... (4, 5), + ... (4, 6), + ... (5, 6), + ... (7, 8), + ... ] >>> G = nx.Graph(e) >>> G.add_node(9) >>> setlist = nx.chordal_graph_cliques(G) """ - if not is_chordal(G): - raise nx.NetworkXError("Input graph is not chordal.") - - cliques = set() - for C in nx.connected.connected_component_subgraphs(G): - cliques |= _connected_chordal_graph_cliques(C) - - return cliques + msg = "This will return a generator in 3.0." + warnings.warn(msg, DeprecationWarning) + return {c for c in _chordal_graph_cliques(G)} def chordal_graph_treewidth(G): @@ -221,13 +236,24 @@ def chordal_graph_treewidth(G): The algorithm does not support DiGraph, MultiGraph and MultiDiGraph. If the input graph is an instance of one of these classes, a :exc:`NetworkXError` is raised. - The algorithm can only be applied to chordal graphs. If - the input graph is found to be non-chordal, a :exc:`NetworkXError` is raised. + The algorithm can only be applied to chordal graphs. If the input + graph is found to be non-chordal, a :exc:`NetworkXError` is raised. Examples -------- - >>> import networkx as nx - >>> e = [(1,2),(1,3),(2,3),(2,4),(3,4),(3,5),(3,6),(4,5),(4,6),(5,6),(7,8)] + >>> e = [ + ... (1, 2), + ... (1, 3), + ... (2, 3), + ... (2, 4), + ... (3, 4), + ... (3, 5), + ... (3, 6), + ... (4, 5), + ... (4, 6), + ... (5, 6), + ... (7, 8), + ... ] >>> G = nx.Graph(e) >>> G.add_node(9) >>> nx.chordal_graph_treewidth(G) @@ -254,7 +280,7 @@ def _is_complete_graph(G): if n < 2: return True e = G.number_of_edges() - max_edges = ((n * (n - 1)) / 2) + max_edges = (n * (n - 1)) / 2 return e == max_edges @@ -271,7 +297,6 @@ def _max_cardinality_node(G, choices, wanna_connect): """Returns a the node in choices that has more connections in G to nodes in wanna_connect. """ -# max_number = None max_number = -1 for x in choices: number = len([y for y in G[x] if y in wanna_connect]) @@ -283,7 +308,7 @@ def _max_cardinality_node(G, choices, wanna_connect): def _find_chordality_breaker(G, s=None, treewidth_bound=sys.maxsize): """ Given a graph G, starts a max cardinality search - (starting from s if s is given and from a random node otherwise) + (starting from s if s is given and from an arbitrary node otherwise) trying to find a non-chordal cycle. If it does find one, it returns (u,v,w) where u,v,w are the three @@ -292,10 +317,9 @@ def _find_chordality_breaker(G, s=None, treewidth_bound=sys.maxsize): unnumbered = set(G) if s is None: - s = random.choice(list(unnumbered)) + s = arbitrary_element(G) unnumbered.remove(s) - numbered = set([s]) -# current_treewidth = None + numbered = {s} current_treewidth = -1 while unnumbered: # and current_treewidth <= treewidth_bound: v = _max_cardinality_node(G, unnumbered, numbered) @@ -308,7 +332,8 @@ def _find_chordality_breaker(G, s=None, treewidth_bound=sys.maxsize): current_treewidth = max(current_treewidth, len(clique_wanna_be)) if current_treewidth > treewidth_bound: raise nx.NetworkXTreewidthBoundExceeded( - "treewidth_bound exceeded: %s" % current_treewidth) + f"treewidth_bound exceeded: {current_treewidth}" + ) else: # sg is not a clique, # look for an edge that is not included in sg @@ -317,30 +342,149 @@ def _find_chordality_breaker(G, s=None, treewidth_bound=sys.maxsize): return () -def _connected_chordal_graph_cliques(G): - """Return the set of maximal cliques of a connected chordal graph.""" - if G.number_of_nodes() == 1: - x = frozenset(G.nodes()) - return set([x]) - else: - cliques = set() - unnumbered = set(G.nodes()) - v = random.choice(list(unnumbered)) - unnumbered.remove(v) - numbered = set([v]) - clique_wanna_be = set([v]) - while unnumbered: - v = _max_cardinality_node(G, unnumbered, numbered) +def _chordal_graph_cliques(G): + """Returns all maximal cliques of a chordal graph. + + The algorithm breaks the graph in connected components and performs a + maximum cardinality search in each component to get the cliques. + + Parameters + ---------- + G : graph + A NetworkX graph + + Returns + ------- + iterator + An iterator over maximal cliques, each of which is a frozenset of + nodes in `G`. The order of cliques is arbitrary. + + Raises + ------ + NetworkXError + The algorithm does not support DiGraph, MultiGraph and MultiDiGraph. + If the input graph is an instance of one of these classes, a + :exc:`NetworkXError` is raised. + The algorithm can only be applied to chordal graphs. If the input + graph is found to be non-chordal, a :exc:`NetworkXError` is raised. + + Examples + -------- + >>> e = [ + ... (1, 2), + ... (1, 3), + ... (2, 3), + ... (2, 4), + ... (3, 4), + ... (3, 5), + ... (3, 6), + ... (4, 5), + ... (4, 6), + ... (5, 6), + ... (7, 8), + ... ] + >>> G = nx.Graph(e) + >>> G.add_node(9) + >>> cliques = [c for c in _chordal_graph_cliques(G)] + >>> cliques[0] + frozenset({1, 2, 3}) + """ + if not is_chordal(G): + raise nx.NetworkXError("Input graph is not chordal.") + + for C in (G.subgraph(c).copy() for c in connected_components(G)): + if C.number_of_nodes() == 1: + yield frozenset(C.nodes()) + else: + unnumbered = set(C.nodes()) + v = arbitrary_element(C) unnumbered.remove(v) - numbered.add(v) - new_clique_wanna_be = set(G.neighbors(v)) & numbered - sg = G.subgraph(clique_wanna_be) - if _is_complete_graph(sg): - new_clique_wanna_be.add(v) - if not new_clique_wanna_be >= clique_wanna_be: - cliques.add(frozenset(clique_wanna_be)) - clique_wanna_be = new_clique_wanna_be + numbered = {v} + clique_wanna_be = {v} + while unnumbered: + v = _max_cardinality_node(C, unnumbered, numbered) + unnumbered.remove(v) + numbered.add(v) + new_clique_wanna_be = set(C.neighbors(v)) & numbered + sg = C.subgraph(clique_wanna_be) + if _is_complete_graph(sg): + new_clique_wanna_be.add(v) + if not new_clique_wanna_be >= clique_wanna_be: + yield frozenset(clique_wanna_be) + clique_wanna_be = new_clique_wanna_be + else: + raise nx.NetworkXError("Input graph is not chordal.") + yield frozenset(clique_wanna_be) + + +@not_implemented_for("directed") +def complete_to_chordal_graph(G): + """Return a copy of G completed to a chordal graph + + Adds edges to a copy of G to create a chordal graph. A graph G=(V,E) is + called chordal if for each cycle with length bigger than 3, there exist + two non-adjacent nodes connected by an edge (called a chord). + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + Returns + ------- + H : NetworkX graph + The chordal enhancement of G + alpha : Dictionary + The elimination ordering of nodes of G + + Notes + ------ + There are different approaches to calculate the chordal + enhancement of a graph. The algorithm used here is called + MCS-M and gives at least minimal (local) triangulation of graph. Note + that this triangulation is not necessarily a global minimum. + + https://en.wikipedia.org/wiki/Chordal_graph + + References + ---------- + .. [1] Berry, Anne & Blair, Jean & Heggernes, Pinar & Peyton, Barry. (2004) + Maximum Cardinality Search for Computing Minimal Triangulations of + Graphs. Algorithmica. 39. 287-298. 10.1007/s00453-004-1084-3. + + Examples + -------- + >>> from networkx.algorithms.chordal import complete_to_chordal_graph + >>> G = nx.wheel_graph(10) + >>> H, alpha = complete_to_chordal_graph(G) + """ + H = G.copy() + alpha = {node: 0 for node in H} + if nx.is_chordal(H): + return H, alpha + chords = set() + weight = {node: 0 for node in H.nodes()} + unnumbered_nodes = list(H.nodes()) + for i in range(len(H.nodes()), 0, -1): + # get the node in unnumbered_nodes with the maximum weight + z = max(unnumbered_nodes, key=lambda node: weight[node]) + unnumbered_nodes.remove(z) + alpha[z] = i + update_nodes = [] + for y in unnumbered_nodes: + if G.has_edge(y, z): + update_nodes.append(y) else: - raise nx.NetworkXError("Input graph is not chordal.") - cliques.add(frozenset(clique_wanna_be)) - return cliques + # y_weight will be bigger than node weights between y and z + y_weight = weight[y] + lower_nodes = [ + node for node in unnumbered_nodes if weight[node] < y_weight + ] + if nx.has_path(H.subgraph(lower_nodes + [z, y]), y, z): + update_nodes.append(y) + chords.add((z, y)) + # during calculation of paths the weights should not be updated + for node in update_nodes: + weight[node] += 1 + H.add_edges_from(chords) + return H, alpha diff --git a/networkx/algorithms/clique.py b/networkx/algorithms/clique.py index 42de494..11bb145 100644 --- a/networkx/algorithms/clique.py +++ b/networkx/algorithms/clique.py @@ -1,9 +1,3 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. """Functions for finding and manipulating cliques. Finding the largest clique in a graph is NP-complete problem, so most of @@ -17,21 +11,26 @@ from itertools import chain from itertools import combinations from itertools import islice -try: - from itertools import ifilter as filter -except ImportError: - pass -import networkx +import networkx as nx from networkx.utils import not_implemented_for -__author__ = """Dan Schult (dschult@colgate.edu)""" -__all__ = ['find_cliques', 'find_cliques_recursive', 'make_max_clique_graph', - 'make_clique_bipartite', 'graph_clique_number', - 'graph_number_of_cliques', 'node_clique_number', - 'number_of_cliques', 'cliques_containing_node', - 'enumerate_all_cliques'] -@not_implemented_for('directed') +__all__ = [ + "find_cliques", + "find_cliques_recursive", + "make_max_clique_graph", + "make_clique_bipartite", + "graph_clique_number", + "graph_number_of_cliques", + "node_clique_number", + "number_of_cliques", + "cliques_containing_node", + "enumerate_all_cliques", + "max_weight_clique", +] + + +@not_implemented_for("directed") def enumerate_all_cliques(G): """Returns all cliques in an undirected graph. @@ -94,12 +93,15 @@ def enumerate_all_cliques(G): yield base for i, u in enumerate(cnbrs): # Use generators to reduce memory consumption. - queue.append((chain(base, [u]), - filter(nbrs[u].__contains__, - islice(cnbrs, i + 1, None)))) + queue.append( + ( + chain(base, [u]), + filter(nbrs[u].__contains__, islice(cnbrs, i + 1, None)), + ) + ) -@not_implemented_for('directed') +@not_implemented_for("directed") def find_cliques(G): """Returns all maximal cliques in an undirected graph. @@ -132,9 +134,8 @@ def find_cliques(G): To obtain a list of all maximal cliques, use `list(find_cliques(G))`. However, be aware that in the worst-case, the length of this list can be exponential in the number of nodes in - the graph (for example, when the graph is the complete graph). This - function avoids storing all cliques in memory by only keeping - current candidate node lists in memory during its search. + the graph. This function avoids storing all cliques in memory by + only keeping current candidate node lists in memory during its search. This implementation is based on the algorithm published by Bron and Kerbosch (1973) [1]_, as adapted by Tomita, Tanaka and Takahashi @@ -239,9 +240,8 @@ def find_cliques_recursive(G): To obtain a list of all maximal cliques, use `list(find_cliques_recursive(G))`. However, be aware that in the worst-case, the length of this list can be exponential in the number - of nodes in the graph (for example, when the graph is the complete - graph). This function avoids storing all cliques in memory by only - keeping current candidate node lists in memory during its search. + of nodes in the graph. This function avoids storing all cliques in memory + by only keeping current candidate node lists in memory during its search. This implementation is based on the algorithm published by Bron and Kerbosch (1973) [1]_, as adapted by Tomita, Tanaka and Takahashi @@ -292,8 +292,7 @@ def expand(subg, cand): else: cand_q = cand & adj_q if cand_q: - for clique in expand(subg_q, cand_q): - yield clique + yield from expand(subg_q, cand_q) Q.pop() return expand(set(G), set(G)) @@ -309,9 +308,8 @@ def make_max_clique_graph(G, create_using=None): ---------- G : NetworkX graph - create_using : NetworkX graph - If provided, this graph will be cleared and the nodes and edges - of the maximal clique graph will be added to this graph. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. Returns ------- @@ -333,8 +331,10 @@ def make_max_clique_graph(G, create_using=None): steps. """ - B = create_using if create_using is not None else networkx.Graph() - B.clear() + if create_using is None: + B = G.__class__() + else: + B = nx.empty_graph(0, create_using) cliques = list(enumerate(set(c) for c in find_cliques(G))) # Add a numbered node for each clique. B.add_nodes_from(i for i, c in cliques) @@ -362,9 +362,8 @@ def make_clique_bipartite(G, fpos=None, create_using=None, name=None): additional attribute, `pos`, a dictionary mapping node to position in the Euclidean plane. - create_using : NetworkX graph - If provided, this graph will be cleared and the nodes and edges - of the bipartite graph will be added to this graph. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. Returns ------- @@ -379,7 +378,7 @@ def make_clique_bipartite(G, fpos=None, create_using=None, name=None): convention for bipartite graphs in NetworkX. """ - B = create_using if create_using is not None else networkx.Graph() + B = nx.empty_graph(0, create_using) B.clear() # The "bottom" nodes in the bipartite graph are the nodes of the # original graph, G. @@ -423,7 +422,9 @@ def graph_clique_number(G, cliques=None): """ if cliques is None: cliques = find_cliques(G) - return max([len(c) for c in cliques]) + if len(G.nodes) < 1: + return 0 + return max([len(c) for c in cliques] or [1]) def graph_number_of_cliques(G, cliques=None): @@ -469,19 +470,19 @@ def node_clique_number(G, nodes=None, cliques=None): if isinstance(nodes, list): d = {} for n in nodes: - H = networkx.ego_graph(G, n) - d[n] = max((len(c) for c in find_cliques(H))) + H = nx.ego_graph(G, n) + d[n] = max(len(c) for c in find_cliques(H)) else: - H = networkx.ego_graph(G, nodes) - d = max((len(c) for c in find_cliques(H))) + H = nx.ego_graph(G, nodes) + d = max(len(c) for c in find_cliques(H)) return d # nodes is None--find all cliques cliques = list(find_cliques(G)) if nodes is None: - nodes = list(G.nodes()) # none, get entire graph + nodes = list(G.nodes()) # none, get entire graph - if not isinstance(nodes, list): # check for a list + if not isinstance(nodes, list): # check for a list v = nodes # assume it is a single value d = max([len(c) for c in cliques if v in c]) @@ -517,9 +518,9 @@ def number_of_cliques(G, nodes=None, cliques=None): cliques = list(find_cliques(G)) if nodes is None: - nodes = list(G.nodes()) # none, get entire graph + nodes = list(G.nodes()) # none, get entire graph - if not isinstance(nodes, list): # check for a list + if not isinstance(nodes, list): # check for a list v = nodes # assume it is a single value numcliq = len([1 for c in cliques if v in c]) @@ -540,9 +541,9 @@ def cliques_containing_node(G, nodes=None, cliques=None): cliques = list(find_cliques(G)) if nodes is None: - nodes = list(G.nodes()) # none, get entire graph + nodes = list(G.nodes()) # none, get entire graph - if not isinstance(nodes, list): # check for a list + if not isinstance(nodes, list): # check for a list v = nodes # assume it is a single value vcliques = [c for c in cliques if v in c] @@ -551,3 +552,160 @@ def cliques_containing_node(G, nodes=None, cliques=None): for v in nodes: vcliques[v] = [c for c in cliques if v in c] return vcliques + + +class MaxWeightClique(object): + """A class for the maximum weight clique algorithm. + + This class is a helper for the `max_weight_clique` function. The class + should not normally be used directly. + + Parameters + ---------- + G : NetworkX graph + The undirected graph for which a maximum weight clique is sought + weight : string or None, optional (default='weight') + The node attribute that holds the integer value used as a weight. + If None, then each node has weight 1. + + Attributes + ---------- + G : NetworkX graph + The undirected graph for which a maximum weight clique is sought + node_weights: dict + The weight of each node + incumbent_nodes : list + The nodes of the incumbent clique (the best clique found so far) + incumbent_weight: int + The weight of the incumbent clique + """ + + def __init__(self, G, weight): + self.G = G + self.incumbent_nodes = [] + self.incumbent_weight = 0 + + if weight is None: + self.node_weights = {v: 1 for v in G.nodes()} + else: + for v in G.nodes(): + if weight not in G.nodes[v]: + err = "Node {} does not have the requested weight field." + raise KeyError(err.format(v)) + if not isinstance(G.nodes[v][weight], int): + err = "The '{}' field of node {} is not an integer." + raise ValueError(err.format(weight, v)) + self.node_weights = {v: G.nodes[v][weight] for v in G.nodes()} + + def update_incumbent_if_improved(self, C, C_weight): + """Update the incumbent if the node set C has greater weight. + + C is assumed to be a clique. + """ + if C_weight > self.incumbent_weight: + self.incumbent_nodes = C[:] + self.incumbent_weight = C_weight + + def greedily_find_independent_set(self, P): + """Greedily find an independent set of nodes from a set of + nodes P.""" + independent_set = [] + P = P[:] + while P: + v = P[0] + independent_set.append(v) + P = [w for w in P if v != w and not self.G.has_edge(v, w)] + return independent_set + + def find_branching_nodes(self, P, target): + """Find a set of nodes to branch on.""" + residual_wt = {v: self.node_weights[v] for v in P} + total_wt = 0 + P = P[:] + while P: + independent_set = self.greedily_find_independent_set(P) + min_wt_in_class = min(residual_wt[v] for v in independent_set) + total_wt += min_wt_in_class + if total_wt > target: + break + for v in independent_set: + residual_wt[v] -= min_wt_in_class + P = [v for v in P if residual_wt[v] != 0] + return P + + def expand(self, C, C_weight, P): + """Look for the best clique that contains all the nodes in C and zero or + more of the nodes in P, backtracking if it can be shown that no such + clique has greater weight than the incumbent. + """ + self.update_incumbent_if_improved(C, C_weight) + branching_nodes = self.find_branching_nodes(P, self.incumbent_weight - C_weight) + while branching_nodes: + v = branching_nodes.pop() + P.remove(v) + new_C = C + [v] + new_C_weight = C_weight + self.node_weights[v] + new_P = [w for w in P if self.G.has_edge(v, w)] + self.expand(new_C, new_C_weight, new_P) + + def find_max_weight_clique(self): + """Find a maximum weight clique.""" + # Sort nodes in reverse order of degree for speed + nodes = sorted(self.G.nodes(), key=lambda v: self.G.degree(v), reverse=True) + nodes = [v for v in nodes if self.node_weights[v] > 0] + self.expand([], 0, nodes) + + +@not_implemented_for("directed") +def max_weight_clique(G, weight="weight"): + """Find a maximum weight clique in G. + + A *clique* in a graph is a set of nodes such that every two distinct nodes + are adjacent. The *weight* of a clique is the sum of the weights of its + nodes. A *maximum weight clique* of graph G is a clique C in G such that + no clique in G has weight greater than the weight of C. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + weight : string or None, optional (default='weight') + The node attribute that holds the integer value used as a weight. + If None, then each node has weight 1. + + Returns + ------- + clique : list + the nodes of a maximum weight clique + weight : int + the weight of a maximum weight clique + + Notes + ----- + The implementation is recursive, and therefore it may run into recursion + depth issues if G contains a clique whose number of nodes is close to the + recursion depth limit. + + At each search node, the algorithm greedily constructs a weighted + independent set cover of part of the graph in order to find a small set of + nodes on which to branch. The algorithm is very similar to the algorithm + of Tavares et al. [1]_, other than the fact that the NetworkX version does + not use bitsets. This style of algorithm for maximum weight clique (and + maximum weight independent set, which is the same problem but on the + complement graph) has a decades-long history. See Algorithm B of Warren + and Hicks [2]_ and the references in that paper. + + References + ---------- + .. [1] Tavares, W.A., Neto, M.B.C., Rodrigues, C.D., Michelon, P.: Um + algoritmo de branch and bound para o problema da clique máxima + ponderada. Proceedings of XLVII SBPO 1 (2015). + + .. [2] Warrent, Jeffrey S, Hicks, Illya V.: Combinatorial Branch-and-Bound + for the Maximum Weight Independent Set Problem. Technical Report, + Texas A&M University (2016). + """ + + mwc = MaxWeightClique(G, weight) + mwc.find_max_weight_clique() + return mwc.incumbent_nodes, mwc.incumbent_weight diff --git a/networkx/algorithms/cluster.py b/networkx/algorithms/cluster.py index 38da2ae..84a7ef8 100644 --- a/networkx/algorithms/cluster.py +++ b/networkx/algorithms/cluster.py @@ -1,30 +1,22 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. """Algorithms to characterize the number of triangles in a graph.""" -from __future__ import division +from itertools import chain from itertools import combinations from collections import Counter -import networkx as nx from networkx.utils import not_implemented_for -__author__ = """\n""".join(['Aric Hagberg ', - 'Dan Schult (dschult@colgate.edu)', - 'Pieter Swart (swart@lanl.gov)', - 'Jordi Torrents ']) +__all__ = [ + "triangles", + "average_clustering", + "clustering", + "transitivity", + "square_clustering", + "generalized_degree", +] -__all__ = ['triangles', 'average_clustering', 'clustering', 'transitivity', - 'square_clustering', 'generalized_degree'] - -@not_implemented_for('directed') +@not_implemented_for("directed") def triangles(G, nodes=None): """Compute the number of triangles. @@ -44,12 +36,12 @@ def triangles(G, nodes=None): Examples -------- - >>> G=nx.complete_graph(5) - >>> print(nx.triangles(G,0)) + >>> G = nx.complete_graph(5) + >>> print(nx.triangles(G, 0)) 6 >>> print(nx.triangles(G)) {0: 6, 1: 6, 2: 6, 3: 6, 4: 6} - >>> print(list(nx.triangles(G,(0,1)).values())) + >>> print(list(nx.triangles(G, (0, 1)).values())) [6, 6] Notes @@ -67,7 +59,7 @@ def triangles(G, nodes=None): return {v: t // 2 for v, d, t, _ in _triangles_and_degree_iter(G, nodes)} -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def _triangles_and_degree_iter(G, nodes=None): """ Return an iterator of (node, degree, triangles, generalized degree). @@ -88,8 +80,8 @@ def _triangles_and_degree_iter(G, nodes=None): yield (v, len(vs), ntriangles, gen_degree) -@not_implemented_for('multigraph') -def _weighted_triangles_and_degree_iter(G, nodes=None, weight='weight'): +@not_implemented_for("multigraph") +def _weighted_triangles_and_degree_iter(G, nodes=None, weight="weight"): """ Return an iterator of (node, degree, weighted_triangles). Used for weighted clustering. @@ -118,11 +110,104 @@ def wt(u, v): # Only compute the edge weight once, before the inner inner # loop. wij = wt(i, j) - weighted_triangles += sum((wij * wt(j, k) * wt(k, i)) ** (1 / 3) - for k in inbrs & jnbrs) + weighted_triangles += sum( + (wij * wt(j, k) * wt(k, i)) ** (1 / 3) for k in inbrs & jnbrs + ) yield (i, len(inbrs), 2 * weighted_triangles) +@not_implemented_for("multigraph") +def _directed_triangles_and_degree_iter(G, nodes=None): + """ Return an iterator of + (node, total_degree, reciprocal_degree, directed_triangles). + + Used for directed clustering. + + """ + nodes_nbrs = ((n, G._pred[n], G._succ[n]) for n in G.nbunch_iter(nodes)) + + for i, preds, succs in nodes_nbrs: + ipreds = set(preds) - {i} + isuccs = set(succs) - {i} + + directed_triangles = 0 + for j in chain(ipreds, isuccs): + jpreds = set(G._pred[j]) - {j} + jsuccs = set(G._succ[j]) - {j} + directed_triangles += sum( + 1 + for k in chain( + (ipreds & jpreds), + (ipreds & jsuccs), + (isuccs & jpreds), + (isuccs & jsuccs), + ) + ) + dtotal = len(ipreds) + len(isuccs) + dbidirectional = len(ipreds & isuccs) + yield (i, dtotal, dbidirectional, directed_triangles) + + +@not_implemented_for("multigraph") +def _directed_weighted_triangles_and_degree_iter(G, nodes=None, weight="weight"): + """ Return an iterator of + (node, total_degree, reciprocal_degree, directed_weighted_triangles). + + Used for directed weighted clustering. + + """ + if weight is None or G.number_of_edges() == 0: + max_weight = 1 + else: + max_weight = max(d.get(weight, 1) for u, v, d in G.edges(data=True)) + + nodes_nbrs = ((n, G._pred[n], G._succ[n]) for n in G.nbunch_iter(nodes)) + + def wt(u, v): + return G[u][v].get(weight, 1) / max_weight + + for i, preds, succs in nodes_nbrs: + ipreds = set(preds) - {i} + isuccs = set(succs) - {i} + + directed_triangles = 0 + for j in ipreds: + jpreds = set(G._pred[j]) - {j} + jsuccs = set(G._succ[j]) - {j} + directed_triangles += sum( + (wt(j, i) * wt(k, i) * wt(k, j)) ** (1 / 3) for k in ipreds & jpreds + ) + directed_triangles += sum( + (wt(j, i) * wt(k, i) * wt(j, k)) ** (1 / 3) for k in ipreds & jsuccs + ) + directed_triangles += sum( + (wt(j, i) * wt(i, k) * wt(k, j)) ** (1 / 3) for k in isuccs & jpreds + ) + directed_triangles += sum( + (wt(j, i) * wt(i, k) * wt(j, k)) ** (1 / 3) for k in isuccs & jsuccs + ) + + for j in isuccs: + jpreds = set(G._pred[j]) - {j} + jsuccs = set(G._succ[j]) - {j} + directed_triangles += sum( + (wt(i, j) * wt(k, i) * wt(k, j)) ** (1 / 3) for k in ipreds & jpreds + ) + directed_triangles += sum( + (wt(i, j) * wt(k, i) * wt(j, k)) ** (1 / 3) for k in ipreds & jsuccs + ) + directed_triangles += sum( + (wt(i, j) * wt(i, k) * wt(k, j)) ** (1 / 3) for k in isuccs & jpreds + ) + directed_triangles += sum( + (wt(i, j) * wt(i, k) * wt(j, k)) ** (1 / 3) for k in isuccs & jsuccs + ) + + dtotal = len(ipreds) + len(isuccs) + dbidirectional = len(ipreds & isuccs) + yield (i, dtotal, dbidirectional, directed_triangles) + + def average_clustering(G, nodes=None, weight=None, count_zeros=True): r"""Compute the average clustering coefficient for the graph G. @@ -132,7 +217,7 @@ def average_clustering(G, nodes=None, weight=None, count_zeros=True): C = \frac{1}{n}\sum_{v \in G} c_v, - where `n` is the number of nodes in `G`. + where :math:`n` is the number of nodes in `G`. Parameters ---------- @@ -155,7 +240,7 @@ def average_clustering(G, nodes=None, weight=None, count_zeros=True): Examples -------- - >>> G=nx.complete_graph(5) + >>> G = nx.complete_graph(5) >>> print(nx.average_clustering(G)) 1.0 @@ -182,32 +267,46 @@ def average_clustering(G, nodes=None, weight=None, count_zeros=True): return sum(c) / len(c) -@not_implemented_for('directed') def clustering(G, nodes=None, weight=None): r"""Compute the clustering coefficient for nodes. - For unweighted graphs, the clustering of a node `u` + For unweighted graphs, the clustering of a node :math:`u` is the fraction of possible triangles through that node that exist, .. math:: c_u = \frac{2 T(u)}{deg(u)(deg(u)-1)}, - where `T(u)` is the number of triangles through node `u` and - `deg(u)` is the degree of `u`. + where :math:`T(u)` is the number of triangles through node :math:`u` and + :math:`deg(u)` is the degree of :math:`u`. - For weighted graphs, the clustering is defined - as the geometric average of the subgraph edge weights [1]_, + For weighted graphs, there are several ways to define clustering [1]_. + the one used here is defined + as the geometric average of the subgraph edge weights [2]_, .. math:: c_u = \frac{1}{deg(u)(deg(u)-1))} - \sum_{uv} (\hat{w}_{uv} \hat{w}_{uw} \hat{w}_{vw})^{1/3}. + \sum_{vw} (\hat{w}_{uv} \hat{w}_{uw} \hat{w}_{vw})^{1/3}. + + The edge weights :math:`\hat{w}_{uv}` are normalized by the maximum weight + in the network :math:`\hat{w}_{uv} = w_{uv}/\max(w)`. - The edge weights `\hat{w}_{uv}` are normalized by the maximum weight in the - network `\hat{w}_{uv} = w_{uv}/\max(w)`. + The value of :math:`c_u` is assigned to 0 if :math:`deg(u) < 2`. - The value of `c_u` is assigned to 0 if `deg(u) < 2`. + For directed graphs, the clustering is similarly defined as the fraction + of all possible directed triangles or geometric average of the subgraph + edge weights for unweighted and weighted directed graph respectively [3]_. + + .. math:: + + c_u = \frac{1}{deg^{tot}(u)(deg^{tot}(u)-1) - 2deg^{\leftrightarrow}(u)} + T(u), + + where :math:`T(u)` is the number of directed triangles through node + :math:`u`, :math:`deg^{tot}(u)` is the sum of in degree and out degree of + :math:`u` and :math:`deg^{\leftrightarrow}(u)` is the reciprocal degree of + :math:`u`. Parameters ---------- @@ -227,8 +326,8 @@ def clustering(G, nodes=None, weight=None): Examples -------- - >>> G=nx.complete_graph(5) - >>> print(nx.clustering(G,0)) + >>> G = nx.complete_graph(5) + >>> print(nx.clustering(G, 0)) 1.0 >>> print(nx.clustering(G)) {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0} @@ -243,15 +342,32 @@ def clustering(G, nodes=None, weight=None): complex networks by J. Saramäki, M. Kivelä, J.-P. Onnela, K. Kaski, and J. Kertész, Physical Review E, 75 027105 (2007). http://jponnela.com/web_documents/a9.pdf + .. [2] Intensity and coherence of motifs in weighted complex + networks by J. P. Onnela, J. Saramäki, J. Kertész, and K. Kaski, + Physical Review E, 71(6), 065103 (2005). + .. [3] Clustering in complex directed networks by G. Fagiolo, + Physical Review E, 76(2), 026107 (2007). """ - if weight is not None: - td_iter = _weighted_triangles_and_degree_iter(G, nodes, weight) - clusterc = {v: 0 if t == 0 else t / (d * (d - 1)) for - v, d, t in td_iter} + if G.is_directed(): + if weight is not None: + td_iter = _directed_weighted_triangles_and_degree_iter(G, nodes, weight) + clusterc = { + v: 0 if t == 0 else t / ((dt * (dt - 1) - 2 * db) * 2) + for v, dt, db, t in td_iter + } + else: + td_iter = _directed_triangles_and_degree_iter(G, nodes) + clusterc = { + v: 0 if t == 0 else t / ((dt * (dt - 1) - 2 * db) * 2) + for v, dt, db, t in td_iter + } else: - td_iter = _triangles_and_degree_iter(G, nodes) - clusterc = {v: 0 if t == 0 else t / (d * (d - 1)) for - v, d, t, _ in td_iter} + if weight is not None: + td_iter = _weighted_triangles_and_degree_iter(G, nodes, weight) + clusterc = {v: 0 if t == 0 else t / (d * (d - 1)) for v, d, t in td_iter} + else: + td_iter = _triangles_and_degree_iter(G, nodes) + clusterc = {v: 0 if t == 0 else t / (d * (d - 1)) for v, d, t, _ in td_iter} if nodes in G: # Return the value of the sole entry in the dictionary. return clusterc[nodes] @@ -302,10 +418,11 @@ def square_clustering(G, nodes=None): \sum_{w=u+1}^{k_v} q_v(u,w) }{ \sum_{u=1}^{k_v} \sum_{w=u+1}^{k_v} [a_v(u,w) + q_v(u,w)]}, - where `q_v(u,w)` are the number of common neighbors of `u` and `w` - other than `v` (ie squares), and - `a_v(u,w) = (k_u - (1+q_v(u,w)+\theta_{uv}))(k_w - (1+q_v(u,w)+\theta_{uw}))`, - where `\theta_{uw} = 1` if `u` and `w` are connected and 0 otherwise. + where :math:`q_v(u,w)` are the number of common neighbors of :math:`u` and + :math:`w` other than :math:`v` (ie squares), and :math:`a_v(u,w) = (k_u - + (1+q_v(u,w)+\theta_{uv}))(k_w - (1+q_v(u,w)+\theta_{uw}))`, where + :math:`\theta_{uw} = 1` if :math:`u` and :math:`w` are connected and 0 + otherwise. Parameters ---------- @@ -321,16 +438,16 @@ def square_clustering(G, nodes=None): Examples -------- - >>> G=nx.complete_graph(5) - >>> print(nx.square_clustering(G,0)) + >>> G = nx.complete_graph(5) + >>> print(nx.square_clustering(G, 0)) 1.0 >>> print(nx.square_clustering(G)) {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0} Notes ----- - While `C_3(v)` (triangle clustering) gives the probability that - two neighbors of node v are connected with each other, `C_4(v)` is + While :math:`C_3(v)` (triangle clustering) gives the probability that + two neighbors of node v are connected with each other, :math:`C_4(v)` is the probability that two neighbors of node v share a common neighbor different from v. This algorithm can be applied to both bipartite and unipartite networks. @@ -350,7 +467,7 @@ def square_clustering(G, nodes=None): clustering[v] = 0 potential = 0 for u, w in combinations(G[v], 2): - squares = len((set(G[u]) & set(G[w])) - set([v])) + squares = len((set(G[u]) & set(G[w])) - {v}) clustering[v] += squares degm = squares + 1 if w in G[u]: @@ -364,16 +481,17 @@ def square_clustering(G, nodes=None): return clustering -@not_implemented_for('directed') +@not_implemented_for("directed") def generalized_degree(G, nodes=None): - """ Compute the generalized degree for nodes. + r""" Compute the generalized degree for nodes. For each node, the generalized degree shows how many edges of given triangle multiplicity the node is connected to. The triangle multiplicity of an edge is the number of triangles an edge participates in. The - generalized degree of node `i` can be written as a vector - `\mathbf{k}_i=(k_i^{(0)}, \dotsc, k_i^{(N-2)})` where `k_i^{(j)}` is the - number of edges attached to node `i` that participate in `j` triangles. + generalized degree of node :math:`i` can be written as a vector + :math:`\mathbf{k}_i=(k_i^{(0)}, \dotsc, k_i^{(N-2)})` where + :math:`k_i^{(j)}` is the number of edges attached to node :math:`i` that + participate in :math:`j` triangles. Parameters ---------- @@ -390,17 +508,16 @@ def generalized_degree(G, nodes=None): Examples -------- - >>> G=nx.complete_graph(5) - >>> print(nx.generalized_degree(G,0)) + >>> G = nx.complete_graph(5) + >>> print(nx.generalized_degree(G, 0)) Counter({3: 4}) >>> print(nx.generalized_degree(G)) - {0: Counter({3: 4}), 1: Counter({3: 4}), 2: Counter({3: 4}), \ -3: Counter({3: 4}), 4: Counter({3: 4})} + {0: Counter({3: 4}), 1: Counter({3: 4}), 2: Counter({3: 4}), 3: Counter({3: 4}), 4: Counter({3: 4})} To recover the number of triangles attached to a node: - >>> k1 = nx.generalized_degree(G,0) - >>> sum([k*v for k,v in k1.items()])/2 == nx.triangles(G,0) + >>> k1 = nx.generalized_degree(G, 0) + >>> sum([k * v for k, v in k1.items()]) / 2 == nx.triangles(G, 0) True Notes @@ -411,9 +528,9 @@ def generalized_degree(G, nodes=None): The return value does not include a `zero` entry if no edges of a particular triangle multiplicity are present. - The number of triangles node `i` is attached to can be recovered from - the generalized degree `\mathbf{k}_i=(k_i^{(0)}, \dotsc, k_i^{(N-2)})` by - `(k_i^{(1)}+2k_i^{(2)}+\dotsc +(N-2)k_i^{(N-2)})/2`. + The number of triangles node :math:`i` is attached to can be recovered from + the generalized degree :math:`\mathbf{k}_i=(k_i^{(0)}, \dotsc, + k_i^{(N-2)})` by :math:`(k_i^{(1)}+2k_i^{(2)}+\dotsc +(N-2)k_i^{(N-2)})/2`. References ---------- diff --git a/networkx/algorithms/coloring/__init__.py b/networkx/algorithms/coloring/__init__.py index eab4a34..39381d9 100644 --- a/networkx/algorithms/coloring/__init__.py +++ b/networkx/algorithms/coloring/__init__.py @@ -1,2 +1,4 @@ from networkx.algorithms.coloring.greedy_coloring import * -__all__ = ['greedy_color'] +from networkx.algorithms.coloring.equitable_coloring import equitable_color + +__all__ = ["greedy_color", "equitable_color"] diff --git a/networkx/algorithms/coloring/equitable_coloring.py b/networkx/algorithms/coloring/equitable_coloring.py new file mode 100644 index 0000000..711f4b4 --- /dev/null +++ b/networkx/algorithms/coloring/equitable_coloring.py @@ -0,0 +1,513 @@ +""" +Equitable coloring of graphs with bounded degree. +""" + +import networkx as nx +from collections import defaultdict + +__all__ = ["equitable_color"] + + +def is_coloring(G, coloring): + """Determine if the coloring is a valid coloring for the graph G.""" + # Verify that the coloring is valid. + for (s, d) in G.edges: + if coloring[s] == coloring[d]: + return False + return True + + +def is_equitable(G, coloring, num_colors=None): + """Determines if the coloring is valid and equitable for the graph G.""" + + if not is_coloring(G, coloring): + return False + + # Verify whether it is equitable. + color_set_size = defaultdict(int) + for color in coloring.values(): + color_set_size[color] += 1 + + if num_colors is not None: + for color in range(num_colors): + if color not in color_set_size: + # These colors do not have any vertices attached to them. + color_set_size[color] = 0 + + # If there are more than 2 distinct values, the coloring cannot be equitable + all_set_sizes = set(color_set_size.values()) + if len(all_set_sizes) == 0 and num_colors is None: # Was an empty graph + return True + elif len(all_set_sizes) == 1: + return True + elif len(all_set_sizes) == 2: + a, b = list(all_set_sizes) + return abs(a - b) <= 1 + else: # len(all_set_sizes) > 2: + return False + + +def make_C_from_F(F): + C = defaultdict(lambda: []) + for node, color in F.items(): + C[color].append(node) + + return C + + +def make_N_from_L_C(L, C): + nodes = L.keys() + colors = C.keys() + return { + (node, color): sum(1 for v in L[node] if v in C[color]) + for node in nodes + for color in colors + } + + +def make_H_from_C_N(C, N): + return { + (c1, c2): sum(1 for node in C[c1] if N[(node, c2)] == 0) + for c1 in C.keys() + for c2 in C.keys() + } + + +def change_color(u, X, Y, N, H, F, C, L): + """Change the color of 'u' from X to Y and update N, H, F, C.""" + assert F[u] == X and X != Y + + # Change the class of 'u' from X to Y + F[u] = Y + + for k in C.keys(): + # 'u' witnesses an edge from k -> Y instead of from k -> X now. + if N[u, k] == 0: + H[(X, k)] -= 1 + H[(Y, k)] += 1 + + for v in L[u]: + # 'v' has lost a neighbor in X and gained one in Y + N[(v, X)] -= 1 + N[(v, Y)] += 1 + + if N[(v, X)] == 0: + # 'v' witnesses F[v] -> X + H[(F[v], X)] += 1 + + if N[(v, Y)] == 1: + # 'v' no longer witnesses F[v] -> Y + H[(F[v], Y)] -= 1 + + C[X].remove(u) + C[Y].append(u) + + +def move_witnesses(src_color, dst_color, N, H, F, C, T_cal, L): + """Move witness along a path from src_color to dst_color.""" + X = src_color + while X != dst_color: + Y = T_cal[X] + # Move _any_ witness from X to Y = T_cal[X] + w = [x for x in C[X] if N[(x, Y)] == 0][0] + change_color(w, X, Y, N=N, H=H, F=F, C=C, L=L) + X = Y + + +def pad_graph(G, num_colors): + """Add a disconnected complete clique K_p such that the number of nodes in + the graph becomes a multiple of `num_colors`. + + Assumes that the graph's nodes are labelled using integers. + + Returns the number of nodes with each color. + """ + + n_ = len(G) + r = num_colors - 1 + + # Ensure that the number of nodes in G is a multiple of (r + 1) + s = n_ // (r + 1) + if n_ != s * (r + 1): + p = (r + 1) - n_ % (r + 1) + s += 1 + + # Complete graph K_p between (imaginary) nodes [n_, ... , n_ + p] + K = nx.relabel_nodes(nx.complete_graph(p), {idx: idx + n_ for idx in range(p)}) + G.add_edges_from(K.edges) + + return s + + +def procedure_P(V_minus, V_plus, N, H, F, C, L, excluded_colors=None): + """Procedure P as described in the paper.""" + + if excluded_colors is None: + excluded_colors = set() + + A_cal = set() + T_cal = {} + R_cal = [] + + # BFS to determine A_cal, i.e. colors reachable from V- + reachable = [V_minus] + marked = set(reachable) + idx = 0 + + while idx < len(reachable): + pop = reachable[idx] + idx += 1 + + A_cal.add(pop) + R_cal.append(pop) + + # TODO: Checking whether a color has been visited can be made faster by + # using a look-up table instead of testing for membership in a set by a + # logarithmic factor. + next_layer = [] + for k in C.keys(): + if ( + H[(k, pop)] > 0 + and k not in A_cal + and k not in excluded_colors + and k not in marked + ): + next_layer.append(k) + + for dst in next_layer: + # Record that `dst` can reach `pop` + T_cal[dst] = pop + + marked.update(next_layer) + reachable.extend(next_layer) + + # Variables for the algorithm + b = len(C) - len(A_cal) + + if V_plus in A_cal: + # Easy case: V+ is in A_cal + # Move one node from V+ to V- using T_cal to find the parents. + move_witnesses(V_plus, V_minus, N=N, H=H, F=F, C=C, T_cal=T_cal, L=L) + else: + # If there is a solo edge, we can resolve the situation by + # moving witnesses from B to A, making G[A] equitable and then + # recursively balancing G[B - w] with a different V_minus and + # but the same V_plus. + + A_0 = set() + A_cal_0 = set() + num_terminal_sets_found = 0 + made_equitable = False + + for W_1 in R_cal[::-1]: + + for v in C[W_1]: + X = None + + for U in C.keys(): + if N[(v, U)] == 0 and U in A_cal and U != W_1: + X = U + + # v does not witness an edge in H[A_cal] + if X is None: + continue + + for U in C.keys(): + # Note: Departing from the paper here. + if N[(v, U)] >= 1 and U not in A_cal: + X_prime = U + w = v + + # Finding the solo neighbor of w in X_prime + y_candidates = [ + node + for node in L[w] + if F[node] == X_prime and N[(node, W_1)] == 1 + ] + + if len(y_candidates) > 0: + y = y_candidates[0] + W = W_1 + + # Move w from W to X, now X has one extra node. + change_color(w, W, X, N=N, H=H, F=F, C=C, L=L) + + # Move witness from X to V_minus, making the coloring + # equitable. + move_witnesses( + src_color=X, + dst_color=V_minus, + N=N, + H=H, + F=F, + C=C, + T_cal=T_cal, + L=L, + ) + + # Move y from X_prime to W, making W the correct size. + change_color(y, X_prime, W, N=N, H=H, F=F, C=C, L=L) + + # Then call the procedure on G[B - y] + procedure_P( + V_minus=X_prime, + V_plus=V_plus, + N=N, + H=H, + C=C, + F=F, + L=L, + excluded_colors=excluded_colors.union(A_cal), + ) + made_equitable = True + break + + if made_equitable: + break + else: + # No node in W_1 was found such that + # it had a solo-neighbor. + A_cal_0.add(W_1) + A_0.update(C[W_1]) + num_terminal_sets_found += 1 + + if num_terminal_sets_found == b: + # Otherwise, construct the maximal independent set and find + # a pair of z_1, z_2 as in Case II. + + # BFS to determine B_cal': the set of colors reachable from V+ + B_cal_prime = set() + T_cal_prime = {} + + reachable = [V_plus] + marked = set(reachable) + idx = 0 + while idx < len(reachable): + pop = reachable[idx] + idx += 1 + + B_cal_prime.add(pop) + + # No need to check for excluded_colors here because + # they only exclude colors from A_cal + next_layer = [ + k + for k in C.keys() + if H[(pop, k)] > 0 and k not in B_cal_prime and k not in marked + ] + + for dst in next_layer: + T_cal_prime[pop] = dst + + marked.update(next_layer) + reachable.extend(next_layer) + + # Construct the independent set of G[B'] + I_set = set() + I_covered = set() + W_covering = {} + + B_prime = [node for k in B_cal_prime for node in C[k]] + + # Add the nodes in V_plus to I first. + for z in C[V_plus] + B_prime: + if z in I_covered or F[z] not in B_cal_prime: + continue + + I_set.add(z) + I_covered.add(z) + I_covered.update([nbr for nbr in L[z]]) + + for w in L[z]: + if F[w] in A_cal_0 and N[(z, F[w])] == 1: + if w not in W_covering: + W_covering[w] = z + else: + # Found z1, z2 which have the same solo + # neighbor in some W + z_1 = W_covering[w] + # z_2 = z + + Z = F[z_1] + W = F[w] + + # shift nodes along W, V- + move_witnesses( + W, V_minus, N=N, H=H, F=F, C=C, T_cal=T_cal, L=L + ) + + # shift nodes along V+ to Z + move_witnesses( + V_plus, + Z, + N=N, + H=H, + F=F, + C=C, + T_cal=T_cal_prime, + L=L, + ) + + # change color of z_1 to W + change_color(z_1, Z, W, N=N, H=H, F=F, C=C, L=L) + + # change color of w to some color in B_cal + W_plus = [ + k + for k in C.keys() + if N[(w, k)] == 0 and k not in A_cal + ][0] + change_color(w, W, W_plus, N=N, H=H, F=F, C=C, L=L) + + # recurse with G[B \cup W*] + excluded_colors.update( + [ + k + for k in C.keys() + if k != W and k not in B_cal_prime + ] + ) + procedure_P( + V_minus=W, + V_plus=W_plus, + N=N, + H=H, + C=C, + F=F, + L=L, + excluded_colors=excluded_colors, + ) + + made_equitable = True + break + + if made_equitable: + break + else: + assert False, ( + "Must find a w which is the solo neighbor " + "of two vertices in B_cal_prime." + ) + + if made_equitable: + break + + +def equitable_color(G, num_colors): + """Provides equitable (r + 1)-coloring for nodes of G in O(r * n^2) time + if deg(G) <= r. The algorithm is described in [1]_. + + Attempts to color a graph using r colors, where no neighbors of a node + can have same color as the node itself and the number of nodes with each + color differ by at most 1. + + Parameters + ---------- + G : networkX graph + The nodes of this graph will be colored. + + num_colors : number of colors to use + This number must be at least one more than the maximum degree of nodes + in the graph. + + Returns + ------- + A dictionary with keys representing nodes and values representing + corresponding coloring. + + Examples + -------- + >>> G = nx.cycle_graph(4) + >>> d = nx.coloring.equitable_color(G, num_colors=3) + >>> nx.algorithms.coloring.equitable_coloring.is_equitable(G, d) + True + + Raises + ------ + NetworkXAlgorithmError + If the maximum degree of the graph ``G`` is greater than + ``num_colors``. + + References + ---------- + .. [1] Kierstead, H. A., Kostochka, A. V., Mydlarz, M., & Szemerédi, E. + (2010). A fast algorithm for equitable coloring. Combinatorica, 30(2), + 217-224. + """ + + # Map nodes to integers for simplicity later. + nodes_to_int = {} + int_to_nodes = {} + + for idx, node in enumerate(G.nodes): + nodes_to_int[node] = idx + int_to_nodes[idx] = node + + G = nx.relabel_nodes(G, nodes_to_int, copy=True) + + # Basic graph statistics and sanity check. + if len(G.nodes) > 0: + r_ = max([G.degree(node) for node in G.nodes]) + else: + r_ = 0 + + if r_ >= num_colors: + raise nx.NetworkXAlgorithmError( + f"Graph has maximum degree {r_}, needs " + f"{r_ + 1} (> {num_colors}) colors for guaranteed coloring." + ) + + # Ensure that the number of nodes in G is a multiple of (r + 1) + pad_graph(G, num_colors) + + # Starting the algorithm. + # L = {node: list(G.neighbors(node)) for node in G.nodes} + L_ = {node: [] for node in G.nodes} + + # Arbitrary equitable allocation of colors to nodes. + F = {node: idx % num_colors for idx, node in enumerate(G.nodes)} + + C = make_C_from_F(F) + + # The neighborhood is empty initially. + N = make_N_from_L_C(L_, C) + + # Currently all nodes witness all edges. + H = make_H_from_C_N(C, N) + + # Start of algorithm. + edges_seen = set() + + for u in sorted(G.nodes): + for v in sorted(G.neighbors(u)): + + # Do not double count edges if (v, u) has already been seen. + if (v, u) in edges_seen: + continue + + edges_seen.add((u, v)) + + L_[u].append(v) + L_[v].append(u) + + N[(u, F[v])] += 1 + N[(v, F[u])] += 1 + + if F[u] != F[v]: + # Were 'u' and 'v' witnesses for F[u] -> F[v] or F[v] -> F[u]? + if N[(u, F[v])] == 1: + H[F[u], F[v]] -= 1 # u cannot witness an edge between F[u], F[v] + + if N[(v, F[u])] == 1: + H[F[v], F[u]] -= 1 # v cannot witness an edge between F[v], F[u] + + if N[(u, F[u])] != 0: + # Find the first color where 'u' does not have any neighbors. + Y = [k for k in C.keys() if N[(u, k)] == 0][0] + X = F[u] + change_color(u, X, Y, N=N, H=H, F=F, C=C, L=L_) + + # Procedure P + procedure_P(V_minus=X, V_plus=Y, N=N, H=H, F=F, C=C, L=L_) + + return {int_to_nodes[x]: F[x] for x in int_to_nodes} diff --git a/networkx/algorithms/coloring/greedy_coloring.py b/networkx/algorithms/coloring/greedy_coloring.py index c3eb8ce..2c40412 100644 --- a/networkx/algorithms/coloring/greedy_coloring.py +++ b/networkx/algorithms/coloring/greedy_coloring.py @@ -1,27 +1,25 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2014 by -# Christian Olsson -# Jan Aagaard Meier -# Henrik Haugbølle -# Arya McCarthy -# All rights reserved. -# BSD license. """ Greedy graph coloring using various strategies. """ from collections import defaultdict, deque import itertools -import random import networkx as nx from networkx.utils import arbitrary_element +from networkx.utils import py_random_state from . import greedy_coloring_with_interchange as _interchange -__all__ = ['greedy_color', 'strategy_connected_sequential', - 'strategy_connected_sequential_bfs', - 'strategy_connected_sequential_dfs', 'strategy_independent_set', - 'strategy_largest_first', 'strategy_random_sequential', - 'strategy_saturation_largest_first', 'strategy_smallest_last'] +__all__ = [ + "greedy_color", + "strategy_connected_sequential", + "strategy_connected_sequential_bfs", + "strategy_connected_sequential_dfs", + "strategy_independent_set", + "strategy_largest_first", + "strategy_random_sequential", + "strategy_saturation_largest_first", + "strategy_smallest_last", +] def strategy_largest_first(G, colors): @@ -34,14 +32,18 @@ def strategy_largest_first(G, colors): return sorted(G, key=G.degree, reverse=True) -def strategy_random_sequential(G, colors): +@py_random_state(2) +def strategy_random_sequential(G, colors, seed=None): """Returns a random permutation of the nodes of ``G`` as a list. ``G`` is a NetworkX graph. ``colors`` is ignored. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. """ nodes = list(G) - random.shuffle(nodes) + seed.shuffle(nodes) return nodes @@ -69,7 +71,7 @@ def strategy_smallest_last(G, colors): # Build initial degree list (i.e. the bucket queue data structure) degrees = defaultdict(set) # set(), for fast random-access removals - lbound = float('inf') + lbound = float("inf") for node, d in H.degree(): degrees[d].add(node) lbound = min(lbound, d) # Lower bound on min-degree. @@ -139,8 +141,7 @@ def strategy_independent_set(G, colors): while len(remaining_nodes) > 0: nodes = _maximal_independent_set(G.subgraph(remaining_nodes)) remaining_nodes -= nodes - for v in nodes: - yield v + yield from nodes def strategy_connected_sequential_bfs(G, colors): @@ -153,7 +154,7 @@ def strategy_connected_sequential_bfs(G, colors): ``G`` is a NetworkX graph. ``colors`` is ignored. """ - return strategy_connected_sequential(G, colors, 'bfs') + return strategy_connected_sequential(G, colors, "bfs") def strategy_connected_sequential_dfs(G, colors): @@ -166,10 +167,10 @@ def strategy_connected_sequential_dfs(G, colors): ``G`` is a NetworkX graph. ``colors`` is ignored. """ - return strategy_connected_sequential(G, colors, 'dfs') + return strategy_connected_sequential(G, colors, "dfs") -def strategy_connected_sequential(G, colors, traversal='bfs'): +def strategy_connected_sequential(G, colors, traversal="bfs"): """Returns an iterable over nodes in ``G`` in the order given by a breadth-first or depth-first traversal. @@ -183,19 +184,21 @@ def strategy_connected_sequential(G, colors, traversal='bfs'): ``G`` is a NetworkX graph. ``colors`` is ignored. """ - if traversal == 'bfs': + if traversal == "bfs": traverse = nx.bfs_edges - elif traversal == 'dfs': + elif traversal == "dfs": traverse = nx.dfs_edges else: - raise nx.NetworkXError("Please specify one of the strings 'bfs' or" - " 'dfs' for connected sequential ordering") - for component in nx.connected_component_subgraphs(G): + raise nx.NetworkXError( + "Please specify one of the strings 'bfs' or" + " 'dfs' for connected sequential ordering" + ) + for component in nx.connected_components(G): source = arbitrary_element(component) # Yield the source node, then all the nodes in the specified # traversal order. yield source - for (_, end) in traverse(component, source): + for (_, end) in traverse(G.subgraph(component), source): yield end @@ -220,8 +223,9 @@ def strategy_saturation_largest_first(G, colors): else: # Compute the maximum saturation and the set of nodes that # achieve that saturation. - saturation = {v: len(c) for v, c in distinct_colors.items() - if v not in colors} + saturation = { + v: len(c) for v, c in distinct_colors.items() if v not in colors + } # Yield the node with the highest saturation, and break ties by # degree. node = max(saturation, key=lambda v: (saturation[v], G.degree(v))) @@ -234,19 +238,19 @@ def strategy_saturation_largest_first(G, colors): #: Dictionary mapping name of a strategy as a string to the strategy function. STRATEGIES = { - 'largest_first': strategy_largest_first, - 'random_sequential': strategy_random_sequential, - 'smallest_last': strategy_smallest_last, - 'independent_set': strategy_independent_set, - 'connected_sequential_bfs': strategy_connected_sequential_bfs, - 'connected_sequential_dfs': strategy_connected_sequential_dfs, - 'connected_sequential': strategy_connected_sequential, - 'saturation_largest_first': strategy_saturation_largest_first, - 'DSATUR': strategy_saturation_largest_first, + "largest_first": strategy_largest_first, + "random_sequential": strategy_random_sequential, + "smallest_last": strategy_smallest_last, + "independent_set": strategy_independent_set, + "connected_sequential_bfs": strategy_connected_sequential_bfs, + "connected_sequential_dfs": strategy_connected_sequential_dfs, + "connected_sequential": strategy_connected_sequential, + "saturation_largest_first": strategy_saturation_largest_first, + "DSATUR": strategy_saturation_largest_first, } -def greedy_color(G, strategy='largest_first', interchange=False): +def greedy_color(G, strategy="largest_first", interchange=False): """Color a graph using various strategies of greedy graph coloring. Attempts to color a graph using as few colors as possible, where no @@ -282,18 +286,17 @@ def greedy_color(G, strategy='largest_first', interchange=False): * ``'connected_sequential_bfs'`` * ``'connected_sequential_dfs'`` * ``'connected_sequential'`` (alias for the previous strategy) - * ``'strategy_saturation_largest_first'`` + * ``'saturation_largest_first'`` * ``'DSATUR'`` (alias for the previous strategy) interchange: bool Will use the color interchange algorithm described by [3]_ if set to ``True``. - Note that ``strategy_saturation_largest_first`` and - ``strategy_independent_set`` do not work with - interchange. Furthermore, if you use interchange with your own - strategy function, you cannot rely on the values in the - ``colors`` argument. + Note that ``saturation_largest_first`` and ``independent_set`` + do not work with interchange. Furthermore, if you use + interchange with your own strategy function, you cannot rely + on the values in the ``colors`` argument. Returns ------- @@ -303,15 +306,15 @@ def greedy_color(G, strategy='largest_first', interchange=False): Examples -------- >>> G = nx.cycle_graph(4) - >>> d = nx.coloring.greedy_color(G, strategy='largest_first') + >>> d = nx.coloring.greedy_color(G, strategy="largest_first") >>> d in [{0: 0, 1: 1, 2: 0, 3: 1}, {0: 1, 1: 0, 2: 1, 3: 0}] True Raises ------ NetworkXPointlessConcept - If ``strategy`` is ``strategy_saturation_largest_first`` or - ``strategy_independent_set`` and ``interchange`` is ``True``. + If ``strategy`` is ``saturation_largest_first`` or + ``independent_set`` and ``interchange`` is ``True``. References ---------- @@ -331,17 +334,17 @@ def greedy_color(G, strategy='largest_first', interchange=False): # Determine the strategy provided by the caller. strategy = STRATEGIES.get(strategy, strategy) if not callable(strategy): - raise nx.NetworkXError('strategy must be callable or a valid string. ' - '{0} not valid.'.format(strategy)) + raise nx.NetworkXError( + "strategy must be callable or a valid string. " f"{strategy} not valid." + ) # Perform some validation on the arguments before executing any # strategy functions. if interchange: if strategy is strategy_independent_set: - msg = 'interchange cannot be used with strategy_independent_set' + msg = "interchange cannot be used with independent_set" raise nx.NetworkXPointlessConcept(msg) if strategy is strategy_saturation_largest_first: - msg = ('interchange cannot be used with' - ' strategy_saturation_largest_first') + msg = "interchange cannot be used with" " saturation_largest_first" raise nx.NetworkXPointlessConcept(msg) colors = {} nodes = strategy(G, colors) diff --git a/networkx/algorithms/coloring/greedy_coloring_with_interchange.py b/networkx/algorithms/coloring/greedy_coloring_with_interchange.py index 465590a..1ef226b 100644 --- a/networkx/algorithms/coloring/greedy_coloring_with_interchange.py +++ b/networkx/algorithms/coloring/greedy_coloring_with_interchange.py @@ -1,11 +1,11 @@ import itertools -__all__ = ['greedy_coloring_with_interchange'] +__all__ = ["greedy_coloring_with_interchange"] -class Node(object): +class Node: - __slots__ = ['node_id', 'color', 'adj_list', 'adj_color'] + __slots__ = ["node_id", "color", "adj_list", "adj_color"] def __init__(self, node_id, n): self.node_id = node_id @@ -14,9 +14,10 @@ def __init__(self, node_id, n): self.adj_color = [None for _ in range(n)] def __repr__(self): - return "Node_id: {0}, Color: {1}, Adj_list: ({2}), \ - adj_color: ({3})".format( - self.node_id, self.color, self.adj_list, self.adj_color) + return ( + f"Node_id: {self.node_id}, Color: {self.color}, " + f"Adj_list: ({self.adj_list}), adj_color: ({self.adj_color})" + ) def assign_color(self, adj_entry, color): adj_entry.col_prev = None @@ -46,9 +47,9 @@ def iter_neighbors_color(self, color): adj_color_node = adj_color_node.col_next -class AdjEntry(object): +class AdjEntry: - __slots__ = ['node_id', 'next', 'mate', 'col_next', 'col_prev'] + __slots__ = ["node_id", "next", "mate", "col_next", "col_prev"] def __init__(self, node_id): self.node_id = node_id @@ -58,13 +59,12 @@ def __init__(self, node_id): self.col_prev = None def __repr__(self): - return "Node_id: {0}, Next: ({1}), Mate: ({2}), \ - col_next: ({3}), col_prev: ({4})".format( - self.node_id, - self.next, - self.mate.node_id, - None if self.col_next is None else self.col_next.node_id, - None if self.col_prev is None else self.col_prev.node_id + col_next = None if self.col_next is None else self.col_next.node_id + col_prev = None if self.col_prev is None else self.col_prev.node_id + return ( + f"Node_id: {self.node_id}, Next: ({self.next}), " + f"Mate: ({self.mate.node_id}), " + f"col_next: ({col_next}), col_prev: ({col_prev})" ) @@ -105,8 +105,7 @@ def greedy_coloring_with_interchange(original_graph, nodes): neighbors = graph[node].iter_neighbors() col_used = {graph[adj_node.node_id].color for adj_node in neighbors} col_used.discard(-1) - k1 = next(itertools.dropwhile( - lambda x: x in col_used, itertools.count())) + k1 = next(itertools.dropwhile(lambda x: x in col_used, itertools.count())) # k1 is now the lowest available color if k1 > k: @@ -116,8 +115,7 @@ def greedy_coloring_with_interchange(original_graph, nodes): col2 = -1 while connected and col1 < k: col1 += 1 - neighbor_cols = ( - graph[node].iter_neighbors_color(col1)) + neighbor_cols = graph[node].iter_neighbors_color(col1) col1_adj = [it for it in neighbor_cols] col2 = col1 @@ -129,10 +127,8 @@ def greedy_coloring_with_interchange(original_graph, nodes): while i < len(frontier): search_node = frontier[i] i += 1 - col_opp = ( - col2 if graph[search_node].color == col1 else col1) - neighbor_cols = ( - graph[search_node].iter_neighbors_color(col_opp)) + col_opp = col2 if graph[search_node].color == col1 else col1 + neighbor_cols = graph[search_node].iter_neighbors_color(col_opp) for neighbor in neighbor_cols: if neighbor not in visited: @@ -140,18 +136,24 @@ def greedy_coloring_with_interchange(original_graph, nodes): frontier.append(neighbor) # Search if node is not adj to any col2 vertex - connected = len(visited.intersection( - graph[node].iter_neighbors_color(col2))) > 0 + connected = ( + len( + visited.intersection(graph[node].iter_neighbors_color(col2)) + ) + > 0 + ) # If connected is false then we can swap !!! if not connected: # Update all the nodes in the component for search_node in visited: graph[search_node].color = ( - col2 if graph[search_node].color == col1 else col1) + col2 if graph[search_node].color == col1 else col1 + ) col2_adj = graph[search_node].adj_color[col2] - graph[search_node].adj_color[col2] = ( - graph[search_node].adj_color[col1]) + graph[search_node].adj_color[col2] = graph[search_node].adj_color[ + col1 + ] graph[search_node].adj_color[col1] = col2_adj # Update all the neighboring nodes @@ -162,8 +164,7 @@ def greedy_coloring_with_interchange(original_graph, nodes): if graph[adj_node.node_id].color != col_opp: # Direct reference to entry adj_mate = adj_node.mate - graph[adj_node.node_id].clear_color( - adj_mate, col_opp) + graph[adj_node.node_id].clear_color(adj_mate, col_opp) graph[adj_node.node_id].assign_color(adj_mate, col) k1 = col1 diff --git a/networkx/algorithms/coloring/tests/__init__.py b/networkx/algorithms/coloring/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/algorithms/coloring/tests/test_coloring.py b/networkx/algorithms/coloring/tests/test_coloring.py index 87be286..5d2cb82 100644 --- a/networkx/algorithms/coloring/tests/test_coloring.py +++ b/networkx/algorithms/coloring/tests/test_coloring.py @@ -1,97 +1,436 @@ -# -*- coding: utf-8 -*- """Greedy coloring test suite. -Run with nose: nosetests -v test_coloring.py """ -__author__ = "\n".join(["Christian Olsson ", - "Jan Aagaard Meier ", - "Henrik Haugbølle ", - "Jake VanderPlas "]) - import networkx as nx -from nose.tools import * +import pytest + + +is_coloring = nx.algorithms.coloring.equitable_coloring.is_coloring +is_equitable = nx.algorithms.coloring.equitable_coloring.is_equitable + ALL_STRATEGIES = [ - 'largest_first', - 'random_sequential', - 'smallest_last', - 'independent_set', - 'connected_sequential_bfs', - 'connected_sequential_dfs', - 'connected_sequential', - 'saturation_largest_first', - 'DSATUR', + "largest_first", + "random_sequential", + "smallest_last", + "independent_set", + "connected_sequential_bfs", + "connected_sequential_dfs", + "connected_sequential", + "saturation_largest_first", + "DSATUR", ] # List of strategies where interchange=True results in an error -INTERCHANGE_INVALID = [ - 'independent_set', - 'saturation_largest_first', - 'DSATUR' -] +INTERCHANGE_INVALID = ["independent_set", "saturation_largest_first", "DSATUR"] class TestColoring: def test_basic_cases(self): def check_basic_case(graph_func, n_nodes, strategy, interchange): graph = graph_func() - coloring = nx.coloring.greedy_color(graph, - strategy=strategy, - interchange=interchange) - assert_true(verify_length(coloring, n_nodes)) - assert_true(verify_coloring(graph, coloring)) + coloring = nx.coloring.greedy_color( + graph, strategy=strategy, interchange=interchange + ) + assert verify_length(coloring, n_nodes) + assert verify_coloring(graph, coloring) for graph_func, n_nodes in BASIC_TEST_CASES.items(): for interchange in [True, False]: for strategy in ALL_STRATEGIES: - if interchange and (strategy in INTERCHANGE_INVALID): - continue - yield (check_basic_case, graph_func, - n_nodes, strategy, interchange) + check_basic_case(graph_func, n_nodes, strategy, False) + if strategy not in INTERCHANGE_INVALID: + check_basic_case(graph_func, n_nodes, strategy, True) def test_special_cases(self): def check_special_case(strategy, graph_func, interchange, colors): graph = graph_func() - coloring = nx.coloring.greedy_color(graph, - strategy=strategy, - interchange=interchange) - if not hasattr(colors, '__len__'): + coloring = nx.coloring.greedy_color( + graph, strategy=strategy, interchange=interchange + ) + if not hasattr(colors, "__len__"): colors = [colors] - assert_true(any(verify_length(coloring, n_colors) - for n_colors in colors)) - assert_true(verify_coloring(graph, coloring)) + assert any(verify_length(coloring, n_colors) for n_colors in colors) + assert verify_coloring(graph, coloring) for strategy, arglist in SPECIAL_TEST_CASES.items(): for args in arglist: - yield (check_special_case, strategy, args[0], args[1], args[2]) + check_special_case(strategy, args[0], args[1], args[2]) def test_interchange_invalid(self): graph = one_node_graph() - - def check_raises(strategy): - assert_raises(nx.NetworkXPointlessConcept, - nx.coloring.greedy_color, - graph, strategy=strategy, interchange=True) - for strategy in INTERCHANGE_INVALID: - yield check_raises, strategy + pytest.raises( + nx.NetworkXPointlessConcept, + nx.coloring.greedy_color, + graph, + strategy=strategy, + interchange=True, + ) def test_bad_inputs(self): graph = one_node_graph() - assert_raises(nx.NetworkXError, nx.coloring.greedy_color, - graph, strategy='invalid strategy') + pytest.raises( + nx.NetworkXError, + nx.coloring.greedy_color, + graph, + strategy="invalid strategy", + ) def test_strategy_as_function(self): graph = lf_shc() - colors_1 = nx.coloring.greedy_color(graph, - 'largest_first') - colors_2 = nx.coloring.greedy_color(graph, - nx.coloring.strategy_largest_first) - assert_equal(colors_1, colors_2) - + colors_1 = nx.coloring.greedy_color(graph, "largest_first") + colors_2 = nx.coloring.greedy_color(graph, nx.coloring.strategy_largest_first) + assert colors_1 == colors_2 -############################## Utility functions ############################## + def test_seed_argument(self): + graph = lf_shc() + rs = nx.coloring.strategy_random_sequential + c1 = nx.coloring.greedy_color(graph, lambda g, c: rs(g, c, seed=1)) + for u, v in graph.edges: + assert c1[u] != c1[v] + + def test_is_coloring(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (1, 2)]) + coloring = {0: 0, 1: 1, 2: 0} + assert is_coloring(G, coloring) + + coloring[0] = 1 + assert not is_coloring(G, coloring) + assert not is_equitable(G, coloring) + + def test_is_equitable(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (1, 2)]) + coloring = {0: 0, 1: 1, 2: 0} + assert is_equitable(G, coloring) + + G.add_edges_from([(2, 3), (2, 4), (2, 5)]) + coloring[3] = 1 + coloring[4] = 1 + coloring[5] = 1 + assert is_coloring(G, coloring) + assert not is_equitable(G, coloring) + + def test_num_colors(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (0, 3)]) + pytest.raises(nx.NetworkXAlgorithmError, nx.coloring.equitable_color, G, 2) + + def test_equitable_color(self): + G = nx.fast_gnp_random_graph(n=10, p=0.2, seed=42) + coloring = nx.coloring.equitable_color(G, max_degree(G) + 1) + assert is_equitable(G, coloring) + + def test_equitable_color_empty(self): + G = nx.empty_graph() + coloring = nx.coloring.equitable_color(G, max_degree(G) + 1) + assert is_equitable(G, coloring) + + def test_equitable_color_large(self): + G = nx.fast_gnp_random_graph(100, 0.1, seed=42) + coloring = nx.coloring.equitable_color(G, max_degree(G) + 1) + assert is_equitable(G, coloring, num_colors=max_degree(G) + 1) + + def test_case_V_plus_not_in_A_cal(self): + # Hand crafted case to avoid the easy case. + L = { + 0: [2, 5], + 1: [3, 4], + 2: [0, 8], + 3: [1, 7], + 4: [1, 6], + 5: [0, 6], + 6: [4, 5], + 7: [3], + 8: [2], + } + + F = { + # Color 0 + 0: 0, + 1: 0, + # Color 1 + 2: 1, + 3: 1, + 4: 1, + 5: 1, + # Color 2 + 6: 2, + 7: 2, + 8: 2, + } + + C = nx.algorithms.coloring.equitable_coloring.make_C_from_F(F) + N = nx.algorithms.coloring.equitable_coloring.make_N_from_L_C(L, C) + H = nx.algorithms.coloring.equitable_coloring.make_H_from_C_N(C, N) + + nx.algorithms.coloring.equitable_coloring.procedure_P( + V_minus=0, V_plus=1, N=N, H=H, F=F, C=C, L=L + ) + check_state(L=L, N=N, H=H, F=F, C=C) + + def test_cast_no_solo(self): + L = { + 0: [8, 9], + 1: [10, 11], + 2: [8], + 3: [9], + 4: [10, 11], + 5: [8], + 6: [9], + 7: [10, 11], + 8: [0, 2, 5], + 9: [0, 3, 6], + 10: [1, 4, 7], + 11: [1, 4, 7], + } + + F = {0: 0, 1: 0, 2: 2, 3: 2, 4: 2, 5: 3, 6: 3, 7: 3, 8: 1, 9: 1, 10: 1, 11: 1} + + C = nx.algorithms.coloring.equitable_coloring.make_C_from_F(F) + N = nx.algorithms.coloring.equitable_coloring.make_N_from_L_C(L, C) + H = nx.algorithms.coloring.equitable_coloring.make_H_from_C_N(C, N) + + nx.algorithms.coloring.equitable_coloring.procedure_P( + V_minus=0, V_plus=1, N=N, H=H, F=F, C=C, L=L + ) + check_state(L=L, N=N, H=H, F=F, C=C) + + def test_hard_prob(self): + # Tests for two levels of recursion. + num_colors, s = 5, 5 + + G = nx.Graph() + G.add_edges_from( + [ + (0, 10), + (0, 11), + (0, 12), + (0, 23), + (10, 4), + (10, 9), + (10, 20), + (11, 4), + (11, 8), + (11, 16), + (12, 9), + (12, 22), + (12, 23), + (23, 7), + (1, 17), + (1, 18), + (1, 19), + (1, 24), + (17, 5), + (17, 13), + (17, 22), + (18, 5), + (19, 5), + (19, 6), + (19, 8), + (24, 7), + (24, 16), + (2, 4), + (2, 13), + (2, 14), + (2, 15), + (4, 6), + (13, 5), + (13, 21), + (14, 6), + (14, 15), + (15, 6), + (15, 21), + (3, 16), + (3, 20), + (3, 21), + (3, 22), + (16, 8), + (20, 8), + (21, 9), + (22, 7), + ] + ) + F = {node: node // s for node in range(num_colors * s)} + F[s - 1] = num_colors - 1 + + params = make_params_from_graph(G=G, F=F) + + nx.algorithms.coloring.equitable_coloring.procedure_P( + V_minus=0, V_plus=num_colors - 1, **params + ) + check_state(**params) + + def test_hardest_prob(self): + # Tests for two levels of recursion. + num_colors, s = 10, 4 + + G = nx.Graph() + G.add_edges_from( + [ + (0, 19), + (0, 24), + (0, 29), + (0, 30), + (0, 35), + (19, 3), + (19, 7), + (19, 9), + (19, 15), + (19, 21), + (19, 24), + (19, 30), + (19, 38), + (24, 5), + (24, 11), + (24, 13), + (24, 20), + (24, 30), + (24, 37), + (24, 38), + (29, 6), + (29, 10), + (29, 13), + (29, 15), + (29, 16), + (29, 17), + (29, 20), + (29, 26), + (30, 6), + (30, 10), + (30, 15), + (30, 22), + (30, 23), + (30, 39), + (35, 6), + (35, 9), + (35, 14), + (35, 18), + (35, 22), + (35, 23), + (35, 25), + (35, 27), + (1, 20), + (1, 26), + (1, 31), + (1, 34), + (1, 38), + (20, 4), + (20, 8), + (20, 14), + (20, 18), + (20, 28), + (20, 33), + (26, 7), + (26, 10), + (26, 14), + (26, 18), + (26, 21), + (26, 32), + (26, 39), + (31, 5), + (31, 8), + (31, 13), + (31, 16), + (31, 17), + (31, 21), + (31, 25), + (31, 27), + (34, 7), + (34, 8), + (34, 13), + (34, 18), + (34, 22), + (34, 23), + (34, 25), + (34, 27), + (38, 4), + (38, 9), + (38, 12), + (38, 14), + (38, 21), + (38, 27), + (2, 3), + (2, 18), + (2, 21), + (2, 28), + (2, 32), + (2, 33), + (2, 36), + (2, 37), + (2, 39), + (3, 5), + (3, 9), + (3, 13), + (3, 22), + (3, 23), + (3, 25), + (3, 27), + (18, 6), + (18, 11), + (18, 15), + (18, 39), + (21, 4), + (21, 10), + (21, 14), + (21, 36), + (28, 6), + (28, 10), + (28, 14), + (28, 16), + (28, 17), + (28, 25), + (28, 27), + (32, 5), + (32, 10), + (32, 12), + (32, 16), + (32, 17), + (32, 22), + (32, 23), + (33, 7), + (33, 10), + (33, 12), + (33, 16), + (33, 17), + (33, 25), + (33, 27), + (36, 5), + (36, 8), + (36, 15), + (36, 16), + (36, 17), + (36, 25), + (36, 27), + (37, 5), + (37, 11), + (37, 15), + (37, 16), + (37, 17), + (37, 22), + (37, 23), + (39, 7), + (39, 8), + (39, 15), + (39, 22), + (39, 23), + ] + ) + F = {node: node // s for node in range(num_colors * s)} + F[s - 1] = num_colors - 1 # V- = 0, V+ = num_colors - 1 + + params = make_params_from_graph(G=G, F=F) + + nx.algorithms.coloring.equitable_coloring.procedure_P( + V_minus=0, V_plus=num_colors - 1, **params + ) + check_state(**params) + + +# ############################ Utility functions ############################ def verify_coloring(graph, coloring): for node in graph.nodes(): if node not in coloring: @@ -122,7 +461,8 @@ def dict_to_sets(colors): return sets -############################## Graph Generation ############################## + +# ############################ Graph Generation ############################ def empty_graph(): @@ -151,322 +491,303 @@ def three_node_clique(): def disconnected(): graph = nx.Graph() - graph.add_edges_from([ - (1, 2), - (2, 3), - (4, 5), - (5, 6) - ]) + graph.add_edges_from([(1, 2), (2, 3), (4, 5), (5, 6)]) return graph def rs_shc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4]) - graph.add_edges_from([ - (1, 2), - (2, 3), - (3, 4) - ]) + graph.add_edges_from([(1, 2), (2, 3), (3, 4)]) return graph def slf_shc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7]) - graph.add_edges_from([ - (1, 2), - (1, 5), - (1, 6), - (2, 3), - (2, 7), - (3, 4), - (3, 7), - (4, 5), - (4, 6), - (5, 6) - ]) + graph.add_edges_from( + [(1, 2), (1, 5), (1, 6), (2, 3), (2, 7), (3, 4), (3, 7), (4, 5), (4, 6), (5, 6)] + ) return graph def slf_hc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8]) - graph.add_edges_from([ - (1, 2), - (1, 3), - (1, 4), - (1, 5), - (2, 3), - (2, 4), - (2, 6), - (5, 7), - (5, 8), - (6, 7), - (6, 8), - (7, 8) - ]) + graph.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (2, 3), + (2, 4), + (2, 6), + (5, 7), + (5, 8), + (6, 7), + (6, 8), + (7, 8), + ] + ) return graph def lf_shc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6]) - graph.add_edges_from([ - (6, 1), - (1, 4), - (4, 3), - (3, 2), - (2, 5) - ]) + graph.add_edges_from([(6, 1), (1, 4), (4, 3), (3, 2), (2, 5)]) return graph def lf_hc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7]) - graph.add_edges_from([ - (1, 7), - (1, 6), - (1, 3), - (1, 4), - (7, 2), - (2, 6), - (2, 3), - (2, 5), - (5, 3), - (5, 4), - (4, 3) - ]) + graph.add_edges_from( + [ + (1, 7), + (1, 6), + (1, 3), + (1, 4), + (7, 2), + (2, 6), + (2, 3), + (2, 5), + (5, 3), + (5, 4), + (4, 3), + ] + ) return graph def sl_shc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6]) - graph.add_edges_from([ - (1, 2), - (1, 3), - (2, 3), - (1, 4), - (2, 5), - (3, 6), - (4, 5), - (4, 6), - (5, 6) - ]) + graph.add_edges_from( + [(1, 2), (1, 3), (2, 3), (1, 4), (2, 5), (3, 6), (4, 5), (4, 6), (5, 6)] + ) return graph def sl_hc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8]) - graph.add_edges_from([ - (1, 2), - (1, 3), - (1, 5), - (1, 7), - (2, 3), - (2, 4), - (2, 8), - (8, 4), - (8, 6), - (8, 7), - (7, 5), - (7, 6), - (3, 4), - (4, 6), - (6, 5), - (5, 3) - ]) + graph.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 5), + (1, 7), + (2, 3), + (2, 4), + (2, 8), + (8, 4), + (8, 6), + (8, 7), + (7, 5), + (7, 6), + (3, 4), + (4, 6), + (6, 5), + (5, 3), + ] + ) return graph def gis_shc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4]) - graph.add_edges_from([ - (1, 2), - (2, 3), - (3, 4) - ]) + graph.add_edges_from([(1, 2), (2, 3), (3, 4)]) return graph def gis_hc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6]) - graph.add_edges_from([ - (1, 5), - (2, 5), - (3, 6), - (4, 6), - (5, 6) - ]) + graph.add_edges_from([(1, 5), (2, 5), (3, 6), (4, 6), (5, 6)]) return graph def cs_shc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5]) - graph.add_edges_from([ - (1, 2), - (1, 5), - (2, 3), - (2, 4), - (2, 5), - (3, 4), - (4, 5) - ]) + graph.add_edges_from([(1, 2), (1, 5), (2, 3), (2, 4), (2, 5), (3, 4), (4, 5)]) return graph def rsi_shc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6]) - graph.add_edges_from([ - (1, 2), - (1, 5), - (1, 6), - (2, 3), - (3, 4), - (4, 5), - (4, 6), - (5, 6) - ]) + graph.add_edges_from( + [(1, 2), (1, 5), (1, 6), (2, 3), (3, 4), (4, 5), (4, 6), (5, 6)] + ) return graph def lfi_shc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7]) - graph.add_edges_from([ - (1, 2), - (1, 5), - (1, 6), - (2, 3), - (2, 7), - (3, 4), - (3, 7), - (4, 5), - (4, 6), - (5, 6) - ]) + graph.add_edges_from( + [(1, 2), (1, 5), (1, 6), (2, 3), (2, 7), (3, 4), (3, 7), (4, 5), (4, 6), (5, 6)] + ) return graph def lfi_hc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8, 9]) - graph.add_edges_from([ - (1, 2), - (1, 5), - (1, 6), - (1, 7), - (2, 3), - (2, 8), - (2, 9), - (3, 4), - (3, 8), - (3, 9), - (4, 5), - (4, 6), - (4, 7), - (5, 6) - ]) + graph.add_edges_from( + [ + (1, 2), + (1, 5), + (1, 6), + (1, 7), + (2, 3), + (2, 8), + (2, 9), + (3, 4), + (3, 8), + (3, 9), + (4, 5), + (4, 6), + (4, 7), + (5, 6), + ] + ) return graph def sli_shc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7]) - graph.add_edges_from([ - (1, 2), - (1, 3), - (1, 5), - (1, 7), - (2, 3), - (2, 6), - (3, 4), - (4, 5), - (4, 6), - (5, 7), - (6, 7) - ]) + graph.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 5), + (1, 7), + (2, 3), + (2, 6), + (3, 4), + (4, 5), + (4, 6), + (5, 7), + (6, 7), + ] + ) return graph def sli_hc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8, 9]) - graph.add_edges_from([ - (1, 2), - (1, 3), - (1, 4), - (1, 5), - (2, 3), - (2, 7), - (2, 8), - (2, 9), - (3, 6), - (3, 7), - (3, 9), - (4, 5), - (4, 6), - (4, 8), - (4, 9), - (5, 6), - (5, 7), - (5, 8), - (6, 7), - (6, 9), - (7, 8), - (8, 9) - ]) + graph.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (2, 3), + (2, 7), + (2, 8), + (2, 9), + (3, 6), + (3, 7), + (3, 9), + (4, 5), + (4, 6), + (4, 8), + (4, 9), + (5, 6), + (5, 7), + (5, 8), + (6, 7), + (6, 9), + (7, 8), + (8, 9), + ] + ) return graph -#--------------------------------------------------------------------------- +# -------------------------------------------------------------------------- # Basic tests for all strategies # For each basic graph function, specify the number of expected colors. -BASIC_TEST_CASES = {empty_graph: 0, - one_node_graph: 1, - two_node_graph: 2, - disconnected: 2, - three_node_clique: 3} +BASIC_TEST_CASES = { + empty_graph: 0, + one_node_graph: 1, + two_node_graph: 2, + disconnected: 2, + three_node_clique: 3, +} -#--------------------------------------------------------------------------- +# -------------------------------------------------------------------------- # Special test cases. Each strategy has a list of tuples of the form # (graph function, interchange, valid # of colors) SPECIAL_TEST_CASES = { - 'random_sequential': [ + "random_sequential": [ (rs_shc, False, (2, 3)), (rs_shc, True, 2), - (rsi_shc, True, (3, 4))], - 'saturation_largest_first': [ - (slf_shc, False, (3, 4)), - (slf_hc, False, 4)], - 'largest_first': [ + (rsi_shc, True, (3, 4)), + ], + "saturation_largest_first": [(slf_shc, False, (3, 4)), (slf_hc, False, 4)], + "largest_first": [ (lf_shc, False, (2, 3)), (lf_hc, False, 4), (lf_shc, True, 2), (lf_hc, True, 3), (lfi_shc, True, (3, 4)), - (lfi_hc, True, 4)], - 'smallest_last': [ + (lfi_hc, True, 4), + ], + "smallest_last": [ (sl_shc, False, (3, 4)), (sl_hc, False, 5), (sl_shc, True, 3), (sl_hc, True, 4), (sli_shc, True, (3, 4)), - (sli_hc, True, 5)], - 'independent_set': [ - (gis_shc, False, (2, 3)), - (gis_hc, False, 3)], - 'connected_sequential': [ - (cs_shc, False, (3, 4)), - (cs_shc, True, 3)], - 'connected_sequential_dfs': [ - (cs_shc, False, (3, 4))], + (sli_hc, True, 5), + ], + "independent_set": [(gis_shc, False, (2, 3)), (gis_hc, False, 3)], + "connected_sequential": [(cs_shc, False, (3, 4)), (cs_shc, True, 3)], + "connected_sequential_dfs": [(cs_shc, False, (3, 4))], } + + +# -------------------------------------------------------------------------- +# Helper functions to test +# (graph function, interchange, valid # of colors) + + +def check_state(L, N, H, F, C): + s = len(C[0]) + num_colors = len(C.keys()) + + assert all(u in L[v] for u in L.keys() for v in L[u]) + assert all(F[u] != F[v] for u in L.keys() for v in L[u]) + assert all(len(L[u]) < num_colors for u in L.keys()) + assert all(len(C[x]) == s for x in C) + assert all(H[(c1, c2)] >= 0 for c1 in C.keys() for c2 in C.keys()) + assert all(N[(u, F[u])] == 0 for u in F.keys()) + + +def max_degree(G): + """Get the maximum degree of any node in G.""" + return max([G.degree(node) for node in G.nodes]) if len(G.nodes) > 0 else 0 + + +def make_params_from_graph(G, F): + """Returns {N, L, H, C} from the given graph.""" + num_nodes = len(G) + L = {u: [] for u in range(num_nodes)} + for (u, v) in G.edges: + L[u].append(v) + L[v].append(u) + + C = nx.algorithms.coloring.equitable_coloring.make_C_from_F(F) + N = nx.algorithms.coloring.equitable_coloring.make_N_from_L_C(L, C) + H = nx.algorithms.coloring.equitable_coloring.make_H_from_C_N(C, N) + + return {"N": N, "F": F, "C": C, "H": H, "L": L} diff --git a/networkx/algorithms/communicability_alg.py b/networkx/algorithms/communicability_alg.py index 1650d6f..c468717 100644 --- a/networkx/algorithms/communicability_alg.py +++ b/networkx/algorithms/communicability_alg.py @@ -1,30 +1,19 @@ -# -*- coding: utf-8 -*- """ Communicability. """ -# Copyright (C) 2011 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# Previously coded as communicability centrality -# All rights reserved. -# BSD license. import networkx as nx -from networkx.utils import * -__author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', - 'Franck Kalala (franckkalala@yahoo.fr']) -__all__ = ['communicability', - 'communicability_exp', - ] +from networkx.utils import not_implemented_for +__all__ = ["communicability", "communicability_exp"] -@not_implemented_for('directed') -@not_implemented_for('multigraph') + +@not_implemented_for("directed") +@not_implemented_for("multigraph") def communicability(G): - r"""Return communicability between all pairs of nodes in G. + r"""Returns communicability between all pairs of nodes in G. The communicability between pairs of nodes in G is the sum of - closed walks of different lengths starting at node u and ending at node v. + walks of different lengths starting at node u and ending at node v. Parameters ---------- @@ -73,13 +62,13 @@ def communicability(G): Examples -------- - >>> G = nx.Graph([(0,1),(1,2),(1,5),(5,4),(2,4),(2,3),(4,3),(3,6)]) + >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)]) >>> c = nx.communicability(G) """ import numpy - import scipy.linalg + nodelist = list(G) # ordering of nodes in matrix - A = nx.to_numpy_matrix(G, nodelist) + A = nx.to_numpy_array(G, nodelist) # convert to 0-1 matrix A[A != 0.0] = 1 w, vec = numpy.linalg.eigh(A) @@ -94,18 +83,18 @@ def communicability(G): p = mapping[u] q = mapping[v] for j in range(len(nodelist)): - s += vec[:, j][p, 0] * vec[:, j][q, 0] * expw[j] + s += vec[:, j][p] * vec[:, j][q] * expw[j] c[u][v] = float(s) return c -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def communicability_exp(G): - r"""Return communicability between all pairs of nodes in G. + r"""Returns communicability between all pairs of nodes in G. Communicability between pair of node (u,v) of node in G is the sum of - closed walks of different lengths starting at node u and ending at node v. + walks of different lengths starting at node u and ending at node v. Parameters ---------- @@ -151,16 +140,17 @@ def communicability_exp(G): Examples -------- - >>> G = nx.Graph([(0,1),(1,2),(1,5),(5,4),(2,4),(2,3),(4,3),(3,6)]) + >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)]) >>> c = nx.communicability_exp(G) """ import scipy.linalg + nodelist = list(G) # ordering of nodes in matrix - A = nx.to_numpy_matrix(G, nodelist) + A = nx.to_numpy_array(G, nodelist) # convert to 0-1 matrix A[A != 0.0] = 1 # communicability matrix - expA = scipy.linalg.expm(A.A) + expA = scipy.linalg.expm(A) mapping = dict(zip(nodelist, range(len(nodelist)))) c = {} for u in G: @@ -168,17 +158,3 @@ def communicability_exp(G): for v in G: c[u][v] = float(expA[mapping[u], mapping[v]]) return c - -# fixture for nose tests - - -def setup_module(module): - from nose import SkipTest - try: - import numpy - except: - raise SkipTest("NumPy not available") - try: - import scipy - except: - raise SkipTest("SciPy not available") diff --git a/networkx/algorithms/community/__init__.py b/networkx/algorithms/community/__init__.py index 8d7ed05..4edfdff 100644 --- a/networkx/algorithms/community/__init__.py +++ b/networkx/algorithms/community/__init__.py @@ -5,7 +5,6 @@ the :mod:`networkx.algorithms.community` module, then accessing the functions as attributes of ``community``. For example:: - >>> import networkx as nx >>> from networkx.algorithms import community >>> G = nx.barbell_graph(5, 1) >>> communities_generator = community.girvan_newman(G) @@ -15,12 +14,12 @@ [[0, 1, 2, 3, 4], [5], [6, 7, 8, 9, 10]] """ -from networkx.algorithms.community.asyn_fluidc import * +from networkx.algorithms.community.asyn_fluid import * from networkx.algorithms.community.centrality import * -from networkx.algorithms.community.community_generators import * from networkx.algorithms.community.kclique import * from networkx.algorithms.community.kernighan_lin import * from networkx.algorithms.community.label_propagation import * +from networkx.algorithms.community.lukes import * from networkx.algorithms.community.modularity_max import * from networkx.algorithms.community.quality import * from networkx.algorithms.community.community_utils import * diff --git a/networkx/algorithms/community/asyn_fluidc.py b/networkx/algorithms/community/asyn_fluid.py similarity index 73% rename from networkx/algorithms/community/asyn_fluidc.py rename to networkx/algorithms/community/asyn_fluid.py index 06c8fa1..e57078e 100644 --- a/networkx/algorithms/community/asyn_fluidc.py +++ b/networkx/algorithms/community/asyn_fluid.py @@ -1,27 +1,23 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2017-2018 -# All rights reserved. -# BSD license. -# Author: Ferran Parés """Asynchronous Fluid Communities algorithm for community detection.""" from collections import Counter -import random from networkx.exception import NetworkXError from networkx.algorithms.components import is_connected from networkx.utils import groups -from networkx.utils.decorators import not_implemented_for +from networkx.utils import not_implemented_for +from networkx.utils import py_random_state -__all__ = ['asyn_fluidc'] +__all__ = ["asyn_fluidc"] -@not_implemented_for('directed', 'multigraph') -def asyn_fluidc(G, k, max_iter=100): +@py_random_state(3) +@not_implemented_for("directed", "multigraph") +def asyn_fluidc(G, k, max_iter=100, seed=None): """Returns communities in `G` as detected by Fluid Communities algorithm. The asynchronous fluid communities algorithm is described in [1]_. The algorithm is based on the simple idea of fluids interacting - in an environment, expanding and pushing each other. It's initialization is + in an environment, expanding and pushing each other. Its initialization is random, so found communities may vary on different executions. The algorithm proceeds as follows. First each of the initial k communities @@ -47,7 +43,11 @@ def asyn_fluidc(G, k, max_iter=100): The number of communities to be found. max_iter : integer - The number of maximum iterations allowed. By default 15. + The number of maximum iterations allowed. By default 100. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -66,18 +66,17 @@ def asyn_fluidc(G, k, max_iter=100): """ # Initial checks if not isinstance(k, int): - raise NetworkXError("k muts be an integer.") + raise NetworkXError("k must be an integer.") if not k > 0: - raise NetworkXError("k muts be greater than 0.") + raise NetworkXError("k must be greater than 0.") if not is_connected(G): - raise NetworkXError("Fluid Communities can only be run on connected\ - Graphs.") + raise NetworkXError("Fluid Communities require connected Graphs.") if len(G) < k: - raise NetworkXError("k must be greater than graph size.") + raise NetworkXError("k cannot be bigger than the number of nodes.") # Initialization max_density = 1.0 vertices = list(G) - random.shuffle(vertices) + seed.shuffle(vertices) communities = {n: i for i, n in enumerate(vertices[:k])} density = {} com_to_numvertices = {} @@ -92,29 +91,30 @@ def asyn_fluidc(G, k, max_iter=100): iter_count += 1 # Loop over all vertices in graph in a random order vertices = list(G) - random.shuffle(vertices) + seed.shuffle(vertices) for vertex in vertices: # Updating rule com_counter = Counter() # Take into account self vertex community try: - com_counter.update({communities[vertex]: - density[communities[vertex]]}) + com_counter.update({communities[vertex]: density[communities[vertex]]}) except KeyError: pass # Gather neighbour vertex communities for v in G[vertex]: try: - com_counter.update({communities[v]: - density[communities[v]]}) + com_counter.update({communities[v]: density[communities[v]]}) except KeyError: continue # Check which is the community with highest density new_com = -1 if len(com_counter.keys()) > 0: max_freq = max(com_counter.values()) - best_communities = [com for com, freq in com_counter.items() - if (max_freq - freq) < 0.0001] + best_communities = [ + com + for com, freq in com_counter.items() + if (max_freq - freq) < 0.0001 + ] # If actual vertex com in best communities, it is preserved try: if communities[vertex] in best_communities: @@ -126,19 +126,21 @@ def asyn_fluidc(G, k, max_iter=100): # Set flag of non-convergence cont = True # Randomly chose a new community from candidates - new_com = random.choice(best_communities) + new_com = seed.choice(best_communities) # Update previous community status try: com_to_numvertices[communities[vertex]] -= 1 - density[communities[vertex]] = max_density / \ - com_to_numvertices[communities[vertex]] + density[communities[vertex]] = ( + max_density / com_to_numvertices[communities[vertex]] + ) except KeyError: pass # Update new community status communities[vertex] = new_com com_to_numvertices[communities[vertex]] += 1 - density[communities[vertex]] = max_density / \ - com_to_numvertices[communities[vertex]] + density[communities[vertex]] = ( + max_density / com_to_numvertices[communities[vertex]] + ) # If maximum iterations reached --> output actual results if iter_count > max_iter: break diff --git a/networkx/algorithms/community/centrality.py b/networkx/algorithms/community/centrality.py index 036b4a2..d8e5964 100644 --- a/networkx/algorithms/community/centrality.py +++ b/networkx/algorithms/community/centrality.py @@ -1,17 +1,8 @@ -# -*- coding: utf-8 -*- -# centrality.py - functions for computing communities using centrality notions -# -# Copyright 2015, 2016 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Functions for computing communities based on centrality notions.""" import networkx as nx -__all__ = ['girvan_newman'] +__all__ = ["girvan_newman"] def girvan_newman(G, most_valuable_edge=None): @@ -53,7 +44,7 @@ def girvan_newman(G, most_valuable_edge=None): >>> k = 2 >>> comp = girvan_newman(G) >>> for communities in itertools.islice(comp, k): - ... print(tuple(sorted(c) for c in communities)) # doctest: +SKIP + ... print(tuple(sorted(c) for c in communities)) # doctest: +SKIP ... ([0, 1, 2, 3], [4, 5, 6, 7]) ([0, 1], [2, 3], [4, 5, 6, 7]) @@ -67,7 +58,7 @@ def girvan_newman(G, most_valuable_edge=None): >>> comp = girvan_newman(G) >>> limited = itertools.takewhile(lambda c: len(c) <= k, comp) >>> for communities in limited: - ... print(tuple(sorted(c) for c in communities)) # doctest: +SKIP + ... print(tuple(sorted(c) for c in communities)) # doctest: +SKIP ... ([0, 1, 2, 3], [4, 5, 6, 7]) ([0, 1], [2, 3], [4, 5, 6, 7]) @@ -78,9 +69,9 @@ def girvan_newman(G, most_valuable_edge=None): >>> from operator import itemgetter >>> G = nx.path_graph(10) >>> edges = G.edges() - >>> nx.set_edge_attributes(G, {(u, v): v for u, v in edges}, 'weight') + >>> nx.set_edge_attributes(G, {(u, v): v for u, v in edges}, "weight") >>> def heaviest(G): - ... u, v, w = max(G.edges(data='weight'), key=itemgetter(2)) + ... u, v, w = max(G.edges(data="weight"), key=itemgetter(2)) ... return (u, v) ... >>> comp = girvan_newman(G, most_valuable_edge=heaviest) @@ -92,7 +83,7 @@ def girvan_newman(G, most_valuable_edge=None): >>> from networkx import edge_betweenness_centrality as betweenness >>> def most_central_edge(G): - ... centrality = betweenness(G, weight='weight') + ... centrality = betweenness(G, weight="weight") ... return max(centrality, key=centrality.get) ... >>> G = nx.path_graph(10) @@ -136,6 +127,7 @@ def girvan_newman(G, most_valuable_edge=None): # If no function is provided for computing the most valuable edge, # use the edge betweenness centrality. if most_valuable_edge is None: + def most_valuable_edge(G): """Returns the edge with the highest betweenness centrality in the graph `G`. @@ -145,6 +137,7 @@ def most_valuable_edge(G): # dictionary will never be empty. betweenness = nx.edge_betweenness_centrality(G) return max(betweenness, key=betweenness.get) + # The copy of G here must include the edge weight data. g = G.copy().to_undirected() # Self-loops must be removed because their removal has no effect on diff --git a/networkx/algorithms/community/community_generators.py b/networkx/algorithms/community/community_generators.py deleted file mode 100644 index 15d8db5..0000000 --- a/networkx/algorithms/community/community_generators.py +++ /dev/null @@ -1,409 +0,0 @@ -# generators.py - functions for generating graphs with community structure -# -# Copyright 2011 Ben Edwards . -# Copyright 2011 Aric Hagberg -# Copyright 2015 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. -"""Functions for generating graphs with community structure.""" -from __future__ import division - -import random - -# HACK In order to accommodate both SciPy and non-SciPy implementations, -# we need to wrap the SciPy implementation of the zeta function with an -# extra parameter, `tolerance`, which will be ignored. -try: - from scipy.special import zeta as _zeta - - def zeta(x, q, tolerance): - return _zeta(x, q) -except ImportError: - def zeta(x, q, tolerance): - """The Hurwitz zeta function, or the Riemann zeta function of two - arguments. - - ``x`` must be greater than one and ``q`` must be positive. - - This function repeatedly computes subsequent partial sums until - convergence, as decided by ``tolerance``. - - """ - z = 0 - z_prev = -float('inf') - k = 0 - while abs(z - z_prev) > tolerance: - z_prev = z - z += 1 / ((k + q) ** x) - k += 1 - return z - -import networkx as nx - -__all__ = ['LFR_benchmark_graph'] - - -def _zipf_rv_below(gamma, xmin, threshold): - """Returns a random value chosen from the Zipf distribution, - guaranteed to be less than or equal to the value ``threshold``. - - Repeatedly draws values from the Zipf distribution until the - threshold is met, then returns that value. - - """ - result = nx.utils.zipf_rv(gamma, xmin) - while result > threshold: - result = nx.utils.zipf_rv(gamma, xmin) - return result - - -def _powerlaw_sequence(gamma, low, high, condition, length, max_iters): - """Returns a list of numbers obeying a power law distribution, with - some additional restrictions. - - ``gamma`` and ``low`` are the parameters for the Zipf distribution. - - ``high`` is the maximum allowed value for values draw from the Zipf - distribution. For more information, see :func:`_zipf_rv_below`. - - ``condition`` and ``length`` are Boolean-valued functions on - lists. While generating the list, random values are drawn and - appended to the list until ``length`` is satisfied by the created - list. Once ``condition`` is satisfied, the sequence generated in - this way is returned. - - ``max_iters`` indicates the number of times to generate a list - satisfying ``length``. If the number of iterations exceeds this - value, :exc:`~networkx.exception.ExceededMaxIterations` is raised. - - """ - for i in range(max_iters): - seq = [] - while not length(seq): - seq.append(_zipf_rv_below(gamma, low, high)) - if condition(seq): - return seq - raise nx.ExceededMaxIterations("Could not create power law sequence") - - -# TODO Needs documentation. -def _generate_min_degree(gamma, average_degree, max_degree, tolerance, - max_iters): - """Returns a minimum degree from the given average degree.""" - min_deg_top = max_degree - min_deg_bot = 1 - min_deg_mid = (min_deg_top - min_deg_bot) / 2 + min_deg_bot - itrs = 0 - mid_avg_deg = 0 - while abs(mid_avg_deg - average_degree) > tolerance: - if itrs > max_iters: - raise nx.ExceededMaxIterations("Could not match average_degree") - mid_avg_deg = 0 - for x in range(int(min_deg_mid), max_degree + 1): - mid_avg_deg += (x ** (-gamma + 1)) / zeta(gamma, min_deg_mid, - tolerance) - if mid_avg_deg > average_degree: - min_deg_top = min_deg_mid - min_deg_mid = (min_deg_top - min_deg_bot) / 2 + min_deg_bot - else: - min_deg_bot = min_deg_mid - min_deg_mid = (min_deg_top - min_deg_bot) / 2 + min_deg_bot - itrs += 1 - # return int(min_deg_mid + 0.5) - return round(min_deg_mid) - - -def _generate_communities(degree_sequence, community_sizes, mu, max_iters): - """Returns a list of sets, each of which represents a community in - the graph. - - ``degree_sequence`` is the degree sequence that must be met by the - graph. - - ``community_sizes`` is the community size distribution that must be - met by the generated list of sets. - - ``mu`` is a float in the interval [0, 1] indicating the fraction of - intra-community edges incident to each node. - - ``max_iters`` is the number of times to try to add a node to a - community. This must be greater than the length of - ``degree_sequence``, otherwise this function will always fail. If - the number of iterations exceeds this value, - :exc:`~networkx.exception.ExceededMaxIterations` is raised. - - The communities returned by this are sets of integers in the set {0, - ..., *n* - 1}, where *n* is the length of ``degree_sequence``. - - """ - # This assumes the nodes in the graph will be natural numbers. - result = [set() for _ in community_sizes] - n = len(degree_sequence) - free = list(range(n)) - for i in range(max_iters): - v = free.pop() - c = random.choice(range(len(community_sizes))) - # s = int(degree_sequence[v] * (1 - mu) + 0.5) - s = round(degree_sequence[v] * (1 - mu)) - # If the community is large enough, add the node to the chosen - # community. Otherwise, return it to the list of unaffiliated - # nodes. - if s < community_sizes[c]: - result[c].add(v) - else: - free.append(v) - # If the community is too big, remove a node from it. - if len(result[c]) > community_sizes[c]: - free.append(result[c].pop()) - if not free: - return result - msg = 'Could not assign communities; try increasing min_community' - raise nx.ExceededMaxIterations(msg) - - -def LFR_benchmark_graph(n, tau1, tau2, mu, average_degree=None, - min_degree=None, max_degree=None, min_community=None, - max_community=None, tol=1.0e-7, max_iters=500, - seed=None): - r"""Returns the LFR benchmark graph for testing community-finding - algorithms. - - This algorithm proceeds as follows: - - 1) Find a degree sequence with a power law distribution, and minimum - value ``min_degree``, which has approximate average degree - ``average_degree``. This is accomplished by either - - a) specifying ``min_degree`` and not ``average_degree``, - b) specifying ``average_degree`` and not ``min_degree``, in which - case a suitable minimum degree will be found. - - ``max_degree`` can also be specified, otherwise it will be set to - ``n``. Each node *u* will have `\mu \mathrm{deg}(u)` edges - joining it to nodes in communities other than its own and `(1 - - \mu) \mathrm{deg}(u)` edges joining it to nodes in its own - community. - 2) Generate community sizes according to a power law distribution - with exponent ``tau2``. If ``min_community`` and - ``max_community`` are not specified they will be selected to be - ``min_degree`` and ``max_degree``, respectively. Community sizes - are generated until the sum of their sizes equals ``n``. - 3) Each node will be randomly assigned a community with the - condition that the community is large enough for the node's - intra-community degree, `(1 - \mu) \mathrm{deg}(u)` as - described in step 2. If a community grows too large, a random node - will be selected for reassignment to a new community, until all - nodes have been assigned a community. - 4) Each node *u* then adds `(1 - \mu) \mathrm{deg}(u)` - intra-community edges and `\mu \mathrm{deg}(u)` inter-community - edges. - - Parameters - ---------- - n : int - Number of nodes in the created graph. - - tau1 : float - Power law exponent for the degree distribution of the created - graph. This value must be strictly greater than one. - - tau2 : float - Power law exponent for the community size distribution in the - created graph. This value must be strictly greater than one. - - mu : float - Fraction of intra-community edges incident to each node. This - value must be in the interval [0, 1]. - - average_degree : float - Desired average degree of nodes in the created graph. This value - must be in the interval [0, *n*]. Exactly one of this and - ``min_degree`` must be specified, otherwise a - :exc:`NetworkXError` is raised. - - min_degree : int - Minimum degree of nodes in the created graph. This value must be - in the interval [0, *n*]. Exactly one of this and - ``average_degree`` must be specified, otherwise a - :exc:`NetworkXError` is raised. - - max_degree : int - Maximum degree of nodes in the created graph. If not specified, - this is set to ``n``, the total number of nodes in the graph. - - min_community : int - Minimum size of communities in the graph. If not specified, this - is set to ``min_degree``. - - max_community : int - Maximum size of communities in the graph. If not specified, this - is set to ``n``, the total number of nodes in the graph. - - tol : float - Tolerance when comparing floats, specifically when comparing - average degree values. - - max_iters : int - Maximum number of iterations to try to create the community sizes, - degree distribution, and community affiliations. - - seed : int - A seed for the random number generator. - - Returns - ------- - G : NetworkX graph - The LFR benchmark graph generated according to the specified - parameters. - - Each node in the graph has a node attribute ``'community'`` that - stores the community (that is, the set of nodes) that includes - it. - - Raises - ------ - NetworkXError - If any of the parameters do not meet their upper and lower bounds: - - - ``tau1`` and ``tau2`` must be less than or equal to one. - - ``mu`` must be in [0, 1]. - - ``max_degree`` must be in {1, ..., *n*}. - - ``min_community`` and ``max_community`` must be in {0, ..., - *n*}. - - If not exactly one of ``average_degree`` and ``min_degree`` is - specified. - - If ``min_degree`` is not specified and a suitable ``min_degree`` - cannot be found. - - ExceededMaxIterations - If a valid degree sequence cannot be created within - ``max_iters`` number of iterations. - - If a valid set of community sizes cannot be created within - ``max_iters`` number of iterations. - - If a valid community assignment cannot be created within ``10 * - n * max_iters`` number of iterations. - - Examples - -------- - Basic usage:: - - >>> from networkx.algorithms.community import LFR_benchmark_graph - >>> n = 250 - >>> tau1 = 3 - >>> tau2 = 1.5 - >>> mu = 0.1 - >>> G = LFR_benchmark_graph(n, tau1, tau2, mu, average_degree=5, - ... min_community=20, seed=10) - - Continuing the example above, you can get the communities from the - node attributes of the graph:: - - >>> communities = {frozenset(G.nodes[v]['community']) for v in G} - - Notes - ----- - This algorithm differs slightly from the original way it was - presented in [1]. - - 1) Rather than connecting the graph via a configuration model then - rewiring to match the intra-community and inter-community - degrees, we do this wiring explicitly at the end, which should be - equivalent. - 2) The code posted on the author's website [2] calculates the random - power law distributed variables and their average using - continuous approximations, whereas we use the discrete - distributions here as both degree and community size are - discrete. - - Though the authors describe the algorithm as quite robust, testing - during development indicates that a somewhat narrower parameter set - is likely to successfully produce a graph. Some suggestions have - been provided in the event of exceptions. - - References - ---------- - .. [1] "Benchmark graphs for testing community detection algorithms", - Andrea Lancichinetti, Santo Fortunato, and Filippo Radicchi, - Phys. Rev. E 78, 046110 2008 - .. [2] http://santo.fortunato.googlepages.com/inthepress2 - - """ - # Perform some basic parameter validation. - if seed is not None: - random.seed(seed) - if not tau1 > 1: - raise nx.NetworkXError("tau1 must be greater than one") - if not tau2 > 1: - raise nx.NetworkXError("tau2 must be greater than one") - if not 0 <= mu <= 1: - raise nx.NetworkXError("mu must be in the interval [0, 1]") - - # Validate parameters for generating the degree sequence. - if max_degree is None: - max_degree = n - elif not 0 < max_degree <= n: - raise nx.NetworkXError("max_degree must be in the interval (0, n]") - if not ((min_degree is None) ^ (average_degree is None)): - raise nx.NetworkXError("Must assign exactly one of min_degree and" - " average_degree") - if min_degree is None: - min_degree = _generate_min_degree(tau1, average_degree, max_degree, - tol, max_iters) - - # Generate a degree sequence with a power law distribution. - low, high = min_degree, max_degree - - def condition(seq): return sum(seq) % 2 == 0 - - def length(seq): return len(seq) >= n - deg_seq = _powerlaw_sequence(tau1, low, high, condition, length, max_iters) - - # Validate parameters for generating the community size sequence. - if min_community is None: - min_community = min(deg_seq) - if max_community is None: - max_community = max(deg_seq) - - # Generate a community size sequence with a power law distribution. - # - # TODO The original code incremented the number of iterations each - # time a new Zipf random value was drawn from the distribution. This - # differed from the way the number of iterations was incremented in - # `_powerlaw_degree_sequence`, so this code was changed to match - # that one. As a result, this code is allowed many more chances to - # generate a valid community size sequence. - low, high = min_community, max_community - - def condition(seq): return sum(seq) == n - - def length(seq): return sum(seq) >= n - comms = _powerlaw_sequence(tau2, low, high, condition, length, max_iters) - - # Generate the communities based on the given degree sequence and - # community sizes. - max_iters *= 10 * n - communities = _generate_communities(deg_seq, comms, mu, max_iters) - - # Finally, generate the benchmark graph based on the given - # communities, joining nodes according to the intra- and - # inter-community degrees. - G = nx.Graph() - G.add_nodes_from(range(n)) - for c in communities: - for u in c: - while G.degree(u) < round(deg_seq[u] * (1 - mu)): - v = random.choice(list(c)) - G.add_edge(u, v) - while G.degree(u) < deg_seq[u]: - v = random.choice(range(n)) - if v not in c: - G.add_edge(u, v) - G.nodes[u]['community'] = c - return G diff --git a/networkx/algorithms/community/community_utils.py b/networkx/algorithms/community/community_utils.py index 21cc8b7..f06fcf4 100644 --- a/networkx/algorithms/community/community_utils.py +++ b/networkx/algorithms/community/community_utils.py @@ -1,37 +1,27 @@ -# -*- coding: utf-8 -*- -# -# utils.py - helper functions for community-finding algorithms -# -# Copyright 2011 Ben Edwards . -# Copyright 2011 Aric Hagberg . -# Copyright 2015 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Helper functions for community-finding algorithms.""" -__all__ = ['is_partition'] +__all__ = ["is_partition"] def is_partition(G, communities): - """Return True if and only if `communities` is a partition of - the nodes of `G`. + """Returns *True* if `communities` is a partition of the nodes of `G`. A partition of a universe set is a family of pairwise disjoint sets whose union is the entire universe set. - `G` is a NetworkX graph. + Parameters + ---------- + G : NetworkX graph. - `communities` is an iterable of sets of nodes of `G`. This - iterable will be consumed multiple times during the execution of - this function. + communities : list or iterable of sets of nodes + If not a list, the iterable is converted internally to a list. + If it is an iterator it is exhausted. """ # Alternate implementation: - # - # return (len(G) == sum(len(c) for c in community) and - # set(G) == set.union(*community)) - # - return all(sum(1 if v in c else 0 for c in communities) == 1 for v in G) + # return all(sum(1 if v in c else 0 for c in communities) == 1 for v in G) + if not isinstance(communities, list): + communities = list(communities) + nodes = {n for c in communities for n in c if n in G} + + return len(G) == len(nodes) == sum(len(c) for c in communities) diff --git a/networkx/algorithms/community/kclique.py b/networkx/algorithms/community/kclique.py index 34ade57..c40835d 100644 --- a/networkx/algorithms/community/kclique.py +++ b/networkx/algorithms/community/kclique.py @@ -1,14 +1,7 @@ -#-*- coding: utf-8 -*- -# Copyright (C) 2011 by -# Conrad Lee -# Aric Hagberg -# All rights reserved. -# BSD license. from collections import defaultdict import networkx as nx -__author__ = """\n""".join(['Conrad Lee ', - 'Aric Hagberg ']) -__all__ = ['k_clique_communities'] + +__all__ = ["k_clique_communities"] def k_clique_communities(G, k, cliques=None): @@ -24,7 +17,7 @@ def k_clique_communities(G, k, cliques=None): k : int Size of smallest clique - cliques: list or generator + cliques: list or generator Precomputed cliques (use networkx.find_cliques(G)) Returns @@ -35,10 +28,10 @@ def k_clique_communities(G, k, cliques=None): -------- >>> from networkx.algorithms.community import k_clique_communities >>> G = nx.complete_graph(5) - >>> K5 = nx.convert_node_labels_to_integers(G,first_label=2) + >>> K5 = nx.convert_node_labels_to_integers(G, first_label=2) >>> G.add_edges_from(K5.edges()) >>> c = list(k_clique_communities(G, 4)) - >>> list(c[0]) + >>> sorted(list(c[0])) [0, 1, 2, 3, 4, 5, 6] >>> list(k_clique_communities(G, 6)) [] @@ -46,12 +39,12 @@ def k_clique_communities(G, k, cliques=None): References ---------- .. [1] Gergely Palla, Imre Derényi, Illés Farkas1, and Tamás Vicsek, - Uncovering the overlapping community structure of complex networks + Uncovering the overlapping community structure of complex networks in nature and society Nature 435, 814-818, 2005, doi:10.1038/nature03607 """ if k < 2: - raise nx.NetworkXError("k=%d, k must be greater than 1." % k) + raise nx.NetworkXError(f"k={k}, k must be greater than 1.") if cliques is None: cliques = nx.find_cliques(G) cliques = [frozenset(c) for c in cliques if len(c) >= k] @@ -73,7 +66,7 @@ def k_clique_communities(G, k, cliques=None): # Connected components of clique graph with perc edges # are the percolated cliques for component in nx.connected_components(perc_graph): - yield(frozenset.union(*component)) + yield (frozenset.union(*component)) def _get_adjacent_cliques(clique, membership_dict): diff --git a/networkx/algorithms/community/kernighan_lin.py b/networkx/algorithms/community/kernighan_lin.py index e35ff00..2323371 100644 --- a/networkx/algorithms/community/kernighan_lin.py +++ b/networkx/algorithms/community/kernighan_lin.py @@ -1,104 +1,53 @@ -# -*- coding: utf-8 -*- -# -# kernighan_lin.py - Kernighan–Lin bipartition algorithm -# -# Copyright 2011 Ben Edwards . -# Copyright 2011 Aric Hagberg . -# Copyright 2015 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Functions for computing the Kernighan–Lin bipartition algorithm.""" -from __future__ import division - -from collections import defaultdict -from itertools import islice -from operator import itemgetter -import random import networkx as nx -from networkx.utils import not_implemented_for +from itertools import count +from networkx.utils import not_implemented_for, py_random_state, BinaryHeap from networkx.algorithms.community.community_utils import is_partition -__all__ = ['kernighan_lin_bisection'] - - -def _compute_delta(G, A, B, weight): - # helper to compute initial swap deltas for a pass - delta = defaultdict(float) - for u, v, d in G.edges(data=True): - w = d.get(weight, 1) - if u in A: - if v in A: - delta[u] -= w - delta[v] -= w - elif v in B: - delta[u] += w - delta[v] += w - elif u in B: - if v in A: - delta[u] += w - delta[v] += w - elif v in B: - delta[u] -= w - delta[v] -= w - return delta - - -def _update_delta(delta, G, A, B, u, v, weight): - # helper to update swap deltas during single pass - for _, nbr, d in G.edges(u, data=True): - w = d.get(weight, 1) - if nbr in A: - delta[nbr] += 2 * w - if nbr in B: - delta[nbr] -= 2 * w - for _, nbr, d in G.edges(v, data=True): - w = d.get(weight, 1) - if nbr in A: - delta[nbr] -= 2 * w - if nbr in B: - delta[nbr] += 2 * w - return delta - - -def _kernighan_lin_pass(G, A, B, weight): - # do a single iteration of Kernighan–Lin algorithm - # returns list of (g_i,u_i,v_i) for i node pairs u_i,v_i - multigraph = G.is_multigraph() - delta = _compute_delta(G, A, B, weight) - swapped = set() - gains = [] - while len(swapped) < len(G): - gain = [] - for u in A - swapped: - for v in B - swapped: - try: - if multigraph: - w = sum(d.get(weight, 1) for d in G[u][v].values()) - else: - w = G[u][v].get(weight, 1) - except KeyError: - w = 0 - gain.append((delta[u] + delta[v] - 2 * w, u, v)) - if len(gain) == 0: - break - maxg, u, v = max(gain, key=itemgetter(0)) - swapped |= {u, v} - gains.append((maxg, u, v)) - delta = _update_delta(delta, G, A - swapped, B - swapped, u, v, weight) - return gains +__all__ = ["kernighan_lin_bisection"] -@not_implemented_for('directed') -def kernighan_lin_bisection(G, partition=None, max_iter=10, weight='weight'): +def _kernighan_lin_sweep(edges, side): + """ + This is a modified form of Kernighan-Lin, which moves single nodes at a + time, alternating between sides to keep the bisection balanced. We keep + two min-heaps of swap costs to make optimal-next-move selection fast. + """ + costs0, costs1 = costs = BinaryHeap(), BinaryHeap() + for u, side_u, edges_u in zip(count(), side, edges): + cost_u = sum(w if side[v] else -w for v, w in edges_u) + costs[side_u].insert(u, cost_u if side_u else -cost_u) + + def _update_costs(costs_x, x): + for y, w in edges[x]: + costs_y = costs[side[y]] + cost_y = costs_y.get(y) + if cost_y is not None: + cost_y += 2 * (-w if costs_x is costs_y else w) + costs_y.insert(y, cost_y, True) + + i = totcost = 0 + while costs0 and costs1: + u, cost_u = costs0.pop() + _update_costs(costs0, u) + v, cost_v = costs1.pop() + _update_costs(costs1, v) + totcost += cost_u + cost_v + yield totcost, i, (u, v) + + +@py_random_state(4) +@not_implemented_for("directed") +def kernighan_lin_bisection(G, partition=None, max_iter=10, weight="weight", seed=None): """Partition a graph into two blocks using the Kernighan–Lin algorithm. - This algorithm paritions a network into two sets by iteratively - swapping pairs of nodes to reduce the edge cut between the two sets. + This algorithm partitions a network into two sets by iteratively + swapping pairs of nodes to reduce the edge cut between the two sets. The + pairs are chosen according to a modified form of Kernighan-Lin, which + moves node individually, alternating between sides to keep the bisection + balanced. Parameters ---------- @@ -116,6 +65,11 @@ def kernighan_lin_bisection(G, partition=None, max_iter=10, weight='weight'): Edge data key to use as weight. If None, the weights are all set to one. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + Only used if partition is None + Returns ------- partition : tuple @@ -134,36 +88,47 @@ def kernighan_lin_bisection(G, partition=None, max_iter=10, weight='weight'): Oxford University Press 2011. """ - # If no partition is provided, split the nodes randomly into a - # balanced partition. + n = len(G) + labels = list(G) + seed.shuffle(labels) + index = {v: i for i, v in enumerate(labels)} + if partition is None: - nodes = list(G) - random.shuffle(nodes) - h = len(nodes) // 2 - partition = (nodes[:h], nodes[h:]) - # Make a copy of the partition as a pair of sets. - try: - A, B = set(partition[0]), set(partition[1]) - except: - raise ValueError('partition must be two sets') - if not is_partition(G, (A, B)): - raise nx.NetworkXError('partition invalid') + side = [0] * (n // 2) + [1] * ((n + 1) // 2) + else: + try: + A, B = partition + except (TypeError, ValueError) as e: + raise nx.NetworkXError("partition must be two sets") from e + if not is_partition(G, (A, B)): + raise nx.NetworkXError("partition invalid") + side = [0] * n + for a in A: + side[a] = 1 + + if G.is_multigraph(): + edges = [ + [ + (index[u], sum(e.get(weight, 1) for e in d.values())) + for u, d in G[v].items() + ] + for v in labels + ] + else: + edges = [ + [(index[u], e.get(weight, 1)) for u, e in G[v].items()] for v in labels + ] + for i in range(max_iter): - # `gains` is a list of triples of the form (g, u, v) for each - # node pair (u, v), where `g` is the gain of that node pair. - gains = _kernighan_lin_pass(G, A, B, weight) - csum = list(nx.utils.accumulate(g for g, u, v in gains)) - max_cgain = max(csum) - if max_cgain <= 0: + costs = list(_kernighan_lin_sweep(edges, side)) + min_cost, min_i, _ = min(costs) + if min_cost >= 0: break - # Get the node pairs up to the index of the maximum cumulative - # gain, and collect each `u` into `anodes` and each `v` into - # `bnodes`, for each pair `(u, v)`. - index = csum.index(max_cgain) - nodesets = islice(zip(*gains[:index + 1]), 1, 3) - anodes, bnodes = (set(s) for s in nodesets) - A |= bnodes - A -= anodes - B |= anodes - B -= bnodes + + for _, _, (u, v) in costs[: min_i + 1]: + side[u] = 1 + side[v] = 0 + + A = {u for u, s in zip(labels, side) if s == 0} + B = {u for u, s in zip(labels, side) if s == 1} return A, B diff --git a/networkx/algorithms/community/label_propagation.py b/networkx/algorithms/community/label_propagation.py index 01a908a..5afd43f 100644 --- a/networkx/algorithms/community/label_propagation.py +++ b/networkx/algorithms/community/label_propagation.py @@ -1,23 +1,18 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2015-2018 Aitor Almeida -# All rights reserved. -# BSD license. -# -# Author: Aitor Almeida """ Label propagation community detection algorithms. """ from collections import Counter -import random import networkx as nx from networkx.utils import groups -from networkx.utils.decorators import not_implemented_for +from networkx.utils import not_implemented_for +from networkx.utils import py_random_state -__all__ = ['label_propagation_communities', 'asyn_lpa_communities'] +__all__ = ["label_propagation_communities", "asyn_lpa_communities"] -def asyn_lpa_communities(G, weight=None): +@py_random_state(2) +def asyn_lpa_communities(G, weight=None, seed=None): """Returns communities in `G` as detected by asynchronous label propagation. @@ -47,6 +42,10 @@ def asyn_lpa_communities(G, weight=None): frequency with which a label appears among the neighbors of a node: a higher weight means the label appears more often. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + Returns ------- communities : iterable @@ -68,7 +67,7 @@ def asyn_lpa_communities(G, weight=None): while cont: cont = False nodes = list(G) - random.shuffle(nodes) + seed.shuffle(nodes) # Calculate the label for each node for node in nodes: if len(G[node]) < 1: @@ -79,24 +78,25 @@ def asyn_lpa_communities(G, weight=None): # algorithm asynchronous. label_freq = Counter() for v in G[node]: - label_freq.update({labels[v]: G.edges[v, node][weight] - if weight else 1}) + label_freq.update( + {labels[v]: G.edges[node, v][weight] if weight else 1} + ) # Choose the label with the highest frecuency. If more than 1 label # has the highest frecuency choose one randomly. max_freq = max(label_freq.values()) - best_labels = [label for label, freq in label_freq.items() - if freq == max_freq] - new_label = random.choice(best_labels) - labels[node] = new_label - # Continue until all nodes have a label that is better than other - # neighbour labels (only one label has max_freq for each node). - cont = cont or len(best_labels) > 1 + best_labels = [ + label for label, freq in label_freq.items() if freq == max_freq + ] + + # Continue until all nodes have a majority label + if labels[node] not in best_labels: + labels[node] = seed.choice(best_labels) + cont = True - # TODO In Python 3.3 or later, this should be `yield from ...`. - return iter(groups(labels).values()) + yield from groups(labels).values() -@not_implemented_for('directed') +@not_implemented_for("directed") def label_propagation_communities(G): """Generates community sets determined by label propagation @@ -136,7 +136,7 @@ def label_propagation_communities(G): _update_label(n, labeling, G) for label in set(labeling.values()): - yield set((x for x in labeling if labeling[x] == label)) + yield {x for x in labeling if labeling[x] == label} def _color_network(G): @@ -150,7 +150,7 @@ def _color_network(G): if color in coloring: coloring[color].add(node) else: - coloring[color] = set([node]) + coloring[color] = {node} return coloring @@ -162,8 +162,9 @@ def _labeling_complete(labeling, G): Nodes with no neighbors are considered complete. """ - return all(labeling[v] in _most_frequent_labels(v, labeling, G) - for v in G if len(G[v]) > 0) + return all( + labeling[v] in _most_frequent_labels(v, labeling, G) for v in G if len(G[v]) > 0 + ) def _most_frequent_labels(node, labeling, G): diff --git a/networkx/algorithms/community/lukes.py b/networkx/algorithms/community/lukes.py new file mode 100644 index 0000000..ea4c12f --- /dev/null +++ b/networkx/algorithms/community/lukes.py @@ -0,0 +1,227 @@ +"""Lukes Algorithm for exact optimal weighted tree partitioning.""" + +from copy import deepcopy +from functools import lru_cache +from random import choice + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["lukes_partitioning"] + +D_EDGE_W = "weight" +D_EDGE_VALUE = 1.0 +D_NODE_W = "weight" +D_NODE_VALUE = 1 +PKEY = "partitions" +CLUSTER_EVAL_CACHE_SIZE = 2048 + + +def _split_n_from(n: int, min_size_of_first_part: int): + # splits j in two parts of which the first is at least + # the second argument + assert n >= min_size_of_first_part + for p1 in range(min_size_of_first_part, n + 1): + yield p1, n - p1 + + +def lukes_partitioning(G, max_size: int, node_weight=None, edge_weight=None) -> list: + + """Optimal partitioning of a weighted tree using the Lukes algorithm. + + This algorithm partitions a connected, acyclic graph featuring integer + node weights and float edge weights. The resulting clusters are such + that the total weight of the nodes in each cluster does not exceed + max_size and that the weight of the edges that are cut by the partition + is minimum. The algorithm is based on LUKES[1]. + + Parameters + ---------- + G : graph + + max_size : int + Maximum weight a partition can have in terms of sum of + node_weight for all nodes in the partition + + edge_weight : key + Edge data key to use as weight. If None, the weights are all + set to one. + + node_weight : key + Node data key to use as weight. If None, the weights are all + set to one. The data must be int. + + Returns + ------- + partition : list + A list of sets of nodes representing the clusters of the + partition. + + Raises + ------- + NotATree + If G is not a tree. + TypeError + If any of the values of node_weight is not int. + + References + ---------- + .. Lukes, J. A. (1974). + "Efficient Algorithm for the Partitioning of Trees." + IBM Journal of Research and Development, 18(3), 217–224. + + """ + # First sanity check and tree preparation + if not nx.is_tree(G): + raise nx.NotATree("lukes_partitioning works only on trees") + else: + if nx.is_directed(G): + root = [n for n, d in G.in_degree() if d == 0] + assert len(root) == 1 + root = root[0] + t_G = deepcopy(G) + else: + root = choice(list(G.nodes)) + # this has the desirable side effect of not inheriting attributes + t_G = nx.dfs_tree(G, root) + + # Since we do not want to screw up the original graph, + # if we have a blank attribute, we make a deepcopy + if edge_weight is None or node_weight is None: + safe_G = deepcopy(G) + if edge_weight is None: + nx.set_edge_attributes(safe_G, D_EDGE_VALUE, D_EDGE_W) + edge_weight = D_EDGE_W + if node_weight is None: + nx.set_node_attributes(safe_G, D_NODE_VALUE, D_NODE_W) + node_weight = D_NODE_W + else: + safe_G = G + + # Second sanity check + # The values of node_weight MUST BE int. + # I cannot see any room for duck typing without incurring serious + # danger of subtle bugs. + all_n_attr = nx.get_node_attributes(safe_G, node_weight).values() + for x in all_n_attr: + if not isinstance(x, int): + raise TypeError( + "lukes_partitioning needs integer " + f"values for node_weight ({node_weight})" + ) + + # SUBROUTINES ----------------------- + # these functions are defined here for two reasons: + # - brevity: we can leverage global "safe_G" + # - caching: signatures are hashable + + @not_implemented_for("undirected") + # this is intended to be called only on t_G + def _leaves(gr): + for x in gr.nodes: + if not nx.descendants(gr, x): + yield x + + @not_implemented_for("undirected") + def _a_parent_of_leaves_only(gr): + tleaves = set(_leaves(gr)) + for n in set(gr.nodes) - tleaves: + if all([x in tleaves for x in nx.descendants(gr, n)]): + return n + + @lru_cache(CLUSTER_EVAL_CACHE_SIZE) + def _value_of_cluster(cluster: frozenset): + valid_edges = [e for e in safe_G.edges if e[0] in cluster and e[1] in cluster] + return sum([safe_G.edges[e][edge_weight] for e in valid_edges]) + + def _value_of_partition(partition: list): + return sum([_value_of_cluster(frozenset(c)) for c in partition]) + + @lru_cache(CLUSTER_EVAL_CACHE_SIZE) + def _weight_of_cluster(cluster: frozenset): + return sum([safe_G.nodes[n][node_weight] for n in cluster]) + + def _pivot(partition: list, node): + ccx = [c for c in partition if node in c] + assert len(ccx) == 1 + return ccx[0] + + def _concatenate_or_merge(partition_1: list, partition_2: list, x, i, ref_weigth): + + ccx = _pivot(partition_1, x) + cci = _pivot(partition_2, i) + merged_xi = ccx.union(cci) + + # We first check if we can do the merge. + # If so, we do the actual calculations, otherwise we concatenate + if _weight_of_cluster(frozenset(merged_xi)) <= ref_weigth: + cp1 = list(filter(lambda x: x != ccx, partition_1)) + cp2 = list(filter(lambda x: x != cci, partition_2)) + + option_2 = [merged_xi] + cp1 + cp2 + return option_2, _value_of_partition(option_2) + else: + option_1 = partition_1 + partition_2 + return option_1, _value_of_partition(option_1) + + # INITIALIZATION ----------------------- + leaves = set(_leaves(t_G)) + for lv in leaves: + t_G.nodes[lv][PKEY] = dict() + slot = safe_G.nodes[lv][node_weight] + t_G.nodes[lv][PKEY][slot] = [{lv}] + t_G.nodes[lv][PKEY][0] = [{lv}] + + for inner in [x for x in t_G.nodes if x not in leaves]: + t_G.nodes[inner][PKEY] = dict() + slot = safe_G.nodes[inner][node_weight] + t_G.nodes[inner][PKEY][slot] = [{inner}] + + # CORE ALGORITHM ----------------------- + while True: + x_node = _a_parent_of_leaves_only(t_G) + weight_of_x = safe_G.nodes[x_node][node_weight] + best_value = 0 + best_partition = None + bp_buffer = dict() + x_descendants = nx.descendants(t_G, x_node) + for i_node in x_descendants: + for j in range(weight_of_x, max_size + 1): + for a, b in _split_n_from(j, weight_of_x): + if ( + a not in t_G.nodes[x_node][PKEY].keys() + or b not in t_G.nodes[i_node][PKEY].keys() + ): + # it's not possible to form this particular weight sum + continue + + part1 = t_G.nodes[x_node][PKEY][a] + part2 = t_G.nodes[i_node][PKEY][b] + part, value = _concatenate_or_merge(part1, part2, x_node, i_node, j) + + if j not in bp_buffer.keys() or bp_buffer[j][1] < value: + # we annotate in the buffer the best partition for j + bp_buffer[j] = part, value + + # we also keep track of the overall best partition + if best_value <= value: + best_value = value + best_partition = part + + # as illustrated in Lukes, once we finished a child, we can + # discharge the partitions we found into the graph + # (the key phrase is make all x == x') + # so that they are used by the subsequent children + for w, (best_part_for_vl, vl) in bp_buffer.items(): + t_G.nodes[x_node][PKEY][w] = best_part_for_vl + bp_buffer.clear() + + # the absolute best partition for this node + # across all weights has to be stored at 0 + t_G.nodes[x_node][PKEY][0] = best_partition + t_G.remove_nodes_from(x_descendants) + + if x_node == root: + # the 0-labeled partition of root + # is the optimal one for the whole tree + return t_G.nodes[root][PKEY][0] diff --git a/networkx/algorithms/community/modularity_max.py b/networkx/algorithms/community/modularity_max.py index 3dd0a12..8822536 100644 --- a/networkx/algorithms/community/modularity_max.py +++ b/networkx/algorithms/community/modularity_max.py @@ -1,30 +1,18 @@ -# modularity_max.py - functions for finding communities based on modularity -# -# Copyright 2018 Edward L. Platt -# -# This file is part of NetworkX -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. -# -# Authors: -# Edward L. Platt -# # TODO: # - Alter equations for weighted case # - Write tests for weighted case """Functions for detecting communities based on modularity. """ -from __future__ import division -import networkx as nx from networkx.algorithms.community.quality import modularity from networkx.utils.mapped_queue import MappedQueue __all__ = [ - 'greedy_modularity_communities', - '_naive_greedy_modularity_communities'] + "greedy_modularity_communities", + "naive_greedy_modularity_communities", + "_naive_greedy_modularity_communities", +] def greedy_modularity_communities(G, weight=None): @@ -52,7 +40,7 @@ def greedy_modularity_communities(G, weight=None): >>> sorted(c[0]) [8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33] - References + References ---------- .. [1] M. E. J Newman 'Networks: An Introduction', page 224 Oxford University Press 2011. @@ -63,19 +51,19 @@ def greedy_modularity_communities(G, weight=None): # Count nodes and edges N = len(G.nodes()) - m = sum([d.get('weight', 1) for u, v, d in G.edges(data=True)]) - q0 = 1.0 / (2.0*m) + m = sum([d.get("weight", 1) for u, v, d in G.edges(data=True)]) + q0 = 1.0 / (2.0 * m) # Map node labels to contiguous integers - label_for_node = dict((i, v) for i, v in enumerate(G.nodes())) - node_for_label = dict((label_for_node[i], i) for i in range(N)) + label_for_node = {i: v for i, v in enumerate(G.nodes())} + node_for_label = {label_for_node[i]: i for i in range(N)} # Calculate degrees k_for_label = G.degree(G.nodes(), weight=weight) k = [k_for_label[label_for_node[i]] for i in range(N)] # Initialize community and merge lists - communities = dict((i, frozenset([i])) for i in range(N)) + communities = {i: frozenset([i]) for i in range(N)} merges = [] # Initial modularity @@ -88,24 +76,19 @@ def greedy_modularity_communities(G, weight=None): # dq_dict[i][j]: dQ for merging community i, j # dq_heap[i][n] : (-dq, i, j) for communitiy i nth largest dQ # H[n]: (-dq, i, j) for community with nth largest max_j(dQ_ij) - a = [k[i]*q0 for i in range(N)] - dq_dict = dict( - (i, dict( - (j, 2*q0 - 2*k[i]*k[j]*q0*q0) - for j in [ - node_for_label[u] - for u in G.neighbors(label_for_node[i])] - if j != i)) - for i in range(N)) - dq_heap = [ - MappedQueue([ - (-dq, i, j) - for j, dq in dq_dict[i].items()]) - for i in range(N)] - H = MappedQueue([ - dq_heap[i].h[0] + a = [k[i] * q0 for i in range(N)] + dq_dict = { + i: { + j: 2 * q0 - 2 * k[i] * k[j] * q0 * q0 + for j in [node_for_label[u] for u in G.neighbors(label_for_node[i])] + if j != i + } for i in range(N) - if len(dq_heap[i]) > 0]) + } + dq_heap = [ + MappedQueue([(-dq, i, j) for j, dq in dq_dict[i].items()]) for i in range(N) + ] + H = MappedQueue([dq_heap[i].h[0] for i in range(N) if len(dq_heap[i]) > 0]) # Merge communities until we can't improve modularity while len(H) > 1: @@ -123,7 +106,7 @@ def greedy_modularity_communities(G, weight=None): if len(dq_heap[i]) > 0: H.push(dq_heap[i].h[0]) # If this element was also at the root of row j, we need to remove the - # dupliate entry from H + # duplicate entry from H if dq_heap[j].h[0] == (-dq, j, i): H.remove((-dq, j, i)) # Remove best merge from row j heap @@ -147,7 +130,7 @@ def greedy_modularity_communities(G, weight=None): # Get list of communities connected to merged communities i_set = set(dq_dict[i].keys()) j_set = set(dq_dict[j].keys()) - all_set = (i_set | j_set) - set([i, j]) + all_set = (i_set | j_set) - {i, j} both_set = i_set & j_set # Merge i into j and update dQ for k in all_set: @@ -155,10 +138,10 @@ def greedy_modularity_communities(G, weight=None): if k in both_set: dq_jk = dq_dict[j][k] + dq_dict[i][k] elif k in j_set: - dq_jk = dq_dict[j][k] - 2.0*a[i]*a[k] + dq_jk = dq_dict[j][k] - 2.0 * a[i] * a[k] else: # k in i_set - dq_jk = dq_dict[i][k] - 2.0*a[j]*a[k] + dq_jk = dq_dict[i][k] - 2.0 * a[j] * a[k] # Update rows j and k for row, col in [(j, k), (k, j)]: # Save old value for finding heap index @@ -221,12 +204,12 @@ def greedy_modularity_communities(G, weight=None): a[i] = 0 communities = [ - frozenset([label_for_node[i] for i in c]) - for c in communities.values()] + frozenset([label_for_node[i] for i in c]) for c in communities.values() + ] return sorted(communities, key=len, reverse=True) -def _naive_greedy_modularity_communities(G): +def naive_greedy_modularity_communities(G): """Find communities in graph using the greedy modularity maximization. This implementation is O(n^4), much slower than alternatives, but it is provided as an easy-to-understand reference implementation. @@ -259,11 +242,8 @@ def _naive_greedy_modularity_communities(G): # Found new best, save modularity and group indexes new_modularity = trial_modularity to_merge = (i, j, new_modularity - old_modularity) - elif ( - to_merge and - min(i, j) < min(to_merge[0], to_merge[1]) - ): - # Break ties by chosing pair with lowest min id + elif to_merge and min(i, j) < min(to_merge[0], to_merge[1]): + # Break ties by choosing pair with lowest min id new_modularity = trial_modularity to_merge = (i, j, new_modularity - old_modularity) # Un-merge @@ -278,5 +258,8 @@ def _naive_greedy_modularity_communities(G): communities[i] = frozenset([]) # Remove empty communities and sort communities = [c for c in communities if len(c) > 0] - for com in sorted(communities, key=lambda x: len(x), reverse=True): - yield com + yield from sorted(communities, key=lambda x: len(x), reverse=True) + + +# old name +_naive_greedy_modularity_communities = naive_greedy_modularity_communities diff --git a/networkx/algorithms/community/quality.py b/networkx/algorithms/community/quality.py index 56963d3..2ffe407 100644 --- a/networkx/algorithms/community/quality.py +++ b/networkx/algorithms/community/quality.py @@ -1,16 +1,7 @@ -# quality.py - functions for measuring partitions of a graph -# -# Copyright 2015-2018 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Functions for measuring the quality of a partition (into communities). """ -from __future__ import division from functools import wraps from itertools import product @@ -20,7 +11,7 @@ from networkx.utils import not_implemented_for from networkx.algorithms.community.community_utils import is_partition -__all__ = ['coverage', 'modularity', 'performance'] +__all__ = ["coverage", "modularity", "performance"] class NotAPartition(NetworkXError): @@ -29,14 +20,12 @@ class NotAPartition(NetworkXError): """ def __init__(self, G, collection): - msg = '{} is not a valid partition of the graph {}' - msg = msg.format(G, collection) - super(NotAPartition, self).__init__(msg) + msg = f"{G} is not a valid partition of the graph {collection}" + super().__init__(msg) def require_partition(func): - """Decorator that raises an exception if a partition is not a valid - partition of the nodes of a graph. + """Decorator to check that a valid partition is input to a function Raises :exc:`networkx.NetworkXError` if the partition is not valid. @@ -46,22 +35,22 @@ def require_partition(func): >>> @require_partition ... def foo(G, partition): - ... print('partition is valid!') + ... print("partition is valid!") ... >>> G = nx.complete_graph(5) >>> partition = [{0, 1}, {2, 3}, {4}] >>> foo(G, partition) partition is valid! >>> partition = [{0}, {2, 3}, {4}] - >>> foo(G, partition) # doctest: +IGNORE_EXCEPTION_DETAIL + >>> foo(G, partition) Traceback (most recent call last): ... - NetworkXError: `partition` is not a valid partition of the nodes of G + networkx.exception.NetworkXError: `partition` is not a valid partition of the nodes of G >>> partition = [{0, 1}, {1, 2, 3}, {4}] - >>> foo(G, partition) # doctest: +IGNORE_EXCEPTION_DETAIL + >>> foo(G, partition) Traceback (most recent call last): ... - NetworkXError: `partition` is not a valid partition of the nodes of G + networkx.exception.NetworkXError: `partition` is not a valid partition of the nodes of G """ @@ -69,19 +58,23 @@ def require_partition(func): def new_func(*args, **kw): # Here we assume that the first two arguments are (G, partition). if not is_partition(*args[:2]): - raise nx.NetworkXError('`partition` is not a valid partition of' - ' the nodes of G') + raise nx.NetworkXError( + "`partition` is not a valid partition of" " the nodes of G" + ) return func(*args, **kw) + return new_func def intra_community_edges(G, partition): - """Returns the number of intra-community edges according to the given - partition of the nodes of `G`. + """Returns the number of intra-community edges for a partition of `G`. - `G` must be a NetworkX graph. + Parameters + ---------- + G : NetworkX graph. - `partition` must be a partition of the nodes of `G`. + partition : iterable of sets of nodes + This must be a partition of the nodes of `G`. The "intra-community edges" are those edges joining a pair of nodes in the same block of the partition. @@ -91,19 +84,22 @@ def intra_community_edges(G, partition): def inter_community_edges(G, partition): - """Returns the number of inter-community edges according to the given + """Returns the number of inter-community edges for a prtition of `G`. + according to the given partition of the nodes of `G`. - `G` must be a NetworkX graph. + Parameters + ---------- + G : NetworkX graph. - `partition` must be a partition of the nodes of `G`. + partition : iterable of sets of nodes + This must be a partition of the nodes of `G`. The *inter-community edges* are those edges joining a pair of nodes in different blocks of the partition. Implementation note: this function creates an intermediate graph - that may require the same amount of memory as required to store - `G`. + that may require the same amount of memory as that of `G`. """ # Alternate implementation that does not require constructing a new @@ -114,10 +110,8 @@ def inter_community_edges(G, partition): # for block in partition)) # return sum(1 for u, v in G.edges() if aff[u] != aff[v]) # - if G.is_directed(): - return nx.quotient_graph(G, partition, create_using=nx.MultiDiGraph()).size() - else: - return nx.quotient_graph(G, partition, create_using=nx.MultiGraph()).size() + MG = nx.MultiDiGraph if G.is_directed() else nx.MultiGraph + return nx.quotient_graph(G, partition, create_using=MG).size() def inter_community_non_edges(G, partition): @@ -149,7 +143,7 @@ def inter_community_non_edges(G, partition): return inter_community_edges(nx.complement(G), partition) -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") @require_partition def performance(G, partition): """Returns the performance of a partition. @@ -164,7 +158,6 @@ def performance(G, partition): A simple graph (directed or undirected). partition : sequence - Partition of the nodes of `G`, represented as a sequence of sets of nodes. Each block of the partition represents a community. @@ -247,7 +240,7 @@ def coverage(G, partition): return intra_edges / total_edges -def modularity(G, communities, weight='weight'): +def modularity(G, communities, weight="weight"): r"""Returns the modularity of the given partition of the graph. Modularity is defined in [1]_ as @@ -261,13 +254,29 @@ def modularity(G, communities, weight='weight'): `G`, $k_i$ is the degree of $i$ and $\delta(c_i, c_j)$ is 1 if $i$ and $j$ are in the same community and 0 otherwise. + According to [2]_ (and verified by some algebra) this can be reduced to + + .. math:: + Q = \sum_{c=1}^{n} + \left[ \frac{L_c}{m} - \left( \frac{k_c}{2m} \right) ^2 \right] + + where the sum iterates over all communities $c$, $m$ is the number of edges, + $L_c$ is the number of intra-community links for community $c$, + $k_c$ is the sum of degrees of the nodes in community $c$. + + The second formula is the one actually used in calculation of the modularity. + Parameters ---------- G : NetworkX Graph - communities : list - List of sets of nodes of `G` representing a partition of the - nodes. + communities : list or iterable of set of nodes + These node sets must represent a partition of G's nodes. + + weight : string or None, optional (default="weight") + The edge attribute that holds the numerical value used + as a weight. If None or an edge does not have that attribute, + then that edge has weight 1. Returns ------- @@ -281,43 +290,45 @@ def modularity(G, communities, weight='weight'): Examples -------- + >>> import networkx.algorithms.community as nx_comm >>> G = nx.barbell_graph(3, 0) - >>> nx.algorithms.community.modularity(G, [{0, 1, 2}, {3, 4, 5}]) - 0.35714285714285704 + >>> nx_comm.modularity(G, [{0, 1, 2}, {3, 4, 5}]) + 0.35714285714285715 + >>> nx_comm.modularity(G, nx_comm.label_propagation_communities(G)) + 0.35714285714285715 References ---------- .. [1] M. E. J. Newman *Networks: An Introduction*, page 224. Oxford University Press, 2011. - + .. [2] Clauset, Aaron, Mark EJ Newman, and Cristopher Moore. + "Finding community structure in very large networks." + Physical review E 70.6 (2004). """ + if not isinstance(communities, list): + communities = list(communities) if not is_partition(G, communities): raise NotAPartition(G, communities) - multigraph = G.is_multigraph() directed = G.is_directed() - m = G.size(weight=weight) if directed: out_degree = dict(G.out_degree(weight=weight)) in_degree = dict(G.in_degree(weight=weight)) - norm = 1 / m + m = sum(out_degree.values()) + norm = 1 / m ** 2 else: - out_degree = dict(G.degree(weight=weight)) - in_degree = out_degree - norm = 1 / (2 * m) - - def val(u, v): - try: - if multigraph: - w = sum(d.get(weight, 1) for k, d in G[u][v].items()) - else: - w = G[u][v].get(weight, 1) - except KeyError: - w = 0 - # Double count self-loops if the graph is undirected. - if u == v and not directed: - w *= 2 - return w - in_degree[u] * out_degree[v] * norm - - Q = sum(val(u, v) for c in communities for u, v in product(c, repeat=2)) - return Q * norm + out_degree = in_degree = dict(G.degree(weight=weight)) + deg_sum = sum(out_degree.values()) + m = deg_sum / 2 + norm = 1 / deg_sum ** 2 + + def community_contribution(community): + comm = set(community) + L_c = sum(wt for u, v, wt in G.edges(comm, data=weight, default=1) if v in comm) + + out_degree_sum = sum(out_degree[u] for u in comm) + in_degree_sum = sum(in_degree[u] for u in comm) if directed else out_degree_sum + + return L_c / m - out_degree_sum * in_degree_sum * norm + + return sum(map(community_contribution, communities)) diff --git a/networkx/algorithms/community/tests/__init__.py b/networkx/algorithms/community/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/algorithms/community/tests/test_asyn_fluid.py b/networkx/algorithms/community/tests/test_asyn_fluid.py new file mode 100644 index 0000000..258991a --- /dev/null +++ b/networkx/algorithms/community/tests/test_asyn_fluid.py @@ -0,0 +1,127 @@ +import pytest +from networkx import Graph, NetworkXError +from networkx.algorithms.community.asyn_fluid import asyn_fluidc + + +def test_exceptions(): + test = Graph() + test.add_node("a") + pytest.raises(NetworkXError, asyn_fluidc, test, "hi") + pytest.raises(NetworkXError, asyn_fluidc, test, -1) + pytest.raises(NetworkXError, asyn_fluidc, test, 3) + test.add_node("b") + pytest.raises(NetworkXError, asyn_fluidc, test, 1) + + +def test_single_node(): + test = Graph() + + test.add_node("a") + + # ground truth + ground_truth = {frozenset(["a"])} + + communities = asyn_fluidc(test, 1) + result = {frozenset(c) for c in communities} + assert result == ground_truth + + +def test_two_nodes(): + test = Graph() + + test.add_edge("a", "b") + + # ground truth + ground_truth = {frozenset(["a"]), frozenset(["b"])} + + communities = asyn_fluidc(test, 2) + result = {frozenset(c) for c in communities} + assert result == ground_truth + + +def test_two_clique_communities(): + test = Graph() + + # c1 + test.add_edge("a", "b") + test.add_edge("a", "c") + test.add_edge("b", "c") + + # connection + test.add_edge("c", "d") + + # c2 + test.add_edge("d", "e") + test.add_edge("d", "f") + test.add_edge("f", "e") + + # ground truth + ground_truth = {frozenset(["a", "c", "b"]), frozenset(["e", "d", "f"])} + + communities = asyn_fluidc(test, 2, seed=7) + result = {frozenset(c) for c in communities} + assert result == ground_truth + + +def test_five_clique_ring(): + test = Graph() + + # c1 + test.add_edge("1a", "1b") + test.add_edge("1a", "1c") + test.add_edge("1a", "1d") + test.add_edge("1b", "1c") + test.add_edge("1b", "1d") + test.add_edge("1c", "1d") + + # c2 + test.add_edge("2a", "2b") + test.add_edge("2a", "2c") + test.add_edge("2a", "2d") + test.add_edge("2b", "2c") + test.add_edge("2b", "2d") + test.add_edge("2c", "2d") + + # c3 + test.add_edge("3a", "3b") + test.add_edge("3a", "3c") + test.add_edge("3a", "3d") + test.add_edge("3b", "3c") + test.add_edge("3b", "3d") + test.add_edge("3c", "3d") + + # c4 + test.add_edge("4a", "4b") + test.add_edge("4a", "4c") + test.add_edge("4a", "4d") + test.add_edge("4b", "4c") + test.add_edge("4b", "4d") + test.add_edge("4c", "4d") + + # c5 + test.add_edge("5a", "5b") + test.add_edge("5a", "5c") + test.add_edge("5a", "5d") + test.add_edge("5b", "5c") + test.add_edge("5b", "5d") + test.add_edge("5c", "5d") + + # connections + test.add_edge("1a", "2c") + test.add_edge("2a", "3c") + test.add_edge("3a", "4c") + test.add_edge("4a", "5c") + test.add_edge("5a", "1c") + + # ground truth + ground_truth = { + frozenset(["1a", "1b", "1c", "1d"]), + frozenset(["2a", "2b", "2c", "2d"]), + frozenset(["3a", "3b", "3c", "3d"]), + frozenset(["4a", "4b", "4c", "4d"]), + frozenset(["5a", "5b", "5c", "5d"]), + } + + communities = asyn_fluidc(test, 5, seed=9) + result = {frozenset(c) for c in communities} + assert result == ground_truth diff --git a/networkx/algorithms/community/tests/test_asyn_fluidc.py b/networkx/algorithms/community/tests/test_asyn_fluidc.py deleted file mode 100644 index 417da12..0000000 --- a/networkx/algorithms/community/tests/test_asyn_fluidc.py +++ /dev/null @@ -1,120 +0,0 @@ -from nose.tools import assert_equal -from networkx import Graph -from networkx.algorithms.community.asyn_fluidc import * -import random - - -def test_single_node(): - test = Graph() - - test.add_node('a') - - # ground truth - ground_truth = set([frozenset(['a'])]) - - communities = asyn_fluidc(test, 1) - result = {frozenset(c) for c in communities} - assert_equal(result, ground_truth) - - -def test_two_nodes(): - test = Graph() - - test.add_edge('a', 'b') - - # ground truth - ground_truth = set([frozenset(['a']), frozenset(['b'])]) - - communities = asyn_fluidc(test, 2) - result = {frozenset(c) for c in communities} - assert_equal(result, ground_truth) - - -def test_two_clique_communities(): - random.seed(7) - test = Graph() - - # c1 - test.add_edge('a', 'b') - test.add_edge('a', 'c') - test.add_edge('b', 'c') - - # connection - test.add_edge('c', 'd') - - # c2 - test.add_edge('d', 'e') - test.add_edge('d', 'f') - test.add_edge('f', 'e') - - # ground truth - ground_truth = set([frozenset(['a', 'c', 'b']), - frozenset(['e', 'd', 'f'])]) - - communities = asyn_fluidc(test, 2) - result = {frozenset(c) for c in communities} - assert_equal(result, ground_truth) - - -def five_clique_ring(): - """Not auto-tested (not named test_...) due to cross-version seed issues""" - random.seed(9) - test = Graph() - - # c1 - test.add_edge('1a', '1b') - test.add_edge('1a', '1c') - test.add_edge('1a', '1d') - test.add_edge('1b', '1c') - test.add_edge('1b', '1d') - test.add_edge('1c', '1d') - - # c2 - test.add_edge('2a', '2b') - test.add_edge('2a', '2c') - test.add_edge('2a', '2d') - test.add_edge('2b', '2c') - test.add_edge('2b', '2d') - test.add_edge('2c', '2d') - - # c3 - test.add_edge('3a', '3b') - test.add_edge('3a', '3c') - test.add_edge('3a', '3d') - test.add_edge('3b', '3c') - test.add_edge('3b', '3d') - test.add_edge('3c', '3d') - - # c4 - test.add_edge('4a', '4b') - test.add_edge('4a', '4c') - test.add_edge('4a', '4d') - test.add_edge('4b', '4c') - test.add_edge('4b', '4d') - test.add_edge('4c', '4d') - - # c5 - test.add_edge('5a', '5b') - test.add_edge('5a', '5c') - test.add_edge('5a', '5d') - test.add_edge('5b', '5c') - test.add_edge('5b', '5d') - test.add_edge('5c', '5d') - - # connections - test.add_edge('1a', '2c') - test.add_edge('2a', '3c') - test.add_edge('3a', '4c') - test.add_edge('4a', '5c') - test.add_edge('5a', '1c') - - # ground truth - ground_truth = set([frozenset(['1a', '1b', '1c', '1d']), - frozenset(['2a', '2b', '2c', '2d']), - frozenset(['3a', '3b', '3c', '3d']), - frozenset(['4a', '4b', '4c', '4d']), - frozenset(['5a', '5b', '5c', '5d'])]) - - communities = asyn_fluidc(test, 5) - result = {frozenset(c) for c in communities} - assert_equal(result, ground_truth) diff --git a/networkx/algorithms/community/tests/test_centrality.py b/networkx/algorithms/community/tests/test_centrality.py index 9be8140..5e71088 100644 --- a/networkx/algorithms/community/tests/test_centrality.py +++ b/networkx/algorithms/community/tests/test_centrality.py @@ -1,20 +1,9 @@ -# -*- coding: utf-8 -*- -# test_centrality.py - unit tests for algorithms.community.centrality -# -# Copyright 2015, 2016 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.algorithms.community.centrality` module. """ from operator import itemgetter -from nose.tools import assert_equal -from nose.tools import assert_true import networkx as nx from networkx.algorithms.community import girvan_newman @@ -25,14 +14,14 @@ def set_of_sets(iterable): def validate_communities(result, expected): - assert_equal(set_of_sets(result), set_of_sets(expected)) + assert set_of_sets(result) == set_of_sets(expected) def validate_possible_communities(result, *expected): - assert_true(any(set_of_sets(result) == set_of_sets(p) for p in expected)) + assert any(set_of_sets(result) == set_of_sets(p) for p in expected) -class TestGirvanNewman(object): +class TestGirvanNewman: """Unit tests for the :func:`networkx.algorithms.community.centrality.girvan_newman` function. @@ -42,30 +31,32 @@ class TestGirvanNewman(object): def test_no_edges(self): G = nx.empty_graph(3) communities = list(girvan_newman(G)) - assert_equal(len(communities), 1) + assert len(communities) == 1 validate_communities(communities[0], [{0}, {1}, {2}]) def test_undirected(self): # Start with the graph .-.-.-. G = nx.path_graph(4) communities = list(girvan_newman(G)) - assert_equal(len(communities), 3) + assert len(communities) == 3 # After one removal, we get the graph .-. .-. validate_communities(communities[0], [{0, 1}, {2, 3}]) # After the next, we get the graph .-. . ., but there are two # symmetric possible versions. - validate_possible_communities(communities[1], [{0}, {1}, {2, 3}], - [{0, 1}, {2}, {3}]) + validate_possible_communities( + communities[1], [{0}, {1}, {2, 3}], [{0, 1}, {2}, {3}] + ) # After the last removal, we always get the empty graph. validate_communities(communities[2], [{0}, {1}, {2}, {3}]) def test_directed(self): G = nx.DiGraph(nx.path_graph(4)) communities = list(girvan_newman(G)) - assert_equal(len(communities), 3) + assert len(communities) == 3 validate_communities(communities[0], [{0, 1}, {2, 3}]) - validate_possible_communities(communities[1], [{0}, {1}, {2, 3}], - [{0, 1}, {2}, {3}]) + validate_possible_communities( + communities[1], [{0}, {1}, {2, 3}], [{0, 1}, {2}, {3}] + ) validate_communities(communities[2], [{0}, {1}, {2}, {3}]) def test_selfloops(self): @@ -73,10 +64,11 @@ def test_selfloops(self): G.add_edge(0, 0) G.add_edge(2, 2) communities = list(girvan_newman(G)) - assert_equal(len(communities), 3) + assert len(communities) == 3 validate_communities(communities[0], [{0, 1}, {2, 3}]) - validate_possible_communities(communities[1], [{0}, {1}, {2, 3}], - [{0, 1}, {2}, {3}]) + validate_possible_communities( + communities[1], [{0}, {1}, {2, 3}], [{0, 1}, {2}, {3}] + ) validate_communities(communities[2], [{0}, {1}, {2}, {3}]) def test_most_valuable_edge(self): @@ -84,9 +76,11 @@ def test_most_valuable_edge(self): G.add_weighted_edges_from([(0, 1, 3), (1, 2, 2), (2, 3, 1)]) # Let the most valuable edge be the one with the highest weight. - def heaviest(G): return max(G.edges(data='weight'), key=itemgetter(2))[:2] + def heaviest(G): + return max(G.edges(data="weight"), key=itemgetter(2))[:2] + communities = list(girvan_newman(G, heaviest)) - assert_equal(len(communities), 3) + assert len(communities) == 3 validate_communities(communities[0], [{0}, {1, 2, 3}]) validate_communities(communities[1], [{0}, {1}, {2, 3}]) validate_communities(communities[2], [{0}, {1}, {2}, {3}]) diff --git a/networkx/algorithms/community/tests/test_generators.py b/networkx/algorithms/community/tests/test_generators.py deleted file mode 100644 index efc5870..0000000 --- a/networkx/algorithms/community/tests/test_generators.py +++ /dev/null @@ -1,89 +0,0 @@ -# test_generators.py - unit tests for the community.generators module -# -# Copyright 2011 Ben Edwards . -# Copyright 2011 Aric Hagberg -# Copyright 2015 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. -"""Unit tests for the :mod:`networkx.algorithms.community.generators` -module. - -""" -from nose.tools import assert_equal -from nose.tools import assert_true -from nose.tools import raises - -import networkx as nx -from networkx.algorithms.community import LFR_benchmark_graph -from networkx.algorithms.community.community_utils import is_partition - - -def test_generator(): - n = 250 - tau1 = 3 - tau2 = 1.5 - mu = 0.1 - G = LFR_benchmark_graph(n, tau1, tau2, mu, average_degree=5, - min_community=20, seed=10) - assert_equal(len(G), 250) - C = {frozenset(G.nodes[v]['community']) for v in G} - assert_true(is_partition(G.nodes(), C)) - # assert_equal([len(c) for c in C], [53, 12, 10, 15, 10]) - # assert_equal(len(G.edges()), 157) - - -@raises(nx.NetworkXError) -def test_invalid_tau1(): - n = 100 - tau1 = 2 - tau2 = 1 - mu = 0.1 - LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2) - - -@raises(nx.NetworkXError) -def test_invalid_tau2(): - n = 100 - tau1 = 1 - tau2 = 2 - mu = 0.1 - LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2) - - -@raises(nx.NetworkXError) -def test_mu_too_large(): - n = 100 - tau1 = 2 - tau2 = 2 - mu = 1.1 - LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2) - - -@raises(nx.NetworkXError) -def test_mu_too_small(): - n = 100 - tau1 = 2 - tau2 = 2 - mu = -1 - LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2) - - -@raises(nx.NetworkXError) -def test_both_degrees_none(): - n = 100 - tau1 = 2 - tau2 = 2 - mu = -1 - LFR_benchmark_graph(n, tau1, tau2, mu) - - -@raises(nx.NetworkXError) -def test_neither_degrees_none(): - n = 100 - tau1 = 2 - tau2 = 2 - mu = -1 - LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2, average_degree=5) diff --git a/networkx/algorithms/community/tests/test_kclique.py b/networkx/algorithms/community/tests/test_kclique.py index 056ce08..ffac175 100644 --- a/networkx/algorithms/community/tests/test_kclique.py +++ b/networkx/algorithms/community/tests/test_kclique.py @@ -1,7 +1,6 @@ from itertools import combinations -from nose.tools import assert_equal -from nose.tools import raises +import pytest import networkx as nx from networkx.algorithms.community import k_clique_communities @@ -12,9 +11,9 @@ def test_overlapping_K5(): G.add_edges_from(combinations(range(5), 2)) # Add a five clique G.add_edges_from(combinations(range(2, 7), 2)) # Add another five clique c = list(k_clique_communities(G, 4)) - assert_equal(c, [frozenset(range(7))]) + assert c == [frozenset(range(7))] c = set(k_clique_communities(G, 5)) - assert_equal(c, {frozenset(range(5)), frozenset(range(2, 7))}) + assert c == {frozenset(range(5)), frozenset(range(2, 7))} def test_isolated_K5(): @@ -22,17 +21,16 @@ def test_isolated_K5(): G.add_edges_from(combinations(range(0, 5), 2)) # Add a five clique G.add_edges_from(combinations(range(5, 10), 2)) # Add another five clique c = set(k_clique_communities(G, 5)) - assert_equal(c, {frozenset(range(5)), frozenset(range(5, 10))}) + assert c == {frozenset(range(5)), frozenset(range(5, 10))} -class TestZacharyKarateClub(object): - +class TestZacharyKarateClub: def setup(self): self.G = nx.karate_club_graph() def _check_communities(self, k, expected): communities = set(k_clique_communities(self.G, k)) - assert_equal(communities, expected) + assert communities == expected def test_k2(self): # clique percolation with k=2 is just connected components @@ -40,16 +38,44 @@ def test_k2(self): self._check_communities(2, expected) def test_k3(self): - comm1 = [0, 1, 2, 3, 7, 8, 12, 13, 14, 15, 17, 18, 19, 20, 21, 22, 23, - 26, 27, 28, 29, 30, 31, 32, 33] + comm1 = [ + 0, + 1, + 2, + 3, + 7, + 8, + 12, + 13, + 14, + 15, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 26, + 27, + 28, + 29, + 30, + 31, + 32, + 33, + ] comm2 = [0, 4, 5, 6, 10, 16] comm3 = [24, 25, 31] expected = {frozenset(comm1), frozenset(comm2), frozenset(comm3)} self._check_communities(3, expected) def test_k4(self): - expected = {frozenset([0, 1, 2, 3, 7, 13]), frozenset([8, 32, 30, 33]), - frozenset([32, 33, 29, 23])} + expected = { + frozenset([0, 1, 2, 3, 7, 13]), + frozenset([8, 32, 30, 33]), + frozenset([32, 33, 29, 23]), + } self._check_communities(4, expected) def test_k5(self): @@ -61,6 +87,6 @@ def test_k6(self): self._check_communities(6, expected) -@raises(nx.NetworkXError) def test_bad_k(): - list(k_clique_communities(nx.Graph(), 1)) + with pytest.raises(nx.NetworkXError): + list(k_clique_communities(nx.Graph(), 1)) diff --git a/networkx/algorithms/community/tests/test_kernighan_lin.py b/networkx/algorithms/community/tests/test_kernighan_lin.py index ab0fb8a..3cd0f0e 100644 --- a/networkx/algorithms/community/tests/test_kernighan_lin.py +++ b/networkx/algorithms/community/tests/test_kernighan_lin.py @@ -1,27 +1,16 @@ -# -*- encoding: utf-8 -*- -# test_kernighan_lin.py - unit tests for Kernighan–Lin bipartition algorithm -# -# Copyright 2011 Ben Edwards . -# Copyright 2011 Aric Hagberg . -# Copyright 2015 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.algorithms.community.kernighan_lin` module. """ -from nose.tools import assert_equal -from nose.tools import raises +import pytest import networkx as nx from networkx.algorithms.community import kernighan_lin_bisection +from itertools import permutations def assert_partition_equal(x, y): - assert_equal(set(map(frozenset, x)), set(map(frozenset, y))) + assert set(map(frozenset, x)) == set(map(frozenset, y)) def test_partition(): @@ -30,18 +19,31 @@ def test_partition(): assert_partition_equal(C, [{0, 1, 2}, {3, 4, 5}]) -@raises(nx.NetworkXError) -def test_non_disjoint_partition(): +def test_partition_argument(): G = nx.barbell_graph(3, 0) - partition = ({0, 1, 2}, {2, 3, 4, 5}) - kernighan_lin_bisection(G, partition) + partition = [{0, 1, 2}, {3, 4, 5}] + C = kernighan_lin_bisection(G, partition) + assert_partition_equal(C, partition) -@raises(nx.NetworkXError) -def test_too_many_blocks(): +def test_seed_argument(): G = nx.barbell_graph(3, 0) - partition = ({0, 1}, {2}, {3, 4, 5}) - kernighan_lin_bisection(G, partition) + C = kernighan_lin_bisection(G, seed=1) + assert_partition_equal(C, [{0, 1, 2}, {3, 4, 5}]) + + +def test_non_disjoint_partition(): + with pytest.raises(nx.NetworkXError): + G = nx.barbell_graph(3, 0) + partition = ({0, 1, 2}, {2, 3, 4, 5}) + kernighan_lin_bisection(G, partition) + + +def test_too_many_blocks(): + with pytest.raises(nx.NetworkXError): + G = nx.barbell_graph(3, 0) + partition = ({0, 1}, {2}, {3, 4, 5}) + kernighan_lin_bisection(G, partition) def test_multigraph(): @@ -49,5 +51,9 @@ def test_multigraph(): M = nx.MultiGraph(G.edges()) M.add_edges_from(G.edges()) M.remove_edge(1, 2) - A, B = kernighan_lin_bisection(M) - assert_partition_equal([A, B], [{0, 1}, {2, 3}]) + for labels in permutations(range(4)): + mapping = dict(zip(M, labels)) + A, B = kernighan_lin_bisection(nx.relabel_nodes(M, mapping), seed=0) + assert_partition_equal( + [A, B], [{mapping[0], mapping[1]}, {mapping[2], mapping[3]}] + ) diff --git a/networkx/algorithms/community/tests/test_label_propagation.py b/networkx/algorithms/community/tests/test_label_propagation.py index 84321c5..9e49688 100644 --- a/networkx/algorithms/community/tests/test_label_propagation.py +++ b/networkx/algorithms/community/tests/test_label_propagation.py @@ -1,104 +1,115 @@ from itertools import chain from itertools import combinations -from nose.tools import * -from nose.tools import assert_equal, assert_in +import pytest import networkx as nx from networkx.algorithms.community import label_propagation_communities from networkx.algorithms.community import asyn_lpa_communities -@raises(nx.NetworkXNotImplemented) def test_directed_not_supported(): - # not supported for directed graphs - test = nx.DiGraph() - test.add_edge('a', 'b') - test.add_edge('a', 'c') - test.add_edge('b', 'd') - result = label_propagation_communities(test) + with pytest.raises(nx.NetworkXNotImplemented): + # not supported for directed graphs + test = nx.DiGraph() + test.add_edge("a", "b") + test.add_edge("a", "c") + test.add_edge("b", "d") + result = label_propagation_communities(test) def test_one_node(): test = nx.Graph() - test.add_node('a') + test.add_node("a") # The expected communities are: - ground_truth = set([frozenset(['a'])]) + ground_truth = {frozenset(["a"])} communities = label_propagation_communities(test) result = {frozenset(c) for c in communities} - assert_equal(result, ground_truth) + assert result == ground_truth def test_unconnected_communities(): test = nx.Graph() # community 1 - test.add_edge('a', 'c') - test.add_edge('a', 'd') - test.add_edge('d', 'c') + test.add_edge("a", "c") + test.add_edge("a", "d") + test.add_edge("d", "c") # community 2 - test.add_edge('b', 'e') - test.add_edge('e', 'f') - test.add_edge('f', 'b') + test.add_edge("b", "e") + test.add_edge("e", "f") + test.add_edge("f", "b") # The expected communities are: - ground_truth = set([frozenset(['a', 'c', 'd']), - frozenset(['b', 'e', 'f'])]) + ground_truth = {frozenset(["a", "c", "d"]), frozenset(["b", "e", "f"])} communities = label_propagation_communities(test) result = {frozenset(c) for c in communities} - assert_equal(result, ground_truth) + assert result == ground_truth def test_connected_communities(): test = nx.Graph() # community 1 - test.add_edge('a', 'b') - test.add_edge('c', 'a') - test.add_edge('c', 'b') - test.add_edge('d', 'a') - test.add_edge('d', 'b') - test.add_edge('d', 'c') - test.add_edge('e', 'a') - test.add_edge('e', 'b') - test.add_edge('e', 'c') - test.add_edge('e', 'd') + test.add_edge("a", "b") + test.add_edge("c", "a") + test.add_edge("c", "b") + test.add_edge("d", "a") + test.add_edge("d", "b") + test.add_edge("d", "c") + test.add_edge("e", "a") + test.add_edge("e", "b") + test.add_edge("e", "c") + test.add_edge("e", "d") # community 2 - test.add_edge('1', '2') - test.add_edge('3', '1') - test.add_edge('3', '2') - test.add_edge('4', '1') - test.add_edge('4', '2') - test.add_edge('4', '3') - test.add_edge('5', '1') - test.add_edge('5', '2') - test.add_edge('5', '3') - test.add_edge('5', '4') + test.add_edge("1", "2") + test.add_edge("3", "1") + test.add_edge("3", "2") + test.add_edge("4", "1") + test.add_edge("4", "2") + test.add_edge("4", "3") + test.add_edge("5", "1") + test.add_edge("5", "2") + test.add_edge("5", "3") + test.add_edge("5", "4") # edge between community 1 and 2 - test.add_edge('a', '1') + test.add_edge("a", "1") # community 3 - test.add_edge('x', 'y') + test.add_edge("x", "y") # community 4 with only a single node - test.add_node('z') + test.add_node("z") # The expected communities are: - ground_truth1 = set([frozenset(['a', 'b', 'c', 'd', 'e']), - frozenset(['1', '2', '3', '4', '5']), - frozenset(['x', 'y']), - frozenset(['z'])]) - ground_truth2 = set([frozenset(['a', 'b', 'c', 'd', 'e', - '1', '2', '3', '4', '5']), - frozenset(['x', 'y']), - frozenset(['z'])]) + ground_truth1 = { + frozenset(["a", "b", "c", "d", "e"]), + frozenset(["1", "2", "3", "4", "5"]), + frozenset(["x", "y"]), + frozenset(["z"]), + } + ground_truth2 = { + frozenset(["a", "b", "c", "d", "e", "1", "2", "3", "4", "5"]), + frozenset(["x", "y"]), + frozenset(["z"]), + } ground_truth = (ground_truth1, ground_truth2) communities = label_propagation_communities(test) result = {frozenset(c) for c in communities} - assert_in(result, ground_truth) + assert result in ground_truth -class TestAsynLpaCommunities(object): +def test_termination(): + # ensure termination of asyn_lpa_communities in two cases + # that led to an endless loop in a previous version + test1 = nx.karate_club_graph() + test2 = nx.caveman_graph(2, 10) + test2.add_edges_from([(0, 20), (20, 10)]) + asyn_lpa_communities(test1) + asyn_lpa_communities(test2) + + +class TestAsynLpaCommunities: def _check_communities(self, G, expected): """Checks that the communities computed from the given graph ``G`` using the :func:`~networkx.asyn_lpa_communities` function match @@ -110,7 +121,7 @@ def _check_communities(self, G, expected): """ communities = asyn_lpa_communities(G) result = {frozenset(c) for c in communities} - assert_equal(result, expected) + assert result == expected def test_null_graph(self): G = nx.null_graph() @@ -124,10 +135,17 @@ def test_single_node(self): def test_simple_communities(self): # This graph is the disjoint union of two triangles. - G = nx.Graph(['ab', 'ac', 'bc', 'de', 'df', 'fe']) - ground_truth = {frozenset('abc'), frozenset('def')} + G = nx.Graph(["ab", "ac", "bc", "de", "df", "fe"]) + ground_truth = {frozenset("abc"), frozenset("def")} self._check_communities(G, ground_truth) + def test_seed_argument(self): + G = nx.Graph(["ab", "ac", "bc", "de", "df", "fe"]) + ground_truth = {frozenset("abc"), frozenset("def")} + communities = asyn_lpa_communities(G, seed=1) + result = {frozenset(c) for c in communities} + assert result == ground_truth + def test_several_communities(self): # This graph is the disjoint union of five triangles. ground_truth = {frozenset(range(3 * i, 3 * (i + 1))) for i in range(5)} diff --git a/networkx/algorithms/community/tests/test_lukes.py b/networkx/algorithms/community/tests/test_lukes.py new file mode 100644 index 0000000..80e2de3 --- /dev/null +++ b/networkx/algorithms/community/tests/test_lukes.py @@ -0,0 +1,154 @@ +from itertools import product + +import pytest + +import networkx as nx +from networkx.algorithms.community import lukes_partitioning + +EWL = "e_weight" +NWL = "n_weight" + + +# first test from the Lukes original paper +def paper_1_case(float_edge_wt=False, explicit_node_wt=True, directed=False): + + # problem-specific constants + limit = 3 + + # configuration + if float_edge_wt: + shift = 0.001 + else: + shift = 0 + + if directed: + example_1 = nx.DiGraph() + else: + example_1 = nx.Graph() + + # graph creation + example_1.add_edge(1, 2, **{EWL: 3 + shift}) + example_1.add_edge(1, 4, **{EWL: 2 + shift}) + example_1.add_edge(2, 3, **{EWL: 4 + shift}) + example_1.add_edge(2, 5, **{EWL: 6 + shift}) + + # node weights + if explicit_node_wt: + nx.set_node_attributes(example_1, 1, NWL) + wtu = NWL + else: + wtu = None + + # partitioning + clusters_1 = { + frozenset(x) + for x in lukes_partitioning(example_1, limit, node_weight=wtu, edge_weight=EWL) + } + + return clusters_1 + + +# second test from the Lukes original paper +def paper_2_case(explicit_edge_wt=True, directed=False): + + # problem specific constants + byte_block_size = 32 + + # configuration + if directed: + example_2 = nx.DiGraph() + else: + example_2 = nx.Graph() + + if explicit_edge_wt: + edic = {EWL: 1} + wtu = EWL + else: + edic = {} + wtu = None + + # graph creation + example_2.add_edge("name", "home_address", **edic) + example_2.add_edge("name", "education", **edic) + example_2.add_edge("education", "bs", **edic) + example_2.add_edge("education", "ms", **edic) + example_2.add_edge("education", "phd", **edic) + example_2.add_edge("name", "telephone", **edic) + example_2.add_edge("telephone", "home", **edic) + example_2.add_edge("telephone", "office", **edic) + example_2.add_edge("office", "no1", **edic) + example_2.add_edge("office", "no2", **edic) + + example_2.nodes["name"][NWL] = 20 + example_2.nodes["education"][NWL] = 10 + example_2.nodes["bs"][NWL] = 1 + example_2.nodes["ms"][NWL] = 1 + example_2.nodes["phd"][NWL] = 1 + example_2.nodes["home_address"][NWL] = 8 + example_2.nodes["telephone"][NWL] = 8 + example_2.nodes["home"][NWL] = 8 + example_2.nodes["office"][NWL] = 4 + example_2.nodes["no1"][NWL] = 1 + example_2.nodes["no2"][NWL] = 1 + + # partitioning + clusters_2 = { + frozenset(x) + for x in lukes_partitioning( + example_2, byte_block_size, node_weight=NWL, edge_weight=wtu + ) + } + + return clusters_2 + + +def test_paper_1_case(): + ground_truth = {frozenset([1, 4]), frozenset([2, 3, 5])} + + tf = (True, False) + for flt, nwt, drc in product(tf, tf, tf): + part = paper_1_case(flt, nwt, drc) + assert part == ground_truth + + +def test_paper_2_case(): + ground_truth = { + frozenset(["education", "bs", "ms", "phd"]), + frozenset(["name", "home_address"]), + frozenset(["telephone", "home", "office", "no1", "no2"]), + } + + tf = (True, False) + for ewt, drc in product(tf, tf): + part = paper_2_case(ewt, drc) + assert part == ground_truth + + +def test_mandatory_tree(): + not_a_tree = nx.complete_graph(4) + + with pytest.raises(nx.NotATree): + lukes_partitioning(not_a_tree, 5) + + +def test_mandatory_integrality(): + + byte_block_size = 32 + + ex_1_broken = nx.DiGraph() + + ex_1_broken.add_edge(1, 2, **{EWL: 3.2}) + ex_1_broken.add_edge(1, 4, **{EWL: 2.4}) + ex_1_broken.add_edge(2, 3, **{EWL: 4.0}) + ex_1_broken.add_edge(2, 5, **{EWL: 6.3}) + + ex_1_broken.nodes[1][NWL] = 1.2 # ! + ex_1_broken.nodes[2][NWL] = 1 + ex_1_broken.nodes[3][NWL] = 1 + ex_1_broken.nodes[4][NWL] = 1 + ex_1_broken.nodes[5][NWL] = 2 + + with pytest.raises(TypeError): + lukes_partitioning( + ex_1_broken, byte_block_size, node_weight=NWL, edge_weight=EWL + ) diff --git a/networkx/algorithms/community/tests/test_modularity_max.py b/networkx/algorithms/community/tests/test_modularity_max.py index 16bc735..ba80ef9 100644 --- a/networkx/algorithms/community/tests/test_modularity_max.py +++ b/networkx/algorithms/community/tests/test_modularity_max.py @@ -1,43 +1,39 @@ -from itertools import combinations - -from nose.tools import assert_equal -from nose.tools import raises - import networkx as nx from networkx.algorithms.community import ( greedy_modularity_communities, - _naive_greedy_modularity_communities) - + naive_greedy_modularity_communities, +) -class TestCNM(object): +class TestCNM: def setup(self): self.G = nx.karate_club_graph() def _check_communities(self, expected): communities = set(greedy_modularity_communities(self.G)) - assert_equal(communities, expected) + assert communities == expected def test_karate_club(self): - john_a = frozenset([ - 8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]) + john_a = frozenset( + [8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33] + ) mr_hi = frozenset([0, 4, 5, 6, 10, 11, 16, 19]) overlap = frozenset([1, 2, 3, 7, 9, 12, 13, 17, 21]) self._check_communities({john_a, overlap, mr_hi}) -class TestNaive(object): - +class TestNaive: def setup(self): self.G = nx.karate_club_graph() def _check_communities(self, expected): - communities = set(_naive_greedy_modularity_communities(self.G)) - assert_equal(communities, expected) + communities = set(naive_greedy_modularity_communities(self.G)) + assert communities == expected def test_karate_club(self): - john_a = frozenset([ - 8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]) + john_a = frozenset( + [8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33] + ) mr_hi = frozenset([0, 4, 5, 6, 10, 11, 16, 19]) overlap = frozenset([1, 2, 3, 7, 9, 12, 13, 17, 21]) - self._check_communities({john_a, overlap, mr_hi}) + self._check_communities({john_a, overlap, mr_hi}) diff --git a/networkx/algorithms/community/tests/test_quality.py b/networkx/algorithms/community/tests/test_quality.py index a941311..b3e8851 100644 --- a/networkx/algorithms/community/tests/test_quality.py +++ b/networkx/algorithms/community/tests/test_quality.py @@ -1,19 +1,7 @@ -# test_quality.py - unit tests for the quality module -# -# Copyright 2015 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.algorithms.community.quality` module. """ -from __future__ import division - -from nose.tools import assert_equal -from nose.tools import assert_almost_equal import networkx as nx from networkx import barbell_graph @@ -21,16 +9,17 @@ from networkx.algorithms.community import modularity from networkx.algorithms.community import performance from networkx.algorithms.community.quality import inter_community_edges +from networkx.testing import almost_equal -class TestPerformance(object): +class TestPerformance: """Unit tests for the :func:`performance` function.""" def test_bad_partition(self): """Tests that a poor partition has a low performance measure.""" G = barbell_graph(3, 0) partition = [{0, 1, 4}, {2, 3, 5}] - assert_almost_equal(8 / 15, performance(G, partition)) + assert almost_equal(8 / 15, performance(G, partition)) def test_good_partition(self): """Tests that a good partition has a high performance measure. @@ -38,42 +27,58 @@ def test_good_partition(self): """ G = barbell_graph(3, 0) partition = [{0, 1, 2}, {3, 4, 5}] - assert_almost_equal(14 / 15, performance(G, partition)) + assert almost_equal(14 / 15, performance(G, partition)) -class TestCoverage(object): +class TestCoverage: """Unit tests for the :func:`coverage` function.""" def test_bad_partition(self): """Tests that a poor partition has a low coverage measure.""" G = barbell_graph(3, 0) partition = [{0, 1, 4}, {2, 3, 5}] - assert_almost_equal(3 / 7, coverage(G, partition)) + assert almost_equal(3 / 7, coverage(G, partition)) def test_good_partition(self): """Tests that a good partition has a high coverage measure.""" G = barbell_graph(3, 0) partition = [{0, 1, 2}, {3, 4, 5}] - assert_almost_equal(6 / 7, coverage(G, partition)) + assert almost_equal(6 / 7, coverage(G, partition)) def test_modularity(): G = nx.barbell_graph(3, 0) C = [{0, 1, 4}, {2, 3, 5}] - assert_almost_equal(-16 / (14 ** 2), modularity(G, C)) + assert almost_equal(-16 / (14 ** 2), modularity(G, C)) C = [{0, 1, 2}, {3, 4, 5}] - assert_almost_equal((35 * 2) / (14 ** 2), modularity(G, C)) + assert almost_equal((35 * 2) / (14 ** 2), modularity(G, C)) + + n = 1000 + G = nx.erdos_renyi_graph(n, 0.09, seed=42, directed=True) + C = [set(range(n // 2)), set(range(n // 2, n))] + assert almost_equal(0.00017154251389292754, modularity(G, C)) + + G = nx.margulis_gabber_galil_graph(10) + mid_value = G.number_of_nodes() // 2 + nodes = list(G.nodes) + C = [set(nodes[:mid_value]), set(nodes[mid_value:])] + assert almost_equal(0.13, modularity(G, C)) + + G = nx.DiGraph() + G.add_edges_from([(2, 1), (2, 3), (3, 4)]) + C = [{1, 2}, {3, 4}] + assert almost_equal(2 / 9, modularity(G, C)) def test_inter_community_edges_with_digraphs(): G = nx.complete_graph(2, create_using=nx.DiGraph()) partition = [{0}, {1}] - assert_equal(inter_community_edges(G, partition), 2) + assert inter_community_edges(G, partition) == 2 G = nx.complete_graph(10, create_using=nx.DiGraph()) partition = [{0}, {1, 2}, {3, 4, 5}, {6, 7, 8, 9}] - assert_equal(inter_community_edges(G, partition), 70) + assert inter_community_edges(G, partition) == 70 G = nx.cycle_graph(4, create_using=nx.DiGraph()) partition = [{0, 1}, {2, 3}] - assert_equal(inter_community_edges(G, partition), 2) + assert inter_community_edges(G, partition) == 2 diff --git a/networkx/algorithms/community/tests/test_utils.py b/networkx/algorithms/community/tests/test_utils.py index adc7496..a031782 100644 --- a/networkx/algorithms/community/tests/test_utils.py +++ b/networkx/algorithms/community/tests/test_utils.py @@ -1,16 +1,6 @@ -# test_utils.py - unit tests for the community utils module -# -# Copyright 2016 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.algorithms.community.utils` module. """ -from nose.tools import assert_false -from nose.tools import assert_true import networkx as nx from networkx.algorithms.community import is_partition @@ -18,14 +8,22 @@ def test_is_partition(): G = nx.empty_graph(3) - assert_true(is_partition(G, [{0, 1}, {2}])) + assert is_partition(G, [{0, 1}, {2}]) + assert is_partition(G, ({0, 1}, {2})) + assert is_partition(G, ([0, 1], [2])) + assert is_partition(G, [[0, 1], [2]]) def test_not_covering(): G = nx.empty_graph(3) - assert_false(is_partition(G, [{0}, {1}])) + assert not is_partition(G, [{0}, {1}]) def test_not_disjoint(): G = nx.empty_graph(3) - assert_false(is_partition(G, [{0, 1}, {1, 2}])) + assert not is_partition(G, [{0, 1}, {1, 2}]) + + +def test_not_node(): + G = nx.empty_graph(3) + assert not is_partition(G, [{0, 1}, {3}]) diff --git a/networkx/algorithms/components/attracting.py b/networkx/algorithms/components/attracting.py index 6245e4b..8d2cd8b 100644 --- a/networkx/algorithms/components/attracting.py +++ b/networkx/algorithms/components/attracting.py @@ -1,25 +1,15 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Christopher Ellison """Attracting components.""" -import warnings as _warnings import networkx as nx from networkx.utils.decorators import not_implemented_for -__all__ = ['number_attracting_components', - 'attracting_components', - 'is_attracting_component', - 'attracting_component_subgraphs', - ] +__all__ = [ + "number_attracting_components", + "attracting_components", + "is_attracting_component", +] -@not_implemented_for('undirected') +@not_implemented_for("undirected") def attracting_components(G): """Generates the attracting components in `G`. @@ -31,6 +21,9 @@ def attracting_components(G): nodes. If a random walker enters the attractor containing the node, then the node will be visited infinitely often. + To obtain induced subgraphs on each component use: + ``(G.subgraph(c).copy() for c in attracting_components(G))`` + Parameters ---------- G : DiGraph, MultiDiGraph @@ -43,7 +36,7 @@ def attracting_components(G): Raises ------ - NetworkXNotImplemented : + NetworkXNotImplemented If the input graph is undirected. See Also @@ -59,7 +52,7 @@ def attracting_components(G): yield scc[n] -@not_implemented_for('undirected') +@not_implemented_for("undirected") def number_attracting_components(G): """Returns the number of attracting components in `G`. @@ -75,7 +68,7 @@ def number_attracting_components(G): Raises ------ - NetworkXNotImplemented : + NetworkXNotImplemented If the input graph is undirected. See Also @@ -87,7 +80,7 @@ def number_attracting_components(G): return sum(1 for ac in attracting_components(G)) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def is_attracting_component(G): """Returns True if `G` consists of a single attracting component. @@ -103,7 +96,7 @@ def is_attracting_component(G): Raises ------ - NetworkXNotImplemented : + NetworkXNotImplemented If the input graph is undirected. See Also @@ -116,19 +109,3 @@ def is_attracting_component(G): if len(ac) == 1: return len(ac[0]) == len(G) return False - - -@not_implemented_for('undirected') -def attracting_component_subgraphs(G, copy=True): - """DEPRECATED: Use ``(G.subgraph(c) for c in attracting_components(G))`` - - Or ``(G.subgraph(c).copy() for c in attracting_components(G))`` - """ - msg = "attracting_component_subgraphs is deprecated and will be removed" \ - "in 2.2. Use (G.subgraph(c).copy() for c in attracting_components(G))" - _warnings.warn(msg, DeprecationWarning) - for c in attracting_components(G): - if copy: - yield G.subgraph(c).copy() - else: - yield G.subgraph(c) diff --git a/networkx/algorithms/components/biconnected.py b/networkx/algorithms/components/biconnected.py index c30d0d5..bbd085c 100644 --- a/networkx/algorithms/components/biconnected.py +++ b/networkx/algorithms/components/biconnected.py @@ -1,32 +1,18 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2011-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Jordi Torrents (jtorrents@milnou.net) -# Dan Schult (dschult@colgate.edu) -# Aric Hagberg (aric.hagberg@gmail.com) """Biconnected components and articulation points.""" -import warnings as _warnings from itertools import chain -import networkx as nx from networkx.utils.decorators import not_implemented_for __all__ = [ - 'biconnected_components', - 'biconnected_component_edges', - 'biconnected_component_subgraphs', - 'is_biconnected', - 'articulation_points', + "biconnected_components", + "biconnected_component_edges", + "is_biconnected", + "articulation_points", ] -@not_implemented_for('directed') +@not_implemented_for("directed") def is_biconnected(G): - """Return True if the graph is biconnected, False otherwise. + """Returns True if the graph is biconnected, False otherwise. A graph is biconnected if, and only if, it cannot be disconnected by removing only one node (and all edges incident on that node). If @@ -46,7 +32,7 @@ def is_biconnected(G): Raises ------ - NetworkXNotImplemented : + NetworkXNotImplemented If the input graph is not undirected. Examples @@ -92,14 +78,16 @@ def is_biconnected(G): if len(bcc) == 1: return len(bcc[0]) == len(G) return False # Multiple bicomponents or No bicomponents (empty graph?) + + # if len(bcc) == 0: # No bicomponents (it could be an empty graph) # return False # return len(bcc[0]) == len(G) -@not_implemented_for('directed') +@not_implemented_for("directed") def biconnected_component_edges(G): - """Return a generator of lists of edges, one list for each biconnected + """Returns a generator of lists of edges, one list for each biconnected component of the input graph. Biconnected components are maximal subgraphs such that the removal of a @@ -122,7 +110,7 @@ def biconnected_component_edges(G): Raises ------ - NetworkXNotImplemented : + NetworkXNotImplemented If the input graph is not undirected. Examples @@ -166,13 +154,12 @@ def biconnected_component_edges(G): Communications of the ACM 16: 372–378. doi:10.1145/362248.362272 """ - for comp in _biconnected_dfs(G, components=True): - yield comp + yield from _biconnected_dfs(G, components=True) -@not_implemented_for('directed') +@not_implemented_for("directed") def biconnected_components(G): - """Return a generator of sets of nodes, one set for each biconnected + """Returns a generator of sets of nodes, one set for each biconnected component of the graph Biconnected components are maximal subgraphs such that the removal of a @@ -196,7 +183,7 @@ def biconnected_components(G): Raises ------ - NetworkXNotImplemented : + NetworkXNotImplemented If the input graph is not undirected. See Also @@ -233,6 +220,9 @@ def biconnected_components(G): >>> Gc = max(nx.biconnected_components(G), key=len) + To create the components as subgraphs use: + ``(G.subgraph(c).copy() for c in biconnected_components(G))`` + See Also -------- is_biconnected @@ -263,23 +253,7 @@ def biconnected_components(G): yield set(chain.from_iterable(comp)) -@not_implemented_for('directed') -def biconnected_component_subgraphs(G, copy=True): - """DEPRECATED: Use ``(G.subgraph(c) for c in biconnected_components(G))`` - - Or ``(G.subgraph(c).copy() for c in biconnected_components(G))`` - """ - msg = "connected_component_subgraphs is deprecated and will be removed" \ - "in 2.2. Use (G.subgraph(c).copy() for c in biconnected_components(G))" - _warnings.warn(msg, DeprecationWarning) - for c in biconnected_components(G): - if copy: - yield G.subgraph(c).copy() - else: - yield G.subgraph(c) - - -@not_implemented_for('directed') +@not_implemented_for("directed") def articulation_points(G): """Yield the articulation points, or cut vertices, of a graph. @@ -303,7 +277,7 @@ def articulation_points(G): Raises ------ - NetworkXNotImplemented : + NetworkXNotImplemented If the input graph is not undirected. Examples @@ -353,7 +327,7 @@ def articulation_points(G): yield articulation -@not_implemented_for('directed') +@not_implemented_for("directed") def _biconnected_dfs(G, components=True): # depth-first search algorithm to generate articulation points # and biconnected components diff --git a/networkx/algorithms/components/connected.py b/networkx/algorithms/components/connected.py index c94b3db..95b169c 100644 --- a/networkx/algorithms/components/connected.py +++ b/networkx/algorithms/components/connected.py @@ -1,30 +1,17 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Eben Kenah -# Aric Hagberg (hagberg@lanl.gov) -# Christopher Ellison """Connected components.""" -import warnings as _warnings import networkx as nx from networkx.utils.decorators import not_implemented_for from ...utils import arbitrary_element __all__ = [ - 'number_connected_components', - 'connected_components', - 'connected_component_subgraphs', - 'is_connected', - 'node_connected_component', + "number_connected_components", + "connected_components", + "is_connected", + "node_connected_component", ] -@not_implemented_for('directed') +@not_implemented_for("directed") def connected_components(G): """Generate connected components. @@ -40,7 +27,7 @@ def connected_components(G): Raises ------ - NetworkXNotImplemented: + NetworkXNotImplemented If G is directed. Examples @@ -57,6 +44,10 @@ def connected_components(G): >>> largest_cc = max(nx.connected_components(G), key=len) + To create the induced subgraph of each component use: + + >>> S = [G.subgraph(c).copy() for c in nx.connected_components(G)] + See Also -------- strongly_connected_components @@ -70,29 +61,13 @@ def connected_components(G): seen = set() for v in G: if v not in seen: - c = set(_plain_bfs(G, v)) - yield c + c = _plain_bfs(G, v) seen.update(c) - - -@not_implemented_for('directed') -def connected_component_subgraphs(G, copy=True): - """DEPRECATED: Use ``(G.subgraph(c) for c in connected_components(G))`` - - Or ``(G.subgraph(c).copy() for c in connected_components(G))`` - """ - msg = "connected_component_subgraphs is deprecated and will be removed" \ - "in 2.2. Use (G.subgraph(c).copy() for c in connected_components(G))" - _warnings.warn(msg, DeprecationWarning) - for c in connected_components(G): - if copy: - yield G.subgraph(c).copy() - else: - yield G.subgraph(c) + yield c def number_connected_components(G): - """Return the number of connected components. + """Returns the number of connected components. Parameters ---------- @@ -118,9 +93,9 @@ def number_connected_components(G): return sum(1 for cc in connected_components(G)) -@not_implemented_for('directed') +@not_implemented_for("directed") def is_connected(G): - """Return True if the graph is connected, False otherwise. + """Returns True if the graph is connected, False otherwise. Parameters ---------- @@ -134,7 +109,7 @@ def is_connected(G): Raises ------ - NetworkXNotImplemented: + NetworkXNotImplemented If G is directed. Examples @@ -157,14 +132,15 @@ def is_connected(G): """ if len(G) == 0: - raise nx.NetworkXPointlessConcept('Connectivity is undefined ', - 'for the null graph.') + raise nx.NetworkXPointlessConcept( + "Connectivity is undefined ", "for the null graph." + ) return sum(1 for node in _plain_bfs(G, arbitrary_element(G))) == len(G) -@not_implemented_for('directed') +@not_implemented_for("directed") def node_connected_component(G, n): - """Return the set of nodes in the component of graph containing node n. + """Returns the set of nodes in the component of graph containing node n. Parameters ---------- @@ -181,7 +157,7 @@ def node_connected_component(G, n): Raises ------ - NetworkXNotImplemented: + NetworkXNotImplemented If G is directed. See Also @@ -193,7 +169,7 @@ def node_connected_component(G, n): For undirected graphs only. """ - return set(_plain_bfs(G, n)) + return _plain_bfs(G, n) def _plain_bfs(G, source): @@ -206,6 +182,6 @@ def _plain_bfs(G, source): nextlevel = set() for v in thislevel: if v not in seen: - yield v seen.add(v) nextlevel.update(G_adj[v]) + return seen diff --git a/networkx/algorithms/components/semiconnected.py b/networkx/algorithms/components/semiconnected.py index 2c4f191..9603f9d 100644 --- a/networkx/algorithms/components/semiconnected.py +++ b/networkx/algorithms/components/semiconnected.py @@ -1,22 +1,13 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: ysitu (ysitu@users.noreply.github.com) """Semiconnectedness.""" import networkx as nx from networkx.utils import not_implemented_for, pairwise -__all__ = ['is_semiconnected'] +__all__ = ["is_semiconnected"] -@not_implemented_for('undirected') -def is_semiconnected(G): - """Return True if the graph is semiconnected, False otherwise. +@not_implemented_for("undirected") +def is_semiconnected(G, topo_order=None): + """Returns True if the graph is semiconnected, False otherwise. A graph is semiconnected if, and only if, for any pair of nodes, either one is reachable from the other, or they are mutually reachable. @@ -26,6 +17,9 @@ def is_semiconnected(G): G : NetworkX graph A directed graph. + topo_order: list or tuple, optional + A topological order for G (if None, the function will compute one) + Returns ------- semiconnected : bool @@ -33,18 +27,18 @@ def is_semiconnected(G): Raises ------ - NetworkXNotImplemented : + NetworkXNotImplemented If the input graph is undirected. - NetworkXPointlessConcept : + NetworkXPointlessConcept If the graph is empty. Examples -------- - >>> G=nx.path_graph(4,create_using=nx.DiGraph()) + >>> G = nx.path_graph(4, create_using=nx.DiGraph()) >>> print(nx.is_semiconnected(G)) True - >>> G=nx.DiGraph([(1, 2), (3, 2)]) + >>> G = nx.DiGraph([(1, 2), (3, 2)]) >>> print(nx.is_semiconnected(G)) False @@ -57,11 +51,14 @@ def is_semiconnected(G): """ if len(G) == 0: raise nx.NetworkXPointlessConcept( - 'Connectivity is undefined for the null graph.') + "Connectivity is undefined for the null graph." + ) if not nx.is_weakly_connected(G): return False G = nx.condensation(G) - path = nx.topological_sort(G) - return all(G.has_edge(u, v) for u, v in pairwise(path)) + if topo_order is None: + topo_order = nx.topological_sort(G) + + return all(G.has_edge(u, v) for u, v in pairwise(topo_order)) diff --git a/networkx/algorithms/components/strongly_connected.py b/networkx/algorithms/components/strongly_connected.py index 6c38437..8df56c5 100644 --- a/networkx/algorithms/components/strongly_connected.py +++ b/networkx/algorithms/components/strongly_connected.py @@ -1,30 +1,18 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Eben Kenah -# Aric Hagberg (hagberg@lanl.gov) -# Christopher Ellison -# Ben Edwards (bedwards@cs.unm.edu) """Strongly connected components.""" -import warnings as _warnings import networkx as nx from networkx.utils.decorators import not_implemented_for -__all__ = ['number_strongly_connected_components', - 'strongly_connected_components', - 'strongly_connected_component_subgraphs', - 'is_strongly_connected', - 'strongly_connected_components_recursive', - 'kosaraju_strongly_connected_components', - 'condensation'] +__all__ = [ + "number_strongly_connected_components", + "strongly_connected_components", + "is_strongly_connected", + "strongly_connected_components_recursive", + "kosaraju_strongly_connected_components", + "condensation", +] -@not_implemented_for('undirected') +@not_implemented_for("undirected") def strongly_connected_components(G): """Generate nodes in strongly connected components of graph. @@ -41,7 +29,7 @@ def strongly_connected_components(G): Raises ------ - NetworkXNotImplemented : + NetworkXNotImplemented If G is undirected. Examples @@ -50,8 +38,10 @@ def strongly_connected_components(G): >>> G = nx.cycle_graph(4, create_using=nx.DiGraph()) >>> nx.add_cycle(G, [10, 11, 12]) - >>> [len(c) for c in sorted(nx.strongly_connected_components(G), - ... key=len, reverse=True)] + >>> [ + ... len(c) + ... for c in sorted(nx.strongly_connected_components(G), key=len, reverse=True) + ... ] [4, 3] If you only want the largest component, it's more efficient to @@ -82,9 +72,9 @@ def strongly_connected_components(G): """ preorder = {} lowlink = {} - scc_found = {} + scc_found = set() scc_queue = [] - i = 0 # Preorder counter + i = 0 # Preorder counter for source in G: if source not in scc_found: queue = [source] @@ -93,16 +83,15 @@ def strongly_connected_components(G): if v not in preorder: i = i + 1 preorder[v] = i - done = 1 - v_nbrs = G[v] - for w in v_nbrs: + done = True + for w in G[v]: if w not in preorder: queue.append(w) - done = 0 + done = False break - if done == 1: + if done: lowlink[v] = preorder[v] - for w in v_nbrs: + for w in G[v]: if w not in scc_found: if preorder[w] > preorder[v]: lowlink[v] = min([lowlink[v], lowlink[w]]) @@ -110,18 +99,17 @@ def strongly_connected_components(G): lowlink[v] = min([lowlink[v], preorder[w]]) queue.pop() if lowlink[v] == preorder[v]: - scc_found[v] = True scc = {v} while scc_queue and preorder[scc_queue[-1]] > preorder[v]: k = scc_queue.pop() - scc_found[k] = True scc.add(k) + scc_found.update(scc) yield scc else: scc_queue.append(v) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def kosaraju_strongly_connected_components(G, source=None): """Generate nodes in strongly connected components of graph. @@ -138,7 +126,7 @@ def kosaraju_strongly_connected_components(G, source=None): Raises ------ - NetworkXNotImplemented: + NetworkXNotImplemented If G is undirected. Examples @@ -147,8 +135,12 @@ def kosaraju_strongly_connected_components(G, source=None): >>> G = nx.cycle_graph(4, create_using=nx.DiGraph()) >>> nx.add_cycle(G, [10, 11, 12]) - >>> [len(c) for c in sorted(nx.kosaraju_strongly_connected_components(G), - ... key=len, reverse=True)] + >>> [ + ... len(c) + ... for c in sorted( + ... nx.kosaraju_strongly_connected_components(G), key=len, reverse=True + ... ) + ... ] [4, 3] If you only want the largest component, it's more efficient to @@ -165,8 +157,7 @@ def kosaraju_strongly_connected_components(G, source=None): Uses Kosaraju's algorithm. """ - with nx.utils.reversed(G): - post = list(nx.dfs_postorder_nodes(G, source=source)) + post = list(nx.dfs_postorder_nodes(G.reverse(copy=False), source=source)) seen = set() while post: @@ -179,7 +170,7 @@ def kosaraju_strongly_connected_components(G, source=None): seen.update(new) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def strongly_connected_components_recursive(G): """Generate nodes in strongly connected components of graph. @@ -198,7 +189,7 @@ def strongly_connected_components_recursive(G): Raises ------ - NetworkXNotImplemented : + NetworkXNotImplemented If G is undirected. Examples @@ -207,8 +198,12 @@ def strongly_connected_components_recursive(G): >>> G = nx.cycle_graph(4, create_using=nx.DiGraph()) >>> nx.add_cycle(G, [10, 11, 12]) - >>> [len(c) for c in sorted(nx.strongly_connected_components_recursive(G), - ... key=len, reverse=True)] + >>> [ + ... len(c) + ... for c in sorted( + ... nx.strongly_connected_components_recursive(G), key=len, reverse=True + ... ) + ... ] [4, 3] If you only want the largest component, it's more efficient to @@ -216,6 +211,9 @@ def strongly_connected_components_recursive(G): >>> largest = max(nx.strongly_connected_components_recursive(G), key=len) + To create the induced subgraph of the components use: + >>> S = [G.subgraph(c).copy() for c in nx.weakly_connected_components(G)] + See Also -------- connected_components @@ -234,6 +232,7 @@ def strongly_connected_components_recursive(G): Information Processing Letters 49(1): 9-14, (1994).. """ + def visit(v, cnt): root[v] = cnt visited[v] = cnt @@ -241,8 +240,7 @@ def visit(v, cnt): stack.append(v) for w in G[v]: if w not in visited: - for c in visit(w, cnt): - yield c + yield from visit(w, cnt) if w not in component: root[v] = min(root[v], root[w]) if root[v] == visited[v]: @@ -262,29 +260,12 @@ def visit(v, cnt): stack = [] for source in G: if source not in visited: - for c in visit(source, cnt): - yield c - - -@not_implemented_for('undirected') -def strongly_connected_component_subgraphs(G, copy=True): - """DEPRECATED: Use ``(G.subgraph(c) for c in strongly_connected_components(G))`` - - Or ``(G.subgraph(c).copy() for c in strongly_connected_components(G))`` - """ - msg = "strongly_connected_component_subgraphs is deprecated and will be removed in 2.2" \ - "use (G.subgraph(c).copy() for c in strongly_connected_components(G))" - _warnings.warn(msg, DeprecationWarning) - for c in strongly_connected_components(G): - if copy: - yield G.subgraph(c).copy() - else: - yield G.subgraph(c) + yield from visit(source, cnt) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def number_strongly_connected_components(G): - """Return number of strongly connected components in graph. + """Returns number of strongly connected components in graph. Parameters ---------- @@ -298,7 +279,7 @@ def number_strongly_connected_components(G): Raises ------ - NetworkXNotImplemented: + NetworkXNotImplemented If G is undirected. See Also @@ -314,7 +295,7 @@ def number_strongly_connected_components(G): return sum(1 for scc in strongly_connected_components(G)) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def is_strongly_connected(G): """Test directed graph for strong connectivity. @@ -333,7 +314,7 @@ def is_strongly_connected(G): Raises ------ - NetworkXNotImplemented: + NetworkXNotImplemented If G is undirected. See Also @@ -350,12 +331,13 @@ def is_strongly_connected(G): """ if len(G) == 0: raise nx.NetworkXPointlessConcept( - """Connectivity is undefined for the null graph.""") + """Connectivity is undefined for the null graph.""" + ) return len(list(strongly_connected_components(G))[0]) == len(G) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def condensation(G, scc=None): """Returns the condensation of G. @@ -385,7 +367,7 @@ def condensation(G, scc=None): Raises ------ - NetworkXNotImplemented: + NetworkXNotImplemented If G is undirected. Notes @@ -400,7 +382,7 @@ def condensation(G, scc=None): members = {} C = nx.DiGraph() # Add mapping dict as graph attribute - C.graph['mapping'] = mapping + C.graph["mapping"] = mapping if len(G) == 0: return C for i, component in enumerate(scc): @@ -408,8 +390,9 @@ def condensation(G, scc=None): mapping.update((n, i) for n in component) number_of_components = i + 1 C.add_nodes_from(range(number_of_components)) - C.add_edges_from((mapping[u], mapping[v]) for u, v in G.edges() - if mapping[u] != mapping[v]) + C.add_edges_from( + (mapping[u], mapping[v]) for u, v in G.edges() if mapping[u] != mapping[v] + ) # Add a list of members (ie original nodes) to each node (ie scc) in C. - nx.set_node_attributes(C, members, 'members') + nx.set_node_attributes(C, members, "members") return C diff --git a/networkx/algorithms/components/tests/__init__.py b/networkx/algorithms/components/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/algorithms/components/tests/test_attracting.py b/networkx/algorithms/components/tests/test_attracting.py index bfeae1a..aee49e0 100644 --- a/networkx/algorithms/components/tests/test_attracting.py +++ b/networkx/algorithms/components/tests/test_attracting.py @@ -1,59 +1,68 @@ -#!/usr/bin/env python -from nose.tools import * +import pytest import networkx as nx from networkx import NetworkXNotImplemented -class TestAttractingComponents(object): - def setUp(self): - self.G1 = nx.DiGraph() - self.G1.add_edges_from([(5, 11), (11, 2), (11, 9), (11, 10), - (7, 11), (7, 8), (8, 9), (3, 8), (3, 10)]) - self.G2 = nx.DiGraph() - self.G2.add_edges_from([(0, 1), (0, 2), (1, 1), (1, 2), (2, 1)]) +class TestAttractingComponents: + @classmethod + def setup_class(cls): + cls.G1 = nx.DiGraph() + cls.G1.add_edges_from( + [ + (5, 11), + (11, 2), + (11, 9), + (11, 10), + (7, 11), + (7, 8), + (8, 9), + (3, 8), + (3, 10), + ] + ) + cls.G2 = nx.DiGraph() + cls.G2.add_edges_from([(0, 1), (0, 2), (1, 1), (1, 2), (2, 1)]) - self.G3 = nx.DiGraph() - self.G3.add_edges_from([(0, 1), (1, 2), (2, 1), (0, 3), (3, 4), (4, 3)]) + cls.G3 = nx.DiGraph() + cls.G3.add_edges_from([(0, 1), (1, 2), (2, 1), (0, 3), (3, 4), (4, 3)]) - self.G4 = nx.DiGraph() + cls.G4 = nx.DiGraph() def test_attracting_components(self): ac = list(nx.attracting_components(self.G1)) - assert_true({2} in ac) - assert_true({9} in ac) - assert_true({10} in ac) + assert {2} in ac + assert {9} in ac + assert {10} in ac ac = list(nx.attracting_components(self.G2)) ac = [tuple(sorted(x)) for x in ac] - assert_true(ac == [(1, 2)]) + assert ac == [(1, 2)] ac = list(nx.attracting_components(self.G3)) ac = [tuple(sorted(x)) for x in ac] - assert_true((1, 2) in ac) - assert_true((3, 4) in ac) - assert_equal(len(ac), 2) + assert (1, 2) in ac + assert (3, 4) in ac + assert len(ac) == 2 ac = list(nx.attracting_components(self.G4)) - assert_equal(ac, []) + assert ac == [] def test_number_attacting_components(self): - assert_equal(nx.number_attracting_components(self.G1), 3) - assert_equal(nx.number_attracting_components(self.G2), 1) - assert_equal(nx.number_attracting_components(self.G3), 2) - assert_equal(nx.number_attracting_components(self.G4), 0) + assert nx.number_attracting_components(self.G1) == 3 + assert nx.number_attracting_components(self.G2) == 1 + assert nx.number_attracting_components(self.G3) == 2 + assert nx.number_attracting_components(self.G4) == 0 def test_is_attracting_component(self): - assert_false(nx.is_attracting_component(self.G1)) - assert_false(nx.is_attracting_component(self.G2)) - assert_false(nx.is_attracting_component(self.G3)) + assert not nx.is_attracting_component(self.G1) + assert not nx.is_attracting_component(self.G2) + assert not nx.is_attracting_component(self.G3) g2 = self.G3.subgraph([1, 2]) - assert_true(nx.is_attracting_component(g2)) - assert_false(nx.is_attracting_component(self.G4)) + assert nx.is_attracting_component(g2) + assert not nx.is_attracting_component(self.G4) def test_connected_raise(self): G = nx.Graph() - assert_raises(NetworkXNotImplemented, nx.attracting_components, G) - assert_raises(NetworkXNotImplemented, nx.number_attracting_components, G) - assert_raises(NetworkXNotImplemented, nx.is_attracting_component, G) - # deprecated - assert_raises(NetworkXNotImplemented, nx.attracting_component_subgraphs, G) + pytest.raises(NetworkXNotImplemented, nx.attracting_components, G) + pytest.raises(NetworkXNotImplemented, nx.number_attracting_components, G) + pytest.raises(NetworkXNotImplemented, nx.is_attracting_component, G) diff --git a/networkx/algorithms/components/tests/test_biconnected.py b/networkx/algorithms/components/tests/test_biconnected.py index ae9e103..c21c0a8 100644 --- a/networkx/algorithms/components/tests/test_biconnected.py +++ b/networkx/algorithms/components/tests/test_biconnected.py @@ -1,20 +1,18 @@ -#!/usr/bin/env python -from nose.tools import * +import pytest import networkx as nx -from networkx.algorithms.components import biconnected from networkx import NetworkXNotImplemented def assert_components_edges_equal(x, y): sx = {frozenset([frozenset(e) for e in c]) for c in x} sy = {frozenset([frozenset(e) for e in c]) for c in y} - assert_equal(sx, sy) + assert sx == sy def assert_components_equal(x, y): sx = {frozenset(c) for c in x} sy = {frozenset(c) for c in y} - assert_equal(sx, sy) + assert sx == sy def test_barbell(): @@ -22,7 +20,7 @@ def test_barbell(): nx.add_path(G, [7, 20, 21, 22]) nx.add_cycle(G, [22, 23, 24, 25]) pts = set(nx.articulation_points(G)) - assert_equal(pts, {7, 8, 9, 10, 11, 12, 20, 21, 22}) + assert pts == {7, 8, 9, 10, 11, 12, 20, 21, 22} answer = [ {12, 13, 14, 15, 16, 17, 18, 19}, @@ -41,34 +39,34 @@ def test_barbell(): G.add_edge(2, 17) pts = set(nx.articulation_points(G)) - assert_equal(pts, {7, 20, 21, 22}) + assert pts == {7, 20, 21, 22} def test_articulation_points_repetitions(): G = nx.Graph() G.add_edges_from([(0, 1), (1, 2), (1, 3)]) - assert_equal(list(nx.articulation_points(G)), [1]) + assert list(nx.articulation_points(G)) == [1] def test_articulation_points_cycle(): G = nx.cycle_graph(3) nx.add_cycle(G, [1, 3, 4]) pts = set(nx.articulation_points(G)) - assert_equal(pts, {1}) + assert pts == {1} def test_is_biconnected(): G = nx.cycle_graph(3) - assert_true(nx.is_biconnected(G)) + assert nx.is_biconnected(G) nx.add_cycle(G, [1, 3, 4]) - assert_false(nx.is_biconnected(G)) + assert not nx.is_biconnected(G) def test_empty_is_biconnected(): G = nx.empty_graph(5) - assert_false(nx.is_biconnected(G)) + assert not nx.is_biconnected(G) G.add_edge(0, 1) - assert_false(nx.is_biconnected(G)) + assert not nx.is_biconnected(G) def test_biconnected_components_cycle(): @@ -77,34 +75,39 @@ def test_biconnected_components_cycle(): answer = [{0, 1, 2}, {1, 3, 4}] assert_components_equal(list(nx.biconnected_components(G)), answer) -# deprecated - - -def test_biconnected_component_subgraphs_cycle(): - G = nx.cycle_graph(3) - nx.add_cycle(G, [1, 3, 4, 5]) - Gc = set(nx.biconnected_component_subgraphs(G)) - assert_equal(len(Gc), 2) - g1, g2 = Gc - if 0 in g1: - assert_true(nx.is_isomorphic(g1, nx.Graph([(0, 1), (0, 2), (1, 2)]))) - assert_true(nx.is_isomorphic(g2, nx.Graph([(1, 3), (1, 5), (3, 4), (4, 5)]))) - else: - assert_true(nx.is_isomorphic(g1, nx.Graph([(1, 3), (1, 5), (3, 4), (4, 5)]))) - assert_true(nx.is_isomorphic(g2, nx.Graph([(0, 1), (0, 2), (1, 2)]))) - def test_biconnected_components1(): # graph example from # http://www.ibluemojo.com/school/articul_algorithm.html edges = [ - (0, 1), (0, 5), (0, 6), (0, 14), (1, 5), (1, 6), (1, 14), (2, 4), - (2, 10), (3, 4), (3, 15), (4, 6), (4, 7), (4, 10), (5, 14), (6, 14), - (7, 9), (8, 9), (8, 12), (8, 13), (10, 15), (11, 12), (11, 13), (12, 13) + (0, 1), + (0, 5), + (0, 6), + (0, 14), + (1, 5), + (1, 6), + (1, 14), + (2, 4), + (2, 10), + (3, 4), + (3, 15), + (4, 6), + (4, 7), + (4, 10), + (5, 14), + (6, 14), + (7, 9), + (8, 9), + (8, 12), + (8, 13), + (10, 15), + (11, 12), + (11, 13), + (12, 13), ] G = nx.Graph(edges) pts = set(nx.articulation_points(G)) - assert_equal(pts, {4, 6, 7, 8, 9}) + assert pts == {4, 6, 7, 8, 9} comps = list(nx.biconnected_component_edges(G)) answer = [ [(3, 4), (15, 3), (10, 15), (10, 4), (2, 10), (4, 2)], @@ -120,18 +123,25 @@ def test_biconnected_components1(): def test_biconnected_components2(): G = nx.Graph() - nx.add_cycle(G, 'ABC') - nx.add_cycle(G, 'CDE') - nx.add_cycle(G, 'FIJHG') - nx.add_cycle(G, 'GIJ') - G.add_edge('E', 'G') + nx.add_cycle(G, "ABC") + nx.add_cycle(G, "CDE") + nx.add_cycle(G, "FIJHG") + nx.add_cycle(G, "GIJ") + G.add_edge("E", "G") comps = list(nx.biconnected_component_edges(G)) answer = [ - [tuple('GF'), tuple('FI'), tuple('IG'), tuple('IJ'), - tuple('JG'), tuple('JH'), tuple('HG')], - [tuple('EG')], - [tuple('CD'), tuple('DE'), tuple('CE')], - [tuple('AB'), tuple('BC'), tuple('AC')] + [ + tuple("GF"), + tuple("FI"), + tuple("IG"), + tuple("IJ"), + tuple("JG"), + tuple("JH"), + tuple("HG"), + ], + [tuple("EG")], + [tuple("CD"), tuple("DE"), tuple("CE")], + [tuple("AB"), tuple("BC"), tuple("AC")], ] assert_components_edges_equal(comps, answer) @@ -139,46 +149,80 @@ def test_biconnected_components2(): def test_biconnected_davis(): D = nx.davis_southern_women_graph() bcc = list(nx.biconnected_components(D))[0] - assert_true(set(D) == bcc) # All nodes in a giant bicomponent + assert set(D) == bcc # All nodes in a giant bicomponent # So no articulation points - assert_equal(len(list(nx.articulation_points(D))), 0) + assert len(list(nx.articulation_points(D))) == 0 def test_biconnected_karate(): K = nx.karate_club_graph() - answer = [{0, 1, 2, 3, 7, 8, 9, 12, 13, 14, 15, 17, 18, 19, - 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33}, - {0, 4, 5, 6, 10, 16}, - {0, 11}] + answer = [ + { + 0, + 1, + 2, + 3, + 7, + 8, + 9, + 12, + 13, + 14, + 15, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 24, + 25, + 26, + 27, + 28, + 29, + 30, + 31, + 32, + 33, + }, + {0, 4, 5, 6, 10, 16}, + {0, 11}, + ] bcc = list(nx.biconnected_components(K)) assert_components_equal(bcc, answer) - assert_equal(set(nx.articulation_points(K)), {0}) + assert set(nx.articulation_points(K)) == {0} def test_biconnected_eppstein(): # tests from http://www.ics.uci.edu/~eppstein/PADS/Biconnectivity.py - G1 = nx.Graph({ - 0: [1, 2, 5], - 1: [0, 5], - 2: [0, 3, 4], - 3: [2, 4, 5, 6], - 4: [2, 3, 5, 6], - 5: [0, 1, 3, 4], - 6: [3, 4], - }) - G2 = nx.Graph({ - 0: [2, 5], - 1: [3, 8], - 2: [0, 3, 5], - 3: [1, 2, 6, 8], - 4: [7], - 5: [0, 2], - 6: [3, 8], - 7: [4], - 8: [1, 3, 6], - }) - assert_true(nx.is_biconnected(G1)) - assert_false(nx.is_biconnected(G2)) + G1 = nx.Graph( + { + 0: [1, 2, 5], + 1: [0, 5], + 2: [0, 3, 4], + 3: [2, 4, 5, 6], + 4: [2, 3, 5, 6], + 5: [0, 1, 3, 4], + 6: [3, 4], + } + ) + G2 = nx.Graph( + { + 0: [2, 5], + 1: [3, 8], + 2: [0, 3, 5], + 3: [1, 2, 6, 8], + 4: [7], + 5: [0, 2], + 6: [3, 8], + 7: [4], + 8: [1, 3, 6], + } + ) + assert nx.is_biconnected(G1) + assert not nx.is_biconnected(G2) answer_G2 = [{1, 3, 6, 8}, {0, 2, 5}, {2, 3}, {4, 7}] bcc = list(nx.biconnected_components(G2)) assert_components_equal(bcc, answer_G2) @@ -186,17 +230,15 @@ def test_biconnected_eppstein(): def test_null_graph(): G = nx.Graph() - assert_false(nx.is_biconnected(G)) - assert_equal(list(nx.biconnected_components(G)), []) - assert_equal(list(nx.biconnected_component_edges(G)), []) - assert_equal(list(nx.articulation_points(G)), []) + assert not nx.is_biconnected(G) + assert list(nx.biconnected_components(G)) == [] + assert list(nx.biconnected_component_edges(G)) == [] + assert list(nx.articulation_points(G)) == [] def test_connected_raise(): DG = nx.DiGraph() - assert_raises(NetworkXNotImplemented, nx.biconnected_components, DG) - assert_raises(NetworkXNotImplemented, nx.biconnected_component_edges, DG) - assert_raises(NetworkXNotImplemented, nx.articulation_points, DG) - assert_raises(NetworkXNotImplemented, nx.is_biconnected, DG) - # deprecated - assert_raises(NetworkXNotImplemented, nx.biconnected_component_subgraphs, DG) + pytest.raises(NetworkXNotImplemented, nx.biconnected_components, DG) + pytest.raises(NetworkXNotImplemented, nx.biconnected_component_edges, DG) + pytest.raises(NetworkXNotImplemented, nx.articulation_points, DG) + pytest.raises(NetworkXNotImplemented, nx.is_biconnected, DG) diff --git a/networkx/algorithms/components/tests/test_connected.py b/networkx/algorithms/components/tests/test_connected.py index 594f5d0..ebe30ac 100644 --- a/networkx/algorithms/components/tests/test_connected.py +++ b/networkx/algorithms/components/tests/test_connected.py @@ -1,50 +1,63 @@ -#!/usr/bin/env python -from nose.tools import * +import pytest import networkx as nx from networkx import convert_node_labels_to_integers as cnlti from networkx import NetworkXNotImplemented class TestConnected: - - def setUp(self): + @classmethod + def setup_class(cls): G1 = cnlti(nx.grid_2d_graph(2, 2), first_label=0, ordering="sorted") G2 = cnlti(nx.lollipop_graph(3, 3), first_label=4, ordering="sorted") G3 = cnlti(nx.house_graph(), first_label=10, ordering="sorted") - self.G = nx.union(G1, G2) - self.G = nx.union(self.G, G3) - self.DG = nx.DiGraph([(1, 2), (1, 3), (2, 3)]) - self.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1) + cls.G = nx.union(G1, G2) + cls.G = nx.union(cls.G, G3) + cls.DG = nx.DiGraph([(1, 2), (1, 3), (2, 3)]) + cls.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1) - self.gc = [] + cls.gc = [] G = nx.DiGraph() - G.add_edges_from([(1, 2), (2, 3), (2, 8), (3, 4), (3, 7), (4, 5), - (5, 3), (5, 6), (7, 4), (7, 6), (8, 1), (8, 7)]) + G.add_edges_from( + [ + (1, 2), + (2, 3), + (2, 8), + (3, 4), + (3, 7), + (4, 5), + (5, 3), + (5, 6), + (7, 4), + (7, 6), + (8, 1), + (8, 7), + ] + ) C = [[3, 4, 5, 7], [1, 2, 8], [6]] - self.gc.append((G, C)) + cls.gc.append((G, C)) G = nx.DiGraph() G.add_edges_from([(1, 2), (1, 3), (1, 4), (4, 2), (3, 4), (2, 3)]) C = [[2, 3, 4], [1]] - self.gc.append((G, C)) + cls.gc.append((G, C)) G = nx.DiGraph() G.add_edges_from([(1, 2), (2, 3), (3, 2), (2, 1)]) C = [[1, 2, 3]] - self.gc.append((G, C)) + cls.gc.append((G, C)) # Eppstein's tests G = nx.DiGraph({0: [1], 1: [2, 3], 2: [4, 5], 3: [4, 5], 4: [6], 5: [], 6: []}) C = [[0], [1], [2], [3], [4], [5], [6]] - self.gc.append((G, C)) + cls.gc.append((G, C)) G = nx.DiGraph({0: [1], 1: [2, 3, 4], 2: [0, 3], 3: [4], 4: [3]}) C = [[0, 1, 2], [3, 4]] - self.gc.append((G, C)) + cls.gc.append((G, C)) G = nx.DiGraph() C = [] - self.gc.append((G, C)) + cls.gc.append((G, C)) def test_connected_components(self): cc = nx.connected_components @@ -52,51 +65,47 @@ def test_connected_components(self): C = { frozenset([0, 1, 2, 3]), frozenset([4, 5, 6, 7, 8, 9]), - frozenset([10, 11, 12, 13, 14]) + frozenset([10, 11, 12, 13, 14]), } - assert_equal({frozenset(g) for g in cc(G)}, C) + assert {frozenset(g) for g in cc(G)} == C def test_number_connected_components(self): ncc = nx.number_connected_components - assert_equal(ncc(self.G), 3) + assert ncc(self.G) == 3 def test_number_connected_components2(self): ncc = nx.number_connected_components - assert_equal(ncc(self.grid), 1) + assert ncc(self.grid) == 1 def test_connected_components2(self): cc = nx.connected_components G = self.grid C = {frozenset([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16])} - assert_equal({frozenset(g) for g in cc(G)}, C) + assert {frozenset(g) for g in cc(G)} == C def test_node_connected_components(self): ncc = nx.node_connected_component G = self.grid C = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16} - assert_equal(ncc(G, 1), C) - - # deprecated - def test_connected_component_subgraphs(self): - wcc = nx.weakly_connected_component_subgraphs - cc = nx.connected_component_subgraphs - for G, C in self.gc: - U = G.to_undirected() - w = {frozenset(g) for g in wcc(G)} - c = {frozenset(g) for g in cc(U)} - assert_equal(w, c) + assert ncc(G, 1) == C def test_is_connected(self): - assert_true(nx.is_connected(self.grid)) + assert nx.is_connected(self.grid) G = nx.Graph() G.add_nodes_from([1, 2]) - assert_false(nx.is_connected(G)) + assert not nx.is_connected(G) def test_connected_raise(self): - assert_raises(NetworkXNotImplemented, nx.connected_components, self.DG) - assert_raises(NetworkXNotImplemented, nx.number_connected_components, self.DG) - assert_raises(NetworkXNotImplemented, nx.node_connected_component, self.DG, 1) - assert_raises(NetworkXNotImplemented, nx.is_connected, self.DG) - assert_raises(nx.NetworkXPointlessConcept, nx.is_connected, nx.Graph()) - # deprecated - assert_raises(NetworkXNotImplemented, nx.connected_component_subgraphs, self.DG) + pytest.raises(NetworkXNotImplemented, nx.connected_components, self.DG) + pytest.raises(NetworkXNotImplemented, nx.number_connected_components, self.DG) + pytest.raises(NetworkXNotImplemented, nx.node_connected_component, self.DG, 1) + pytest.raises(NetworkXNotImplemented, nx.is_connected, self.DG) + pytest.raises(nx.NetworkXPointlessConcept, nx.is_connected, nx.Graph()) + + def test_connected_mutability(self): + G = self.grid + seen = set() + for component in nx.connected_components(G): + assert len(seen & component) == 0 + seen.update(component) + component.clear() diff --git a/networkx/algorithms/components/tests/test_semiconnected.py b/networkx/algorithms/components/tests/test_semiconnected.py index e92a648..d9a8c7c 100644 --- a/networkx/algorithms/components/tests/test_semiconnected.py +++ b/networkx/algorithms/components/tests/test_semiconnected.py @@ -1,54 +1,53 @@ from itertools import chain import networkx as nx -from nose.tools import * +import pytest -class TestIsSemiconnected(object): - +class TestIsSemiconnected: def test_undirected(self): - assert_raises(nx.NetworkXNotImplemented, nx.is_semiconnected, - nx.Graph()) - assert_raises(nx.NetworkXNotImplemented, nx.is_semiconnected, - nx.MultiGraph()) + pytest.raises(nx.NetworkXNotImplemented, nx.is_semiconnected, nx.Graph()) + pytest.raises(nx.NetworkXNotImplemented, nx.is_semiconnected, nx.MultiGraph()) def test_empty(self): - assert_raises(nx.NetworkXPointlessConcept, nx.is_semiconnected, - nx.DiGraph()) - assert_raises(nx.NetworkXPointlessConcept, nx.is_semiconnected, - nx.MultiDiGraph()) + pytest.raises(nx.NetworkXPointlessConcept, nx.is_semiconnected, nx.DiGraph()) + pytest.raises( + nx.NetworkXPointlessConcept, nx.is_semiconnected, nx.MultiDiGraph() + ) def test_single_node_graph(self): G = nx.DiGraph() G.add_node(0) - ok_(nx.is_semiconnected(G)) + assert nx.is_semiconnected(G) def test_path(self): G = nx.path_graph(100, create_using=nx.DiGraph()) - ok_(nx.is_semiconnected(G)) + assert nx.is_semiconnected(G) G.add_edge(100, 99) - ok_(not nx.is_semiconnected(G)) + assert not nx.is_semiconnected(G) def test_cycle(self): G = nx.cycle_graph(100, create_using=nx.DiGraph()) - ok_(nx.is_semiconnected(G)) + assert nx.is_semiconnected(G) G = nx.path_graph(100, create_using=nx.DiGraph()) G.add_edge(0, 99) - ok_(nx.is_semiconnected(G)) + assert nx.is_semiconnected(G) def test_tree(self): G = nx.DiGraph() - G.add_edges_from(chain.from_iterable([(i, 2 * i + 1), (i, 2 * i + 2)] - for i in range(100))) - ok_(not nx.is_semiconnected(G)) + G.add_edges_from( + chain.from_iterable([(i, 2 * i + 1), (i, 2 * i + 2)] for i in range(100)) + ) + assert not nx.is_semiconnected(G) def test_dumbbell(self): G = nx.cycle_graph(100, create_using=nx.DiGraph()) G.add_edges_from((i + 100, (i + 1) % 100 + 100) for i in range(100)) - ok_(not nx.is_semiconnected(G)) # G is disconnected. + assert not nx.is_semiconnected(G) # G is disconnected. G.add_edge(100, 99) - ok_(nx.is_semiconnected(G)) + assert nx.is_semiconnected(G) def test_alternating_path(self): - G = nx.DiGraph(chain.from_iterable([(i, i - 1), (i, i + 1)] - for i in range(0, 100, 2))) - ok_(not nx.is_semiconnected(G)) + G = nx.DiGraph( + chain.from_iterable([(i, i - 1), (i, i + 1)] for i in range(0, 100, 2)) + ) + assert not nx.is_semiconnected(G) diff --git a/networkx/algorithms/components/tests/test_strongly_connected.py b/networkx/algorithms/components/tests/test_strongly_connected.py index d0aecb2..959b333 100644 --- a/networkx/algorithms/components/tests/test_strongly_connected.py +++ b/networkx/algorithms/components/tests/test_strongly_connected.py @@ -1,28 +1,41 @@ -#!/usr/bin/env python -from nose.tools import * +import pytest import networkx as nx from networkx import NetworkXNotImplemented class TestStronglyConnected: - - def setUp(self): - self.gc = [] + @classmethod + def setup_class(cls): + cls.gc = [] G = nx.DiGraph() - G.add_edges_from([(1, 2), (2, 3), (2, 8), (3, 4), (3, 7), (4, 5), - (5, 3), (5, 6), (7, 4), (7, 6), (8, 1), (8, 7)]) + G.add_edges_from( + [ + (1, 2), + (2, 3), + (2, 8), + (3, 4), + (3, 7), + (4, 5), + (5, 3), + (5, 6), + (7, 4), + (7, 6), + (8, 1), + (8, 7), + ] + ) C = {frozenset([3, 4, 5, 7]), frozenset([1, 2, 8]), frozenset([6])} - self.gc.append((G, C)) + cls.gc.append((G, C)) G = nx.DiGraph() G.add_edges_from([(1, 2), (1, 3), (1, 4), (4, 2), (3, 4), (2, 3)]) C = {frozenset([2, 3, 4]), frozenset([1])} - self.gc.append((G, C)) + cls.gc.append((G, C)) G = nx.DiGraph() G.add_edges_from([(1, 2), (2, 3), (3, 2), (2, 1)]) C = {frozenset([1, 2, 3])} - self.gc.append((G, C)) + cls.gc.append((G, C)) # Eppstein's tests G = nx.DiGraph({0: [1], 1: [2, 3], 2: [4, 5], 3: [4, 5], 4: [6], 5: [], 6: []}) @@ -35,69 +48,83 @@ def setUp(self): frozenset([5]), frozenset([6]), } - self.gc.append((G, C)) + cls.gc.append((G, C)) G = nx.DiGraph({0: [1], 1: [2, 3, 4], 2: [0, 3], 3: [4], 4: [3]}) C = {frozenset([0, 1, 2]), frozenset([3, 4])} - self.gc.append((G, C)) + cls.gc.append((G, C)) def test_tarjan(self): scc = nx.strongly_connected_components for G, C in self.gc: - assert_equal({frozenset(g) for g in scc(G)}, C) + assert {frozenset(g) for g in scc(G)} == C def test_tarjan_recursive(self): scc = nx.strongly_connected_components_recursive for G, C in self.gc: - assert_equal({frozenset(g) for g in scc(G)}, C) + assert {frozenset(g) for g in scc(G)} == C def test_kosaraju(self): scc = nx.kosaraju_strongly_connected_components for G, C in self.gc: - assert_equal({frozenset(g) for g in scc(G)}, C) + assert {frozenset(g) for g in scc(G)} == C def test_number_strongly_connected_components(self): ncc = nx.number_strongly_connected_components for G, C in self.gc: - assert_equal(ncc(G), len(C)) + assert ncc(G) == len(C) def test_is_strongly_connected(self): for G, C in self.gc: if len(C) == 1: - assert_true(nx.is_strongly_connected(G)) + assert nx.is_strongly_connected(G) else: - assert_false(nx.is_strongly_connected(G)) - - # deprecated - def test_strongly_connected_component_subgraphs(self): - scc = nx.strongly_connected_component_subgraphs - for G, C in self.gc: - assert_equal({frozenset(g) for g in scc(G)}, C) + assert not nx.is_strongly_connected(G) def test_contract_scc1(self): G = nx.DiGraph() - G.add_edges_from([ - (1, 2), (2, 3), (2, 11), (2, 12), (3, 4), (4, 3), (4, 5), (5, 6), - (6, 5), (6, 7), (7, 8), (7, 9), (7, 10), (8, 9), (9, 7), (10, 6), - (11, 2), (11, 4), (11, 6), (12, 6), (12, 11), - ]) + G.add_edges_from( + [ + (1, 2), + (2, 3), + (2, 11), + (2, 12), + (3, 4), + (4, 3), + (4, 5), + (5, 6), + (6, 5), + (6, 7), + (7, 8), + (7, 9), + (7, 10), + (8, 9), + (9, 7), + (10, 6), + (11, 2), + (11, 4), + (11, 6), + (12, 6), + (12, 11), + ] + ) scc = list(nx.strongly_connected_components(G)) cG = nx.condensation(G, scc) # DAG - assert_true(nx.is_directed_acyclic_graph(cG)) + assert nx.is_directed_acyclic_graph(cG) # nodes - assert_equal(sorted(cG.nodes()), [0, 1, 2, 3]) + assert sorted(cG.nodes()) == [0, 1, 2, 3] # edges mapping = {} for i, component in enumerate(scc): for n in component: mapping[n] = i edge = (mapping[2], mapping[3]) - assert_true(cG.has_edge(*edge)) + assert cG.has_edge(*edge) edge = (mapping[2], mapping[5]) - assert_true(cG.has_edge(*edge)) + assert cG.has_edge(*edge) edge = (mapping[3], mapping[5]) - assert_true(cG.has_edge(*edge)) + assert cG.has_edge(*edge) def test_contract_scc_isolate(self): # Bug found and fixed in [1687]. @@ -106,8 +133,8 @@ def test_contract_scc_isolate(self): G.add_edge(2, 1) scc = list(nx.strongly_connected_components(G)) cG = nx.condensation(G, scc) - assert_equal(list(cG.nodes()), [0]) - assert_equal(list(cG.edges()), []) + assert list(cG.nodes()) == [0] + assert list(cG.edges()) == [] def test_contract_scc_edge(self): G = nx.DiGraph() @@ -118,39 +145,72 @@ def test_contract_scc_edge(self): G.add_edge(4, 3) scc = list(nx.strongly_connected_components(G)) cG = nx.condensation(G, scc) - assert_equal(sorted(cG.nodes()), [0, 1]) + assert sorted(cG.nodes()) == [0, 1] if 1 in scc[0]: edge = (0, 1) else: edge = (1, 0) - assert_equal(list(cG.edges()), [edge]) + assert list(cG.edges()) == [edge] def test_condensation_mapping_and_members(self): G, C = self.gc[1] C = sorted(C, key=len, reverse=True) cG = nx.condensation(G) - mapping = cG.graph['mapping'] - assert_true(all(n in G for n in mapping)) - assert_true(all(0 == cN for n, cN in mapping.items() if n in C[0])) - assert_true(all(1 == cN for n, cN in mapping.items() if n in C[1])) + mapping = cG.graph["mapping"] + assert all(n in G for n in mapping) + assert all(0 == cN for n, cN in mapping.items() if n in C[0]) + assert all(1 == cN for n, cN in mapping.items() if n in C[1]) for n, d in cG.nodes(data=True): - assert_equal(set(C[n]), cG.nodes[n]['members']) + assert set(C[n]) == cG.nodes[n]["members"] def test_null_graph(self): G = nx.DiGraph() - assert_equal(list(nx.strongly_connected_components(G)), []) - assert_equal(list(nx.kosaraju_strongly_connected_components(G)), []) - assert_equal(list(nx.strongly_connected_components_recursive(G)), []) - assert_equal(len(nx.condensation(G)), 0) - assert_raises(nx.NetworkXPointlessConcept, nx.is_strongly_connected, nx.DiGraph()) + assert list(nx.strongly_connected_components(G)) == [] + assert list(nx.kosaraju_strongly_connected_components(G)) == [] + assert list(nx.strongly_connected_components_recursive(G)) == [] + assert len(nx.condensation(G)) == 0 + pytest.raises( + nx.NetworkXPointlessConcept, nx.is_strongly_connected, nx.DiGraph() + ) def test_connected_raise(self): G = nx.Graph() - assert_raises(NetworkXNotImplemented, nx.strongly_connected_components, G) - assert_raises(NetworkXNotImplemented, nx.kosaraju_strongly_connected_components, G) - assert_raises(NetworkXNotImplemented, nx.strongly_connected_components_recursive, G) - assert_raises(NetworkXNotImplemented, nx.is_strongly_connected, G) - assert_raises(nx.NetworkXPointlessConcept, nx.is_strongly_connected, nx.DiGraph()) - assert_raises(NetworkXNotImplemented, nx.condensation, G) - # deprecated - assert_raises(NetworkXNotImplemented, nx.strongly_connected_component_subgraphs, G) + pytest.raises(NetworkXNotImplemented, nx.strongly_connected_components, G) + pytest.raises( + NetworkXNotImplemented, nx.kosaraju_strongly_connected_components, G + ) + pytest.raises( + NetworkXNotImplemented, nx.strongly_connected_components_recursive, G + ) + pytest.raises(NetworkXNotImplemented, nx.is_strongly_connected, G) + pytest.raises( + nx.NetworkXPointlessConcept, nx.is_strongly_connected, nx.DiGraph() + ) + pytest.raises(NetworkXNotImplemented, nx.condensation, G) + + +# Commented out due to variability on Travis-CI hardware/operating systems +# def test_linear_time(self): +# # See Issue #2831 +# count = 100 # base case +# dg = nx.DiGraph() +# dg.add_nodes_from([0, 1]) +# for i in range(2, count): +# dg.add_node(i) +# dg.add_edge(i, 1) +# dg.add_edge(0, i) +# t = time.time() +# ret = tuple(nx.strongly_connected_components(dg)) +# dt = time.time() - t +# +# count = 200 +# dg = nx.DiGraph() +# dg.add_nodes_from([0, 1]) +# for i in range(2, count): +# dg.add_node(i) +# dg.add_edge(i, 1) +# dg.add_edge(0, i) +# t = time.time() +# ret = tuple(nx.strongly_connected_components(dg)) +# dt2 = time.time() - t +# assert_less(dt2, dt * 2.3) # should be 2 times longer for this graph diff --git a/networkx/algorithms/components/tests/test_subgraph_copies.py b/networkx/algorithms/components/tests/test_subgraph_copies.py deleted file mode 100644 index fceeaa6..0000000 --- a/networkx/algorithms/components/tests/test_subgraph_copies.py +++ /dev/null @@ -1,87 +0,0 @@ -""" Tests for subgraphs attributes -""" -from copy import deepcopy -from nose.tools import assert_equal -import networkx as nx - -# deprecated in 2.1 for removal in 2.2 - - -class TestSubgraphAttributesDicts: - - def setUp(self): - self.undirected = [ - nx.connected_component_subgraphs, - nx.biconnected_component_subgraphs, - ] - self.directed = [ - nx.weakly_connected_component_subgraphs, - nx.strongly_connected_component_subgraphs, - nx.attracting_component_subgraphs, - ] - self.subgraph_funcs = self.undirected + self.directed - - self.D = nx.DiGraph() - self.D.add_edge(1, 2, eattr='red') - self.D.add_edge(2, 1, eattr='red') - self.D.nodes[1]['nattr'] = 'blue' - self.D.graph['gattr'] = 'green' - - self.G = nx.Graph() - self.G.add_edge(1, 2, eattr='red') - self.G.nodes[1]['nattr'] = 'blue' - self.G.graph['gattr'] = 'green' - - def test_subgraphs_default_copy_behavior(self): - # Test the default behavior of subgraph functions - # For the moment (1.10) the default is to copy - for subgraph_func in self.subgraph_funcs: - G = deepcopy(self.G if subgraph_func in self.undirected else self.D) - SG = list(subgraph_func(G))[0] - assert_equal(SG[1][2]['eattr'], 'red') - assert_equal(SG.nodes[1]['nattr'], 'blue') - assert_equal(SG.graph['gattr'], 'green') - SG[1][2]['eattr'] = 'foo' - assert_equal(G[1][2]['eattr'], 'red') - assert_equal(SG[1][2]['eattr'], 'foo') - SG.nodes[1]['nattr'] = 'bar' - assert_equal(G.nodes[1]['nattr'], 'blue') - assert_equal(SG.nodes[1]['nattr'], 'bar') - SG.graph['gattr'] = 'baz' - assert_equal(G.graph['gattr'], 'green') - assert_equal(SG.graph['gattr'], 'baz') - - def test_subgraphs_copy(self): - for subgraph_func in self.subgraph_funcs: - test_graph = self.G if subgraph_func in self.undirected else self.D - G = deepcopy(test_graph) - SG = list(subgraph_func(G, copy=True))[0] - assert_equal(SG[1][2]['eattr'], 'red') - assert_equal(SG.nodes[1]['nattr'], 'blue') - assert_equal(SG.graph['gattr'], 'green') - SG[1][2]['eattr'] = 'foo' - assert_equal(G[1][2]['eattr'], 'red') - assert_equal(SG[1][2]['eattr'], 'foo') - SG.nodes[1]['nattr'] = 'bar' - assert_equal(G.nodes[1]['nattr'], 'blue') - assert_equal(SG.nodes[1]['nattr'], 'bar') - SG.graph['gattr'] = 'baz' - assert_equal(G.graph['gattr'], 'green') - assert_equal(SG.graph['gattr'], 'baz') - - def test_subgraphs_no_copy(self): - for subgraph_func in self.subgraph_funcs: - G = deepcopy(self.G if subgraph_func in self.undirected else self.D) - SG = list(subgraph_func(G, copy=False))[0] - assert_equal(SG[1][2]['eattr'], 'red') - assert_equal(SG.nodes[1]['nattr'], 'blue') - assert_equal(SG.graph['gattr'], 'green') - SG[1][2]['eattr'] = 'foo' - assert_equal(G[1][2]['eattr'], 'foo') - assert_equal(SG[1][2]['eattr'], 'foo') - SG.nodes[1]['nattr'] = 'bar' - assert_equal(G.nodes[1]['nattr'], 'bar') - assert_equal(SG.nodes[1]['nattr'], 'bar') - SG.graph['gattr'] = 'baz' - assert_equal(G.graph['gattr'], 'baz') - assert_equal(SG.graph['gattr'], 'baz') diff --git a/networkx/algorithms/components/tests/test_weakly_connected.py b/networkx/algorithms/components/tests/test_weakly_connected.py index 764da59..393f688 100644 --- a/networkx/algorithms/components/tests/test_weakly_connected.py +++ b/networkx/algorithms/components/tests/test_weakly_connected.py @@ -1,77 +1,78 @@ -#!/usr/bin/env python -from nose.tools import * +import pytest import networkx as nx from networkx import NetworkXNotImplemented class TestWeaklyConnected: - - def setUp(self): - self.gc = [] + @classmethod + def setup_class(cls): + cls.gc = [] G = nx.DiGraph() - G.add_edges_from([(1, 2), (2, 3), (2, 8), (3, 4), (3, 7), (4, 5), - (5, 3), (5, 6), (7, 4), (7, 6), (8, 1), (8, 7)]) + G.add_edges_from( + [ + (1, 2), + (2, 3), + (2, 8), + (3, 4), + (3, 7), + (4, 5), + (5, 3), + (5, 6), + (7, 4), + (7, 6), + (8, 1), + (8, 7), + ] + ) C = [[3, 4, 5, 7], [1, 2, 8], [6]] - self.gc.append((G, C)) + cls.gc.append((G, C)) G = nx.DiGraph() G.add_edges_from([(1, 2), (1, 3), (1, 4), (4, 2), (3, 4), (2, 3)]) C = [[2, 3, 4], [1]] - self.gc.append((G, C)) + cls.gc.append((G, C)) G = nx.DiGraph() G.add_edges_from([(1, 2), (2, 3), (3, 2), (2, 1)]) C = [[1, 2, 3]] - self.gc.append((G, C)) + cls.gc.append((G, C)) # Eppstein's tests G = nx.DiGraph({0: [1], 1: [2, 3], 2: [4, 5], 3: [4, 5], 4: [6], 5: [], 6: []}) C = [[0], [1], [2], [3], [4], [5], [6]] - self.gc.append((G, C)) + cls.gc.append((G, C)) G = nx.DiGraph({0: [1], 1: [2, 3, 4], 2: [0, 3], 3: [4], 4: [3]}) C = [[0, 1, 2], [3, 4]] - self.gc.append((G, C)) + cls.gc.append((G, C)) def test_weakly_connected_components(self): for G, C in self.gc: U = G.to_undirected() w = {frozenset(g) for g in nx.weakly_connected_components(G)} c = {frozenset(g) for g in nx.connected_components(U)} - assert_equal(w, c) + assert w == c def test_number_weakly_connected_components(self): for G, C in self.gc: U = G.to_undirected() w = nx.number_weakly_connected_components(G) c = nx.number_connected_components(U) - assert_equal(w, c) - - # deprecated - def test_weakly_connected_component_subgraphs(self): - wcc = nx.weakly_connected_component_subgraphs - cc = nx.connected_component_subgraphs - for G, C in self.gc: - U = G.to_undirected() - w = {frozenset(g) for g in wcc(G)} - c = {frozenset(g) for g in cc(U)} - assert_equal(w, c) + assert w == c def test_is_weakly_connected(self): for G, C in self.gc: U = G.to_undirected() - assert_equal(nx.is_weakly_connected(G), nx.is_connected(U)) + assert nx.is_weakly_connected(G) == nx.is_connected(U) def test_null_graph(self): G = nx.DiGraph() - assert_equal(list(nx.weakly_connected_components(G)), []) - assert_equal(nx.number_weakly_connected_components(G), 0) - assert_raises(nx.NetworkXPointlessConcept, nx.is_weakly_connected, G) + assert list(nx.weakly_connected_components(G)) == [] + assert nx.number_weakly_connected_components(G) == 0 + pytest.raises(nx.NetworkXPointlessConcept, nx.is_weakly_connected, G) def test_connected_raise(self): G = nx.Graph() - assert_raises(NetworkXNotImplemented, nx.weakly_connected_components, G) - assert_raises(NetworkXNotImplemented, nx.number_weakly_connected_components, G) - assert_raises(NetworkXNotImplemented, nx.is_weakly_connected, G) - # deprecated - assert_raises(NetworkXNotImplemented, nx.weakly_connected_component_subgraphs, G) + pytest.raises(NetworkXNotImplemented, nx.weakly_connected_components, G) + pytest.raises(NetworkXNotImplemented, nx.number_weakly_connected_components, G) + pytest.raises(NetworkXNotImplemented, nx.is_weakly_connected, G) diff --git a/networkx/algorithms/components/weakly_connected.py b/networkx/algorithms/components/weakly_connected.py index 4a041a9..ae38d10 100644 --- a/networkx/algorithms/components/weakly_connected.py +++ b/networkx/algorithms/components/weakly_connected.py @@ -1,27 +1,15 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg (hagberg@lanl.gov) -# Christopher Ellison """Weakly connected components.""" -import warnings as _warnings import networkx as nx from networkx.utils.decorators import not_implemented_for __all__ = [ - 'number_weakly_connected_components', - 'weakly_connected_components', - 'weakly_connected_component_subgraphs', - 'is_weakly_connected', + "number_weakly_connected_components", + "weakly_connected_components", + "is_weakly_connected", ] -@not_implemented_for('undirected') +@not_implemented_for("undirected") def weakly_connected_components(G): """Generate weakly connected components of G. @@ -38,7 +26,7 @@ def weakly_connected_components(G): Raises ------ - NetworkXNotImplemented: + NetworkXNotImplemented If G is undirected. Examples @@ -47,8 +35,10 @@ def weakly_connected_components(G): >>> G = nx.path_graph(4, create_using=nx.DiGraph()) >>> nx.add_path(G, [10, 11, 12]) - >>> [len(c) for c in sorted(nx.weakly_connected_components(G), - ... key=len, reverse=True)] + >>> [ + ... len(c) + ... for c in sorted(nx.weakly_connected_components(G), key=len, reverse=True) + ... ] [4, 3] If you only want the largest component, it's more efficient to @@ -74,9 +64,9 @@ def weakly_connected_components(G): seen.update(c) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def number_weakly_connected_components(G): - """Return the number of weakly connected components in G. + """Returns the number of weakly connected components in G. Parameters ---------- @@ -90,7 +80,7 @@ def number_weakly_connected_components(G): Raises ------ - NetworkXNotImplemented: + NetworkXNotImplemented If G is undirected. See Also @@ -107,23 +97,7 @@ def number_weakly_connected_components(G): return sum(1 for wcc in weakly_connected_components(G)) -@not_implemented_for('undirected') -def weakly_connected_component_subgraphs(G, copy=True): - """DEPRECATED: Use ``(G.subgraph(c) for c in weakly_connected_components(G))`` - - Or ``(G.subgraph(c).copy() for c in weakly_connected_components(G))`` - """ - msg = "weakly_connected_component_subgraphs is deprecated and will be removed in 2.2" \ - "use (G.subgraph(c).copy() for c in weakly_connected_components(G))" - _warnings.warn(msg, DeprecationWarning) - for c in weakly_connected_components(G): - if copy: - yield G.subgraph(c).copy() - else: - yield G.subgraph(c) - - -@not_implemented_for('undirected') +@not_implemented_for("undirected") def is_weakly_connected(G): """Test directed graph for weak connectivity. @@ -146,7 +120,7 @@ def is_weakly_connected(G): Raises ------ - NetworkXNotImplemented: + NetworkXNotImplemented If G is undirected. See Also @@ -164,7 +138,8 @@ def is_weakly_connected(G): """ if len(G) == 0: raise nx.NetworkXPointlessConcept( - """Connectivity is undefined for the null graph.""") + """Connectivity is undefined for the null graph.""" + ) return len(list(weakly_connected_components(G))[0]) == len(G) diff --git a/networkx/algorithms/connectivity/__init__.py b/networkx/algorithms/connectivity/__init__.py index fd77d4a..65490c0 100644 --- a/networkx/algorithms/connectivity/__init__.py +++ b/networkx/algorithms/connectivity/__init__.py @@ -10,13 +10,17 @@ from .stoerwagner import * from .utils import * -__all__ = sum([connectivity.__all__, - cuts.__all__, - edge_augmentation.__all__, - edge_kcomponents.__all__, - disjoint_paths.__all__, - kcomponents.__all__, - kcutsets.__all__, - stoerwagner.__all__, - utils.__all__, - ], []) +__all__ = sum( + [ + connectivity.__all__, + cuts.__all__, + edge_augmentation.__all__, + edge_kcomponents.__all__, + disjoint_paths.__all__, + kcomponents.__all__, + kcutsets.__all__, + stoerwagner.__all__, + utils.__all__, + ], + [], +) diff --git a/networkx/algorithms/connectivity/connectivity.py b/networkx/algorithms/connectivity/connectivity.py index 6cee8dc..aedadf7 100644 --- a/networkx/algorithms/connectivity/connectivity.py +++ b/networkx/algorithms/connectivity/connectivity.py @@ -1,13 +1,12 @@ -# -*- coding: utf-8 -*- """ Flow based connectivity algorithms """ -from __future__ import division import itertools from operator import itemgetter import networkx as nx + # Define the default maximum flow function to use in all flow based # connectivity algorithms. from networkx.algorithms.flow import boykov_kolmogorov @@ -15,23 +14,24 @@ from networkx.algorithms.flow import edmonds_karp from networkx.algorithms.flow import shortest_augmenting_path from networkx.algorithms.flow import build_residual_network -default_flow_func = edmonds_karp -from .utils import (build_auxiliary_node_connectivity, - build_auxiliary_edge_connectivity) +default_flow_func = edmonds_karp -__author__ = '\n'.join(['Jordi Torrents ']) +from .utils import build_auxiliary_node_connectivity, build_auxiliary_edge_connectivity -__all__ = ['average_node_connectivity', - 'local_node_connectivity', - 'node_connectivity', - 'local_edge_connectivity', - 'edge_connectivity', - 'all_pairs_node_connectivity'] +__all__ = [ + "average_node_connectivity", + "local_node_connectivity", + "node_connectivity", + "local_edge_connectivity", + "edge_connectivity", + "all_pairs_node_connectivity", +] -def local_node_connectivity(G, s, t, flow_func=None, auxiliary=None, - residual=None, cutoff=None): +def local_node_connectivity( + G, s, t, flow_func=None, auxiliary=None, residual=None, cutoff=None +): r"""Computes local node connectivity for nodes s and t. Local node connectivity for two non adjacent nodes s and t is the @@ -112,15 +112,14 @@ def local_node_connectivity(G, s, t, flow_func=None, auxiliary=None, >>> import itertools >>> # You also have to explicitly import the function for >>> # building the auxiliary digraph from the connectivity package - >>> from networkx.algorithms.connectivity import ( - ... build_auxiliary_node_connectivity) + >>> from networkx.algorithms.connectivity import build_auxiliary_node_connectivity ... >>> H = build_auxiliary_node_connectivity(G) >>> # And the function for building the residual network from the >>> # flow package >>> from networkx.algorithms.flow import build_residual_network >>> # Note that the auxiliary digraph has an edge attribute named capacity - >>> R = build_residual_network(H, 'capacity') + >>> R = build_residual_network(H, "capacity") >>> result = dict.fromkeys(G, dict()) >>> # Reuse the auxiliary digraph and the residual network by passing them >>> # as parameters @@ -192,26 +191,26 @@ def local_node_connectivity(G, s, t, flow_func=None, auxiliary=None, else: H = auxiliary - mapping = H.graph.get('mapping', None) + mapping = H.graph.get("mapping", None) if mapping is None: - raise nx.NetworkXError('Invalid auxiliary digraph.') + raise nx.NetworkXError("Invalid auxiliary digraph.") kwargs = dict(flow_func=flow_func, residual=residual) if flow_func is shortest_augmenting_path: - kwargs['cutoff'] = cutoff - kwargs['two_phase'] = True + kwargs["cutoff"] = cutoff + kwargs["two_phase"] = True elif flow_func is edmonds_karp: - kwargs['cutoff'] = cutoff + kwargs["cutoff"] = cutoff elif flow_func is dinitz: - kwargs['cutoff'] = cutoff + kwargs["cutoff"] = cutoff elif flow_func is boykov_kolmogorov: - kwargs['cutoff'] = cutoff + kwargs["cutoff"] = cutoff - return nx.maximum_flow_value(H, '%sB' % mapping[s], '%sA' % mapping[t], **kwargs) + return nx.maximum_flow_value(H, f"{mapping[s]}B", f"{mapping[t]}A", **kwargs) def node_connectivity(G, s=None, t=None, flow_func=None): - """Returns node connectivity for a graph or digraph G. + r"""Returns node connectivity for a graph or digraph G. Node connectivity is equal to the minimum number of nodes that must be removed to disconnect G or render it trivial. If source @@ -301,14 +300,14 @@ def node_connectivity(G, s=None, t=None, flow_func=None): """ if (s is not None and t is None) or (s is None and t is not None): - raise nx.NetworkXError('Both source and target must be specified.') + raise nx.NetworkXError("Both source and target must be specified.") # Local node connectivity if s is not None and t is not None: if s not in G: - raise nx.NetworkXError('node %s not in graph' % s) + raise nx.NetworkXError(f"node {s} not in graph") if t not in G: - raise nx.NetworkXError('node %s not in graph' % t) + raise nx.NetworkXError(f"node {t} not in graph") return local_node_connectivity(G, s, t, flow_func=flow_func) # Global node connectivity @@ -320,8 +319,8 @@ def node_connectivity(G, s=None, t=None, flow_func=None): # and successors for directed graphs def neighbors(v): - return itertools.chain.from_iterable([G.predecessors(v), - G.successors(v)]) + return itertools.chain.from_iterable([G.predecessors(v), G.successors(v)]) + else: if not nx.is_connected(G): return 0 @@ -330,21 +329,21 @@ def neighbors(v): # Reuse the auxiliary digraph and the residual network H = build_auxiliary_node_connectivity(G) - R = build_residual_network(H, 'capacity') + R = build_residual_network(H, "capacity") kwargs = dict(flow_func=flow_func, auxiliary=H, residual=R) # Pick a node with minimum degree # Node connectivity is bounded by degree. v, K = min(G.degree(), key=itemgetter(1)) # compute local node connectivity with all its non-neighbors nodes - for w in set(G) - set(neighbors(v)) - set([v]): - kwargs['cutoff'] = K + for w in set(G) - set(neighbors(v)) - {v}: + kwargs["cutoff"] = K K = min(K, local_node_connectivity(G, v, w, **kwargs)) # Also for non adjacent pairs of neighbors of v for x, y in iter_func(neighbors(v), 2): if y in G[x]: continue - kwargs['cutoff'] = K + kwargs["cutoff"] = K K = min(K, local_node_connectivity(G, x, y, **kwargs)) return K @@ -405,7 +404,7 @@ def average_node_connectivity(G, flow_func=None): # Reuse the auxiliary digraph and the residual network H = build_auxiliary_node_connectivity(G) - R = build_residual_network(H, 'capacity') + R = build_residual_network(H, "capacity") kwargs = dict(flow_func=flow_func, auxiliary=H, residual=R) num, den = 0, 0 @@ -472,8 +471,8 @@ def all_pairs_node_connectivity(G, nbunch=None, flow_func=None): # Reuse auxiliary digraph and residual network H = build_auxiliary_node_connectivity(G) - mapping = H.graph['mapping'] - R = build_residual_network(H, 'capacity') + mapping = H.graph["mapping"] + R = build_residual_network(H, "capacity") kwargs = dict(flow_func=flow_func, auxiliary=H, residual=R) for u, v in iter_func(nbunch, 2): @@ -485,8 +484,9 @@ def all_pairs_node_connectivity(G, nbunch=None, flow_func=None): return all_pairs -def local_edge_connectivity(G, s, t, flow_func=None, auxiliary=None, - residual=None, cutoff=None): +def local_edge_connectivity( + G, s, t, flow_func=None, auxiliary=None, residual=None, cutoff=None +): r"""Returns local edge connectivity for nodes s and t in G. Local edge connectivity for two nodes s and t is the minimum number @@ -566,14 +566,13 @@ def local_edge_connectivity(G, s, t, flow_func=None, auxiliary=None, >>> import itertools >>> # You also have to explicitly import the function for >>> # building the auxiliary digraph from the connectivity package - >>> from networkx.algorithms.connectivity import ( - ... build_auxiliary_edge_connectivity) + >>> from networkx.algorithms.connectivity import build_auxiliary_edge_connectivity >>> H = build_auxiliary_edge_connectivity(G) >>> # And the function for building the residual network from the >>> # flow package >>> from networkx.algorithms.flow import build_residual_network >>> # Note that the auxiliary digraph has an edge attribute named capacity - >>> R = build_residual_network(H, 'capacity') + >>> R = build_residual_network(H, "capacity") >>> result = dict.fromkeys(G, dict()) >>> # Reuse the auxiliary digraph and the residual network by passing them >>> # as parameters @@ -636,14 +635,14 @@ def local_edge_connectivity(G, s, t, flow_func=None, auxiliary=None, kwargs = dict(flow_func=flow_func, residual=residual) if flow_func is shortest_augmenting_path: - kwargs['cutoff'] = cutoff - kwargs['two_phase'] = True + kwargs["cutoff"] = cutoff + kwargs["two_phase"] = True elif flow_func is edmonds_karp: - kwargs['cutoff'] = cutoff + kwargs["cutoff"] = cutoff elif flow_func is dinitz: - kwargs['cutoff'] = cutoff + kwargs["cutoff"] = cutoff elif flow_func is boykov_kolmogorov: - kwargs['cutoff'] = cutoff + kwargs["cutoff"] = cutoff return nx.maximum_flow_value(H, s, t, **kwargs) @@ -681,7 +680,7 @@ def edge_connectivity(G, s=None, t=None, flow_func=None, cutoff=None): cutoff : integer, float If specified, the maximum flow algorithm will terminate when the flow value reaches or exceeds the cutoff. This is only for the - algorithms that support the cutoff parameter: :meth:`edmonds_karp` + algorithms that support the cutoff parameter: e.g., :meth:`edmonds_karp` and :meth:`shortest_augmenting_path`. Other algorithms will ignore this parameter. Default value: None. @@ -751,21 +750,20 @@ def edge_connectivity(G, s=None, t=None, flow_func=None, cutoff=None): """ if (s is not None and t is None) or (s is None and t is not None): - raise nx.NetworkXError('Both source and target must be specified.') + raise nx.NetworkXError("Both source and target must be specified.") # Local edge connectivity if s is not None and t is not None: if s not in G: - raise nx.NetworkXError('node %s not in graph' % s) + raise nx.NetworkXError(f"node {s} not in graph") if t not in G: - raise nx.NetworkXError('node %s not in graph' % t) - return local_edge_connectivity(G, s, t, flow_func=flow_func, - cutoff=cutoff) + raise nx.NetworkXError(f"node {t} not in graph") + return local_edge_connectivity(G, s, t, flow_func=flow_func, cutoff=cutoff) # Global edge connectivity # reuse auxiliary digraph and residual network H = build_auxiliary_edge_connectivity(G) - R = build_residual_network(H, 'capacity') + R = build_residual_network(H, "capacity") kwargs = dict(flow_func=flow_func, auxiliary=H, residual=R) if G.is_directed(): @@ -782,13 +780,11 @@ def edge_connectivity(G, s=None, t=None, flow_func=None, cutoff=None): L = min(cutoff, L) for i in range(n): - kwargs['cutoff'] = L + kwargs["cutoff"] = L try: - L = min(L, local_edge_connectivity(G, nodes[i], nodes[i + 1], - **kwargs)) + L = min(L, local_edge_connectivity(G, nodes[i], nodes[i + 1], **kwargs)) except IndexError: # last node! - L = min(L, local_edge_connectivity(G, nodes[i], nodes[0], - **kwargs)) + L = min(L, local_edge_connectivity(G, nodes[i], nodes[0], **kwargs)) return L else: # undirected # Algorithm 6 in [1] @@ -814,7 +810,7 @@ def edge_connectivity(G, s=None, t=None, flow_func=None, cutoff=None): return L for w in D: - kwargs['cutoff'] = L + kwargs["cutoff"] = L L = min(L, local_edge_connectivity(G, v, w, **kwargs)) return L diff --git a/networkx/algorithms/connectivity/cuts.py b/networkx/algorithms/connectivity/cuts.py index 7e78b9b..b1de99e 100644 --- a/networkx/algorithms/connectivity/cuts.py +++ b/networkx/algorithms/connectivity/cuts.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Flow based cut algorithms """ @@ -9,21 +8,20 @@ # cut algorithms. from networkx.algorithms.flow import edmonds_karp from networkx.algorithms.flow import build_residual_network -default_flow_func = edmonds_karp -from .utils import (build_auxiliary_node_connectivity, - build_auxiliary_edge_connectivity) +default_flow_func = edmonds_karp -__author__ = '\n'.join(['Jordi Torrents ']) +from .utils import build_auxiliary_node_connectivity, build_auxiliary_edge_connectivity -__all__ = ['minimum_st_node_cut', - 'minimum_node_cut', - 'minimum_st_edge_cut', - 'minimum_edge_cut'] +__all__ = [ + "minimum_st_node_cut", + "minimum_node_cut", + "minimum_st_edge_cut", + "minimum_edge_cut", +] -def minimum_st_edge_cut(G, s, t, flow_func=None, auxiliary=None, - residual=None): +def minimum_st_edge_cut(G, s, t, flow_func=None, auxiliary=None, residual=None): """Returns the edges of the cut-set of a minimum (s, t)-cut. This function returns the set of edges of minimum cardinality that, @@ -49,10 +47,10 @@ def minimum_st_edge_cut(G, s, t, flow_func=None, auxiliary=None, flow_func : function A function for computing the maximum flow among a pair of nodes. - The function has to accept at least three parameters: a Digraph, - a source node, and a target node. And return a residual network - that follows NetworkX conventions (see :meth:`maximum_flow` for - details). If flow_func is None, the default maximum flow function + The function has to accept at least three parameters: a Digraph, + a source node, and a target node. And return a residual network + that follows NetworkX conventions (see :meth:`maximum_flow` for + details). If flow_func is None, the default maximum flow function (:meth:`edmonds_karp`) is used. See :meth:`node_connectivity` for details. The choice of the default function may change from version to version and should not be relied on. Default value: None. @@ -95,25 +93,24 @@ def minimum_st_edge_cut(G, s, t, flow_func=None, auxiliary=None, If you need to compute local edge cuts on several pairs of nodes in the same graph, it is recommended that you reuse the - data structures that NetworkX uses in the computation: the + data structures that NetworkX uses in the computation: the auxiliary digraph for edge connectivity, and the residual network for the underlying maximum flow computation. Example of how to compute local edge cuts among all pairs of - nodes of the platonic icosahedral graph reusing the data + nodes of the platonic icosahedral graph reusing the data structures. >>> import itertools - >>> # You also have to explicitly import the function for + >>> # You also have to explicitly import the function for >>> # building the auxiliary digraph from the connectivity package - >>> from networkx.algorithms.connectivity import ( - ... build_auxiliary_edge_connectivity) + >>> from networkx.algorithms.connectivity import build_auxiliary_edge_connectivity >>> H = build_auxiliary_edge_connectivity(G) >>> # And the function for building the residual network from the >>> # flow package >>> from networkx.algorithms.flow import build_residual_network >>> # Note that the auxiliary digraph has an edge attribute named capacity - >>> R = build_residual_network(H, 'capacity') + >>> R = build_residual_network(H, "capacity") >>> result = dict.fromkeys(G, dict()) >>> # Reuse the auxiliary digraph and the residual network by passing them >>> # as parameters @@ -143,7 +140,7 @@ def minimum_st_edge_cut(G, s, t, flow_func=None, auxiliary=None, else: H = auxiliary - kwargs = dict(capacity='capacity', flow_func=flow_func, residual=residual) + kwargs = dict(capacity="capacity", flow_func=flow_func, residual=residual) cut_value, partition = nx.minimum_cut(H, s, t, **kwargs) reachable, non_reachable = partition @@ -175,12 +172,12 @@ def minimum_st_node_cut(G, s, t, flow_func=None, auxiliary=None, residual=None): flow_func : function A function for computing the maximum flow among a pair of nodes. - The function has to accept at least three parameters: a Digraph, - a source node, and a target node. And return a residual network - that follows NetworkX conventions (see :meth:`maximum_flow` for - details). If flow_func is None, the default maximum flow function + The function has to accept at least three parameters: a Digraph, + a source node, and a target node. And return a residual network + that follows NetworkX conventions (see :meth:`maximum_flow` for + details). If flow_func is None, the default maximum flow function (:meth:`edmonds_karp`) is used. See below for details. The choice - of the default function may change from version to version and + of the default function may change from version to version and should not be relied on. Default value: None. auxiliary : NetworkX DiGraph @@ -222,16 +219,15 @@ def minimum_st_node_cut(G, s, t, flow_func=None, auxiliary=None, residual=None): Example of how to compute local st node cuts reusing the data structures: - >>> # You also have to explicitly import the function for + >>> # You also have to explicitly import the function for >>> # building the auxiliary digraph from the connectivity package - >>> from networkx.algorithms.connectivity import ( - ... build_auxiliary_node_connectivity) + >>> from networkx.algorithms.connectivity import build_auxiliary_node_connectivity >>> H = build_auxiliary_node_connectivity(G) >>> # And the function for building the residual network from the >>> # flow package >>> from networkx.algorithms.flow import build_residual_network >>> # Note that the auxiliary digraph has an edge attribute named capacity - >>> R = build_residual_network(H, 'capacity') + >>> R = build_residual_network(H, "capacity") >>> # Reuse the auxiliary digraph and the residual network by passing them >>> # as parameters >>> len(minimum_st_node_cut(G, 0, 6, auxiliary=H, residual=R)) @@ -254,7 +250,7 @@ def minimum_st_node_cut(G, s, t, flow_func=None, auxiliary=None, residual=None): is based in solving a number of maximum flow computations to determine the capacity of the minimum cut on an auxiliary directed network that corresponds to the minimum node cut of G. It handles both directed - and undirected graphs. This implementation is based on algorithm 11 + and undirected graphs. This implementation is based on algorithm 11 in [1]_. See also @@ -280,20 +276,19 @@ def minimum_st_node_cut(G, s, t, flow_func=None, auxiliary=None, residual=None): else: H = auxiliary - mapping = H.graph.get('mapping', None) + mapping = H.graph.get("mapping", None) if mapping is None: - raise nx.NetworkXError('Invalid auxiliary digraph.') + raise nx.NetworkXError("Invalid auxiliary digraph.") if G.has_edge(s, t) or G.has_edge(t, s): - return [] + return {} kwargs = dict(flow_func=flow_func, residual=residual, auxiliary=H) # The edge cut in the auxiliary digraph corresponds to the node cut in the # original graph. - edge_cut = minimum_st_edge_cut(H, '%sB' % mapping[s], '%sA' % mapping[t], - **kwargs) + edge_cut = minimum_st_edge_cut(H, f"{mapping[s]}B", f"{mapping[t]}A", **kwargs) # Each node in the original graph maps to two nodes of the auxiliary graph - node_cut = set(H.nodes[node]['id'] for edge in edge_cut for node in edge) - return node_cut - set([s, t]) + node_cut = {H.nodes[node]["id"] for edge in edge_cut for node in edge} + return node_cut - {s, t} def minimum_node_cut(G, s=None, t=None, flow_func=None): @@ -316,10 +311,10 @@ def minimum_node_cut(G, s=None, t=None, flow_func=None): flow_func : function A function for computing the maximum flow among a pair of nodes. - The function has to accept at least three parameters: a Digraph, - a source node, and a target node. And return a residual network - that follows NetworkX conventions (see :meth:`maximum_flow` for - details). If flow_func is None, the default maximum flow function + The function has to accept at least three parameters: a Digraph, + a source node, and a target node. And return a residual network + that follows NetworkX conventions (see :meth:`maximum_flow` for + details). If flow_func is None, the default maximum flow function (:meth:`edmonds_karp`) is used. See below for details. The choice of the default function may change from version to version and should not be relied on. Default value: None. @@ -358,7 +353,7 @@ def minimum_node_cut(G, s=None, t=None, flow_func=None): If you need to perform several local st cuts among different pairs of nodes on the same graph, it is recommended that you reuse - the data structures used in the maximum flow computations. See + the data structures used in the maximum flow computations. See :meth:`minimum_st_node_cut` for details. Notes @@ -367,7 +362,7 @@ def minimum_node_cut(G, s=None, t=None, flow_func=None): is based in solving a number of maximum flow computations to determine the capacity of the minimum cut on an auxiliary directed network that corresponds to the minimum node cut of G. It handles both directed - and undirected graphs. This implementation is based on algorithm 11 + and undirected graphs. This implementation is based on algorithm 11 in [1]_. See also @@ -390,35 +385,35 @@ def minimum_node_cut(G, s=None, t=None, flow_func=None): """ if (s is not None and t is None) or (s is None and t is not None): - raise nx.NetworkXError('Both source and target must be specified.') + raise nx.NetworkXError("Both source and target must be specified.") # Local minimum node cut. if s is not None and t is not None: if s not in G: - raise nx.NetworkXError('node %s not in graph' % s) + raise nx.NetworkXError(f"node {s} not in graph") if t not in G: - raise nx.NetworkXError('node %s not in graph' % t) + raise nx.NetworkXError(f"node {t} not in graph") return minimum_st_node_cut(G, s, t, flow_func=flow_func) # Global minimum node cut. # Analog to the algorithm 11 for global node connectivity in [1]. if G.is_directed(): if not nx.is_weakly_connected(G): - raise nx.NetworkXError('Input graph is not connected') + raise nx.NetworkXError("Input graph is not connected") iter_func = itertools.permutations def neighbors(v): - return itertools.chain.from_iterable([G.predecessors(v), - G.successors(v)]) + return itertools.chain.from_iterable([G.predecessors(v), G.successors(v)]) + else: if not nx.is_connected(G): - raise nx.NetworkXError('Input graph is not connected') + raise nx.NetworkXError("Input graph is not connected") iter_func = itertools.combinations neighbors = G.neighbors # Reuse the auxiliary digraph and the residual network. H = build_auxiliary_node_connectivity(G) - R = build_residual_network(H, 'capacity') + R = build_residual_network(H, "capacity") kwargs = dict(flow_func=flow_func, auxiliary=H, residual=R) # Choose a node with minimum degree. @@ -426,7 +421,7 @@ def neighbors(v): # Initial node cutset is all neighbors of the node with minimum degree. min_cut = set(G[v]) # Compute st node cuts between v and all its non-neighbors nodes in G. - for w in set(G) - set(neighbors(v)) - set([v]): + for w in set(G) - set(neighbors(v)) - {v}: this_cut = minimum_st_node_cut(G, v, w, **kwargs) if len(min_cut) >= len(this_cut): min_cut = this_cut @@ -461,10 +456,10 @@ def minimum_edge_cut(G, s=None, t=None, flow_func=None): flow_func : function A function for computing the maximum flow among a pair of nodes. - The function has to accept at least three parameters: a Digraph, - a source node, and a target node. And return a residual network - that follows NetworkX conventions (see :meth:`maximum_flow` for - details). If flow_func is None, the default maximum flow function + The function has to accept at least three parameters: a Digraph, + a source node, and a target node. And return a residual network + that follows NetworkX conventions (see :meth:`maximum_flow` for + details). If flow_func is None, the default maximum flow function (:meth:`edmonds_karp`) is used. See below for details. The choice of the default function may change from version to version and should not be relied on. Default value: None. @@ -483,10 +478,10 @@ def minimum_edge_cut(G, s=None, t=None, flow_func=None): >>> len(nx.minimum_edge_cut(G)) 5 - You can use alternative flow algorithms for the underlying - maximum flow computation. In dense networks the algorithm - :meth:`shortest_augmenting_path` will usually perform better - than the default :meth:`edmonds_karp`, which is faster for + You can use alternative flow algorithms for the underlying + maximum flow computation. In dense networks the algorithm + :meth:`shortest_augmenting_path` will usually perform better + than the default :meth:`edmonds_karp`, which is faster for sparse networks with highly skewed degree distributions. Alternative flow functions have to be explicitly imported from the flow package. @@ -503,7 +498,7 @@ def minimum_edge_cut(G, s=None, t=None, flow_func=None): If you need to perform several local computations among different pairs of nodes on the same graph, it is recommended that you reuse - the data structures used in the maximum flow computations. See + the data structures used in the maximum flow computations. See :meth:`local_edge_connectivity` for details. Notes @@ -512,7 +507,7 @@ def minimum_edge_cut(G, s=None, t=None, flow_func=None): undirected graphs the algorithm works by finding a 'small' dominating set of nodes of G (see algorithm 7 in [1]_) and computing the maximum flow between an arbitrary node in the dominating set and the rest of - nodes in it. This is an implementation of algorithm 6 in [1]_. For + nodes in it. This is an implementation of algorithm 6 in [1]_. For directed graphs, the algorithm does n calls to the max flow function. The function raises an error if the directed graph is not weakly connected and returns an empty set if it is weakly connected. @@ -537,19 +532,19 @@ def minimum_edge_cut(G, s=None, t=None, flow_func=None): """ if (s is not None and t is None) or (s is None and t is not None): - raise nx.NetworkXError('Both source and target must be specified.') + raise nx.NetworkXError("Both source and target must be specified.") # reuse auxiliary digraph and residual network H = build_auxiliary_edge_connectivity(G) - R = build_residual_network(H, 'capacity') + R = build_residual_network(H, "capacity") kwargs = dict(flow_func=flow_func, residual=R, auxiliary=H) # Local minimum edge cut if s and t are not None if s is not None and t is not None: if s not in G: - raise nx.NetworkXError('node %s not in graph' % s) + raise nx.NetworkXError(f"node {s} not in graph") if t not in G: - raise nx.NetworkXError('node %s not in graph' % t) + raise nx.NetworkXError(f"node {t} not in graph") return minimum_st_edge_cut(H, s, t, **kwargs) # Global minimum edge cut @@ -557,7 +552,7 @@ def minimum_edge_cut(G, s=None, t=None, flow_func=None): if G.is_directed(): # Based on algorithm 8 in [1] if not nx.is_weakly_connected(G): - raise nx.NetworkXError('Input graph is not connected') + raise nx.NetworkXError("Input graph is not connected") # Initial cutset is all edges of a node with minimum degree node = min(G, key=G.degree) @@ -579,7 +574,7 @@ def minimum_edge_cut(G, s=None, t=None, flow_func=None): else: # undirected # Based on algorithm 6 in [1] if not nx.is_connected(G): - raise nx.NetworkXError('Input graph is not connected') + raise nx.NetworkXError("Input graph is not connected") # Initial cutset is all edges of a node with minimum degree node = min(G, key=G.degree) diff --git a/networkx/algorithms/connectivity/disjoint_paths.py b/networkx/algorithms/connectivity/disjoint_paths.py index d78316f..12a7c4a 100644 --- a/networkx/algorithms/connectivity/disjoint_paths.py +++ b/networkx/algorithms/connectivity/disjoint_paths.py @@ -1,47 +1,26 @@ -# disjoint_paths.py - Flow based node and edge disjoint paths. -# -# Copyright 2017-2018 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. -# -# Author: Jordi Torrents """Flow based node and edge disjoint paths.""" import networkx as nx from networkx.exception import NetworkXNoPath + # Define the default maximum flow function to use for the undelying # maximum flow computations from networkx.algorithms.flow import edmonds_karp from networkx.algorithms.flow import preflow_push from networkx.algorithms.flow import shortest_augmenting_path + default_flow_func = edmonds_karp # Functions to build auxiliary data structures. -from networkx.algorithms.flow import build_residual_network from .utils import build_auxiliary_node_connectivity from .utils import build_auxiliary_edge_connectivity -try: - from itertools import filterfalse as _filterfalse -except ImportError: # Python 2 - def _filterfalse(predicate, iterable): - # https://docs.python.org/3/library/itertools.html - # filterfalse(lambda x: x%2, range(10)) --> 0 2 4 6 8 - if predicate is None: - predicate = bool - for x in iterable: - if not predicate(x): - yield x - -__all__ = [ - 'edge_disjoint_paths', - 'node_disjoint_paths', -] - - -def edge_disjoint_paths(G, s, t, flow_func=None, cutoff=None, auxiliary=None, - residual=None): +from itertools import filterfalse as _filterfalse + +__all__ = ["edge_disjoint_paths", "node_disjoint_paths"] + + +def edge_disjoint_paths( + G, s, t, flow_func=None, cutoff=None, auxiliary=None, residual=None +): """Returns the edges disjoint paths between source and target. Edge disjoint paths are paths that do not share any edge. The @@ -60,17 +39,17 @@ def edge_disjoint_paths(G, s, t, flow_func=None, cutoff=None, auxiliary=None, flow_func : function A function for computing the maximum flow among a pair of nodes. - The function has to accept at least three parameters: a Digraph, - a source node, and a target node. And return a residual network - that follows NetworkX conventions (see :meth:`maximum_flow` for - details). If flow_func is None, the default maximum flow function + The function has to accept at least three parameters: a Digraph, + a source node, and a target node. And return a residual network + that follows NetworkX conventions (see :meth:`maximum_flow` for + details). If flow_func is None, the default maximum flow function (:meth:`edmonds_karp`) is used. The choice of the default function may change from version to version and should not be relied on. Default value: None. cutoff : int Maximum number of paths to yield. Some of the maximum flow - algorithms, such as :meth:`edmonds_karp` (the default) and + algorithms, such as :meth:`edmonds_karp` (the default) and :meth:`shortest_augmenting_path` support the cutoff parameter, and will terminate when the flow value reaches or exceeds the cutoff. Other algorithms will ignore this parameter. @@ -93,10 +72,10 @@ def edge_disjoint_paths(G, s, t, flow_func=None, cutoff=None, auxiliary=None, Raises ------ - NetworkXNoPath : exception + NetworkXNoPath If there is no path between source and target. - NetworkXError : exception + NetworkXError If source or target are not in the graph G. See also @@ -121,25 +100,24 @@ def edge_disjoint_paths(G, s, t, flow_func=None, cutoff=None, auxiliary=None, If you need to compute edge disjoint paths on several pairs of nodes in the same graph, it is recommended that you reuse the - data structures that NetworkX uses in the computation: the + data structures that NetworkX uses in the computation: the auxiliary digraph for edge connectivity, and the residual network for the underlying maximum flow computation. Example of how to compute edge disjoint paths among all pairs of - nodes of the platonic icosahedral graph reusing the data + nodes of the platonic icosahedral graph reusing the data structures. >>> import itertools - >>> # You also have to explicitly import the function for + >>> # You also have to explicitly import the function for >>> # building the auxiliary digraph from the connectivity package - >>> from networkx.algorithms.connectivity import ( - ... build_auxiliary_edge_connectivity) + >>> from networkx.algorithms.connectivity import build_auxiliary_edge_connectivity >>> H = build_auxiliary_edge_connectivity(G) >>> # And the function for building the residual network from the >>> # flow package >>> from networkx.algorithms.flow import build_residual_network >>> # Note that the auxiliary digraph has an edge attribute named capacity - >>> R = build_residual_network(H, 'capacity') + >>> R = build_residual_network(H, "capacity") >>> result = {n: {} for n in G} >>> # Reuse the auxiliary digraph and the residual network by passing them >>> # as arguments @@ -172,9 +150,9 @@ def edge_disjoint_paths(G, s, t, flow_func=None, cutoff=None, auxiliary=None, """ if s not in G: - raise nx.NetworkXError('node %s not in graph' % s) + raise nx.NetworkXError(f"node {s} not in graph") if t not in G: - raise nx.NetworkXError('node %s not in graph' % t) + raise nx.NetworkXError(f"node {t} not in graph") if flow_func is None: flow_func = default_flow_func @@ -196,21 +174,25 @@ def edge_disjoint_paths(G, s, t, flow_func=None, cutoff=None, auxiliary=None, # Compute maximum flow between source and target. Flow functions in # NetworkX return a residual network. - kwargs = dict(capacity='capacity', residual=residual, cutoff=cutoff, - value_only=True) + kwargs = dict( + capacity="capacity", residual=residual, cutoff=cutoff, value_only=True + ) if flow_func is preflow_push: - del kwargs['cutoff'] + del kwargs["cutoff"] if flow_func is shortest_augmenting_path: - kwargs['two_phase'] = True + kwargs["two_phase"] = True R = flow_func(H, s, t, **kwargs) - if R.graph['flow_value'] == 0: + if R.graph["flow_value"] == 0: raise NetworkXNoPath # Saturated edges in the residual network form the edge disjoint paths # between source and target - cutset = [(u, v) for u, v, d in R.edges(data=True) - if d['capacity'] == d['flow'] and d['flow'] > 0] + cutset = [ + (u, v) + for u, v, d in R.edges(data=True) + if d["capacity"] == d["flow"] and d["flow"] > 0 + ] # This is equivalent of what flow.utils.build_flow_dict returns, but # only for the nodes with saturated edges and without reporting 0 flows. flow_dict = {n: {} for edge in cutset for n in edge} @@ -242,11 +224,12 @@ def edge_disjoint_paths(G, s, t, flow_func=None, cutoff=None, auxiliary=None, paths_found += 1 -def node_disjoint_paths(G, s, t, flow_func=None, cutoff=None, auxiliary=None, - residual=None): +def node_disjoint_paths( + G, s, t, flow_func=None, cutoff=None, auxiliary=None, residual=None +): r"""Computes node disjoint paths between source and target. - Node dijoint paths are paths that only share their first and last + Node disjoint paths are paths that only share their first and last nodes. The number of node independent paths between two nodes is equal to their local node connectivity. @@ -295,10 +278,10 @@ def node_disjoint_paths(G, s, t, flow_func=None, cutoff=None, auxiliary=None, Raises ------ - NetworkXNoPath : exception + NetworkXNoPath If there is no path between source and target. - NetworkXError : exception + NetworkXError If source or target are not in the graph G. Examples @@ -320,16 +303,15 @@ def node_disjoint_paths(G, s, t, flow_func=None, cutoff=None, auxiliary=None, Example of how to compute node disjoint paths reusing the data structures: - >>> # You also have to explicitly import the function for + >>> # You also have to explicitly import the function for >>> # building the auxiliary digraph from the connectivity package - >>> from networkx.algorithms.connectivity import ( - ... build_auxiliary_node_connectivity) + >>> from networkx.algorithms.connectivity import build_auxiliary_node_connectivity >>> H = build_auxiliary_node_connectivity(G) >>> # And the function for building the residual network from the >>> # flow package >>> from networkx.algorithms.flow import build_residual_network >>> # Note that the auxiliary digraph has an edge attribute named capacity - >>> R = build_residual_network(H, 'capacity') + >>> R = build_residual_network(H, "capacity") >>> # Reuse the auxiliary digraph and the residual network by passing them >>> # as arguments >>> len(list(nx.node_disjoint_paths(G, 0, 6, auxiliary=H, residual=R))) @@ -367,22 +349,21 @@ def node_disjoint_paths(G, s, t, flow_func=None, cutoff=None, auxiliary=None, """ if s not in G: - raise nx.NetworkXError('node %s not in graph' % s) + raise nx.NetworkXError(f"node {s} not in graph") if t not in G: - raise nx.NetworkXError('node %s not in graph' % t) + raise nx.NetworkXError(f"node {t} not in graph") if auxiliary is None: H = build_auxiliary_node_connectivity(G) else: H = auxiliary - mapping = H.graph.get('mapping', None) + mapping = H.graph.get("mapping", None) if mapping is None: - raise nx.NetworkXError('Invalid auxiliary digraph.') + raise nx.NetworkXError("Invalid auxiliary digraph.") # Maximum possible edge disjoint paths - possible = min(H.out_degree('%sB' % mapping[s]), - H.in_degree('%sA' % mapping[t])) + possible = min(H.out_degree(f"{mapping[s]}B"), H.in_degree(f"{mapping[t]}A")) if not possible: raise NetworkXNoPath @@ -391,16 +372,14 @@ def node_disjoint_paths(G, s, t, flow_func=None, cutoff=None, auxiliary=None, else: cutoff = min(cutoff, possible) - kwargs = dict(flow_func=flow_func, residual=residual, auxiliary=H, - cutoff=cutoff) + kwargs = dict(flow_func=flow_func, residual=residual, auxiliary=H, cutoff=cutoff) # The edge disjoint paths in the auxiliary digraph correspond to the node # disjoint paths in the original graph. - paths_edges = edge_disjoint_paths(H, '%sB' % mapping[s], '%sA' % mapping[t], - **kwargs) + paths_edges = edge_disjoint_paths(H, f"{mapping[s]}B", f"{mapping[t]}A", **kwargs) for path in paths_edges: # Each node in the original graph maps to two nodes in auxiliary graph - yield list(_unique_everseen(H.node[node]['id'] for node in path)) + yield list(_unique_everseen(H.nodes[node]["id"] for node in path)) def _unique_everseen(iterable): diff --git a/networkx/algorithms/connectivity/edge_augmentation.py b/networkx/algorithms/connectivity/edge_augmentation.py index 326478e..0a564d9 100644 --- a/networkx/algorithms/connectivity/edge_augmentation.py +++ b/networkx/algorithms/connectivity/edge_augmentation.py @@ -1,12 +1,3 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Jon Crall (erotemic@gmail.com) """ Algorithms for finding k-edge-augmentations @@ -21,23 +12,17 @@ :mod:`edge_kcomponents` : algorithms for finding k-edge-connected components :mod:`connectivity` : algorithms for determening edge connectivity. """ -import random import math -import sys import itertools as it import networkx as nx -from networkx.utils import not_implemented_for +from networkx.utils import not_implemented_for, py_random_state from collections import defaultdict, namedtuple -__all__ = [ - 'k_edge_augmentation', - 'is_k_edge_connected', - 'is_locally_k_edge_connected', -] +__all__ = ["k_edge_augmentation", "is_k_edge_connected", "is_locally_k_edge_connected"] -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def is_k_edge_connected(G, k): """Tests to see if a graph is k-edge-connected. @@ -70,7 +55,7 @@ def is_k_edge_connected(G, k): False """ if k < 1: - raise ValueError('k must be positive, not {}'.format(k)) + raise ValueError(f"k must be positive, not {k}") # First try to quickly determine if G is not k-edge-connected if G.number_of_nodes() < k + 1: return False @@ -86,8 +71,8 @@ def is_k_edge_connected(G, k): return nx.edge_connectivity(G, cutoff=k) >= k -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def is_locally_k_edge_connected(G, s, t, k): """Tests to see if an edge in a graph is locally k-edge-connected. @@ -129,7 +114,7 @@ def is_locally_k_edge_connected(G, s, t, k): True """ if k < 1: - raise ValueError('k must be positive, not {}'.format(k)) + raise ValueError(f"k must be positive, not {k}") # First try to quickly determine s, t is not k-locally-edge-connected in G if G.degree(s) < k or G.degree(t) < k: @@ -143,8 +128,8 @@ def is_locally_k_edge_connected(G, s, t, k): return localk >= k -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def k_edge_augmentation(G, k, avail=None, weight=None, partial=False): """Finds set of edges to k-edge-connect G. @@ -199,10 +184,10 @@ def k_edge_augmentation(G, k, avail=None, weight=None, partial=False): Raises ------ - NetworkXUnfeasible: + NetworkXUnfeasible If partial is False and no k-edge-augmentation exists. - NetworkXNotImplemented: + NetworkXNotImplemented If the input graph is directed or a multigraph. ValueError: @@ -241,8 +226,8 @@ def k_edge_augmentation(G, k, avail=None, weight=None, partial=False): >>> G = nx.path_graph((1, 2, 3, 4)) >>> G.add_node(5) >>> # avail can be a tuple with a dict - >>> avail = [(1, 5, {'weight': 11}), (2, 5, {'weight': 10})] - >>> sorted(nx.k_edge_augmentation(G, k=1, avail=avail, weight='weight')) + >>> avail = [(1, 5, {"weight": 11}), (2, 5, {"weight": 10})] + >>> sorted(nx.k_edge_augmentation(G, k=1, avail=avail, weight="weight")) [(2, 5)] >>> # or avail can be a 3-tuple with a real number >>> avail = [(1, 5, 11), (2, 5, 10), (4, 3, 1), (4, 5, 51)] @@ -259,28 +244,28 @@ def k_edge_augmentation(G, k, avail=None, weight=None, partial=False): """ try: if k <= 0: - raise ValueError('k must be a positive integer, not {}'.format(k)) + raise ValueError(f"k must be a positive integer, not {k}") elif G.number_of_nodes() < k + 1: - msg = 'impossible to {} connect in graph with less than {} nodes' - raise nx.NetworkXUnfeasible(msg.format(k, k + 1)) + msg = f"impossible to {k} connect in graph with less than {k + 1} nodes" + raise nx.NetworkXUnfeasible(msg) elif avail is not None and len(avail) == 0: if not nx.is_k_edge_connected(G, k): - raise nx.NetworkXUnfeasible('no available edges') + raise nx.NetworkXUnfeasible("no available edges") aug_edges = [] elif k == 1: - aug_edges = one_edge_augmentation(G, avail=avail, weight=weight, - partial=partial) + aug_edges = one_edge_augmentation( + G, avail=avail, weight=weight, partial=partial + ) elif k == 2: aug_edges = bridge_augmentation(G, avail=avail, weight=weight) else: - # raise NotImplementedError( - # 'not implemented for k>2. k={}'.format(k)) + # raise NotImplementedError(f'not implemented for k>2. k={k}') aug_edges = greedy_k_edge_augmentation( - G, k=k, avail=avail, weight=weight, seed=0) + G, k=k, avail=avail, weight=weight, seed=0 + ) # Do eager evaulation so we can catch any exceptions # Before executing partial code. - for edge in list(aug_edges): - yield edge + yield from list(aug_edges) except nx.NetworkXUnfeasible: if partial: # Return all available edges @@ -289,10 +274,10 @@ def k_edge_augmentation(G, k, avail=None, weight=None, partial=False): else: # If we can't k-edge-connect the entire graph, try to # k-edge-connect as much as possible - aug_edges = partial_k_edge_augmentation(G, k=k, avail=avail, - weight=weight) - for edge in aug_edges: - yield edge + aug_edges = partial_k_edge_augmentation( + G, k=k, avail=avail, weight=weight + ) + yield from aug_edges else: raise @@ -349,6 +334,7 @@ def partial_k_edge_augmentation(G, k, avail, weight=None): >>> sorted(partial_k_edge_augmentation(G, k=2, avail=avail)) [(1, 5), (1, 8)] """ + def _edges_between_disjoint(H, only1, only2): """ finds edges between disjoint nodes """ only1_adj = {u: set(H.adj[u]) for u in only1} @@ -363,8 +349,11 @@ def _edges_between_disjoint(H, only1, only2): # Find which parts of the graph can be k-edge-connected H = G.copy() H.add_edges_from( - ((u, v, {'weight': w, 'generator': (u, v)}) - for (u, v), w in zip(avail, avail_w))) + ( + (u, v, {"weight": w, "generator": (u, v)}) + for (u, v), w in zip(avail, avail_w) + ) + ) k_edge_subgraphs = list(nx.k_edge_subgraphs(H, k=k)) # Generate edges to k-edge-connect internal subgraphs @@ -374,28 +363,27 @@ def _edges_between_disjoint(H, only1, only2): C = H.subgraph(nodes).copy() # Find the internal edges that were available sub_avail = { - d['generator']: d['weight'] + d["generator"]: d["weight"] for (u, v, d) in C.edges(data=True) - if 'generator' in d + if "generator" in d } # Remove potential augmenting edges C.remove_edges_from(sub_avail.keys()) # Find a subset of these edges that makes the compoment # k-edge-connected and ignore the rest - for edge in nx.k_edge_augmentation(C, k=k, avail=sub_avail): - yield edge + yield from nx.k_edge_augmentation(C, k=k, avail=sub_avail) # Generate all edges between CCs that could not be k-edge-connected for cc1, cc2 in it.combinations(k_edge_subgraphs, 2): for (u, v) in _edges_between_disjoint(H, cc1, cc2): d = H.get_edge_data(u, v) - edge = d.get('generator', None) + edge = d.get("generator", None) if edge is not None: yield edge -@not_implemented_for('multigraph') -@not_implemented_for('directed') +@not_implemented_for("multigraph") +@not_implemented_for("directed") def one_edge_augmentation(G, avail=None, weight=None, partial=False): """Finds minimum weight set of edges to connect G. @@ -426,7 +414,7 @@ def one_edge_augmentation(G, avail=None, weight=None, partial=False): Raises ------ - NetworkXUnfeasible: + NetworkXUnfeasible If partial is False and no one-edge-augmentation exists. Notes @@ -443,12 +431,13 @@ def one_edge_augmentation(G, avail=None, weight=None, partial=False): if avail is None: return unconstrained_one_edge_augmentation(G) else: - return weighted_one_edge_augmentation(G, avail=avail, weight=weight, - partial=partial) + return weighted_one_edge_augmentation( + G, avail=avail, weight=weight, partial=partial + ) -@not_implemented_for('multigraph') -@not_implemented_for('directed') +@not_implemented_for("multigraph") +@not_implemented_for("directed") def bridge_augmentation(G, avail=None, weight=None): """Finds the a set of edges that bridge connects G. @@ -476,7 +465,7 @@ def bridge_augmentation(G, avail=None, weight=None): Raises ------ - NetworkXUnfeasible: + NetworkXUnfeasible If no bridge-augmentation exists. Notes @@ -491,8 +480,7 @@ def bridge_augmentation(G, avail=None, weight=None): :func:`k_edge_augmentation` """ if G.number_of_nodes() < 3: - raise nx.NetworkXUnfeasible( - 'impossible to bridge connect less than 3 nodes') + raise nx.NetworkXUnfeasible("impossible to bridge connect less than 3 nodes") if avail is None: return unconstrained_bridge_augmentation(G) else: @@ -501,6 +489,7 @@ def bridge_augmentation(G, avail=None, weight=None): # --- Algorithms and Helpers --- + def _ordered(u, v): """Returns the nodes in an undirected edge in lower-triangular order""" return (u, v) if u < v else (v, u) @@ -509,19 +498,20 @@ def _ordered(u, v): def _unpack_available_edges(avail, weight=None, G=None): """Helper to separate avail into edges and corresponding weights""" if weight is None: - weight = 'weight' + weight = "weight" if isinstance(avail, dict): avail_uv = list(avail.keys()) avail_w = list(avail.values()) else: + def _try_getitem(d): try: return d[weight] except TypeError: return d + avail_uv = [tup[0:2] for tup in avail] - avail_w = [1 if len(tup) == 2 else _try_getitem(tup[-1]) - for tup in avail] + avail_w = [1 if len(tup) == 2 else _try_getitem(tup[-1]) for tup in avail] if G is not None: # Edges already in the graph are filtered @@ -531,7 +521,7 @@ def _try_getitem(d): return avail_uv, avail_w -MetaEdge = namedtuple('MetaEdge', ('meta_uv', 'uv', 'w')) +MetaEdge = namedtuple("MetaEdge", ("meta_uv", "uv", "w")) def _lightest_meta_edges(mapping, avail_uv, avail_w): @@ -563,7 +553,7 @@ def _lightest_meta_edges(mapping, avail_uv, avail_w): >>> groups = ([1, 2, 3], [4, 5], [6]) >>> mapping = {n: meta_n for meta_n, ns in enumerate(groups) for n in ns} >>> avail_uv = [(1, 2), (3, 6), (1, 4), (5, 2), (6, 1), (2, 6), (3, 1)] - >>> avail_w = [ 20, 99, 20, 15, 50, 99, 20] + >>> avail_w = [20, 99, 20, 15, 50, 99, 20] >>> sorted(_lightest_meta_edges(mapping, avail_uv, avail_w)) [MetaEdge(meta_uv=(0, 1), uv=(5, 2), w=15), MetaEdge(meta_uv=(0, 2), uv=(6, 1), w=50)] """ @@ -619,7 +609,7 @@ def unconstrained_one_edge_augmentation(G): meta_aug = list(zip(meta_nodes, meta_nodes[1:])) # map that path to the original graph inverse = defaultdict(list) - for k, v in C.graph['mapping'].items(): + for k, v in C.graph["mapping"].items(): inverse[v].append(k) for mu, mv in meta_aug: yield (inverse[mu][0], inverse[mv][0]) @@ -674,23 +664,22 @@ def weighted_one_edge_augmentation(G, avail, weight=None, partial=False): # Collapse CCs in the original graph into nodes in a metagraph # Then find an MST of the metagraph instead of the original graph C = collapse(G, nx.connected_components(G)) - mapping = C.graph['mapping'] + mapping = C.graph["mapping"] # Assign each available edge to an edge in the metagraph candidate_mapping = _lightest_meta_edges(mapping, avail_uv, avail_w) # nx.set_edge_attributes(C, name='weight', values=0) C.add_edges_from( - (mu, mv, {'weight': w, 'generator': uv}) + (mu, mv, {"weight": w, "generator": uv}) for (mu, mv), uv, w in candidate_mapping ) # Find MST of the meta graph meta_mst = nx.minimum_spanning_tree(C) if not partial and not nx.is_connected(meta_mst): - raise nx.NetworkXUnfeasible( - 'Not possible to connect G with available edges') + raise nx.NetworkXUnfeasible("Not possible to connect G with available edges") # Yield the edge that generated the meta-edge for mu, mv, d in meta_mst.edges(data=True): - if 'generator' in d: - edge = d['generator'] + if "generator" in d: + edge = d["generator"] yield edge @@ -789,9 +778,9 @@ def unconstrained_bridge_augmentation(G): # Choose pairs of distinct leaf nodes in each tree. If this is not # possible then make a pair using the single isolated node in the tree. vset1 = [ - tuple(cc) * 2 # case1: an isolated node - if len(cc) == 1 else - sorted(cc, key=C.degree)[0:2] # case2: pair of leaf nodes + tuple(cc) * 2 # case1: an isolated node + if len(cc) == 1 + else sorted(cc, key=C.degree)[0:2] # case2: pair of leaf nodes for cc in nx.connected_components(C) ] if len(vset1) > 1: @@ -813,7 +802,10 @@ def unconstrained_bridge_augmentation(G): A2 = [tuple(leafs)] else: # Choose an arbitrary non-leaf root - root = next(n for n, d in T.degree() if d > 1) + try: + root = next(n for n, d in T.degree() if d > 1) + except StopIteration: # no nodes found with degree > 1 + return # order the leaves of C by (induced directed) preorder v2 = [n for n in nx.dfs_preorder_nodes(T, root) if T.degree(n) == 1] # connecting first half of the leafs in pre-order to the second @@ -826,11 +818,13 @@ def unconstrained_bridge_augmentation(G): # Construct the mapping (beta) from meta-nodes to regular nodes inverse = defaultdict(list) - for k, v in C.graph['mapping'].items(): + for k, v in C.graph["mapping"].items(): inverse[v].append(k) # sort so we choose minimum degree nodes first - inverse = {mu: sorted(mapped, key=lambda u: (G.degree(u), u)) - for mu, mapped in inverse.items()} + inverse = { + mu: sorted(mapped, key=lambda u: (G.degree(u), u)) + for mu, mapped in inverse.items() + } # For each meta-edge, map back to an arbitrary pair in the original graph G2 = G.copy() @@ -897,7 +891,7 @@ def weighted_bridge_augmentation(G, avail, weight=None): >>> avail = [(1, 4, 1000), (1, 3, 1), (2, 4, 1)] >>> sorted(weighted_bridge_augmentation(G, avail)) [(1, 3), (2, 4)] - >>> #------ + >>> # ------ >>> G = nx.path_graph((1, 2, 3, 4)) >>> G.add_node(5) >>> avail = [(1, 5, 11), (2, 5, 10), (4, 3, 1), (4, 5, 1)] @@ -909,7 +903,7 @@ def weighted_bridge_augmentation(G, avail, weight=None): """ if weight is None: - weight = 'weight' + weight = "weight" # If input G is not connected the approximation factor increases to 3 if not nx.is_connected(G): @@ -917,15 +911,14 @@ def weighted_bridge_augmentation(G, avail, weight=None): connectors = list(one_edge_augmentation(H, avail=avail, weight=weight)) H.add_edges_from(connectors) - for edge in connectors: - yield edge + yield from connectors else: connectors = [] H = G if len(avail) == 0: if nx.has_bridges(H): - raise nx.NetworkXUnfeasible('no augmentation possible') + raise nx.NetworkXUnfeasible("no augmentation possible") avail_uv, avail_w = _unpack_available_edges(avail, weight=weight, G=H) @@ -934,7 +927,7 @@ def weighted_bridge_augmentation(G, avail, weight=None): C = collapse(H, bridge_ccs) # Use the meta graph to shrink avail to a small feasible subset - mapping = C.graph['mapping'] + mapping = C.graph["mapping"] # Choose the minimum weight feasible edge in each group meta_to_wuv = { (mu, mv): (w, uv) @@ -954,7 +947,10 @@ def weighted_bridge_augmentation(G, avail, weight=None): # nx.least_common_ancestor on the reversed Tree. # Pick an arbitrary leaf from C as the root - root = next(n for n in C.nodes() if C.degree(n) == 1) + try: + root = next(n for n, d in C.degree() if d == 1) + except StopIteration: # no nodes found with degree == 1 + return # Root C into a tree TR by directing all edges away from the root # Note in their paper T directs edges towards the root TR = nx.dfs_tree(C, root) @@ -963,12 +959,13 @@ def weighted_bridge_augmentation(G, avail, weight=None): # This indicates that it costs nothing to use edges that were given. D = nx.reverse(TR).copy() - nx.set_edge_attributes(D, name='weight', values=0) + nx.set_edge_attributes(D, name="weight", values=0) # The LCA of mu and mv in T is the shared ancestor of mu and mv that is # located farthest from the root. lca_gen = nx.tree_all_pairs_lowest_common_ancestor( - TR, root=root, pairs=meta_to_wuv.keys()) + TR, root=root, pairs=meta_to_wuv.keys() + ) for (mu, mv), lca in lca_gen: w, uv = meta_to_wuv[(mu, mv)] @@ -990,9 +987,9 @@ def weighted_bridge_augmentation(G, avail, weight=None): # Note the original edges must be directed towards to root for the # branching to give us a bridge-augmentation. A = _minimum_rooted_branching(D, root) - except nx.NetworkXException: + except nx.NetworkXException as e: # If there is no branching then augmentation is not possible - raise nx.NetworkXUnfeasible('no 2-edge-augmentation possible') + raise nx.NetworkXUnfeasible("no 2-edge-augmentation possible") from e # For each edge e, in the branching that did not belong to the directed # tree T, add the corresponding edge that **GENERATED** it (this is not @@ -1002,13 +999,12 @@ def weighted_bridge_augmentation(G, avail, weight=None): bridge_connectors = set() for mu, mv in A.edges(): data = D.get_edge_data(mu, mv) - if 'generator' in data: + if "generator" in data: # Add the avail edge that generated the branching edge. - edge = data['generator'] + edge = data["generator"] bridge_connectors.add(edge) - for edge in bridge_connectors: - yield edge + yield from bridge_connectors def _minimum_rooted_branching(D, root): @@ -1064,17 +1060,17 @@ def collapse(G, grouped_nodes): -------- >>> # Collapses a graph using disjoint groups, but not necesarilly connected >>> G = nx.Graph([(1, 0), (2, 3), (3, 1), (3, 4), (4, 5), (5, 6), (5, 7)]) - >>> G.add_node('A') + >>> G.add_node("A") >>> grouped_nodes = [{0, 1, 2, 3}, {5, 6, 7}] >>> C = collapse(G, grouped_nodes) - >>> members = nx.get_node_attributes(C, 'members') + >>> members = nx.get_node_attributes(C, "members") >>> sorted(members.keys()) [0, 1, 2, 3] >>> member_values = set(map(frozenset, members.values())) >>> assert {0, 1, 2, 3} in member_values >>> assert {4} in member_values >>> assert {5, 6, 7} in member_values - >>> assert {'A'} in member_values + >>> assert {"A"} in member_values """ mapping = {} members = {} @@ -1083,24 +1079,26 @@ def collapse(G, grouped_nodes): remaining = set(G.nodes()) for i, group in enumerate(grouped_nodes): group = set(group) - assert remaining.issuperset(group), ( - 'grouped nodes must exist in G and be disjoint') + assert remaining.issuperset( + group + ), "grouped nodes must exist in G and be disjoint" remaining.difference_update(group) members[i] = group mapping.update((n, i) for n in group) # remaining nodes are in their own group for i, node in enumerate(remaining, start=i + 1): - group = set([node]) + group = {node} members[i] = group mapping.update((n, i) for n in group) number_of_groups = i + 1 C.add_nodes_from(range(number_of_groups)) - C.add_edges_from((mapping[u], mapping[v]) for u, v in G.edges() - if mapping[u] != mapping[v]) + C.add_edges_from( + (mapping[u], mapping[v]) for u, v in G.edges() if mapping[u] != mapping[v] + ) # Add a list of members (ie original nodes) to each node (ie scc) in C. - nx.set_node_attributes(C, name='members', values=members) + nx.set_node_attributes(C, name="members", values=members) # Add mapping dict as graph attribute - C.graph['mapping'] = mapping + C.graph["mapping"] = mapping return C @@ -1140,36 +1138,14 @@ def complement_edges(G): yield (u, v) -if sys.version_info[0] == 2: - def _compat_shuffle(rng, input): - """ - python2 workaround so shuffle works the same as python3 - - References - ---------- - https://stackoverflow.com/questions/38943038/diff-shuffle-py2-py3 - """ - def _randbelow(n): - "Return a random int in the range [0,n). Raises ValueError if n==0." - getrandbits = rng.getrandbits - k = n.bit_length() # don't use (n-1) here because n can be 1 - r = getrandbits(k) # 0 <= r < 2**k - while r >= n: - r = getrandbits(k) - return r - - for i in range(len(input) - 1, 0, -1): - # pick an element in input[:i+1] with which to exchange input[i] - j = _randbelow(i + 1) - input[i], input[j] = input[j], input[i] -else: - def _compat_shuffle(rng, input): - """wrapper around rng.shuffle for python 2 compatibility reasons""" - rng.shuffle(input) - - -@not_implemented_for('multigraph') -@not_implemented_for('directed') +def _compat_shuffle(rng, input): + """wrapper around rng.shuffle for python 2 compatibility reasons""" + rng.shuffle(input) + + +@py_random_state(4) +@not_implemented_for("multigraph") +@not_implemented_for("directed") def greedy_k_edge_augmentation(G, k, avail=None, weight=None, seed=None): """Greedy algorithm for finding a k-edge-augmentation @@ -1188,8 +1164,9 @@ def greedy_k_edge_augmentation(G, k, avail=None, weight=None, seed=None): key to use to find weights if ``avail`` is a set of 3-tuples. For more details, see :func:`k_edge_augmentation`. - seed : integer or None - seed for the random number generator used in this algorithm + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Yields ------ @@ -1230,7 +1207,7 @@ def greedy_k_edge_augmentation(G, k, avail=None, weight=None, seed=None): done = is_k_edge_connected(G, k) if done: - raise StopIteration() + return if avail is None: # all edges are available avail_uv = list(complement_edges(G)) @@ -1260,12 +1237,10 @@ def greedy_k_edge_augmentation(G, k, avail=None, weight=None, seed=None): # Check for feasibility if not done: - raise nx.NetworkXUnfeasible( - 'not able to k-edge-connect with available edges') + raise nx.NetworkXUnfeasible("not able to k-edge-connect with available edges") # Randomized attempt to reduce the size of the solution - rng = random.Random(seed) - _compat_shuffle(rng, aug_edges) + _compat_shuffle(seed, aug_edges) for (u, v) in list(aug_edges): # Don't remove if we know it would break connectivity if H.degree(u) <= k or H.degree(v) <= k: @@ -1278,5 +1253,4 @@ def greedy_k_edge_augmentation(G, k, avail=None, weight=None, seed=None): aug_edges.append((u, v)) # Generate results - for edge in aug_edges: - yield edge + yield from aug_edges diff --git a/networkx/algorithms/connectivity/edge_kcomponents.py b/networkx/algorithms/connectivity/edge_kcomponents.py index f54d325..b46c721 100644 --- a/networkx/algorithms/connectivity/edge_kcomponents.py +++ b/networkx/algorithms/connectivity/edge_kcomponents.py @@ -1,12 +1,3 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Jon Crall (erotemic@gmail.com) """ Algorithms for finding k-edge-connected components and subgraphs. @@ -25,14 +16,14 @@ import itertools as it __all__ = [ - 'k_edge_components', - 'k_edge_subgraphs', - 'bridge_components', - 'EdgeComponentAuxGraph', + "k_edge_components", + "k_edge_subgraphs", + "bridge_components", + "EdgeComponentAuxGraph", ] -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def k_edge_components(G, k): """Generates nodes in each maximal k-edge-connected component in G. @@ -58,7 +49,7 @@ def k_edge_components(G, k): Raises ------ - NetworkXNotImplemented: + NetworkXNotImplemented If the input graph is a multigraph. ValueError: @@ -97,7 +88,7 @@ def k_edge_components(G, k): """ # Compute k-edge-ccs using the most efficient algorithms available. if k < 1: - raise ValueError('k cannot be less than 1') + raise ValueError("k cannot be less than 1") if G.is_directed(): if k == 1: return nx.strongly_connected_components(G) @@ -115,7 +106,7 @@ def k_edge_components(G, k): return aux_graph.k_edge_components(k) -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def k_edge_subgraphs(G, k): """Generates nodes in each maximal k-edge-connected subgraph in G. @@ -141,7 +132,7 @@ def k_edge_subgraphs(G, k): Raises ------ - NetworkXNotImplemented: + NetworkXNotImplemented If the input graph is a multigraph. ValueError: @@ -176,7 +167,7 @@ def k_edge_subgraphs(G, k): https://openproceedings.org/2012/conf/edbt/ZhouLYLCL12.pdf """ if k < 1: - raise ValueError('k cannot be less than 1') + raise ValueError("k cannot be less than 1") if G.is_directed(): if k <= 1: # For directed graphs , @@ -202,8 +193,8 @@ def _k_edge_subgraphs_nodes(G, k): yield set(C.nodes()) -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def bridge_components(G): """Finds all bridge-connected components G. @@ -225,7 +216,7 @@ def bridge_components(G): Raises ------ - NetworkXNotImplemented: + NetworkXNotImplemented If the input graph is directed or a multigraph. Notes @@ -242,11 +233,10 @@ def bridge_components(G): """ H = G.copy() H.remove_edges_from(bridges(G)) - for cc in nx.connected_components(H): - yield cc + yield from nx.connected_components(H) -class EdgeComponentAuxGraph(object): +class EdgeComponentAuxGraph: r"""A simple algorithm to find all k-edge-connected components in a graph. Constructing the AuxillaryGraph (which may take some time) allows for the @@ -342,7 +332,7 @@ def construct(EdgeComponentAuxGraph, G): G : NetworkX graph """ # workaround for classmethod decorator - not_implemented_for('multigraph')(lambda G: G)(G) + not_implemented_for("multigraph")(lambda G: G)(G) def _recursive_build(H, A, source, avail): # Terminate once the flow has been compute to every node. @@ -364,7 +354,7 @@ def _recursive_build(H, A, source, avail): _recursive_build(H, A, sink, avail.intersection(T)) # Copy input to ensure all edges have unit capacity - H = G.fresh_copy() + H = G.__class__() H.add_nodes_from(G.nodes()) H.add_edges_from(G.edges(), capacity=1) @@ -408,19 +398,18 @@ def k_edge_components(self, k): k-edge-ccs in the original graph. """ if k < 1: - raise ValueError('k cannot be less than 1') + raise ValueError("k cannot be less than 1") A = self.A # "traverse the auxiliary graph A and delete all edges with weights less # than k" - aux_weights = nx.get_edge_attributes(A, 'weight') + aux_weights = nx.get_edge_attributes(A, "weight") # Create a relevant graph with the auxiliary edges with weights >= k R = nx.Graph() R.add_nodes_from(A.nodes()) R.add_edges_from(e for e, w in aux_weights.items() if w >= k) # Return the nodes that are k-edge-connected in the original graph - for cc in nx.connected_components(R): - yield cc + yield from nx.connected_components(R) def k_edge_subgraphs(self, k): """Queries the auxiliary graph for k-edge-connected subgraphs. @@ -444,12 +433,12 @@ def k_edge_subgraphs(self, k): then use this method. """ if k < 1: - raise ValueError('k cannot be less than 1') + raise ValueError("k cannot be less than 1") H = self.H A = self.A # "traverse the auxiliary graph A and delete all edges with weights less # than k" - aux_weights = nx.get_edge_attributes(A, 'weight') + aux_weights = nx.get_edge_attributes(A, "weight") # Create a relevant graph with the auxiliary edges with weights >= k R = nx.Graph() R.add_nodes_from(A.nodes()) @@ -464,8 +453,7 @@ def k_edge_subgraphs(self, k): else: # Call subgraph solution to refine the results C = H.subgraph(cc) - for sub_cc in k_edge_subgraphs(C, k): - yield sub_cc + yield from k_edge_subgraphs(C, k) def _low_degree_nodes(G, k, nbunch=None): @@ -509,11 +497,9 @@ def _high_degree_components(G, k): # Note: remaining connected components may not be k-edge-connected if G.is_directed(): - for cc in nx.strongly_connected_components(H): - yield cc + yield from nx.strongly_connected_components(H) else: - for cc in nx.connected_components(H): - yield cc + yield from nx.connected_components(H) def general_k_edge_subgraphs(G, k): @@ -563,7 +549,7 @@ def general_k_edge_subgraphs(G, k): [1, 1, 1, 4, 4] """ if k < 1: - raise ValueError('k cannot be less than 1') + raise ValueError("k cannot be less than 1") # Node pruning optimization (incorporates early return) # find_ccs is either connected_components/strongly_connected_components @@ -573,7 +559,7 @@ def general_k_edge_subgraphs(G, k): if G.number_of_nodes() < k: for node in G.nodes(): yield G.subgraph([node]).copy() - raise StopIteration() + return # Intermediate results R0 = {G.subgraph(cc).copy() for cc in find_ccs(G)} diff --git a/networkx/algorithms/connectivity/kcomponents.py b/networkx/algorithms/connectivity/kcomponents.py index 1aef980..a9fe783 100644 --- a/networkx/algorithms/connectivity/kcomponents.py +++ b/networkx/algorithms/connectivity/kcomponents.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Moody and White algorithm for k-components """ @@ -8,16 +7,16 @@ import networkx as nx from networkx.utils import not_implemented_for + # Define the default maximum flow function. from networkx.algorithms.flow import edmonds_karp -default_flow_func = edmonds_karp -__author__ = '\n'.join(['Jordi Torrents ']) +default_flow_func = edmonds_karp -__all__ = ['k_components'] +__all__ = ["k_components"] -@not_implemented_for('directed') +@not_implemented_for("directed") def k_components(G, flow_func=None): r"""Returns the k-component structure of a graph G. @@ -47,7 +46,7 @@ def k_components(G, flow_func=None): Raises ------ - NetworkXNotImplemented: + NetworkXNotImplemented If the input graph is directed. Examples @@ -116,7 +115,7 @@ def k_components(G, flow_func=None): comp = set(component) if len(comp) > 1: k_components[1].append(comp) - bicomponents = list(nx.biconnected_component_subgraphs(G)) + bicomponents = [G.subgraph(c) for c in nx.biconnected_components(G)] for bicomponent in bicomponents: bicomp = set(bicomponent) # avoid considering dyads as bicomponents @@ -127,7 +126,7 @@ def k_components(G, flow_func=None): continue k = nx.node_connectivity(B, flow_func=flow_func) if k > 2: - k_components[k].append(set(B.nodes())) + k_components[k].append(set(B)) # Perform cuts in a DFS like order. cuts = list(nx.all_node_cuts(B, k=k, flow_func=flow_func)) stack = [(k, _generate_partition(B, cuts, k))] @@ -138,7 +137,7 @@ def k_components(G, flow_func=None): C = B.subgraph(nodes) this_k = nx.node_connectivity(C, flow_func=flow_func) if this_k > parent_k and this_k > 2: - k_components[this_k].append(set(C.nodes())) + k_components[this_k].append(set(C)) cuts = list(nx.all_node_cuts(C, k=this_k, flow_func=flow_func)) if cuts: stack.append((this_k, _generate_partition(C, cuts, this_k))) @@ -170,8 +169,9 @@ def _consolidate(sets, k): G = nx.Graph() nodes = {i: s for i, s in enumerate(sets)} G.add_nodes_from(nodes) - G.add_edges_from((u, v) for u, v in combinations(nodes, 2) - if len(nodes[u] & nodes[v]) >= k) + G.add_edges_from( + (u, v) for u, v in combinations(nodes, 2) if len(nodes[u] & nodes[v]) >= k + ) for component in nx.connected_components(G): yield set.union(*[nodes[n] for n in component]) @@ -182,9 +182,9 @@ def has_nbrs_in_partition(G, node, partition): if n in partition: return True return False + components = [] - nodes = ({n for n, d in G.degree() if d > k} - - {n for cut in cuts for n in cut}) + nodes = {n for n, d in G.degree() if d > k} - {n for cut in cuts for n in cut} H = G.subgraph(nodes) for cc in nx.connected_components(H): component = set(cc) @@ -194,8 +194,7 @@ def has_nbrs_in_partition(G, node, partition): component.add(node) if len(component) < G.order(): components.append(component) - for component in _consolidate(components, k + 1): - yield component + yield from _consolidate(components, k + 1) def _reconstruct_k_components(k_comps): diff --git a/networkx/algorithms/connectivity/kcutsets.py b/networkx/algorithms/connectivity/kcutsets.py index 1edd2d8..0d806b7 100644 --- a/networkx/algorithms/connectivity/kcutsets.py +++ b/networkx/algorithms/connectivity/kcutsets.py @@ -1,9 +1,10 @@ -# -*- coding: utf-8 -*- """ Kanevsky all minimum node k cutsets algorithm. """ -from operator import itemgetter +import copy +from collections import defaultdict from itertools import combinations +from operator import itemgetter import networkx as nx from .utils import build_auxiliary_node_connectivity @@ -12,20 +13,19 @@ edmonds_karp, shortest_augmenting_path, ) -default_flow_func = edmonds_karp +default_flow_func = edmonds_karp -__author__ = '\n'.join(['Jordi Torrents ']) -__all__ = ['all_node_cuts'] +__all__ = ["all_node_cuts"] def all_node_cuts(G, k=None, flow_func=None): - r"""Returns all minimum k cutsets of an undirected graph G. + r"""Returns all minimum k cutsets of an undirected graph G. This implementation is based on Kanevsky's algorithm [1]_ for finding all - minimum-size node cut-sets of an undirected graph G; ie the set (or sets) - of nodes of cardinality equal to the node connectivity of G. Thus if + minimum-size node cut-sets of an undirected graph G; ie the set (or sets) + of nodes of cardinality equal to the node connectivity of G. Thus if removed, would break G into two or more connected components. Parameters @@ -34,7 +34,7 @@ def all_node_cuts(G, k=None, flow_func=None): Undirected graph k : Integer - Node connectivity of the input graph. If k is None, then it is + Node connectivity of the input graph. If k is None, then it is computed. Default value: None. flow_func : function @@ -66,10 +66,10 @@ def all_node_cuts(G, k=None, flow_func=None): ----- This implementation is based on the sequential algorithm for finding all minimum-size separating vertex sets in a graph [1]_. The main idea is to - compute minimum cuts using local maximum flow computations among a set + compute minimum cuts using local maximum flow computations among a set of nodes of highest degree and all other non-adjacent nodes in the Graph. Once we find a minimum cut, we add an edge between the high degree - node and the target node of the local maximum flow computation to make + node and the target node of the local maximum flow computation to make sure that we will not find that minimum cut again. See also @@ -80,25 +80,15 @@ def all_node_cuts(G, k=None, flow_func=None): References ---------- - .. [1] Kanevsky, A. (1993). Finding all minimum-size separating vertex + .. [1] Kanevsky, A. (1993). Finding all minimum-size separating vertex sets in a graph. Networks 23(6), 533--541. http://onlinelibrary.wiley.com/doi/10.1002/net.3230230604/abstract """ if not nx.is_connected(G): - raise nx.NetworkXError('Input graph is disconnected.') + raise nx.NetworkXError("Input graph is disconnected.") # Address some corner cases first. - # For cycle graphs - if G.order() == G.size(): - if all(2 == d for n, d in G.degree()): - seen = set() - for u in G: - for v in nx.non_neighbors(G, u): - if (u, v) not in seen and (v, u) not in seen: - yield {v, u} - seen.add((v, u)) - return # For complete Graphs if nx.density(G) == 1: for cut_set in combinations(G, len(G) - 1): @@ -110,14 +100,18 @@ def all_node_cuts(G, k=None, flow_func=None): # Even-Tarjan reduction is what we call auxiliary digraph # for node connectivity. H = build_auxiliary_node_connectivity(G) - mapping = H.graph['mapping'] - R = build_residual_network(H, 'capacity') - kwargs = dict(capacity='capacity', residual=R) + H_nodes = H.nodes # for speed + mapping = H.graph["mapping"] + # Keep a copy of original predecessors, H will be modified later. + # Shallow copy is enough. + original_H_pred = copy.copy(H._pred) + R = build_residual_network(H, "capacity") + kwargs = dict(capacity="capacity", residual=R) # Define default flow function if flow_func is None: flow_func = default_flow_func if flow_func is shortest_augmenting_path: - kwargs['two_phase'] = True + kwargs["two_phase"] = True # Begin the actual algorithm # step 1: Find node connectivity k of G if k is None: @@ -136,54 +130,91 @@ def all_node_cuts(G, k=None, flow_func=None): non_adjacent = set(G) - X - set(G[x]) for v in non_adjacent: # step 4: compute maximum flow in an Even-Tarjan reduction H of G - # and step:5 build the associated residual network R - R = flow_func(H, '%sB' % mapping[x], '%sA' % mapping[v], **kwargs) - flow_value = R.graph['flow_value'] + # and step 5: build the associated residual network R + R = flow_func(H, f"{mapping[x]}B", f"{mapping[v]}A", **kwargs) + flow_value = R.graph["flow_value"] if flow_value == k: - # Remove saturated edges form the residual network - saturated_edges = [(u, w, d) for (u, w, d) in - R.edges(data=True) - if d['capacity'] == d['flow']] + # Find the nodes incident to the flow. + E1 = flowed_edges = [ + (u, w) for (u, w, d) in R.edges(data=True) if d["flow"] != 0 + ] + VE1 = incident_nodes = {n for edge in E1 for n in edge} + # Remove saturated edges form the residual network. + # Note that reversed edges are introduced with capacity 0 + # in the residual graph and they need to be removed too. + saturated_edges = [ + (u, w, d) + for (u, w, d) in R.edges(data=True) + if d["capacity"] == d["flow"] or d["capacity"] == 0 + ] R.remove_edges_from(saturated_edges) + R_closure = nx.transitive_closure(R) # step 6: shrink the strongly connected components of - # residual flow network R and call it L + # residual flow network R and call it L. L = nx.condensation(R) - cmap = L.graph['mapping'] - # step 7: Compute antichains of L; they map to closed sets in H - # Any edge in H that links a closed set is part of a cutset + cmap = L.graph["mapping"] + inv_cmap = defaultdict(list) + for n, scc in cmap.items(): + inv_cmap[scc].append(n) + # Find the incident nodes in the condensed graph. + VE1 = {cmap[n] for n in VE1} + # step 7: Compute all antichains of L; + # they map to closed sets in H. + # Any edge in H that links a closed set is part of a cutset. for antichain in nx.antichains(L): + # Only antichains that are subsets of incident nodes counts. + # Lemma 8 in reference. + if not set(antichain).issubset(VE1): + continue # Nodes in an antichain of the condensation graph of # the residual network map to a closed set of nodes that - # define a node partition of the auxiliary digraph H. - S = {n for n, scc in cmap.items() if scc in antichain} + # define a node partition of the auxiliary digraph H + # through taking all of antichain's predecessors in the + # transitive closure. + S = set() + for scc in antichain: + S.update(inv_cmap[scc]) + S_ancestors = set() + for n in S: + S_ancestors.update(R_closure._pred[n]) + S.update(S_ancestors) + if f"{mapping[x]}B" not in S or f"{mapping[v]}A" in S: + continue # Find the cutset that links the node partition (S,~S) in H cutset = set() for u in S: - cutset.update((u, w) for w in H[u] if w not in S) + cutset.update((u, w) for w in original_H_pred[u] if w not in S) # The edges in H that form the cutset are internal edges # (ie edges that represent a node of the original graph G) - node_cut = {H.nodes[n]['id'] for edge in cutset for n in edge} + if any([H_nodes[u]["id"] != H_nodes[w]["id"] for u, w in cutset]): + continue + node_cut = {H_nodes[u]["id"] for u, _ in cutset} if len(node_cut) == k: + # The cut is invalid if it includes internal edges of + # end nodes. The other half of Lemma 8 in ref. + if x in node_cut or v in node_cut: + continue if node_cut not in seen: yield node_cut seen.append(node_cut) - # Add an edge (x, v) to make sure that we do not - # find this cutset again. This is equivalent - # of adding the edge in the input graph - # G.add_edge(x, v) and then regenerate H and R: - # Add edges to the auxiliary digraph. - H.add_edge('%sB' % mapping[x], '%sA' % mapping[v], - capacity=1) - H.add_edge('%sB' % mapping[v], '%sA' % mapping[x], - capacity=1) - # Add edges to the residual network. - R.add_edge('%sB' % mapping[x], '%sA' % mapping[v], - capacity=1) - R.add_edge('%sA' % mapping[v], '%sB' % mapping[x], - capacity=1) - break + + # Add an edge (x, v) to make sure that we do not + # find this cutset again. This is equivalent + # of adding the edge in the input graph + # G.add_edge(x, v) and then regenerate H and R: + # Add edges to the auxiliary digraph. + # See build_residual_network for convention we used + # in residual graphs. + H.add_edge(f"{mapping[x]}B", f"{mapping[v]}A", capacity=1) + H.add_edge(f"{mapping[v]}B", f"{mapping[x]}A", capacity=1) + # Add edges to the residual network. + R.add_edge(f"{mapping[x]}B", f"{mapping[v]}A", capacity=1) + R.add_edge(f"{mapping[v]}A", f"{mapping[x]}B", capacity=0) + R.add_edge(f"{mapping[v]}B", f"{mapping[x]}A", capacity=1) + R.add_edge(f"{mapping[x]}A", f"{mapping[v]}B", capacity=0) + # Add again the saturated edges to reuse the residual network R.add_edges_from(saturated_edges) diff --git a/networkx/algorithms/connectivity/stoerwagner.py b/networkx/algorithms/connectivity/stoerwagner.py index 6e017b1..b1d7f67 100644 --- a/networkx/algorithms/connectivity/stoerwagner.py +++ b/networkx/algorithms/connectivity/stoerwagner.py @@ -1,9 +1,3 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2014 -# ysitu -# All rights reserved. -# BSD license. """ Stoer-Wagner minimum cut algorithm. """ @@ -14,15 +8,13 @@ from ...utils import not_implemented_for from ...utils import arbitrary_element -__author__ = 'ysitu ' +__all__ = ["stoer_wagner"] -__all__ = ['stoer_wagner'] - -@not_implemented_for('directed') -@not_implemented_for('multigraph') -def stoer_wagner(G, weight='weight', heap=BinaryHeap): - """Returns the weighted minimum edge cut using the Stoer-Wagner algorithm. +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def stoer_wagner(G, weight="weight", heap=BinaryHeap): + r"""Returns the weighted minimum edge cut using the Stoer-Wagner algorithm. Determine the minimum edge cut of a connected graph using the Stoer-Wagner algorithm. In weighted cases, all weights must be @@ -80,33 +72,34 @@ def stoer_wagner(G, weight='weight', heap=BinaryHeap): Examples -------- >>> G = nx.Graph() - >>> G.add_edge('x', 'a', weight=3) - >>> G.add_edge('x', 'b', weight=1) - >>> G.add_edge('a', 'c', weight=3) - >>> G.add_edge('b', 'c', weight=5) - >>> G.add_edge('b', 'd', weight=4) - >>> G.add_edge('d', 'e', weight=2) - >>> G.add_edge('c', 'y', weight=2) - >>> G.add_edge('e', 'y', weight=3) + >>> G.add_edge("x", "a", weight=3) + >>> G.add_edge("x", "b", weight=1) + >>> G.add_edge("a", "c", weight=3) + >>> G.add_edge("b", "c", weight=5) + >>> G.add_edge("b", "d", weight=4) + >>> G.add_edge("d", "e", weight=2) + >>> G.add_edge("c", "y", weight=2) + >>> G.add_edge("e", "y", weight=3) >>> cut_value, partition = nx.stoer_wagner(G) >>> cut_value 4 """ n = len(G) if n < 2: - raise nx.NetworkXError('graph has less than two nodes.') + raise nx.NetworkXError("graph has less than two nodes.") if not nx.is_connected(G): - raise nx.NetworkXError('graph is not connected.') + raise nx.NetworkXError("graph is not connected.") # Make a copy of the graph for internal use. - G = nx.Graph((u, v, {'weight': e.get(weight, 1)}) - for u, v, e in G.edges(data=True) if u != v) + G = nx.Graph( + (u, v, {"weight": e.get(weight, 1)}) for u, v, e in G.edges(data=True) if u != v + ) - for u, v, e, in G.edges(data=True): - if e['weight'] < 0: - raise nx.NetworkXError('graph has a negative-weighted edge.') + for u, v, e in G.edges(data=True): + if e["weight"] < 0: + raise nx.NetworkXError("graph has a negative-weighted edge.") - cut_value = float('inf') + cut_value = float("inf") nodes = set(G) contractions = [] # contracted node pairs @@ -114,20 +107,20 @@ def stoer_wagner(G, weight='weight', heap=BinaryHeap): for i in range(n - 1): # Pick an arbitrary node u and create a set A = {u}. u = arbitrary_element(G) - A = set([u]) + A = {u} # Repeatedly pick the node "most tightly connected" to A and add it to # A. The tightness of connectivity of a node not in A is defined by the # of edges connecting it to nodes in A. h = heap() # min-heap emulating a max-heap for v, e in G[u].items(): - h.insert(v, -e['weight']) + h.insert(v, -e["weight"]) # Repeat until all but one node has been added to A. for j in range(n - i - 2): u = h.pop()[0] A.add(u) - for v, e, in G[u].items(): + for v, e in G[u].items(): if v not in A: - h.insert(v, h.get(v, 0) - e['weight']) + h.insert(v, h.get(v, 0) - e["weight"]) # A and the remaining node v define a "cut of the phase". There is a # minimum cut of the original graph that is also a cut of the phase. # Due to contractions in earlier phases, v may in fact represent @@ -142,9 +135,9 @@ def stoer_wagner(G, weight='weight', heap=BinaryHeap): for w, e in G[v].items(): if w != u: if w not in G[u]: - G.add_edge(u, w, weight=e['weight']) + G.add_edge(u, w, weight=e["weight"]) else: - G[u][w]['weight'] += e['weight'] + G[u][w]["weight"] += e["weight"] G.remove_node(v) # Recover the optimal partitioning from the contractions. diff --git a/networkx/algorithms/connectivity/tests/__init__.py b/networkx/algorithms/connectivity/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/algorithms/connectivity/tests/test_connectivity.py b/networkx/algorithms/connectivity/tests/test_connectivity.py index b752cd2..00a8951 100644 --- a/networkx/algorithms/connectivity/tests/test_connectivity.py +++ b/networkx/algorithms/connectivity/tests/test_connectivity.py @@ -1,5 +1,5 @@ import itertools -from nose.tools import assert_equal, assert_true, assert_raises +import pytest import networkx as nx from networkx.algorithms import flow @@ -15,22 +15,20 @@ ] -msg = "Assertion failed in function: {0}" - # helper functions for tests def _generate_no_biconnected(max_attempts=50): attempts = 0 while True: - G = nx.fast_gnp_random_graph(100, 0.0575) + G = nx.fast_gnp_random_graph(100, 0.0575, seed=42) if nx.is_connected(G) and not nx.is_biconnected(G): attempts = 0 yield G else: if attempts >= max_attempts: - msg = "Tried %d times: no suitable Graph." - raise Exception(msg % max_attempts) + msg = f"Tried {max_attempts} times: no suitable Graph." + raise Exception(msg) else: attempts += 1 @@ -47,19 +45,17 @@ def test_average_connectivity(): G3 = nx.Graph() for flow_func in flow_funcs: kwargs = dict(flow_func=flow_func) - assert_equal(nx.average_node_connectivity(G1, **kwargs), 1, - msg=msg.format(flow_func.__name__)) - assert_equal(nx.average_node_connectivity(G2, **kwargs), 2.2, - msg=msg.format(flow_func.__name__)) - assert_equal(nx.average_node_connectivity(G3, **kwargs), 0, - msg=msg.format(flow_func.__name__)) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert nx.average_node_connectivity(G1, **kwargs) == 1, errmsg + assert nx.average_node_connectivity(G2, **kwargs) == 2.2, errmsg + assert nx.average_node_connectivity(G3, **kwargs) == 0, errmsg def test_average_connectivity_directed(): G = nx.DiGraph([(1, 3), (1, 4), (1, 5)]) for flow_func in flow_funcs: - assert_equal(nx.average_node_connectivity(G), 0.25, - msg=msg.format(flow_func.__name__)) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert nx.average_node_connectivity(G) == 0.25, errmsg def test_articulation_points(): @@ -67,31 +63,50 @@ def test_articulation_points(): for flow_func in flow_funcs: for i in range(3): G = next(Ggen) - assert_equal(nx.node_connectivity(G, flow_func=flow_func), 1, - msg=msg.format(flow_func.__name__)) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert nx.node_connectivity(G, flow_func=flow_func) == 1, errmsg def test_brandes_erlebach(): # Figure 1 chapter 7: Connectivity # http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf G = nx.Graph() - G.add_edges_from([(1, 2), (1, 3), (1, 4), (1, 5), (2, 3), (2, 6), (3, 4), - (3, 6), (4, 6), (4, 7), (5, 7), (6, 8), (6, 9), (7, 8), - (7, 10), (8, 11), (9, 10), (9, 11), (10, 11)]) + G.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (2, 3), + (2, 6), + (3, 4), + (3, 6), + (4, 6), + (4, 7), + (5, 7), + (6, 8), + (6, 9), + (7, 8), + (7, 10), + (8, 11), + (9, 10), + (9, 11), + (10, 11), + ] + ) for flow_func in flow_funcs: kwargs = dict(flow_func=flow_func) - assert_equal(3, local_edge_connectivity(G, 1, 11, **kwargs), - msg=msg.format(flow_func.__name__)) - assert_equal(3, nx.edge_connectivity(G, 1, 11, **kwargs), - msg=msg.format(flow_func.__name__)) - assert_equal(2, local_node_connectivity(G, 1, 11, **kwargs), - msg=msg.format(flow_func.__name__)) - assert_equal(2, nx.node_connectivity(G, 1, 11, **kwargs), - msg=msg.format(flow_func.__name__)) - assert_equal(2, nx.edge_connectivity(G, **kwargs), # node 5 has degree 2 - msg=msg.format(flow_func.__name__)) - assert_equal(2, nx.node_connectivity(G, **kwargs), - msg=msg.format(flow_func.__name__)) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert 3 == local_edge_connectivity(G, 1, 11, **kwargs), errmsg + assert 3 == nx.edge_connectivity(G, 1, 11, **kwargs), errmsg + assert 2 == local_node_connectivity(G, 1, 11, **kwargs), errmsg + assert 2 == nx.node_connectivity(G, 1, 11, **kwargs), errmsg + assert 2 == nx.edge_connectivity(G, **kwargs), errmsg + assert 2 == nx.node_connectivity(G, **kwargs), errmsg + if flow_func is flow.preflow_push: + assert 3 == nx.edge_connectivity(G, 1, 11, cutoff=2, **kwargs), errmsg + else: + assert 2 == nx.edge_connectivity(G, 1, 11, cutoff=2, **kwargs), errmsg def test_white_harary_1(): @@ -108,10 +123,9 @@ def test_white_harary_1(): for i in range(7, 10): G.add_edge(0, i) for flow_func in flow_funcs: - assert_equal(1, nx.node_connectivity(G, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) - assert_equal(3, nx.edge_connectivity(G, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert 1 == nx.node_connectivity(G, flow_func=flow_func), errmsg + assert 3 == nx.edge_connectivity(G, flow_func=flow_func), errmsg def test_white_harary_2(): @@ -120,111 +134,107 @@ def test_white_harary_2(): G = nx.disjoint_union(nx.complete_graph(4), nx.complete_graph(4)) G.add_edge(0, 4) # kappa <= lambda <= delta - assert_equal(3, min(nx.core_number(G).values())) + assert 3 == min(nx.core_number(G).values()) for flow_func in flow_funcs: - assert_equal(1, nx.node_connectivity(G, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) - assert_equal(1, nx.edge_connectivity(G, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert 1 == nx.node_connectivity(G, flow_func=flow_func), errmsg + assert 1 == nx.edge_connectivity(G, flow_func=flow_func), errmsg def test_complete_graphs(): for n in range(5, 20, 5): for flow_func in flow_funcs: G = nx.complete_graph(n) - assert_equal(n - 1, nx.node_connectivity(G, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) - assert_equal(n - 1, nx.node_connectivity(G.to_directed(), - flow_func=flow_func), - msg=msg.format(flow_func.__name__)) - assert_equal(n - 1, nx.edge_connectivity(G, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) - assert_equal(n - 1, nx.edge_connectivity(G.to_directed(), - flow_func=flow_func), - msg=msg.format(flow_func.__name__)) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert n - 1 == nx.node_connectivity(G, flow_func=flow_func), errmsg + assert n - 1 == nx.node_connectivity( + G.to_directed(), flow_func=flow_func + ), errmsg + assert n - 1 == nx.edge_connectivity(G, flow_func=flow_func), errmsg + assert n - 1 == nx.edge_connectivity( + G.to_directed(), flow_func=flow_func + ), errmsg def test_empty_graphs(): for k in range(5, 25, 5): G = nx.empty_graph(k) for flow_func in flow_funcs: - assert_equal(0, nx.node_connectivity(G, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) - assert_equal(0, nx.edge_connectivity(G, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert 0 == nx.node_connectivity(G, flow_func=flow_func), errmsg + assert 0 == nx.edge_connectivity(G, flow_func=flow_func), errmsg def test_petersen(): G = nx.petersen_graph() for flow_func in flow_funcs: - assert_equal(3, nx.node_connectivity(G, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) - assert_equal(3, nx.edge_connectivity(G, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert 3 == nx.node_connectivity(G, flow_func=flow_func), errmsg + assert 3 == nx.edge_connectivity(G, flow_func=flow_func), errmsg def test_tutte(): G = nx.tutte_graph() for flow_func in flow_funcs: - assert_equal(3, nx.node_connectivity(G, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) - assert_equal(3, nx.edge_connectivity(G, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert 3 == nx.node_connectivity(G, flow_func=flow_func), errmsg + assert 3 == nx.edge_connectivity(G, flow_func=flow_func), errmsg def test_dodecahedral(): G = nx.dodecahedral_graph() for flow_func in flow_funcs: - assert_equal(3, nx.node_connectivity(G, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) - assert_equal(3, nx.edge_connectivity(G, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert 3 == nx.node_connectivity(G, flow_func=flow_func), errmsg + assert 3 == nx.edge_connectivity(G, flow_func=flow_func), errmsg def test_octahedral(): G = nx.octahedral_graph() for flow_func in flow_funcs: - assert_equal(4, nx.node_connectivity(G, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) - assert_equal(4, nx.edge_connectivity(G, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert 4 == nx.node_connectivity(G, flow_func=flow_func), errmsg + assert 4 == nx.edge_connectivity(G, flow_func=flow_func), errmsg def test_icosahedral(): G = nx.icosahedral_graph() for flow_func in flow_funcs: - assert_equal(5, nx.node_connectivity(G, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) - assert_equal(5, nx.edge_connectivity(G, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert 5 == nx.node_connectivity(G, flow_func=flow_func), errmsg + assert 5 == nx.edge_connectivity(G, flow_func=flow_func), errmsg def test_missing_source(): G = nx.path_graph(4) for flow_func in flow_funcs: - assert_raises(nx.NetworkXError, nx.node_connectivity, G, 10, 1, - flow_func=flow_func) + pytest.raises( + nx.NetworkXError, nx.node_connectivity, G, 10, 1, flow_func=flow_func + ) def test_missing_target(): G = nx.path_graph(4) for flow_func in flow_funcs: - assert_raises(nx.NetworkXError, nx.node_connectivity, G, 1, 10, - flow_func=flow_func) + pytest.raises( + nx.NetworkXError, nx.node_connectivity, G, 1, 10, flow_func=flow_func + ) def test_edge_missing_source(): G = nx.path_graph(4) for flow_func in flow_funcs: - assert_raises(nx.NetworkXError, nx.edge_connectivity, G, 10, 1, - flow_func=flow_func) + pytest.raises( + nx.NetworkXError, nx.edge_connectivity, G, 10, 1, flow_func=flow_func + ) def test_edge_missing_target(): G = nx.path_graph(4) for flow_func in flow_funcs: - assert_raises(nx.NetworkXError, nx.edge_connectivity, G, 1, 10, - flow_func=flow_func) + pytest.raises( + nx.NetworkXError, nx.edge_connectivity, G, 1, 10, flow_func=flow_func + ) def test_not_weakly_connected(): @@ -232,10 +242,9 @@ def test_not_weakly_connected(): nx.add_path(G, [1, 2, 3]) nx.add_path(G, [4, 5]) for flow_func in flow_funcs: - assert_equal(nx.node_connectivity(G), 0, - msg=msg.format(flow_func.__name__)) - assert_equal(nx.edge_connectivity(G), 0, - msg=msg.format(flow_func.__name__)) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert nx.node_connectivity(G) == 0, errmsg + assert nx.edge_connectivity(G) == 0, errmsg def test_not_connected(): @@ -243,28 +252,22 @@ def test_not_connected(): nx.add_path(G, [1, 2, 3]) nx.add_path(G, [4, 5]) for flow_func in flow_funcs: - assert_equal(nx.node_connectivity(G), 0, - msg=msg.format(flow_func.__name__)) - assert_equal(nx.edge_connectivity(G), 0, - msg=msg.format(flow_func.__name__)) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert nx.node_connectivity(G) == 0, errmsg + assert nx.edge_connectivity(G) == 0, errmsg def test_directed_edge_connectivity(): G = nx.cycle_graph(10, create_using=nx.DiGraph()) # only one direction D = nx.cycle_graph(10).to_directed() # 2 reciprocal edges for flow_func in flow_funcs: - assert_equal(1, nx.edge_connectivity(G, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) - assert_equal(1, local_edge_connectivity(G, 1, 4, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) - assert_equal(1, nx.edge_connectivity(G, 1, 4, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) - assert_equal(2, nx.edge_connectivity(D, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) - assert_equal(2, local_edge_connectivity(D, 1, 4, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) - assert_equal(2, nx.edge_connectivity(D, 1, 4, flow_func=flow_func), - msg=msg.format(flow_func.__name__)) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert 1 == nx.edge_connectivity(G, flow_func=flow_func), errmsg + assert 1 == local_edge_connectivity(G, 1, 4, flow_func=flow_func), errmsg + assert 1 == nx.edge_connectivity(G, 1, 4, flow_func=flow_func), errmsg + assert 2 == nx.edge_connectivity(D, flow_func=flow_func), errmsg + assert 2 == local_edge_connectivity(D, 1, 4, flow_func=flow_func), errmsg + assert 2 == nx.edge_connectivity(D, 1, 4, flow_func=flow_func), errmsg def test_cutoff(): @@ -276,95 +279,97 @@ def test_cutoff(): continue for cutoff in [3, 2, 1]: result = local_func(G, 0, 4, flow_func=flow_func, cutoff=cutoff) - assert_equal(cutoff, result, - msg="cutoff error in {0}".format(flow_func.__name__)) + assert cutoff == result, f"cutoff error in {flow_func.__name__}" def test_invalid_auxiliary(): G = nx.complete_graph(5) - assert_raises(nx.NetworkXError, local_node_connectivity, G, 0, 3, - auxiliary=G) + pytest.raises(nx.NetworkXError, local_node_connectivity, G, 0, 3, auxiliary=G) def test_interface_only_source(): G = nx.complete_graph(5) for interface_func in [nx.node_connectivity, nx.edge_connectivity]: - assert_raises(nx.NetworkXError, interface_func, G, s=0) + pytest.raises(nx.NetworkXError, interface_func, G, s=0) def test_interface_only_target(): G = nx.complete_graph(5) for interface_func in [nx.node_connectivity, nx.edge_connectivity]: - assert_raises(nx.NetworkXError, interface_func, G, t=3) + pytest.raises(nx.NetworkXError, interface_func, G, t=3) def test_edge_connectivity_flow_vs_stoer_wagner(): - graph_funcs = [ - nx.icosahedral_graph, - nx.octahedral_graph, - nx.dodecahedral_graph, - ] + graph_funcs = [nx.icosahedral_graph, nx.octahedral_graph, nx.dodecahedral_graph] for graph_func in graph_funcs: G = graph_func() - assert_equal(nx.stoer_wagner(G)[0], nx.edge_connectivity(G)) + assert nx.stoer_wagner(G)[0] == nx.edge_connectivity(G) class TestAllPairsNodeConnectivity: - - def setUp(self): - self.path = nx.path_graph(7) - self.directed_path = nx.path_graph(7, create_using=nx.DiGraph()) - self.cycle = nx.cycle_graph(7) - self.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph()) - self.gnp = nx.gnp_random_graph(30, 0.1) - self.directed_gnp = nx.gnp_random_graph(30, 0.1, directed=True) - self.K20 = nx.complete_graph(20) - self.K10 = nx.complete_graph(10) - self.K5 = nx.complete_graph(5) - self.G_list = [self.path, self.directed_path, self.cycle, - self.directed_cycle, self.gnp, self.directed_gnp, self.K10, - self.K5, self.K20] + @classmethod + def setup_class(cls): + cls.path = nx.path_graph(7) + cls.directed_path = nx.path_graph(7, create_using=nx.DiGraph()) + cls.cycle = nx.cycle_graph(7) + cls.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph()) + cls.gnp = nx.gnp_random_graph(30, 0.1, seed=42) + cls.directed_gnp = nx.gnp_random_graph(30, 0.1, directed=True, seed=42) + cls.K20 = nx.complete_graph(20) + cls.K10 = nx.complete_graph(10) + cls.K5 = nx.complete_graph(5) + cls.G_list = [ + cls.path, + cls.directed_path, + cls.cycle, + cls.directed_cycle, + cls.gnp, + cls.directed_gnp, + cls.K10, + cls.K5, + cls.K20, + ] def test_cycles(self): K_undir = nx.all_pairs_node_connectivity(self.cycle) for source in K_undir: for target, k in K_undir[source].items(): - assert_true(k == 2) + assert k == 2 K_dir = nx.all_pairs_node_connectivity(self.directed_cycle) for source in K_dir: for target, k in K_dir[source].items(): - assert_true(k == 1) + assert k == 1 def test_complete(self): for G in [self.K10, self.K5, self.K20]: K = nx.all_pairs_node_connectivity(G) for source in K: for target, k in K[source].items(): - assert_true(k == len(G) - 1) + assert k == len(G) - 1 def test_paths(self): K_undir = nx.all_pairs_node_connectivity(self.path) for source in K_undir: for target, k in K_undir[source].items(): - assert_true(k == 1) + assert k == 1 K_dir = nx.all_pairs_node_connectivity(self.directed_path) for source in K_dir: for target, k in K_dir[source].items(): if source < target: - assert_true(k == 1) + assert k == 1 else: - assert_true(k == 0) + assert k == 0 def test_all_pairs_connectivity_nbunch(self): G = nx.complete_graph(5) nbunch = [0, 2, 3] C = nx.all_pairs_node_connectivity(G, nbunch=nbunch) - assert_equal(len(C), len(nbunch)) + assert len(C) == len(nbunch) def test_all_pairs_connectivity_icosahedral(self): G = nx.icosahedral_graph() C = nx.all_pairs_node_connectivity(G) - assert_true(all(5 == C[u][v] for u, v in itertools.combinations(G, 2))) + assert all(5 == C[u][v] for u, v in itertools.combinations(G, 2)) def test_all_pairs_connectivity(self): G = nx.Graph() @@ -374,8 +379,9 @@ def test_all_pairs_connectivity(self): for u, v in itertools.combinations(nodes, 2): A[u][v] = A[v][u] = nx.node_connectivity(G, u, v) C = nx.all_pairs_node_connectivity(G) - assert_equal(sorted((k, sorted(v)) for k, v in A.items()), - sorted((k, sorted(v)) for k, v in C.items())) + assert sorted((k, sorted(v)) for k, v in A.items()) == sorted( + (k, sorted(v)) for k, v in C.items() + ) def test_all_pairs_connectivity_directed(self): G = nx.DiGraph() @@ -385,8 +391,9 @@ def test_all_pairs_connectivity_directed(self): for u, v in itertools.permutations(nodes, 2): A[u][v] = nx.node_connectivity(G, u, v) C = nx.all_pairs_node_connectivity(G) - assert_equal(sorted((k, sorted(v)) for k, v in A.items()), - sorted((k, sorted(v)) for k, v in C.items())) + assert sorted((k, sorted(v)) for k, v in A.items()) == sorted( + (k, sorted(v)) for k, v in C.items() + ) def test_all_pairs_connectivity_nbunch_combinations(self): G = nx.complete_graph(5) @@ -395,8 +402,9 @@ def test_all_pairs_connectivity_nbunch_combinations(self): for u, v in itertools.combinations(nbunch, 2): A[u][v] = A[v][u] = nx.node_connectivity(G, u, v) C = nx.all_pairs_node_connectivity(G, nbunch=nbunch) - assert_equal(sorted((k, sorted(v)) for k, v in A.items()), - sorted((k, sorted(v)) for k, v in C.items())) + assert sorted((k, sorted(v)) for k, v in A.items()) == sorted( + (k, sorted(v)) for k, v in C.items() + ) def test_all_pairs_connectivity_nbunch_iter(self): G = nx.complete_graph(5) @@ -405,5 +413,6 @@ def test_all_pairs_connectivity_nbunch_iter(self): for u, v in itertools.combinations(nbunch, 2): A[u][v] = A[v][u] = nx.node_connectivity(G, u, v) C = nx.all_pairs_node_connectivity(G, nbunch=iter(nbunch)) - assert_equal(sorted((k, sorted(v)) for k, v in A.items()), - sorted((k, sorted(v)) for k, v in C.items())) + assert sorted((k, sorted(v)) for k, v in A.items()) == sorted( + (k, sorted(v)) for k, v in C.items() + ) diff --git a/networkx/algorithms/connectivity/tests/test_cuts.py b/networkx/algorithms/connectivity/tests/test_cuts.py index 8eeb294..becad52 100644 --- a/networkx/algorithms/connectivity/tests/test_cuts.py +++ b/networkx/algorithms/connectivity/tests/test_cuts.py @@ -1,4 +1,4 @@ -from nose.tools import assert_equal, assert_true, assert_false, assert_raises +import pytest import networkx as nx from networkx.algorithms import flow @@ -14,22 +14,20 @@ flow.shortest_augmenting_path, ] -msg = "Assertion failed in function: {0}" - # Tests for node and edge cutsets def _generate_no_biconnected(max_attempts=50): attempts = 0 while True: - G = nx.fast_gnp_random_graph(100, 0.0575) + G = nx.fast_gnp_random_graph(100, 0.0575, seed=42) if nx.is_connected(G) and not nx.is_biconnected(G): attempts = 0 yield G else: if attempts >= max_attempts: - msg = "Tried %d times: no suitable Graph." % attempts - raise Exception(msg % max_attempts) + msg = f"Tried {attempts} times: no suitable Graph." + raise Exception(msg) else: attempts += 1 @@ -37,42 +35,60 @@ def _generate_no_biconnected(max_attempts=50): def test_articulation_points(): Ggen = _generate_no_biconnected() for flow_func in flow_funcs: + errmsg = f"Assertion failed in function: {flow_func.__name__}" for i in range(1): # change 1 to 3 or more for more realizations. G = next(Ggen) cut = nx.minimum_node_cut(G, flow_func=flow_func) - assert_true(len(cut) == 1, msg=msg.format(flow_func.__name__)) - assert_true(cut.pop() in set(nx.articulation_points(G)), - msg=msg.format(flow_func.__name__)) + assert len(cut) == 1, errmsg + assert cut.pop() in set(nx.articulation_points(G)), errmsg def test_brandes_erlebach_book(): # Figure 1 chapter 7: Connectivity # http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf G = nx.Graph() - G.add_edges_from([(1, 2), (1, 3), (1, 4), (1, 5), (2, 3), (2, 6), (3, 4), - (3, 6), (4, 6), (4, 7), (5, 7), (6, 8), (6, 9), (7, 8), - (7, 10), (8, 11), (9, 10), (9, 11), (10, 11)]) + G.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (2, 3), + (2, 6), + (3, 4), + (3, 6), + (4, 6), + (4, 7), + (5, 7), + (6, 8), + (6, 9), + (7, 8), + (7, 10), + (8, 11), + (9, 10), + (9, 11), + (10, 11), + ] + ) for flow_func in flow_funcs: kwargs = dict(flow_func=flow_func) + errmsg = f"Assertion failed in function: {flow_func.__name__}" # edge cutsets - assert_equal(3, len(nx.minimum_edge_cut(G, 1, 11, **kwargs)), - msg=msg.format(flow_func.__name__)) + assert 3 == len(nx.minimum_edge_cut(G, 1, 11, **kwargs)), errmsg edge_cut = nx.minimum_edge_cut(G, **kwargs) # Node 5 has only two edges - assert_equal(2, len(edge_cut), msg=msg.format(flow_func.__name__)) + assert 2 == len(edge_cut), errmsg H = G.copy() H.remove_edges_from(edge_cut) - assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__)) + assert not nx.is_connected(H), errmsg # node cuts - assert_equal(set([6, 7]), minimum_st_node_cut(G, 1, 11, **kwargs), - msg=msg.format(flow_func.__name__)) - assert_equal(set([6, 7]), nx.minimum_node_cut(G, 1, 11, **kwargs), - msg=msg.format(flow_func.__name__)) + assert {6, 7} == minimum_st_node_cut(G, 1, 11, **kwargs), errmsg + assert {6, 7} == nx.minimum_node_cut(G, 1, 11, **kwargs), errmsg node_cut = nx.minimum_node_cut(G, **kwargs) - assert_equal(2, len(node_cut), msg=msg.format(flow_func.__name__)) + assert 2 == len(node_cut), errmsg H = G.copy() H.remove_nodes_from(node_cut) - assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__)) + assert not nx.is_connected(H), errmsg def test_white_harary_paper(): @@ -90,109 +106,113 @@ def test_white_harary_paper(): G.add_edge(0, i) for flow_func in flow_funcs: kwargs = dict(flow_func=flow_func) + errmsg = f"Assertion failed in function: {flow_func.__name__}" # edge cuts edge_cut = nx.minimum_edge_cut(G, **kwargs) - assert_equal(3, len(edge_cut), msg=msg.format(flow_func.__name__)) + assert 3 == len(edge_cut), errmsg H = G.copy() H.remove_edges_from(edge_cut) - assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__)) + assert not nx.is_connected(H), errmsg # node cuts node_cut = nx.minimum_node_cut(G, **kwargs) - assert_equal(set([0]), node_cut, msg=msg.format(flow_func.__name__)) + assert {0} == node_cut, errmsg H = G.copy() H.remove_nodes_from(node_cut) - assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__)) + assert not nx.is_connected(H), errmsg def test_petersen_cutset(): G = nx.petersen_graph() for flow_func in flow_funcs: kwargs = dict(flow_func=flow_func) + errmsg = f"Assertion failed in function: {flow_func.__name__}" # edge cuts edge_cut = nx.minimum_edge_cut(G, **kwargs) - assert_equal(3, len(edge_cut), msg=msg.format(flow_func.__name__)) + assert 3 == len(edge_cut), errmsg H = G.copy() H.remove_edges_from(edge_cut) - assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__)) + assert not nx.is_connected(H), errmsg # node cuts node_cut = nx.minimum_node_cut(G, **kwargs) - assert_equal(3, len(node_cut), msg=msg.format(flow_func.__name__)) + assert 3 == len(node_cut), errmsg H = G.copy() H.remove_nodes_from(node_cut) - assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__)) + assert not nx.is_connected(H), errmsg def test_octahedral_cutset(): G = nx.octahedral_graph() for flow_func in flow_funcs: kwargs = dict(flow_func=flow_func) + errmsg = f"Assertion failed in function: {flow_func.__name__}" # edge cuts edge_cut = nx.minimum_edge_cut(G, **kwargs) - assert_equal(4, len(edge_cut), msg=msg.format(flow_func.__name__)) + assert 4 == len(edge_cut), errmsg H = G.copy() H.remove_edges_from(edge_cut) - assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__)) + assert not nx.is_connected(H), errmsg # node cuts node_cut = nx.minimum_node_cut(G, **kwargs) - assert_equal(4, len(node_cut), msg=msg.format(flow_func.__name__)) + assert 4 == len(node_cut), errmsg H = G.copy() H.remove_nodes_from(node_cut) - assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__)) + assert not nx.is_connected(H), errmsg def test_icosahedral_cutset(): G = nx.icosahedral_graph() for flow_func in flow_funcs: kwargs = dict(flow_func=flow_func) + errmsg = f"Assertion failed in function: {flow_func.__name__}" # edge cuts edge_cut = nx.minimum_edge_cut(G, **kwargs) - assert_equal(5, len(edge_cut), msg=msg.format(flow_func.__name__)) + assert 5 == len(edge_cut), errmsg H = G.copy() H.remove_edges_from(edge_cut) - assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__)) + assert not nx.is_connected(H), errmsg # node cuts node_cut = nx.minimum_node_cut(G, **kwargs) - assert_equal(5, len(node_cut), msg=msg.format(flow_func.__name__)) + assert 5 == len(node_cut), errmsg H = G.copy() H.remove_nodes_from(node_cut) - assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__)) + assert not nx.is_connected(H), errmsg def test_node_cutset_exception(): G = nx.Graph() G.add_edges_from([(1, 2), (3, 4)]) for flow_func in flow_funcs: - assert_raises(nx.NetworkXError, nx.minimum_node_cut, G, flow_func=flow_func) + pytest.raises(nx.NetworkXError, nx.minimum_node_cut, G, flow_func=flow_func) def test_node_cutset_random_graphs(): for flow_func in flow_funcs: + errmsg = f"Assertion failed in function: {flow_func.__name__}" for i in range(3): - G = nx.fast_gnp_random_graph(50, 0.25) + G = nx.fast_gnp_random_graph(50, 0.25, seed=42) if not nx.is_connected(G): ccs = iter(nx.connected_components(G)) start = arbitrary_element(next(ccs)) G.add_edges_from((start, arbitrary_element(c)) for c in ccs) cutset = nx.minimum_node_cut(G, flow_func=flow_func) - assert_equal(nx.node_connectivity(G), len(cutset), - msg=msg.format(flow_func.__name__)) + assert nx.node_connectivity(G) == len(cutset), errmsg G.remove_nodes_from(cutset) - assert_false(nx.is_connected(G), msg=msg.format(flow_func.__name__)) + assert not nx.is_connected(G), errmsg def test_edge_cutset_random_graphs(): for flow_func in flow_funcs: + errmsg = f"Assertion failed in function: {flow_func.__name__}" for i in range(3): - G = nx.fast_gnp_random_graph(50, 0.25) + G = nx.fast_gnp_random_graph(50, 0.25, seed=42) if not nx.is_connected(G): ccs = iter(nx.connected_components(G)) start = arbitrary_element(next(ccs)) G.add_edges_from((start, arbitrary_element(c)) for c in ccs) cutset = nx.minimum_edge_cut(G, flow_func=flow_func) - assert_equal(nx.edge_connectivity(G), len(cutset), - msg=msg.format(flow_func.__name__)) + assert nx.edge_connectivity(G) == len(cutset), errmsg G.remove_edges_from(cutset) - assert_false(nx.is_connected(G), msg=msg.format(flow_func.__name__)) + assert not nx.is_connected(G), errmsg def test_empty_graphs(): @@ -200,32 +220,36 @@ def test_empty_graphs(): D = nx.DiGraph() for interface_func in [nx.minimum_node_cut, nx.minimum_edge_cut]: for flow_func in flow_funcs: - assert_raises(nx.NetworkXPointlessConcept, interface_func, G, - flow_func=flow_func) - assert_raises(nx.NetworkXPointlessConcept, interface_func, D, - flow_func=flow_func) + pytest.raises( + nx.NetworkXPointlessConcept, interface_func, G, flow_func=flow_func + ) + pytest.raises( + nx.NetworkXPointlessConcept, interface_func, D, flow_func=flow_func + ) def test_unbounded(): G = nx.complete_graph(5) for flow_func in flow_funcs: - assert_equal(4, len(minimum_st_edge_cut(G, 1, 4, flow_func=flow_func))) + assert 4 == len(minimum_st_edge_cut(G, 1, 4, flow_func=flow_func)) def test_missing_source(): G = nx.path_graph(4) for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]: for flow_func in flow_funcs: - assert_raises(nx.NetworkXError, interface_func, G, 10, 1, - flow_func=flow_func) + pytest.raises( + nx.NetworkXError, interface_func, G, 10, 1, flow_func=flow_func + ) def test_missing_target(): G = nx.path_graph(4) for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]: for flow_func in flow_funcs: - assert_raises(nx.NetworkXError, interface_func, G, 1, 10, - flow_func=flow_func) + pytest.raises( + nx.NetworkXError, interface_func, G, 1, 10, flow_func=flow_func + ) def test_not_weakly_connected(): @@ -234,8 +258,7 @@ def test_not_weakly_connected(): nx.add_path(G, [4, 5]) for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]: for flow_func in flow_funcs: - assert_raises(nx.NetworkXError, interface_func, G, - flow_func=flow_func) + pytest.raises(nx.NetworkXError, interface_func, G, flow_func=flow_func) def test_not_connected(): @@ -244,15 +267,14 @@ def test_not_connected(): nx.add_path(G, [4, 5]) for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]: for flow_func in flow_funcs: - assert_raises(nx.NetworkXError, interface_func, G, - flow_func=flow_func) + pytest.raises(nx.NetworkXError, interface_func, G, flow_func=flow_func) def tests_min_cut_complete(): G = nx.complete_graph(5) for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]: for flow_func in flow_funcs: - assert_equal(4, len(interface_func(G, flow_func=flow_func))) + assert 4 == len(interface_func(G, flow_func=flow_func)) def tests_min_cut_complete_directed(): @@ -260,7 +282,7 @@ def tests_min_cut_complete_directed(): G = G.to_directed() for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]: for flow_func in flow_funcs: - assert_equal(4, len(interface_func(G, flow_func=flow_func))) + assert 4 == len(interface_func(G, flow_func=flow_func)) def tests_minimum_st_node_cut(): @@ -268,22 +290,21 @@ def tests_minimum_st_node_cut(): G.add_nodes_from([0, 1, 2, 3, 7, 8, 11, 12]) G.add_edges_from([(7, 11), (1, 11), (1, 12), (12, 8), (0, 1)]) nodelist = minimum_st_node_cut(G, 7, 11) - assert(nodelist == []) + assert nodelist == {} def test_invalid_auxiliary(): G = nx.complete_graph(5) - assert_raises(nx.NetworkXError, minimum_st_node_cut, G, 0, 3, - auxiliary=G) + pytest.raises(nx.NetworkXError, minimum_st_node_cut, G, 0, 3, auxiliary=G) def test_interface_only_source(): G = nx.complete_graph(5) for interface_func in [nx.minimum_node_cut, nx.minimum_edge_cut]: - assert_raises(nx.NetworkXError, interface_func, G, s=0) + pytest.raises(nx.NetworkXError, interface_func, G, s=0) def test_interface_only_target(): G = nx.complete_graph(5) for interface_func in [nx.minimum_node_cut, nx.minimum_edge_cut]: - assert_raises(nx.NetworkXError, interface_func, G, t=3) + pytest.raises(nx.NetworkXError, interface_func, G, t=3) diff --git a/networkx/algorithms/connectivity/tests/test_disjoint_paths.py b/networkx/algorithms/connectivity/tests/test_disjoint_paths.py index fd46ed0..74bb3f2 100644 --- a/networkx/algorithms/connectivity/tests/test_disjoint_paths.py +++ b/networkx/algorithms/connectivity/tests/test_disjoint_paths.py @@ -1,12 +1,4 @@ -# test_disjoint_paths.py - Tests for flow based node and edge disjoint paths. -# -# Copyright 2016 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. -from nose.tools import assert_equal, assert_true, assert_false, raises +import pytest import networkx as nx from networkx.algorithms import flow @@ -20,8 +12,6 @@ flow.shortest_augmenting_path, ] -msg = "Assertion failed in function: {0}" - def is_path(G, path): return all(v in G[u] for u, v in pairwise(path)) @@ -31,7 +21,7 @@ def are_edge_disjoint_paths(G, paths): if not paths: return False for path in paths: - assert_true(is_path(G, path)) + assert is_path(G, path) paths_edges = [list(pairwise(p)) for p in paths] num_of_edges = sum(len(e) for e in paths_edges) num_unique_edges = len(set.union(*[set(es) for es in paths_edges])) @@ -44,7 +34,7 @@ def are_node_disjoint_paths(G, paths): if not paths: return False for path in paths: - assert_true(is_path(G, path)) + assert is_path(G, path) # first and last nodes are source and target st = {paths[0][0], paths[0][-1]} num_of_nodes = len([n for path in paths for n in path if n not in st]) @@ -56,206 +46,204 @@ def are_node_disjoint_paths(G, paths): def test_graph_from_pr_2053(): G = nx.Graph() - G.add_edges_from([ - ('A', 'B'), ('A', 'D'), ('A', 'F'), ('A', 'G'), - ('B', 'C'), ('B', 'D'), ('B', 'G'), ('C', 'D'), - ('C', 'E'), ('C', 'Z'), ('D', 'E'), ('D', 'F'), - ('E', 'F'), ('E', 'Z'), ('F', 'Z'), ('G', 'Z')]) + G.add_edges_from( + [ + ("A", "B"), + ("A", "D"), + ("A", "F"), + ("A", "G"), + ("B", "C"), + ("B", "D"), + ("B", "G"), + ("C", "D"), + ("C", "E"), + ("C", "Z"), + ("D", "E"), + ("D", "F"), + ("E", "F"), + ("E", "Z"), + ("F", "Z"), + ("G", "Z"), + ] + ) for flow_func in flow_funcs: kwargs = dict(flow_func=flow_func) + errmsg = f"Assertion failed in function: {flow_func.__name__}" # edge disjoint paths - edge_paths = list(nx.edge_disjoint_paths(G, 'A', 'Z', **kwargs)) - assert_true(are_edge_disjoint_paths(G, edge_paths), msg=msg.format(flow_func.__name__)) - assert_equal( - nx.edge_connectivity(G, 'A', 'Z'), - len(edge_paths), - msg=msg.format(flow_func.__name__), - ) + edge_paths = list(nx.edge_disjoint_paths(G, "A", "Z", **kwargs)) + assert are_edge_disjoint_paths(G, edge_paths), errmsg + assert nx.edge_connectivity(G, "A", "Z") == len(edge_paths), errmsg # node disjoint paths - node_paths = list(nx.node_disjoint_paths(G, 'A', 'Z', **kwargs)) - assert_true(are_node_disjoint_paths(G, node_paths), msg=msg.format(flow_func.__name__)) - assert_equal( - nx.node_connectivity(G, 'A', 'Z'), - len(node_paths), - msg=msg.format(flow_func.__name__), - ) + node_paths = list(nx.node_disjoint_paths(G, "A", "Z", **kwargs)) + assert are_node_disjoint_paths(G, node_paths), errmsg + assert nx.node_connectivity(G, "A", "Z") == len(node_paths), errmsg def test_florentine_families(): G = nx.florentine_families_graph() for flow_func in flow_funcs: kwargs = dict(flow_func=flow_func) + errmsg = f"Assertion failed in function: {flow_func.__name__}" # edge disjoint paths - edge_dpaths = list(nx.edge_disjoint_paths(G, 'Medici', 'Strozzi', **kwargs)) - assert_true(are_edge_disjoint_paths(G, edge_dpaths), msg=msg.format(flow_func.__name__)) - assert_equal( - nx.edge_connectivity(G, 'Medici', 'Strozzi'), - len(edge_dpaths), - msg=msg.format(flow_func.__name__), - ) + edge_dpaths = list(nx.edge_disjoint_paths(G, "Medici", "Strozzi", **kwargs)) + assert are_edge_disjoint_paths(G, edge_dpaths), errmsg + assert nx.edge_connectivity(G, "Medici", "Strozzi") == len(edge_dpaths), errmsg # node disjoint paths - node_dpaths = list(nx.node_disjoint_paths(G, 'Medici', 'Strozzi', **kwargs)) - assert_true(are_node_disjoint_paths(G, node_dpaths), msg=msg.format(flow_func.__name__)) - assert_equal( - nx.node_connectivity(G, 'Medici', 'Strozzi'), - len(node_dpaths), - msg=msg.format(flow_func.__name__), - ) + node_dpaths = list(nx.node_disjoint_paths(G, "Medici", "Strozzi", **kwargs)) + assert are_node_disjoint_paths(G, node_dpaths), errmsg + assert nx.node_connectivity(G, "Medici", "Strozzi") == len(node_dpaths), errmsg def test_karate(): G = nx.karate_club_graph() for flow_func in flow_funcs: kwargs = dict(flow_func=flow_func) + errmsg = f"Assertion failed in function: {flow_func.__name__}" # edge disjoint paths edge_dpaths = list(nx.edge_disjoint_paths(G, 0, 33, **kwargs)) - assert_true(are_edge_disjoint_paths(G, edge_dpaths), msg=msg.format(flow_func.__name__)) - assert_equal( - nx.edge_connectivity(G, 0, 33), - len(edge_dpaths), - msg=msg.format(flow_func.__name__), - ) + assert are_edge_disjoint_paths(G, edge_dpaths), errmsg + assert nx.edge_connectivity(G, 0, 33) == len(edge_dpaths), errmsg # node disjoint paths node_dpaths = list(nx.node_disjoint_paths(G, 0, 33, **kwargs)) - assert_true(are_node_disjoint_paths(G, node_dpaths), msg=msg.format(flow_func.__name__)) - assert_equal( - nx.node_connectivity(G, 0, 33), - len(node_dpaths), - msg=msg.format(flow_func.__name__), - ) + assert are_node_disjoint_paths(G, node_dpaths), errmsg + assert nx.node_connectivity(G, 0, 33) == len(node_dpaths), errmsg def test_petersen_disjoint_paths(): G = nx.petersen_graph() for flow_func in flow_funcs: kwargs = dict(flow_func=flow_func) + errmsg = f"Assertion failed in function: {flow_func.__name__}" # edge disjoint paths edge_dpaths = list(nx.edge_disjoint_paths(G, 0, 6, **kwargs)) - assert_true(are_edge_disjoint_paths(G, edge_dpaths), msg=msg.format(flow_func.__name__)) - assert_equal(3, len(edge_dpaths), msg=msg.format(flow_func.__name__)) + assert are_edge_disjoint_paths(G, edge_dpaths), errmsg + assert 3 == len(edge_dpaths), errmsg # node disjoint paths node_dpaths = list(nx.node_disjoint_paths(G, 0, 6, **kwargs)) - assert_true(are_node_disjoint_paths(G, node_dpaths), msg=msg.format(flow_func.__name__)) - assert_equal(3, len(node_dpaths), msg=msg.format(flow_func.__name__)) + assert are_node_disjoint_paths(G, node_dpaths), errmsg + assert 3 == len(node_dpaths), errmsg def test_octahedral_disjoint_paths(): G = nx.octahedral_graph() for flow_func in flow_funcs: kwargs = dict(flow_func=flow_func) + errmsg = f"Assertion failed in function: {flow_func.__name__}" # edge disjoint paths edge_dpaths = list(nx.edge_disjoint_paths(G, 0, 5, **kwargs)) - assert_true(are_edge_disjoint_paths(G, edge_dpaths), msg=msg.format(flow_func.__name__)) - assert_equal(4, len(edge_dpaths), msg=msg.format(flow_func.__name__)) + assert are_edge_disjoint_paths(G, edge_dpaths), errmsg + assert 4 == len(edge_dpaths), errmsg # node disjoint paths node_dpaths = list(nx.node_disjoint_paths(G, 0, 5, **kwargs)) - assert_true(are_node_disjoint_paths(G, node_dpaths), msg=msg.format(flow_func.__name__)) - assert_equal(4, len(node_dpaths), msg=msg.format(flow_func.__name__)) + assert are_node_disjoint_paths(G, node_dpaths), errmsg + assert 4 == len(node_dpaths), errmsg def test_icosahedral_disjoint_paths(): G = nx.icosahedral_graph() for flow_func in flow_funcs: kwargs = dict(flow_func=flow_func) + errmsg = f"Assertion failed in function: {flow_func.__name__}" # edge disjoint paths edge_dpaths = list(nx.edge_disjoint_paths(G, 0, 6, **kwargs)) - assert_true(are_edge_disjoint_paths(G, edge_dpaths), msg=msg.format(flow_func.__name__)) - assert_equal(5, len(edge_dpaths), msg=msg.format(flow_func.__name__)) + assert are_edge_disjoint_paths(G, edge_dpaths), errmsg + assert 5 == len(edge_dpaths), errmsg # node disjoint paths node_dpaths = list(nx.node_disjoint_paths(G, 0, 6, **kwargs)) - assert_true(are_node_disjoint_paths(G, node_dpaths), msg=msg.format(flow_func.__name__)) - assert_equal(5, len(node_dpaths), msg=msg.format(flow_func.__name__)) + assert are_node_disjoint_paths(G, node_dpaths), errmsg + assert 5 == len(node_dpaths), errmsg def test_cutoff_disjoint_paths(): G = nx.icosahedral_graph() for flow_func in flow_funcs: kwargs = dict(flow_func=flow_func) + errmsg = f"Assertion failed in function: {flow_func.__name__}" for cutoff in [2, 4]: - kwargs['cutoff'] = cutoff + kwargs["cutoff"] = cutoff # edge disjoint paths edge_dpaths = list(nx.edge_disjoint_paths(G, 0, 6, **kwargs)) - assert_true(are_edge_disjoint_paths(G, edge_dpaths), msg=msg.format(flow_func.__name__)) - assert_equal(cutoff, len(edge_dpaths), msg=msg.format(flow_func.__name__)) + assert are_edge_disjoint_paths(G, edge_dpaths), errmsg + assert cutoff == len(edge_dpaths), errmsg # node disjoint paths node_dpaths = list(nx.node_disjoint_paths(G, 0, 6, **kwargs)) - assert_true(are_node_disjoint_paths(G, node_dpaths), msg=msg.format(flow_func.__name__)) - assert_equal(cutoff, len(node_dpaths), msg=msg.format(flow_func.__name__)) + assert are_node_disjoint_paths(G, node_dpaths), errmsg + assert cutoff == len(node_dpaths), errmsg -@raises(nx.NetworkXError) def test_missing_source_edge_paths(): - G = nx.path_graph(4) - list(nx.edge_disjoint_paths(G, 10, 1)) + with pytest.raises(nx.NetworkXError): + G = nx.path_graph(4) + list(nx.edge_disjoint_paths(G, 10, 1)) -@raises(nx.NetworkXError) def test_missing_source_node_paths(): - G = nx.path_graph(4) - list(nx.node_disjoint_paths(G, 10, 1)) + with pytest.raises(nx.NetworkXError): + G = nx.path_graph(4) + list(nx.node_disjoint_paths(G, 10, 1)) -@raises(nx.NetworkXError) def test_missing_target_edge_paths(): - G = nx.path_graph(4) - list(nx.edge_disjoint_paths(G, 1, 10)) + with pytest.raises(nx.NetworkXError): + G = nx.path_graph(4) + list(nx.edge_disjoint_paths(G, 1, 10)) -@raises(nx.NetworkXError) def test_missing_target_node_paths(): - G = nx.path_graph(4) - list(nx.node_disjoint_paths(G, 1, 10)) + with pytest.raises(nx.NetworkXError): + G = nx.path_graph(4) + list(nx.node_disjoint_paths(G, 1, 10)) -@raises(nx.NetworkXNoPath) def test_not_weakly_connected_edges(): - G = nx.DiGraph() - nx.add_path(G, [1, 2, 3]) - nx.add_path(G, [4, 5]) - list(nx.edge_disjoint_paths(G, 1, 5)) + with pytest.raises(nx.NetworkXNoPath): + G = nx.DiGraph() + nx.add_path(G, [1, 2, 3]) + nx.add_path(G, [4, 5]) + list(nx.edge_disjoint_paths(G, 1, 5)) -@raises(nx.NetworkXNoPath) def test_not_weakly_connected_nodes(): - G = nx.DiGraph() - nx.add_path(G, [1, 2, 3]) - nx.add_path(G, [4, 5]) - list(nx.node_disjoint_paths(G, 1, 5)) + with pytest.raises(nx.NetworkXNoPath): + G = nx.DiGraph() + nx.add_path(G, [1, 2, 3]) + nx.add_path(G, [4, 5]) + list(nx.node_disjoint_paths(G, 1, 5)) -@raises(nx.NetworkXNoPath) def test_not_connected_edges(): - G = nx.Graph() - nx.add_path(G, [1, 2, 3]) - nx.add_path(G, [4, 5]) - list(nx.edge_disjoint_paths(G, 1, 5)) + with pytest.raises(nx.NetworkXNoPath): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + nx.add_path(G, [4, 5]) + list(nx.edge_disjoint_paths(G, 1, 5)) -@raises(nx.NetworkXNoPath) def test_not_connected_nodes(): - G = nx.Graph() - nx.add_path(G, [1, 2, 3]) - nx.add_path(G, [4, 5]) - list(nx.node_disjoint_paths(G, 1, 5)) + with pytest.raises(nx.NetworkXNoPath): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + nx.add_path(G, [4, 5]) + list(nx.node_disjoint_paths(G, 1, 5)) -@raises(nx.NetworkXNoPath) def test_isolated_edges(): - G = nx.Graph() - G.add_node(1) - nx.add_path(G, [4, 5]) - list(nx.edge_disjoint_paths(G, 1, 5)) + with pytest.raises(nx.NetworkXNoPath): + G = nx.Graph() + G.add_node(1) + nx.add_path(G, [4, 5]) + list(nx.edge_disjoint_paths(G, 1, 5)) -@raises(nx.NetworkXNoPath) def test_isolated_nodes(): - G = nx.Graph() - G.add_node(1) - nx.add_path(G, [4, 5]) - list(nx.node_disjoint_paths(G, 1, 5)) + with pytest.raises(nx.NetworkXNoPath): + G = nx.Graph() + G.add_node(1) + nx.add_path(G, [4, 5]) + list(nx.node_disjoint_paths(G, 1, 5)) -@raises(nx.NetworkXError) def test_invalid_auxiliary(): - G = nx.complete_graph(5) - list(nx.node_disjoint_paths(G, 0, 3, auxiliary=G)) + with pytest.raises(nx.NetworkXError): + G = nx.complete_graph(5) + list(nx.node_disjoint_paths(G, 0, 3, auxiliary=G)) diff --git a/networkx/algorithms/connectivity/tests/test_edge_augmentation.py b/networkx/algorithms/connectivity/tests/test_edge_augmentation.py index 3790a86..dfef363 100644 --- a/networkx/algorithms/connectivity/tests/test_edge_augmentation.py +++ b/networkx/algorithms/connectivity/tests/test_edge_augmentation.py @@ -1,14 +1,9 @@ -# -*- coding: utf-8 -*- import random import networkx as nx import itertools as it from networkx.utils import pairwise -from nose.tools import (assert_equal, assert_false, assert_true, - assert_greater_equal, assert_less, assert_less_equal, - assert_raises) -from networkx.algorithms.connectivity import ( - k_edge_augmentation, -) +import pytest +from networkx.algorithms.connectivity import k_edge_augmentation from networkx.algorithms.connectivity.edge_augmentation import ( collapse, complement_edges, @@ -28,8 +23,13 @@ def tarjan_bridge_graph(): # Information Processing Letters, 1974 - Elsevier # doi:10.1016/0020-0190(74)90003-9. # define 2-connected components and bridges - ccs = [(1, 2, 4, 3, 1, 4), (5, 6, 7, 5), (8, 9, 10, 8), - (17, 18, 16, 15, 17), (11, 12, 14, 13, 11, 14)] + ccs = [ + (1, 2, 4, 3, 1, 4), + (5, 6, 7, 5), + (8, 9, 10, 8), + (17, 18, 16, 15, 17), + (11, 12, 14, 13, 11, 14), + ] bridges = [(4, 8), (3, 5), (3, 17)] G = nx.Graph(it.chain(*(pairwise(path) for path in ccs + bridges))) return G @@ -37,69 +37,66 @@ def tarjan_bridge_graph(): def test_weight_key(): G = nx.Graph() - G.add_nodes_from([ - 1, 2, 3, 4, 5, 6, 7, 8, 9]) + G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8, 9]) G.add_edges_from([(3, 8), (1, 2), (2, 3)]) impossible = {(3, 6), (3, 9)} rng = random.Random(0) avail_uv = list(set(complement_edges(G)) - impossible) - avail = [(u, v, {'cost': rng.random()}) for u, v in avail_uv] + avail = [(u, v, {"cost": rng.random()}) for u, v in avail_uv] _augment_and_check(G, k=1) _augment_and_check(G, k=1, avail=avail_uv) - _augment_and_check(G, k=1, avail=avail, weight='cost') + _augment_and_check(G, k=1, avail=avail, weight="cost") - _check_augmentations(G, avail, weight='cost') + _check_augmentations(G, avail, weight="cost") def test_is_locally_k_edge_connected_exceptions(): - assert_raises(nx.NetworkXNotImplemented, - is_k_edge_connected, - nx.DiGraph(), k=0) - assert_raises(nx.NetworkXNotImplemented, - is_k_edge_connected, - nx.MultiGraph(), k=0) - assert_raises(ValueError, is_k_edge_connected, - nx.Graph(), k=0) + pytest.raises(nx.NetworkXNotImplemented, is_k_edge_connected, nx.DiGraph(), k=0) + pytest.raises(nx.NetworkXNotImplemented, is_k_edge_connected, nx.MultiGraph(), k=0) + pytest.raises(ValueError, is_k_edge_connected, nx.Graph(), k=0) def test_is_k_edge_connected(): G = nx.barbell_graph(10, 0) - assert_true(is_k_edge_connected(G, k=1)) - assert_false(is_k_edge_connected(G, k=2)) + assert is_k_edge_connected(G, k=1) + assert not is_k_edge_connected(G, k=2) G = nx.Graph() G.add_nodes_from([5, 15]) - assert_false(is_k_edge_connected(G, k=1)) - assert_false(is_k_edge_connected(G, k=2)) + assert not is_k_edge_connected(G, k=1) + assert not is_k_edge_connected(G, k=2) G = nx.complete_graph(5) - assert_true(is_k_edge_connected(G, k=1)) - assert_true(is_k_edge_connected(G, k=2)) - assert_true(is_k_edge_connected(G, k=3)) - assert_true(is_k_edge_connected(G, k=4)) + assert is_k_edge_connected(G, k=1) + assert is_k_edge_connected(G, k=2) + assert is_k_edge_connected(G, k=3) + assert is_k_edge_connected(G, k=4) def test_is_k_edge_connected_exceptions(): - assert_raises(nx.NetworkXNotImplemented, - is_locally_k_edge_connected, - nx.DiGraph(), 1, 2, k=0) - assert_raises(nx.NetworkXNotImplemented, - is_locally_k_edge_connected, - nx.MultiGraph(), 1, 2, k=0) - assert_raises(ValueError, - is_locally_k_edge_connected, - nx.Graph(), 1, 2, k=0) + pytest.raises( + nx.NetworkXNotImplemented, is_locally_k_edge_connected, nx.DiGraph(), 1, 2, k=0 + ) + pytest.raises( + nx.NetworkXNotImplemented, + is_locally_k_edge_connected, + nx.MultiGraph(), + 1, + 2, + k=0, + ) + pytest.raises(ValueError, is_locally_k_edge_connected, nx.Graph(), 1, 2, k=0) def test_is_locally_k_edge_connected(): G = nx.barbell_graph(10, 0) - assert_true(is_locally_k_edge_connected(G, 5, 15, k=1)) - assert_false(is_locally_k_edge_connected(G, 5, 15, k=2)) + assert is_locally_k_edge_connected(G, 5, 15, k=1) + assert not is_locally_k_edge_connected(G, 5, 15, k=2) G = nx.Graph() G.add_nodes_from([5, 15]) - assert_false(is_locally_k_edge_connected(G, 5, 15, k=2)) + assert not is_locally_k_edge_connected(G, 5, 15, k=2) def test_null_graph(): @@ -134,24 +131,23 @@ def test_edgeless_graph(): def test_invalid_k(): G = nx.Graph() - assert_raises(ValueError, list, k_edge_augmentation(G, k=-1)) - assert_raises(ValueError, list, k_edge_augmentation(G, k=0)) + pytest.raises(ValueError, list, k_edge_augmentation(G, k=-1)) + pytest.raises(ValueError, list, k_edge_augmentation(G, k=0)) def test_unfeasible(): G = tarjan_bridge_graph() - assert_raises(nx.NetworkXUnfeasible, list, - k_edge_augmentation(G, k=1, avail=[])) + pytest.raises(nx.NetworkXUnfeasible, list, k_edge_augmentation(G, k=1, avail=[])) - assert_raises(nx.NetworkXUnfeasible, list, - k_edge_augmentation(G, k=2, avail=[])) + pytest.raises(nx.NetworkXUnfeasible, list, k_edge_augmentation(G, k=2, avail=[])) - assert_raises(nx.NetworkXUnfeasible, list, - k_edge_augmentation(G, k=2, avail=[(7, 9)])) + pytest.raises( + nx.NetworkXUnfeasible, list, k_edge_augmentation(G, k=2, avail=[(7, 9)]) + ) # partial solutions should not error if real solutions are infeasible aug_edges = list(k_edge_augmentation(G, k=2, avail=[(7, 9)], partial=True)) - assert_equal(aug_edges, [(7, 9)]) + assert aug_edges == [(7, 9)] _check_augmentations(G, avail=[], max_k=MAX_EFFICIENT_K + 2) @@ -162,24 +158,35 @@ def test_tarjan(): G = tarjan_bridge_graph() aug_edges = set(_augment_and_check(G, k=2)[0]) - print('aug_edges = {!r}'.format(aug_edges)) + print(f"aug_edges = {aug_edges!r}") # can't assert edge exactly equality due to non-determinant edge order # but we do know the size of the solution must be 3 - assert_equal(len(aug_edges), 3) - - avail = [(9, 7), (8, 5), (2, 10), (6, 13), (11, 18), (1, 17), (2, 3), - (16, 17), (18, 14), (15, 14)] + assert len(aug_edges) == 3 + + avail = [ + (9, 7), + (8, 5), + (2, 10), + (6, 13), + (11, 18), + (1, 17), + (2, 3), + (16, 17), + (18, 14), + (15, 14), + ] aug_edges = set(_augment_and_check(G, avail=avail, k=2)[0]) # Can't assert exact length since approximation depends on the order of a # dict traversal. - assert_less_equal(len(aug_edges), 3 * 2) + assert len(aug_edges) <= 3 * 2 _check_augmentations(G, avail) def test_configuration(): - seeds = [2718183590, 2470619828, 1694705158, 3001036531, 2401251497] + # seeds = [2718183590, 2470619828, 1694705158, 3001036531, 2401251497] + seeds = [1001, 1002, 1003, 1004] for seed in seeds: deg_seq = nx.random_powerlaw_tree_sequence(20, seed=seed, tries=5000) G = nx.Graph(nx.configuration_model(deg_seq, seed=seed)) @@ -189,7 +196,7 @@ def test_configuration(): def test_shell(): # seeds = [2057382236, 3331169846, 1840105863, 476020778, 2247498425] - seeds = [1840105863] + seeds = [18] for seed in seeds: constructor = [(12, 70, 0.8), (15, 40, 0.6)] G = nx.random_shell_graph(constructor, seed=seed) @@ -235,32 +242,33 @@ def test_gnp_augmentation(): rng = random.Random(0) G = nx.gnp_random_graph(30, 0.005, seed=0) # Randomly make edges available - avail = {(u, v): 1 + rng.random() - for u, v in complement_edges(G) - if rng.random() < .25} + avail = { + (u, v): 1 + rng.random() for u, v in complement_edges(G) if rng.random() < 0.25 + } _check_augmentations(G, avail) def _assert_solution_properties(G, aug_edges, avail_dict=None): """ Checks that aug_edges are consistently formatted """ if avail_dict is not None: - assert_true(all(e in avail_dict for e in aug_edges), - 'when avail is specified aug-edges should be in avail') + assert all( + e in avail_dict for e in aug_edges + ), "when avail is specified aug-edges should be in avail" unique_aug = set(map(tuple, map(sorted, aug_edges))) unique_aug = list(map(tuple, map(sorted, aug_edges))) - assert_true(len(aug_edges) == len(unique_aug), - 'edges should be unique') + assert len(aug_edges) == len(unique_aug), "edges should be unique" - assert_false(any(u == v for u, v in unique_aug), - 'should be no self-edges') + assert not any(u == v for u, v in unique_aug), "should be no self-edges" - assert_false(any(G.has_edge(u, v) for u, v in unique_aug), - 'aug edges and G.edges should be disjoint') + assert not any( + G.has_edge(u, v) for u, v in unique_aug + ), "aug edges and G.edges should be disjoint" -def _augment_and_check(G, k, avail=None, weight=None, verbose=False, - orig_k=None, max_aug_k=None): +def _augment_and_check( + G, k, avail=None, weight=None, verbose=False, orig_k=None, max_aug_k=None +): """ Does one specific augmentation and checks for properties of the result """ @@ -273,31 +281,27 @@ def _augment_and_check(G, k, avail=None, weight=None, verbose=False, try: if avail is not None: # ensure avail is in dict form - avail_dict = dict(zip(*_unpack_available_edges(avail, - weight=weight))) + avail_dict = dict(zip(*_unpack_available_edges(avail, weight=weight))) else: avail_dict = None try: # Find the augmentation if possible - generator = nx.k_edge_augmentation(G, k=k, weight=weight, - avail=avail) - assert_false(isinstance(generator, list), - 'should always return an iter') + generator = nx.k_edge_augmentation(G, k=k, weight=weight, avail=avail) + assert not isinstance(generator, list), "should always return an iter" aug_edges = [] for edge in generator: aug_edges.append(edge) except nx.NetworkXUnfeasible: infeasible = True - info['infeasible'] = True - assert_equal(len(aug_edges), 0, - 'should not generate anything if unfeasible') + info["infeasible"] = True + assert len(aug_edges) == 0, "should not generate anything if unfeasible" if avail is None: n_nodes = G.number_of_nodes() - assert_less_equal(n_nodes, k, ( - 'unconstrained cases are only unfeasible if |V| <= k. ' - 'Got |V|={} and k={}'.format(n_nodes, k) - )) + assert n_nodes <= k, ( + "unconstrained cases are only unfeasible if |V| <= k. " + f"Got |V|={n_nodes} and k={k}" + ) else: if max_aug_k is None: G_aug_all = G.copy() @@ -307,19 +311,22 @@ def _augment_and_check(G, k, avail=None, weight=None, verbose=False, except nx.NetworkXPointlessConcept: max_aug_k = 0 - assert_less(max_aug_k, k, ( - 'avail should only be unfeasible if using all edges ' - 'does not achieve k-edge-connectivity')) + assert max_aug_k < k, ( + "avail should only be unfeasible if using all edges " + "does not achieve k-edge-connectivity" + ) # Test for a partial solution - partial_edges = list(nx.k_edge_augmentation( - G, k=k, weight=weight, partial=True, avail=avail)) + partial_edges = list( + nx.k_edge_augmentation(G, k=k, weight=weight, partial=True, avail=avail) + ) - info['n_partial_edges'] = len(partial_edges) + info["n_partial_edges"] = len(partial_edges) if avail_dict is None: - assert_equal(set(partial_edges), set(complement_edges(G)), ( - 'unweighted partial solutions should be the complement')) + assert set(partial_edges) == set( + complement_edges(G) + ), "unweighted partial solutions should be the complement" elif len(avail_dict) > 0: H = G.copy() @@ -332,8 +339,9 @@ def _augment_and_check(G, k, avail=None, weight=None, verbose=False, # Full connectivity should be no better than our partial # solution. - assert_equal(partial_conn, full_conn, - 'adding more edges should not increase k-conn') + assert ( + partial_conn == full_conn + ), "adding more edges should not increase k-conn" # Find the new edge-connectivity after adding the augmenting edges aug_edges = partial_edges @@ -347,8 +355,8 @@ def _augment_and_check(G, k, avail=None, weight=None, verbose=False, else: total_weight = num_edges - info['total_weight'] = total_weight - info['num_edges'] = num_edges + info["total_weight"] = total_weight + info["num_edges"] = num_edges # Find the new edge-connectivity after adding the augmenting edges G_aug = G.copy() @@ -357,36 +365,33 @@ def _augment_and_check(G, k, avail=None, weight=None, verbose=False, aug_k = nx.edge_connectivity(G_aug) except nx.NetworkXPointlessConcept: aug_k = 0 - info['aug_k'] = aug_k + info["aug_k"] = aug_k # Do checks if not infeasible and orig_k < k: - assert_greater_equal(info['aug_k'], k, ( - 'connectivity should increase to k={} or more'.format(k))) + assert info["aug_k"] >= k, f"connectivity should increase to k={k} or more" - assert_greater_equal(info['aug_k'], orig_k, ( - 'augmenting should never reduce connectivity')) + assert info["aug_k"] >= orig_k, "augmenting should never reduce connectivity" _assert_solution_properties(G, aug_edges, avail_dict) except Exception: - info['failed'] = True - print('edges = {}'.format(list(G.edges()))) - print('nodes = {}'.format(list(G.nodes()))) - print('aug_edges = {}'.format(list(aug_edges))) - print('info = {}'.format(info)) + info["failed"] = True + print(f"edges = {list(G.edges())}") + print(f"nodes = {list(G.nodes())}") + print(f"aug_edges = {list(aug_edges)}") + print(f"info = {info}") raise else: if verbose: - print('info = {}'.format(info)) + print(f"info = {info}") if infeasible: aug_edges = None return aug_edges, info -def _check_augmentations(G, avail=None, max_k=None, weight=None, - verbose=False): +def _check_augmentations(G, avail=None, max_k=None, weight=None, verbose=False): """ Helper to check weighted/unweighted cases with multiple values of k """ # Using all available edges, find the maximum edge-connectivity try: @@ -411,63 +416,71 @@ def _check_augmentations(G, avail=None, max_k=None, weight=None, avail_uniform = {e: 1 for e in complement_edges(G)} if verbose: - print('\n=== CHECK_AUGMENTATION ===') - print('G.number_of_nodes = {!r}'.format(G.number_of_nodes())) - print('G.number_of_edges = {!r}'.format(G.number_of_edges())) - print('max_k = {!r}'.format(max_k)) - print('max_aug_k = {!r}'.format(max_aug_k)) - print('orig_k = {!r}'.format(orig_k)) + print("\n=== CHECK_AUGMENTATION ===") + print(f"G.number_of_nodes = {G.number_of_nodes()!r}") + print(f"G.number_of_edges = {G.number_of_edges()!r}") + print(f"max_k = {max_k!r}") + print(f"max_aug_k = {max_aug_k!r}") + print(f"orig_k = {orig_k!r}") # check augmentation for multiple values of k for k in range(1, max_k + 1): if verbose: - print('---------------') - print('Checking k = {}'.format(k)) + print("---------------") + print(f"Checking k = {k}") # Check the unweighted version if verbose: - print('unweighted case') - aug_edges1, info1 = _augment_and_check( - G, k=k, verbose=verbose, orig_k=orig_k) + print("unweighted case") + aug_edges1, info1 = _augment_and_check(G, k=k, verbose=verbose, orig_k=orig_k) # Check that the weighted version with all available edges and uniform # weights gives a similar solution to the unweighted case. if verbose: - print('weighted uniform case') + print("weighted uniform case") aug_edges2, info2 = _augment_and_check( - G, k=k, avail=avail_uniform, verbose=verbose, + G, + k=k, + avail=avail_uniform, + verbose=verbose, orig_k=orig_k, - max_aug_k=G.number_of_nodes() - 1) + max_aug_k=G.number_of_nodes() - 1, + ) # Check the weighted version if avail is not None: if verbose: - print('weighted case') + print("weighted case") aug_edges3, info3 = _augment_and_check( - G, k=k, avail=avail, weight=weight, verbose=verbose, - max_aug_k=max_aug_k, orig_k=orig_k) + G, + k=k, + avail=avail, + weight=weight, + verbose=verbose, + max_aug_k=max_aug_k, + orig_k=orig_k, + ) if aug_edges1 is not None: # Check approximation ratios if k == 1: # when k=1, both solutions should be optimal - assert_equal(info2['total_weight'], info1['total_weight']) + assert info2["total_weight"] == info1["total_weight"] if k == 2: # when k=2, the weighted version is an approximation if orig_k == 0: # the approximation ratio is 3 if G is not connected - assert_less_equal(info2['total_weight'], - info1['total_weight'] * 3) + assert info2["total_weight"] <= info1["total_weight"] * 3 else: # the approximation ratio is 2 if G is was connected - assert_less_equal(info2['total_weight'], - info1['total_weight'] * 2) + assert info2["total_weight"] <= info1["total_weight"] * 2 _check_unconstrained_bridge_property(G, info1) def _check_unconstrained_bridge_property(G, info1): # Check Theorem 5 from Eswaran and Tarjan. (1975) Augmentation problems import math + bridge_ccs = list(nx.connectivity.bridge_components(G)) # condense G into an forest C C = collapse(G, bridge_ccs) @@ -476,6 +489,7 @@ def _check_unconstrained_bridge_property(G, info1): q = len([n for n, d in C.degree() if d == 0]) # isolated if p + q > 1: size_target = int(math.ceil(p / 2.0)) + q - size_aug = info1['num_edges'] - assert_equal(size_aug, size_target, ( - 'augmentation size is different from what theory predicts')) + size_aug = info1["num_edges"] + assert ( + size_aug == size_target + ), "augmentation size is different from what theory predicts" diff --git a/networkx/algorithms/connectivity/tests/test_edge_kcomponents.py b/networkx/algorithms/connectivity/tests/test_edge_kcomponents.py index 711b796..d0bcf12 100644 --- a/networkx/algorithms/connectivity/tests/test_edge_kcomponents.py +++ b/networkx/algorithms/connectivity/tests/test_edge_kcomponents.py @@ -1,22 +1,16 @@ -# -*- coding: utf-8 -*- import networkx as nx import itertools as it -from nose.tools import (assert_equal, assert_not_equal, assert_greater_equal, - assert_raises, assert_in) +import pytest from networkx.utils import pairwise -from networkx.algorithms.connectivity import ( - bridge_components, - EdgeComponentAuxGraph, -) -from networkx.algorithms.connectivity.edge_kcomponents import ( - general_k_edge_subgraphs, -) +from networkx.algorithms.connectivity import bridge_components, EdgeComponentAuxGraph +from networkx.algorithms.connectivity.edge_kcomponents import general_k_edge_subgraphs # ---------------- # Helper functions # ---------------- + def fset(list_of_sets): """ allows == to be used for list of sets """ return set(map(frozenset, list_of_sets)) @@ -33,7 +27,7 @@ def _assert_subgraph_edge_connectivity(G, ccs_subgraph, k): C = G.subgraph(cc) if len(cc) > 1: connectivity = nx.edge_connectivity(C) - assert_greater_equal(connectivity, k) + assert connectivity >= k def _memo_connectivity(G, u, v, memo): @@ -55,7 +49,7 @@ def _all_pairs_connectivity(G, cc, k, memo): connectivity = _memo_connectivity(G, u, v, memo) if G.is_directed(): connectivity = min(connectivity, _memo_connectivity(G, v, u, memo)) - assert_greater_equal(connectivity, k) + assert connectivity >= k def _assert_local_cc_edge_connectivity(G, ccs_local, k, memo): @@ -100,33 +94,33 @@ def _check_edge_connectivity(G): _assert_subgraph_edge_connectivity(G, ccs_subgraph, k) if k == 1 or k == 2 and not G.is_directed(): - assert_equal(ccs_local, ccs_subgraph, - 'Subgraphs and components should be the same ' - 'when k == 1 or (k == 2 and not G.directed())') + assert ( + ccs_local == ccs_subgraph + ), "Subgraphs and components should be the same when k == 1 or (k == 2 and not G.directed())" if G.is_directed(): # Test special case methods are the same as the aux graph if k == 1: alt_sccs = fset(nx.strongly_connected_components(G)) - assert_equal(alt_sccs, ccs_local, 'k=1 failed alt') - assert_equal(alt_sccs, ccs_subgraph, 'k=1 failed alt') + assert alt_sccs == ccs_local, "k=1 failed alt" + assert alt_sccs == ccs_subgraph, "k=1 failed alt" else: # Test special case methods are the same as the aux graph if k == 1: alt_ccs = fset(nx.connected_components(G)) - assert_equal(alt_ccs, ccs_local, 'k=1 failed alt') - assert_equal(alt_ccs, ccs_subgraph, 'k=1 failed alt') + assert alt_ccs == ccs_local, "k=1 failed alt" + assert alt_ccs == ccs_subgraph, "k=1 failed alt" elif k == 2: alt_bridge_ccs = fset(bridge_components(G)) - assert_equal(alt_bridge_ccs, ccs_local, 'k=2 failed alt') - assert_equal(alt_bridge_ccs, ccs_subgraph, 'k=2 failed alt') + assert alt_bridge_ccs == ccs_local, "k=2 failed alt" + assert alt_bridge_ccs == ccs_subgraph, "k=2 failed alt" # if new methods for k == 3 or k == 4 are implemented add them here # Check the general subgraph method works by itself - alt_subgraph_ccs = fset([set(C.nodes()) for C in - general_k_edge_subgraphs(G, k=k)]) - assert_equal(alt_subgraph_ccs, ccs_subgraph, - 'alt subgraph method failed') + alt_subgraph_ccs = fset( + [set(C.nodes()) for C in general_k_edge_subgraphs(G, k=k)] + ) + assert alt_subgraph_ccs == ccs_subgraph, "alt subgraph method failed" # Stop once k is larger than all special case methods # and we cannot break down ccs any further. @@ -138,37 +132,38 @@ def _check_edge_connectivity(G): # Misc tests # ---------------- + def test_zero_k_exception(): G = nx.Graph() # functions that return generators error immediately - assert_raises(ValueError, nx.k_edge_components, G, k=0) - assert_raises(ValueError, nx.k_edge_subgraphs, G, k=0) + pytest.raises(ValueError, nx.k_edge_components, G, k=0) + pytest.raises(ValueError, nx.k_edge_subgraphs, G, k=0) # actual generators only error when you get the first item aux_graph = EdgeComponentAuxGraph.construct(G) - assert_raises(ValueError, list, aux_graph.k_edge_components(k=0)) - assert_raises(ValueError, list, aux_graph.k_edge_subgraphs(k=0)) + pytest.raises(ValueError, list, aux_graph.k_edge_components(k=0)) + pytest.raises(ValueError, list, aux_graph.k_edge_subgraphs(k=0)) - assert_raises(ValueError, list, general_k_edge_subgraphs(G, k=0)) + pytest.raises(ValueError, list, general_k_edge_subgraphs(G, k=0)) def test_empty_input(): G = nx.Graph() - assert_equal([], list(nx.k_edge_components(G, k=5))) - assert_equal([], list(nx.k_edge_subgraphs(G, k=5))) + assert [] == list(nx.k_edge_components(G, k=5)) + assert [] == list(nx.k_edge_subgraphs(G, k=5)) G = nx.DiGraph() - assert_equal([], list(nx.k_edge_components(G, k=5))) - assert_equal([], list(nx.k_edge_subgraphs(G, k=5))) + assert [] == list(nx.k_edge_components(G, k=5)) + assert [] == list(nx.k_edge_subgraphs(G, k=5)) def test_not_implemented(): G = nx.MultiGraph() - assert_raises(nx.NetworkXNotImplemented, EdgeComponentAuxGraph.construct, G) - assert_raises(nx.NetworkXNotImplemented, nx.k_edge_components, G, k=2) - assert_raises(nx.NetworkXNotImplemented, nx.k_edge_subgraphs, G, k=2) - assert_raises(nx.NetworkXNotImplemented, bridge_components, G) - assert_raises(nx.NetworkXNotImplemented, bridge_components, nx.DiGraph()) + pytest.raises(nx.NetworkXNotImplemented, EdgeComponentAuxGraph.construct, G) + pytest.raises(nx.NetworkXNotImplemented, nx.k_edge_components, G, k=2) + pytest.raises(nx.NetworkXNotImplemented, nx.k_edge_subgraphs, G, k=2) + pytest.raises(nx.NetworkXNotImplemented, bridge_components, G) + pytest.raises(nx.NetworkXNotImplemented, bridge_components, nx.DiGraph()) def test_general_k_edge_subgraph_quick_return(): @@ -176,24 +171,25 @@ def test_general_k_edge_subgraph_quick_return(): G = nx.Graph() G.add_node(0) subgraphs = list(general_k_edge_subgraphs(G, k=1)) - assert_equal(len(subgraphs), 1) + assert len(subgraphs) == 1 for subgraph in subgraphs: - assert_equal(subgraph.number_of_nodes(), 1) + assert subgraph.number_of_nodes() == 1 G.add_node(1) subgraphs = list(general_k_edge_subgraphs(G, k=1)) - assert_equal(len(subgraphs), 2) + assert len(subgraphs) == 2 for subgraph in subgraphs: - assert_equal(subgraph.number_of_nodes(), 1) + assert subgraph.number_of_nodes() == 1 # ---------------- # Undirected tests # ---------------- + def test_random_gnp(): # seeds = [1550709854, 1309423156, 4208992358, 2785630813, 1915069929] - seeds = [2785630813, 1915069929] + seeds = [12, 13] for seed in seeds: G = nx.gnp_random_graph(20, 0.2, seed=seed) @@ -201,7 +197,8 @@ def test_random_gnp(): def test_configuration(): - seeds = [2718183590, 2470619828, 1694705158, 3001036531, 2401251497] + # seeds = [2718183590, 2470619828, 1694705158, 3001036531, 2401251497] + seeds = [14, 15] for seed in seeds: deg_seq = nx.random_powerlaw_tree_sequence(20, seed=seed, tries=5000) G = nx.Graph(nx.configuration_model(deg_seq, seed=seed)) @@ -211,7 +208,7 @@ def test_configuration(): def test_shell(): # seeds = [2057382236, 3331169846, 1840105863, 476020778, 2247498425] - seeds = [2057382236] + seeds = [20] for seed in seeds: constructor = [(12, 70, 0.8), (15, 40, 0.6)] G = nx.random_shell_graph(constructor, seed=seed) @@ -229,8 +226,13 @@ def test_tarjan_bridge(): # Information Processing Letters, 1974 - Elsevier # doi:10.1016/0020-0190(74)90003-9. # define 2-connected components and bridges - ccs = [(1, 2, 4, 3, 1, 4), (5, 6, 7, 5), (8, 9, 10, 8), - (17, 18, 16, 15, 17), (11, 12, 14, 13, 11, 14)] + ccs = [ + (1, 2, 4, 3, 1, 4), + (5, 6, 7, 5), + (8, 9, 10, 8), + (17, 18, 16, 15, 17), + (11, 12, 14, 13, 11, 14), + ] bridges = [(4, 8), (3, 5), (3, 17)] G = nx.Graph(it.chain(*(pairwise(path) for path in ccs + bridges))) _check_edge_connectivity(G) @@ -242,52 +244,51 @@ def test_bridge_cc(): bridges = [(4, 8), (3, 5), (20, 21), (22, 23, 24)] G = nx.Graph(it.chain(*(pairwise(path) for path in cc2 + bridges))) bridge_ccs = fset(bridge_components(G)) - target_ccs = fset([ - {1, 2, 3, 4}, {5}, {8, 9, 10}, {11, 12, 13}, {20}, - {21}, {22}, {23}, {24} - ]) - assert_equal(bridge_ccs, target_ccs) + target_ccs = fset( + [{1, 2, 3, 4}, {5}, {8, 9, 10}, {11, 12, 13}, {20}, {21}, {22}, {23}, {24}] + ) + assert bridge_ccs == target_ccs _check_edge_connectivity(G) def test_undirected_aux_graph(): # Graph similar to the one in # http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0136264 - a, b, c, d, e, f, g, h, i = 'abcdefghi' + a, b, c, d, e, f, g, h, i = "abcdefghi" paths = [ (a, d, b, f, c), (a, e, b), (a, e, b, c, g, b, a), (c, b), (f, g, f), - (h, i) + (h, i), ] G = nx.Graph(it.chain(*[pairwise(path) for path in paths])) aux_graph = EdgeComponentAuxGraph.construct(G) components_1 = fset(aux_graph.k_edge_subgraphs(k=1)) target_1 = fset([{a, b, c, d, e, f, g}, {h, i}]) - assert_equal(target_1, components_1) + assert target_1 == components_1 # Check that the undirected case for k=1 agrees with CCs alt_1 = fset(nx.k_edge_subgraphs(G, k=1)) - assert_equal(alt_1, components_1) + assert alt_1 == components_1 components_2 = fset(aux_graph.k_edge_subgraphs(k=2)) target_2 = fset([{a, b, c, d, e, f, g}, {h}, {i}]) - assert_equal(target_2, components_2) + assert target_2 == components_2 # Check that the undirected case for k=2 agrees with bridge components alt_2 = fset(nx.k_edge_subgraphs(G, k=2)) - assert_equal(alt_2, components_2) + assert alt_2 == components_2 components_3 = fset(aux_graph.k_edge_subgraphs(k=3)) target_3 = fset([{a}, {b, c, f, g}, {d}, {e}, {h}, {i}]) - assert_equal(target_3, components_3) + assert target_3 == components_3 components_4 = fset(aux_graph.k_edge_subgraphs(k=4)) target_4 = fset([{a}, {b}, {c}, {d}, {e}, {f}, {g}, {h}, {i}]) - assert_equal(target_4, components_4) + assert target_4 == components_4 _check_edge_connectivity(G) @@ -307,42 +308,30 @@ def test_local_subgraph_difference(): # Each clique is returned separately in k-edge-subgraphs subgraph_ccs = fset(aux_graph.k_edge_subgraphs(3)) - subgraph_target = fset([{101}, {102}, {103}, {104}, - {21, 22, 23, 24}, {11, 12, 13, 14}]) - assert_equal(subgraph_ccs, subgraph_target) + subgraph_target = fset( + [{101}, {102}, {103}, {104}, {21, 22, 23, 24}, {11, 12, 13, 14}] + ) + assert subgraph_ccs == subgraph_target # But in k-edge-ccs they are returned together # because they are locally 3-edge-connected local_ccs = fset(aux_graph.k_edge_components(3)) - local_target = fset([{101}, {102}, {103}, {104}, - {11, 12, 13, 14, 21, 22, 23, 24}]) - assert_equal(local_ccs, local_target) + local_target = fset([{101}, {102}, {103}, {104}, {11, 12, 13, 14, 21, 22, 23, 24}]) + assert local_ccs == local_target def test_local_subgraph_difference_directed(): - dipaths = [ - (1, 2, 3, 4, 1), - (1, 3, 1), - ] + dipaths = [(1, 2, 3, 4, 1), (1, 3, 1)] G = nx.DiGraph(it.chain(*[pairwise(path) for path in dipaths])) - assert_equal( - fset(nx.k_edge_components(G, k=1)), - fset(nx.k_edge_subgraphs(G, k=1)) - ) + assert fset(nx.k_edge_components(G, k=1)) == fset(nx.k_edge_subgraphs(G, k=1)) # Unlike undirected graphs, when k=2, for directed graphs there is a case # where the k-edge-ccs are not the same as the k-edge-subgraphs. # (in directed graphs ccs and subgraphs are the same when k=2) - assert_not_equal( - fset(nx.k_edge_components(G, k=2)), - fset(nx.k_edge_subgraphs(G, k=2)) - ) + assert fset(nx.k_edge_components(G, k=2)) != fset(nx.k_edge_subgraphs(G, k=2)) - assert_equal( - fset(nx.k_edge_components(G, k=3)), - fset(nx.k_edge_subgraphs(G, k=3)) - ) + assert fset(nx.k_edge_components(G, k=3)) == fset(nx.k_edge_subgraphs(G, k=3)) _check_edge_connectivity(G) @@ -356,20 +345,11 @@ def test_triangles(): G = nx.Graph(it.chain(*[pairwise(path) for path in paths])) # subgraph and ccs are the same in all cases here - assert_equal( - fset(nx.k_edge_components(G, k=1)), - fset(nx.k_edge_subgraphs(G, k=1)) - ) + assert fset(nx.k_edge_components(G, k=1)) == fset(nx.k_edge_subgraphs(G, k=1)) - assert_equal( - fset(nx.k_edge_components(G, k=2)), - fset(nx.k_edge_subgraphs(G, k=2)) - ) + assert fset(nx.k_edge_components(G, k=2)) == fset(nx.k_edge_subgraphs(G, k=2)) - assert_equal( - fset(nx.k_edge_components(G, k=3)), - fset(nx.k_edge_subgraphs(G, k=3)) - ) + assert fset(nx.k_edge_components(G, k=3)) == fset(nx.k_edge_subgraphs(G, k=3)) _check_edge_connectivity(G) @@ -391,18 +371,18 @@ def test_four_clique(): # The subgraphs and ccs are different for k=3 local_ccs = fset(nx.k_edge_components(G, k=3)) subgraphs = fset(nx.k_edge_subgraphs(G, k=3)) - assert_not_equal(local_ccs, subgraphs) + assert local_ccs != subgraphs # The cliques ares in the same cc clique1 = frozenset(paths[0]) clique2 = frozenset(paths[1]) - assert_in(clique1.union(clique2).union({100}), local_ccs) + assert clique1.union(clique2).union({100}) in local_ccs # but different subgraphs - assert_in(clique1, subgraphs) - assert_in(clique2, subgraphs) + assert clique1 in subgraphs + assert clique2 in subgraphs - assert_equal(G.degree(100), 3) + assert G.degree(100) == 3 _check_edge_connectivity(G) @@ -413,35 +393,26 @@ def test_five_clique(): G = nx.disjoint_union(nx.complete_graph(5), nx.complete_graph(5)) paths = [ # add aux-connections - (1, 100, 6), (2, 100, 7), (3, 200, 8), (4, 200, 100), + (1, 100, 6), + (2, 100, 7), + (3, 200, 8), + (4, 200, 100), ] G.add_edges_from(it.chain(*[pairwise(path) for path in paths])) - assert_equal(min(dict(nx.degree(G)).values()), 4) + assert min(dict(nx.degree(G)).values()) == 4 # For k=3 they are the same - assert_equal( - fset(nx.k_edge_components(G, k=3)), - fset(nx.k_edge_subgraphs(G, k=3)) - ) + assert fset(nx.k_edge_components(G, k=3)) == fset(nx.k_edge_subgraphs(G, k=3)) # For k=4 they are the different # the aux nodes are in the same CC as clique 1 but no the same subgraph - assert_not_equal( - fset(nx.k_edge_components(G, k=4)), - fset(nx.k_edge_subgraphs(G, k=4)) - ) + assert fset(nx.k_edge_components(G, k=4)) != fset(nx.k_edge_subgraphs(G, k=4)) # For k=5 they are not the same - assert_not_equal( - fset(nx.k_edge_components(G, k=5)), - fset(nx.k_edge_subgraphs(G, k=5)) - ) + assert fset(nx.k_edge_components(G, k=5)) != fset(nx.k_edge_subgraphs(G, k=5)) # For k=6 they are the same - assert_equal( - fset(nx.k_edge_components(G, k=6)), - fset(nx.k_edge_subgraphs(G, k=6)) - ) + assert fset(nx.k_edge_components(G, k=6)) == fset(nx.k_edge_subgraphs(G, k=6)) _check_edge_connectivity(G) @@ -449,41 +420,42 @@ def test_five_clique(): # Undirected tests # ---------------- + def test_directed_aux_graph(): # Graph similar to the one in # http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0136264 - a, b, c, d, e, f, g, h, i = 'abcdefghi' + a, b, c, d, e, f, g, h, i = "abcdefghi" dipaths = [ (a, d, b, f, c), (a, e, b), (a, e, b, c, g, b, a), (c, b), (f, g, f), - (h, i) + (h, i), ] G = nx.DiGraph(it.chain(*[pairwise(path) for path in dipaths])) aux_graph = EdgeComponentAuxGraph.construct(G) components_1 = fset(aux_graph.k_edge_subgraphs(k=1)) target_1 = fset([{a, b, c, d, e, f, g}, {h}, {i}]) - assert_equal(target_1, components_1) + assert target_1 == components_1 # Check that the directed case for k=1 agrees with SCCs alt_1 = fset(nx.strongly_connected_components(G)) - assert_equal(alt_1, components_1) + assert alt_1 == components_1 components_2 = fset(aux_graph.k_edge_subgraphs(k=2)) target_2 = fset([{i}, {e}, {d}, {b, c, f, g}, {h}, {a}]) - assert_equal(target_2, components_2) + assert target_2 == components_2 components_3 = fset(aux_graph.k_edge_subgraphs(k=3)) target_3 = fset([{a}, {b}, {c}, {d}, {e}, {f}, {g}, {h}, {i}]) - assert_equal(target_3, components_3) + assert target_3 == components_3 def test_random_gnp_directed(): # seeds = [3894723670, 500186844, 267231174, 2181982262, 1116750056] - seeds = [2181982262] + seeds = [21] for seed in seeds: G = nx.gnp_random_graph(20, 0.2, directed=True, seed=seed) _check_edge_connectivity(G) @@ -491,7 +463,7 @@ def test_random_gnp_directed(): def test_configuration_directed(): # seeds = [671221681, 2403749451, 124433910, 672335939, 1193127215] - seeds = [672335939] + seeds = [67] for seed in seeds: deg_seq = nx.random_powerlaw_tree_sequence(20, seed=seed, tries=5000) G = nx.DiGraph(nx.configuration_model(deg_seq, seed=seed)) @@ -501,7 +473,7 @@ def test_configuration_directed(): def test_shell_directed(): # seeds = [3134027055, 4079264063, 1350769518, 1405643020, 530038094] - seeds = [3134027055] + seeds = [31] for seed in seeds: constructor = [(12, 70, 0.8), (15, 40, 0.6)] G = nx.random_shell_graph(constructor, seed=seed).to_directed() diff --git a/networkx/algorithms/connectivity/tests/test_kcomponents.py b/networkx/algorithms/connectivity/tests/test_kcomponents.py index 14c09dd..123e27c 100644 --- a/networkx/algorithms/connectivity/tests/test_kcomponents.py +++ b/networkx/algorithms/connectivity/tests/test_kcomponents.py @@ -1,5 +1,5 @@ # Test for Moody and White k-components algorithm -from nose.tools import assert_equal, assert_true, raises, assert_greater_equal +import pytest import networkx as nx from networkx.algorithms.connectivity.kcomponents import ( build_k_number_dict, @@ -14,14 +14,12 @@ def torrents_and_ferraro_graph(): # Graph from https://arxiv.org/pdf/1503.04476v1 p.26 G = nx.convert_node_labels_to_integers( - nx.grid_graph([5, 5]), - label_attribute='labels', + nx.grid_graph([5, 5]), label_attribute="labels" ) - rlabels = nx.get_node_attributes(G, 'labels') + rlabels = nx.get_node_attributes(G, "labels") labels = {v: k for k, v in rlabels.items()} - for nodes in [(labels[(0, 4)], labels[(1, 4)]), - (labels[(3, 4)], labels[(4, 4)])]: + for nodes in [(labels[(0, 4)], labels[(1, 4)]), (labels[(3, 4)], labels[(4, 4)])]: new_node = G.order() + 1 # Petersen graph is triconnected P = nx.petersen_graph() @@ -46,8 +44,7 @@ def torrents_and_ferraro_graph(): # needed because K5s share only one node. G.add_edge(new_node + 16, new_node + 8) - for nodes in [(labels[(0, 0)], labels[(1, 0)]), - (labels[(3, 0)], labels[(4, 0)])]: + for nodes in [(labels[(0, 0)], labels[(1, 0)]), (labels[(3, 0)], labels[(4, 0)])]: new_node = G.order() + 1 # Petersen graph is triconnected P = nx.petersen_graph() @@ -75,149 +72,178 @@ def torrents_and_ferraro_graph(): return G -@raises(nx.NetworkXNotImplemented) def test_directed(): - G = nx.gnp_random_graph(10, 0.2, directed=True) - nx.k_components(G) + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.gnp_random_graph(10, 0.2, directed=True, seed=42) + nx.k_components(G) # Helper function -def _check_connectivity(G): - result = nx.k_components(G) - for k, components in result.items(): +def _check_connectivity(G, k_components): + for k, components in k_components.items(): if k < 3: continue + # check that k-components have node connectivity >= k. for component in components: C = G.subgraph(component) K = nx.node_connectivity(C) - assert_greater_equal(K, k) + assert K >= k +@pytest.mark.slow def test_torrents_and_ferraro_graph(): G = torrents_and_ferraro_graph() - _check_connectivity(G) + result = nx.k_components(G) + _check_connectivity(G, result) + + # In this example graph there are 8 3-components, 4 with 15 nodes + # and 4 with 5 nodes. + assert len(result[3]) == 8 + assert len([c for c in result[3] if len(c) == 15]) == 4 + assert len([c for c in result[3] if len(c) == 5]) == 4 + # There are also 8 4-components all with 5 nodes. + assert len(result[4]) == 8 + assert all(len(c) == 5 for c in result[4]) +@pytest.mark.slow def test_random_gnp(): - G = nx.gnp_random_graph(50, 0.2) - _check_connectivity(G) + G = nx.gnp_random_graph(50, 0.2, seed=42) + result = nx.k_components(G) + _check_connectivity(G, result) +@pytest.mark.slow def test_shell(): constructor = [(20, 80, 0.8), (80, 180, 0.6)] - G = nx.random_shell_graph(constructor) - _check_connectivity(G) + G = nx.random_shell_graph(constructor, seed=42) + result = nx.k_components(G) + _check_connectivity(G, result) def test_configuration(): - deg_seq = nx.random_powerlaw_tree_sequence(100, tries=5000) + deg_seq = nx.random_powerlaw_tree_sequence(100, tries=5, seed=72) G = nx.Graph(nx.configuration_model(deg_seq)) G.remove_edges_from(nx.selfloop_edges(G)) - _check_connectivity(G) + result = nx.k_components(G) + _check_connectivity(G, result) def test_karate(): G = nx.karate_club_graph() - _check_connectivity(G) + result = nx.k_components(G) + _check_connectivity(G, result) def test_karate_component_number(): karate_k_num = { - 0: 4, 1: 4, 2: 4, 3: 4, 4: 3, 5: 3, 6: 3, 7: 4, 8: 4, 9: 2, - 10: 3, 11: 1, 12: 2, 13: 4, 14: 2, 15: 2, 16: 2, 17: 2, - 18: 2, 19: 3, 20: 2, 21: 2, 22: 2, 23: 3, 24: 3, 25: 3, - 26: 2, 27: 3, 28: 3, 29: 3, 30: 4, 31: 3, 32: 4, 33: 4 + 0: 4, + 1: 4, + 2: 4, + 3: 4, + 4: 3, + 5: 3, + 6: 3, + 7: 4, + 8: 4, + 9: 2, + 10: 3, + 11: 1, + 12: 2, + 13: 4, + 14: 2, + 15: 2, + 16: 2, + 17: 2, + 18: 2, + 19: 3, + 20: 2, + 21: 2, + 22: 2, + 23: 3, + 24: 3, + 25: 3, + 26: 2, + 27: 3, + 28: 3, + 29: 3, + 30: 4, + 31: 3, + 32: 4, + 33: 4, } G = nx.karate_club_graph() k_components = nx.k_components(G) k_num = build_k_number_dict(k_components) - assert_equal(karate_k_num, k_num) - - -def test_torrents_and_ferraro_detail_3_and_4(): - G = torrents_and_ferraro_graph() - result = nx.k_components(G) - # In this example graph there are 8 3-components, 4 with 15 nodes - # and 4 with 5 nodes. - assert_equal(len(result[3]), 8) - assert_equal(len([c for c in result[3] if len(c) == 15]), 4) - assert_equal(len([c for c in result[3] if len(c) == 5]), 4) - # There are also 8 4-components all with 5 nodes. - assert_equal(len(result[4]), 8) - assert_true(all(len(c) == 5 for c in result[4])) - # Finally check that the k-components detected have actually node - # connectivity >= k. - for k, components in result.items(): - if k < 3: - continue - for component in components: - K = nx.node_connectivity(G.subgraph(component)) - assert_greater_equal(K, k) + assert karate_k_num == k_num def test_davis_southern_women(): G = nx.davis_southern_women_graph() - _check_connectivity(G) + result = nx.k_components(G) + _check_connectivity(G, result) def test_davis_southern_women_detail_3_and_4(): solution = { - 3: [{ - 'Nora Fayette', - 'E10', - 'Myra Liddel', - 'E12', - 'E14', - 'Frances Anderson', - 'Evelyn Jefferson', - 'Ruth DeSand', - 'Helen Lloyd', - 'Eleanor Nye', - 'E9', - 'E8', - 'E5', - 'E4', - 'E7', - 'E6', - 'E1', - 'Verne Sanderson', - 'E3', - 'E2', - 'Theresa Anderson', - 'Pearl Oglethorpe', - 'Katherina Rogers', - 'Brenda Rogers', - 'E13', - 'Charlotte McDowd', - 'Sylvia Avondale', - 'Laura Mandeville', - }, + 3: [ + { + "Nora Fayette", + "E10", + "Myra Liddel", + "E12", + "E14", + "Frances Anderson", + "Evelyn Jefferson", + "Ruth DeSand", + "Helen Lloyd", + "Eleanor Nye", + "E9", + "E8", + "E5", + "E4", + "E7", + "E6", + "E1", + "Verne Sanderson", + "E3", + "E2", + "Theresa Anderson", + "Pearl Oglethorpe", + "Katherina Rogers", + "Brenda Rogers", + "E13", + "Charlotte McDowd", + "Sylvia Avondale", + "Laura Mandeville", + } ], - 4: [{ - 'Nora Fayette', - 'E10', - 'Verne Sanderson', - 'E12', - 'Frances Anderson', - 'Evelyn Jefferson', - 'Ruth DeSand', - 'Helen Lloyd', - 'Eleanor Nye', - 'E9', - 'E8', - 'E5', - 'E4', - 'E7', - 'E6', - 'Myra Liddel', - 'E3', - 'Theresa Anderson', - 'Katherina Rogers', - 'Brenda Rogers', - 'Charlotte McDowd', - 'Sylvia Avondale', - 'Laura Mandeville', - }, + 4: [ + { + "Nora Fayette", + "E10", + "Verne Sanderson", + "E12", + "Frances Anderson", + "Evelyn Jefferson", + "Ruth DeSand", + "Helen Lloyd", + "Eleanor Nye", + "E9", + "E8", + "E5", + "E4", + "E7", + "E6", + "Myra Liddel", + "E3", + "Theresa Anderson", + "Katherina Rogers", + "Brenda Rogers", + "Charlotte McDowd", + "Sylvia Avondale", + "Laura Mandeville", + } ], } G = nx.davis_southern_women_graph() @@ -225,33 +251,45 @@ def test_davis_southern_women_detail_3_and_4(): for k, components in result.items(): if k < 3: continue - assert_true(len(components) == len(solution[k])) + assert len(components) == len(solution[k]) for component in components: - assert_true(component in solution[k]) + assert component in solution[k] def test_set_consolidation_rosettacode(): # Tests from http://rosettacode.org/wiki/Set_consolidation def list_of_sets_equal(result, solution): - assert_equal( - {frozenset(s) for s in result}, - {frozenset(s) for s in solution} - ) - question = [{'A', 'B'}, {'C', 'D'}] - solution = [{'A', 'B'}, {'C', 'D'}] + assert {frozenset(s) for s in result} == {frozenset(s) for s in solution} + + question = [{"A", "B"}, {"C", "D"}] + solution = [{"A", "B"}, {"C", "D"}] list_of_sets_equal(_consolidate(question, 1), solution) - question = [{'A', 'B'}, {'B', 'C'}] - solution = [{'A', 'B', 'C'}] + question = [{"A", "B"}, {"B", "C"}] + solution = [{"A", "B", "C"}] list_of_sets_equal(_consolidate(question, 1), solution) - question = [{'A', 'B'}, {'C', 'D'}, {'D', 'B'}] - solution = [{'A', 'C', 'B', 'D'}] + question = [{"A", "B"}, {"C", "D"}, {"D", "B"}] + solution = [{"A", "C", "B", "D"}] list_of_sets_equal(_consolidate(question, 1), solution) - question = [{'H', 'I', 'K'}, {'A', 'B'}, {'C', 'D'}, {'D', 'B'}, {'F', 'G', 'H'}] - solution = [{'A', 'C', 'B', 'D'}, {'G', 'F', 'I', 'H', 'K'}] + question = [{"H", "I", "K"}, {"A", "B"}, {"C", "D"}, {"D", "B"}, {"F", "G", "H"}] + solution = [{"A", "C", "B", "D"}, {"G", "F", "I", "H", "K"}] list_of_sets_equal(_consolidate(question, 1), solution) - question = [{'A', 'H'}, {'H', 'I', 'K'}, {'A', 'B'}, {'C', 'D'}, {'D', 'B'}, {'F', 'G', 'H'}] - solution = [{'A', 'C', 'B', 'D', 'G', 'F', 'I', 'H', 'K'}] + question = [ + {"A", "H"}, + {"H", "I", "K"}, + {"A", "B"}, + {"C", "D"}, + {"D", "B"}, + {"F", "G", "H"}, + ] + solution = [{"A", "C", "B", "D", "G", "F", "I", "H", "K"}] list_of_sets_equal(_consolidate(question, 1), solution) - question = [{'H', 'I', 'K'}, {'A', 'B'}, {'C', 'D'}, {'D', 'B'}, {'F', 'G', 'H'}, {'A', 'H'}] - solution = [{'A', 'C', 'B', 'D', 'G', 'F', 'I', 'H', 'K'}] + question = [ + {"H", "I", "K"}, + {"A", "B"}, + {"C", "D"}, + {"D", "B"}, + {"F", "G", "H"}, + {"A", "H"}, + ] + solution = [{"A", "C", "B", "D", "G", "F", "I", "H", "K"}] list_of_sets_equal(_consolidate(question, 1), solution) diff --git a/networkx/algorithms/connectivity/tests/test_kcutsets.py b/networkx/algorithms/connectivity/tests/test_kcutsets.py index d9aea28..b39752c 100644 --- a/networkx/algorithms/connectivity/tests/test_kcutsets.py +++ b/networkx/algorithms/connectivity/tests/test_kcutsets.py @@ -1,11 +1,13 @@ # Jordi Torrents # Test for k-cutsets -from nose.tools import assert_equal, assert_false, assert_true, assert_raises +import itertools +import pytest import networkx as nx from networkx.algorithms import flow from networkx.algorithms.connectivity.kcutsets import _is_separating_set +MAX_CUTSETS_TO_TEST = 4 # originally 100. cut to decrease testing time flow_funcs = [ flow.boykov_kolmogorov, @@ -20,15 +22,18 @@ # Some nice synthetic graphs ## def graph_example_1(): - G = nx.convert_node_labels_to_integers(nx.grid_graph([5, 5]), - label_attribute='labels') - rlabels = nx.get_node_attributes(G, 'labels') + G = nx.convert_node_labels_to_integers( + nx.grid_graph([5, 5]), label_attribute="labels" + ) + rlabels = nx.get_node_attributes(G, "labels") labels = {v: k for k, v in rlabels.items()} - for nodes in [(labels[(0, 0)], labels[(1, 0)]), - (labels[(0, 4)], labels[(1, 4)]), - (labels[(3, 0)], labels[(4, 0)]), - (labels[(3, 4)], labels[(4, 4)])]: + for nodes in [ + (labels[(0, 0)], labels[(1, 0)]), + (labels[(0, 4)], labels[(1, 4)]), + (labels[(3, 0)], labels[(4, 0)]), + (labels[(3, 4)], labels[(4, 4)]), + ]: new_node = G.order() + 1 # Petersen graph is triconnected P = nx.petersen_graph() @@ -54,13 +59,13 @@ def graph_example_1(): def torrents_and_ferraro_graph(): - G = nx.convert_node_labels_to_integers(nx.grid_graph([5, 5]), - label_attribute='labels') - rlabels = nx.get_node_attributes(G, 'labels') + G = nx.convert_node_labels_to_integers( + nx.grid_graph([5, 5]), label_attribute="labels" + ) + rlabels = nx.get_node_attributes(G, "labels") labels = {v: k for k, v in rlabels.items()} - for nodes in [(labels[(0, 4)], labels[(1, 4)]), - (labels[(3, 4)], labels[(4, 4)])]: + for nodes in [(labels[(0, 4)], labels[(1, 4)]), (labels[(3, 4)], labels[(4, 4)])]: new_node = G.order() + 1 # Petersen graph is triconnected P = nx.petersen_graph() @@ -85,8 +90,7 @@ def torrents_and_ferraro_graph(): # This stupid mistake make one reviewer very angry :P G.add_edge(new_node + 16, new_node + 8) - for nodes in [(labels[(0, 0)], labels[(1, 0)]), - (labels[(3, 0)], labels[(4, 0)])]: + for nodes in [(labels[(0, 0)], labels[(1, 0)]), (labels[(3, 0)], labels[(4, 0)])]: new_node = G.order() + 1 # Petersen graph is triconnected P = nx.petersen_graph() @@ -116,17 +120,19 @@ def torrents_and_ferraro_graph(): # Helper function def _check_separating_sets(G): - for Gc in nx.connected_component_subgraphs(G): - if len(Gc) < 3: + for cc in nx.connected_components(G): + if len(cc) < 3: continue + Gc = G.subgraph(cc) node_conn = nx.node_connectivity(Gc) - for cut in nx.all_node_cuts(Gc): - assert_equal(node_conn, len(cut)) - H = Gc.copy() - H.remove_nodes_from(cut) - assert_false(nx.is_connected(H)) + all_cuts = nx.all_node_cuts(Gc) + # Only test a limited number of cut sets to reduce test time. + for cut in itertools.islice(all_cuts, MAX_CUTSETS_TO_TEST): + assert node_conn == len(cut) + assert not nx.is_connected(nx.restricted_view(G, cut, [])) +@pytest.mark.slow def test_torrents_and_ferraro_graph(): G = torrents_and_ferraro_graph() _check_separating_sets(G) @@ -138,18 +144,18 @@ def test_example_1(): def test_random_gnp(): - G = nx.gnp_random_graph(100, 0.1) + G = nx.gnp_random_graph(100, 0.1, seed=42) _check_separating_sets(G) def test_shell(): constructor = [(20, 80, 0.8), (80, 180, 0.6)] - G = nx.random_shell_graph(constructor) + G = nx.random_shell_graph(constructor, seed=42) _check_separating_sets(G) def test_configuration(): - deg_seq = nx.random_powerlaw_tree_sequence(100, tries=5000) + deg_seq = nx.random_powerlaw_tree_sequence(100, tries=5, seed=72) G = nx.Graph(nx.configuration_model(deg_seq)) G.remove_edges_from(nx.selfloop_edges(G)) _check_separating_sets(G) @@ -163,14 +169,14 @@ def test_karate(): def _generate_no_biconnected(max_attempts=50): attempts = 0 while True: - G = nx.fast_gnp_random_graph(100, 0.0575) + G = nx.fast_gnp_random_graph(100, 0.0575, seed=42) if nx.is_connected(G) and not nx.is_biconnected(G): attempts = 0 yield G else: if attempts >= max_attempts: - msg = "Tried %d times: no suitable Graph." % attempts - raise Exception(msg % max_attempts) + msg = f"Tried {attempts} times: no suitable Graph." + raise Exception(msg) else: attempts += 1 @@ -181,7 +187,7 @@ def test_articulation_points(): G = next(Ggen) articulation_points = list({a} for a in nx.articulation_points(G)) for cut in nx.all_node_cuts(G): - assert_true(cut in articulation_points) + assert cut in articulation_points def test_grid_2d_graph(): @@ -189,82 +195,72 @@ def test_grid_2d_graph(): # are the four pairs of nodes that are # neighbors of the four corner nodes. G = nx.grid_2d_graph(5, 5) - solution = [ - set([(0, 1), (1, 0)]), - set([(3, 0), (4, 1)]), - set([(3, 4), (4, 3)]), - set([(0, 3), (1, 4)]), - ] + solution = [{(0, 1), (1, 0)}, {(3, 0), (4, 1)}, {(3, 4), (4, 3)}, {(0, 3), (1, 4)}] for cut in nx.all_node_cuts(G): - assert_true(cut in solution) + assert cut in solution def test_disconnected_graph(): - G = nx.fast_gnp_random_graph(100, 0.01) + G = nx.fast_gnp_random_graph(100, 0.01, seed=42) cuts = nx.all_node_cuts(G) - assert_raises(nx.NetworkXError, next, cuts) + pytest.raises(nx.NetworkXError, next, cuts) +@pytest.mark.slow def test_alternative_flow_functions(): - graph_funcs = [graph_example_1, nx.davis_southern_women_graph] - for graph_func in graph_funcs: - G = graph_func() + graphs = [nx.grid_2d_graph(4, 4), nx.cycle_graph(5)] + for G in graphs: node_conn = nx.node_connectivity(G) for flow_func in flow_funcs: - for cut in nx.all_node_cuts(G, flow_func=flow_func): - assert_equal(node_conn, len(cut)) - H = G.copy() - H.remove_nodes_from(cut) - assert_false(nx.is_connected(H)) + all_cuts = nx.all_node_cuts(G, flow_func=flow_func) + # Only test a limited number of cut sets to reduce test time. + for cut in itertools.islice(all_cuts, MAX_CUTSETS_TO_TEST): + assert node_conn == len(cut) + assert not nx.is_connected(nx.restricted_view(G, cut, [])) def test_is_separating_set_complete_graph(): G = nx.complete_graph(5) - assert_true(_is_separating_set(G, {0, 1, 2, 3})) + assert _is_separating_set(G, {0, 1, 2, 3}) def test_is_separating_set(): for i in [5, 10, 15]: G = nx.star_graph(i) max_degree_node = max(G, key=G.degree) - assert_true(_is_separating_set(G, {max_degree_node})) + assert _is_separating_set(G, {max_degree_node}) def test_non_repeated_cuts(): # The algorithm was repeating the cut {0, 1} for the giant biconnected # component of the Karate club graph. K = nx.karate_club_graph() - G = max(list(nx.biconnected_component_subgraphs(K)), key=len) + bcc = max(list(nx.biconnected_components(K)), key=len) + G = K.subgraph(bcc) solution = [{32, 33}, {2, 33}, {0, 3}, {0, 1}, {29, 33}] cuts = list(nx.all_node_cuts(G)) if len(solution) != len(cuts): print(nx.info(G)) - print("Solution: {}".format(solution)) - print("Result: {}".format(cuts)) - assert_true(len(solution) == len(cuts)) + print(f"Solution: {solution}") + print(f"Result: {cuts}") + assert len(solution) == len(cuts) for cut in cuts: - assert_true(cut in solution) + assert cut in solution def test_cycle_graph(): G = nx.cycle_graph(5) solution = [{0, 2}, {0, 3}, {1, 3}, {1, 4}, {2, 4}] cuts = list(nx.all_node_cuts(G)) - assert_true(len(solution) == len(cuts)) + assert len(solution) == len(cuts) for cut in cuts: - assert_true(cut in solution) + assert cut in solution def test_complete_graph(): G = nx.complete_graph(5) - solution = [ - {0, 1, 2, 3}, - {0, 1, 2, 4}, - {0, 1, 3, 4}, - {0, 2, 3, 4}, - {1, 2, 3, 4}, - ] + solution = [{0, 1, 2, 3}, {0, 1, 2, 4}, {0, 1, 3, 4}, {0, 2, 3, 4}, {1, 2, 3, 4}] cuts = list(nx.all_node_cuts(G)) - assert_true(len(solution) == len(cuts)) + assert len(solution) == len(cuts) for cut in cuts: - assert_true(cut in solution) + assert cut in solution diff --git a/networkx/algorithms/connectivity/tests/test_stoer_wagner.py b/networkx/algorithms/connectivity/tests/test_stoer_wagner.py index ee5d3ac..6836081 100644 --- a/networkx/algorithms/connectivity/tests/test_stoer_wagner.py +++ b/networkx/algorithms/connectivity/tests/test_stoer_wagner.py @@ -1,59 +1,57 @@ from itertools import chain import networkx as nx -from nose.tools import * +import pytest def _check_partition(G, cut_value, partition, weight): - ok_(isinstance(partition, tuple)) - assert_equal(len(partition), 2) - ok_(isinstance(partition[0], list)) - ok_(isinstance(partition[1], list)) - ok_(len(partition[0]) > 0) - ok_(len(partition[1]) > 0) - assert_equal(sum(map(len, partition)), len(G)) - assert_equal(set(chain.from_iterable(partition)), set(G)) + assert isinstance(partition, tuple) + assert len(partition) == 2 + assert isinstance(partition[0], list) + assert isinstance(partition[1], list) + assert len(partition[0]) > 0 + assert len(partition[1]) > 0 + assert sum(map(len, partition)) == len(G) + assert set(chain.from_iterable(partition)) == set(G) partition = tuple(map(set, partition)) w = 0 for u, v, e in G.edges(data=True): if (u in partition[0]) == (v in partition[1]): w += e.get(weight, 1) - assert_equal(w, cut_value) + assert w == cut_value -def _test_stoer_wagner(G, answer, weight='weight'): - cut_value, partition = nx.stoer_wagner(G, weight, - heap=nx.utils.PairingHeap) - assert_equal(cut_value, answer) +def _test_stoer_wagner(G, answer, weight="weight"): + cut_value, partition = nx.stoer_wagner(G, weight, heap=nx.utils.PairingHeap) + assert cut_value == answer _check_partition(G, cut_value, partition, weight) - cut_value, partition = nx.stoer_wagner(G, weight, - heap=nx.utils.BinaryHeap) - assert_equal(cut_value, answer) + cut_value, partition = nx.stoer_wagner(G, weight, heap=nx.utils.BinaryHeap) + assert cut_value == answer _check_partition(G, cut_value, partition, weight) def test_graph1(): G = nx.Graph() - G.add_edge('x', 'a', weight=3) - G.add_edge('x', 'b', weight=1) - G.add_edge('a', 'c', weight=3) - G.add_edge('b', 'c', weight=5) - G.add_edge('b', 'd', weight=4) - G.add_edge('d', 'e', weight=2) - G.add_edge('c', 'y', weight=2) - G.add_edge('e', 'y', weight=3) + G.add_edge("x", "a", weight=3) + G.add_edge("x", "b", weight=1) + G.add_edge("a", "c", weight=3) + G.add_edge("b", "c", weight=5) + G.add_edge("b", "d", weight=4) + G.add_edge("d", "e", weight=2) + G.add_edge("c", "y", weight=2) + G.add_edge("e", "y", weight=3) _test_stoer_wagner(G, 4) def test_graph2(): G = nx.Graph() - G.add_edge('x', 'a') - G.add_edge('x', 'b') - G.add_edge('a', 'c') - G.add_edge('b', 'c') - G.add_edge('b', 'd') - G.add_edge('d', 'e') - G.add_edge('c', 'y') - G.add_edge('e', 'y') + G.add_edge("x", "a") + G.add_edge("x", "b") + G.add_edge("a", "c") + G.add_edge("b", "c") + G.add_edge("b", "d") + G.add_edge("d", "e") + G.add_edge("c", "y") + G.add_edge("e", "y") _test_stoer_wagner(G, 2) @@ -82,21 +80,21 @@ def test_weight_name(): G.add_edge(1, 2, weight=1, cost=8) G.add_edge(1, 3, cost=2) G.add_edge(2, 3, cost=4) - _test_stoer_wagner(G, 6, weight='cost') + _test_stoer_wagner(G, 6, weight="cost") def test_exceptions(): G = nx.Graph() - assert_raises(nx.NetworkXError, nx.stoer_wagner, G) + pytest.raises(nx.NetworkXError, nx.stoer_wagner, G) G.add_node(1) - assert_raises(nx.NetworkXError, nx.stoer_wagner, G) + pytest.raises(nx.NetworkXError, nx.stoer_wagner, G) G.add_node(2) - assert_raises(nx.NetworkXError, nx.stoer_wagner, G) + pytest.raises(nx.NetworkXError, nx.stoer_wagner, G) G.add_edge(1, 2, weight=-2) - assert_raises(nx.NetworkXError, nx.stoer_wagner, G) + pytest.raises(nx.NetworkXError, nx.stoer_wagner, G) G = nx.DiGraph() - assert_raises(nx.NetworkXNotImplemented, nx.stoer_wagner, G) + pytest.raises(nx.NetworkXNotImplemented, nx.stoer_wagner, G) G = nx.MultiGraph() - assert_raises(nx.NetworkXNotImplemented, nx.stoer_wagner, G) + pytest.raises(nx.NetworkXNotImplemented, nx.stoer_wagner, G) G = nx.MultiDiGraph() - assert_raises(nx.NetworkXNotImplemented, nx.stoer_wagner, G) + pytest.raises(nx.NetworkXNotImplemented, nx.stoer_wagner, G) diff --git a/networkx/algorithms/connectivity/utils.py b/networkx/algorithms/connectivity/utils.py index d6582ed..6896eea 100644 --- a/networkx/algorithms/connectivity/utils.py +++ b/networkx/algorithms/connectivity/utils.py @@ -1,13 +1,9 @@ -# -*- coding: utf-8 -*- """ Utilities for connectivity package """ import networkx as nx -__author__ = '\n'.join(['Jordi Torrents ']) - -__all__ = ['build_auxiliary_node_connectivity', - 'build_auxiliary_edge_connectivity'] +__all__ = ["build_auxiliary_node_connectivity", "build_auxiliary_edge_connectivity"] def build_auxiliary_node_connectivity(G): @@ -24,7 +20,7 @@ def build_auxiliary_node_connectivity(G): For a directed graph having `n` nodes and `m` arcs we derive a directed graph D with `2n` nodes and `m+n` arcs by replacing each original node `v` with two nodes `vA`, `vB` linked by an (internal) - arc (`vA`, `vB`) in D. Then for each arc (`u`, `v`) in G we add one + arc (`vA`, `vB`) in D. Then for each arc (`u`, `v`) in G we add one arc (`uB`, `vA`) in D. Finally we set the attribute capacity = 1 for each arc in D. @@ -46,19 +42,19 @@ def build_auxiliary_node_connectivity(G): for i, node in enumerate(G): mapping[node] = i - H.add_node('%dA' % i, id=node) - H.add_node('%dB' % i, id=node) - H.add_edge('%dA' % i, '%dB' % i, capacity=1) + H.add_node(f"{i}A", id=node) + H.add_node(f"{i}B", id=node) + H.add_edge(f"{i}A", f"{i}B", capacity=1) edges = [] for (source, target) in G.edges(): - edges.append(('%sB' % mapping[source], '%sA' % mapping[target])) + edges.append((f"{mapping[source]}B", f"{mapping[target]}A")) if not directed: - edges.append(('%sB' % mapping[target], '%sA' % mapping[source])) + edges.append((f"{mapping[target]}B", f"{mapping[source]}A")) H.add_edges_from(edges, capacity=1) # Store mapping as graph attribute - H.graph['mapping'] = mapping + H.graph["mapping"] = mapping return H diff --git a/networkx/algorithms/core.py b/networkx/algorithms/core.py index 699b59c..f95ba79 100644 --- a/networkx/algorithms/core.py +++ b/networkx/algorithms/core.py @@ -1,13 +1,3 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Dan Schult (dschult@colgate.edu) -# Jason Grout (jason-sage@creativetrax.com) -# Aric Hagberg (hagberg@lanl.gov) """ Find the k-cores of a graph. @@ -30,18 +20,33 @@ D-cores: Measuring Collaboration of Directed Graphs Based on Degeneracy Christos Giatsidis, Dimitrios M. Thilikos, Michalis Vazirgiannis, ICDM 2011. http://www.graphdegeneracy.org/dcores_ICDM_2011.pdf + +Multi-scale structure and topological anomaly detection via a new network \ +statistic: The onion decomposition +L. Hébert-Dufresne, J. A. Grochow, and A. Allard +Scientific Reports 6, 31708 (2016) +http://doi.org/10.1038/srep31708 + """ import networkx as nx from networkx.exception import NetworkXError from networkx.utils import not_implemented_for -__all__ = ['core_number', 'find_cores', 'k_core', - 'k_shell', 'k_crust', 'k_corona'] +__all__ = [ + "core_number", + "find_cores", + "k_core", + "k_shell", + "k_crust", + "k_corona", + "k_truss", + "onion_layers", +] -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def core_number(G): - """Return the core number for each vertex. + """Returns the core number for each vertex. A k-core is a maximal subgraph that contains nodes of degree k or more. @@ -78,8 +83,10 @@ def core_number(G): https://arxiv.org/abs/cs.DS/0310049 """ if nx.number_of_selfloops(G) > 0: - msg = ('Input graph has self loops which is not permitted; ' - 'Consider using G.remove_edges_from(nx.selfloop_edges(G)).') + msg = ( + "Input graph has self loops which is not permitted; " + "Consider using G.remove_edges_from(nx.selfloop_edges(G))." + ) raise NetworkXError(msg) degrees = dict(G.degree()) # Sort nodes by degree. @@ -139,7 +146,7 @@ def _core_subgraph(G, k_filter, k=None, core=None): def k_core(G, k=None, core_number=None): - """Return the k-core of G. + """Returns the k-core of G. A k-core is a maximal subgraph that contains nodes of degree k or more. @@ -183,13 +190,15 @@ def k_core(G, k=None, core_number=None): Vladimir Batagelj and Matjaz Zaversnik, 2003. https://arxiv.org/abs/cs.DS/0310049 """ + def k_filter(v, k, c): return c[v] >= k + return _core_subgraph(G, k_filter, k, core_number) def k_shell(G, k=None, core_number=None): - """Return the k-shell of G. + """Returns the k-shell of G. The k-shell is the subgraph induced by nodes with core number k. That is, nodes in the k-core that are not in the (k+1)-core. @@ -240,13 +249,15 @@ def k_shell(G, k=None, core_number=None): and Eran Shir, PNAS July 3, 2007 vol. 104 no. 27 11150-11154 http://www.pnas.org/content/104/27/11150.full """ + def k_filter(v, k, c): return c[v] == k + return _core_subgraph(G, k_filter, k, core_number) def k_crust(G, k=None, core_number=None): - """Return the k-crust of G. + """Returns the k-crust of G. The k-crust is the graph G with the k-core removed. @@ -304,7 +315,7 @@ def k_crust(G, k=None, core_number=None): def k_corona(G, k, core_number=None): - """Return the k-corona of G. + """Returns the k-corona of G. The k-corona is the subgraph of nodes in the k-core which have exactly k neighbours in the k-core. @@ -350,6 +361,174 @@ def k_corona(G, k, core_number=None): Phys. Rev. E 73, 056101 (2006) http://link.aps.org/doi/10.1103/PhysRevE.73.056101 """ + def func(v, k, c): return c[v] == k and k == sum(1 for w in G[v] if c[w] >= k) + return _core_subgraph(G, func, k, core_number) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def k_truss(G, k): + """Returns the k-truss of `G`. + + The k-truss is the maximal induced subgraph of `G` which contains at least + three vertices where every edge is incident to at least `k-2` triangles. + + Parameters + ---------- + G : NetworkX graph + An undirected graph + k : int + The order of the truss + + Returns + ------- + H : NetworkX graph + The k-truss subgraph + + Raises + ------ + NetworkXError + + The k-truss is not defined for graphs with self loops or parallel edges + or directed graphs. + + Notes + ----- + A k-clique is a (k-2)-truss and a k-truss is a (k+1)-core. + + Not implemented for digraphs or graphs with parallel edges or self loops. + + Graph, node, and edge attributes are copied to the subgraph. + + K-trusses were originally defined in [2] which states that the k-truss + is the maximal induced subgraph where each edge belongs to at least + `k-2` triangles. A more recent paper, [1], uses a slightly different + definition requiring that each edge belong to at least `k` triangles. + This implementation uses the original definition of `k-2` triangles. + + References + ---------- + .. [1] Bounds and Algorithms for k-truss. Paul Burkhardt, Vance Faber, + David G. Harris, 2018. https://arxiv.org/abs/1806.05523v2 + .. [2] Trusses: Cohesive Subgraphs for Social Network Analysis. Jonathan + Cohen, 2005. + """ + H = G.copy() + + n_dropped = 1 + while n_dropped > 0: + n_dropped = 0 + to_drop = [] + seen = set() + for u in H: + nbrs_u = set(H[u]) + seen.add(u) + new_nbrs = [v for v in nbrs_u if v not in seen] + for v in new_nbrs: + if len(nbrs_u & set(H[v])) < (k - 2): + to_drop.append((u, v)) + H.remove_edges_from(to_drop) + n_dropped = len(to_drop) + H.remove_nodes_from(list(nx.isolates(H))) + + return H + + +@not_implemented_for("multigraph") +@not_implemented_for("directed") +def onion_layers(G): + """Returns the layer of each vertex in an onion decomposition of the graph. + + The onion decomposition refines the k-core decomposition by providing + information on the internal organization of each k-shell. It is usually + used alongside the `core numbers`. + + Parameters + ---------- + G : NetworkX graph + A simple graph without self loops or parallel edges + + Returns + ------- + od_layers : dictionary + A dictionary keyed by vertex to the onion layer. The layers are + contiguous integers starting at 1. + + Raises + ------ + NetworkXError + The onion decomposition is not implemented for graphs with self loops + or parallel edges or for directed graphs. + + Notes + ----- + Not implemented for graphs with parallel edges or self loops. + + Not implemented for directed graphs. + + See Also + -------- + core_number + + References + ---------- + .. [1] Multi-scale structure and topological anomaly detection via a new + network statistic: The onion decomposition + L. Hébert-Dufresne, J. A. Grochow, and A. Allard + Scientific Reports 6, 31708 (2016) + http://doi.org/10.1038/srep31708 + .. [2] Percolation and the effective structure of complex networks + A. Allard and L. Hébert-Dufresne + Physical Review X 9, 011023 (2019) + http://doi.org/10.1103/PhysRevX.9.011023 + """ + if nx.number_of_selfloops(G) > 0: + msg = ( + "Input graph contains self loops which is not permitted; " + "Consider using G.remove_edges_from(nx.selfloop_edges(G))." + ) + raise NetworkXError(msg) + # Dictionaries to register the k-core/onion decompositions. + od_layers = {} + # Adjacency list + neighbors = {v: list(nx.all_neighbors(G, v)) for v in G} + # Effective degree of nodes. + degrees = dict(G.degree()) + # Performs the onion decomposition. + current_core = 1 + current_layer = 1 + # Sets vertices of degree 0 to layer 1, if any. + isolated_nodes = [v for v in nx.isolates(G)] + if len(isolated_nodes) > 0: + for v in isolated_nodes: + od_layers[v] = current_layer + degrees.pop(v) + current_layer = 2 + # Finds the layer for the remaining nodes. + while len(degrees) > 0: + # Sets the order for looking at nodes. + nodes = sorted(degrees, key=degrees.get) + # Sets properly the current core. + min_degree = degrees[nodes[0]] + if min_degree > current_core: + current_core = min_degree + # Identifies vertices in the current layer. + this_layer = [] + for n in nodes: + if degrees[n] > current_core: + break + this_layer.append(n) + # Identifies the core/layer of the vertices in the current layer. + for v in this_layer: + od_layers[v] = current_layer + for n in neighbors[v]: + neighbors[n].remove(v) + degrees[n] = degrees[n] - 1 + degrees.pop(v) + # Updates the layer count. + current_layer = current_layer + 1 + # Returns the dictionaries containing the onion layer of each vertices. + return od_layers diff --git a/networkx/algorithms/covering.py b/networkx/algorithms/covering.py index 2bfce3e..1740442 100644 --- a/networkx/algorithms/covering.py +++ b/networkx/algorithms/covering.py @@ -1,9 +1,3 @@ -# Copyright 2016-2018 NetworkX developers. -# Copyright (C) 2016 by -# Nishant Nikhil -# All rights reserved. -# BSD license. - """ Functions related to graph covers.""" import networkx as nx @@ -12,11 +6,11 @@ from itertools import chain -__all__ = ['min_edge_cover', 'is_edge_cover'] +__all__ = ["min_edge_cover", "is_edge_cover"] -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def min_edge_cover(G, matching_algorithm=None): """Returns a set of edges which constitutes the minimum edge cover of the graph. @@ -65,15 +59,16 @@ def min_edge_cover(G, matching_algorithm=None): if nx.number_of_isolates(G) > 0: # ``min_cover`` does not exist as there is an isolated node raise nx.NetworkXException( - "Graph has a node with no edge incident on it, " - "so no edge cover exists.") + "Graph has a node with no edge incident on it, " "so no edge cover exists." + ) if matching_algorithm is None: - matching_algorithm = partial(nx.max_weight_matching, - maxcardinality=True) + matching_algorithm = partial(nx.max_weight_matching, maxcardinality=True) maximum_matching = matching_algorithm(G) # ``min_cover`` is superset of ``maximum_matching`` try: - min_cover = set(maximum_matching.items()) # bipartite matching case returns dict + min_cover = set( + maximum_matching.items() + ) # bipartite matching case returns dict except AttributeError: min_cover = maximum_matching # iterate for uncovered nodes @@ -91,7 +86,7 @@ def min_edge_cover(G, matching_algorithm=None): return min_cover -@not_implemented_for('directed') +@not_implemented_for("directed") def is_edge_cover(G, cover): """Decides whether a set of edges is a valid edge cover of the graph. diff --git a/networkx/algorithms/cuts.py b/networkx/algorithms/cuts.py index 733fde9..e528582 100644 --- a/networkx/algorithms/cuts.py +++ b/networkx/algorithms/cuts.py @@ -1,30 +1,26 @@ -# -*- coding: utf-8 -*- -# cuts.py - functions for computing and evaluating cuts -# -# Copyright 2011 Ben Edwards . -# Copyright 2011 Aric Hagberg . -# Copyright 2015 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Functions for finding and evaluating cuts in a graph. """ -from __future__ import division from itertools import chain import networkx as nx -__all__ = ['boundary_expansion', 'conductance', 'cut_size', 'edge_expansion', - 'mixing_expansion', 'node_expansion', 'normalized_cut_size', - 'volume'] +__all__ = [ + "boundary_expansion", + "conductance", + "cut_size", + "edge_expansion", + "mixing_expansion", + "node_expansion", + "normalized_cut_size", + "volume", +] # TODO STILL NEED TO UPDATE ALL THE DOCUMENTATION! + def cut_size(G, S, T=None, weight=None): """Returns the size of the cut between two sets of nodes. @@ -69,9 +65,9 @@ def cut_size(G, S, T=None, weight=None): Each parallel edge in a multigraph is counted when determining the cut size:: - >>> G = nx.MultiGraph(['ab', 'ab']) - >>> S = {'a'} - >>> T = {'b'} + >>> G = nx.MultiGraph(["ab", "ab"]) + >>> S = {"a"} + >>> T = {"b"} >>> nx.cut_size(G, S, T) 2 @@ -363,8 +359,8 @@ def node_expansion(G, S): def boundary_expansion(G, S): """Returns the boundary expansion of the set `S`. - The *boundary expansion* is the quotient of the size of the edge - boundary and the cardinality of *S*. [1] + The *boundary expansion* is the quotient of the size + of the node boundary and the cardinality of *S*. [1] Parameters ---------- diff --git a/networkx/algorithms/cycles.py b/networkx/algorithms/cycles.py index b1fe7b0..61d696c 100644 --- a/networkx/algorithms/cycles.py +++ b/networkx/algorithms/cycles.py @@ -1,14 +1,3 @@ -# Copyright (C) 2010-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Jon Olav Vik -# Dan Schult -# Aric Hagberg -# Debsankha Manik """ ======================== Cycle finding algorithms @@ -16,21 +5,21 @@ """ from collections import defaultdict -from itertools import tee import networkx as nx from networkx.utils import not_implemented_for, pairwise -from networkx.algorithms.traversal.edgedfs import helper_funcs __all__ = [ - 'cycle_basis', 'simple_cycles', - 'recursive_simple_cycles', 'find_cycle', - 'minimum_cycle_basis', + "cycle_basis", + "simple_cycles", + "recursive_simple_cycles", + "find_cycle", + "minimum_cycle_basis", ] -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def cycle_basis(G, root=None): """ Returns a list of cycles which form a basis for cycles of G. @@ -85,11 +74,11 @@ def cycle_basis(G, root=None): z = stack.pop() # use last-in so cycles easier to find zused = used[z] for nbr in G[z]: - if nbr not in used: # new node + if nbr not in used: # new node pred[nbr] = z stack.append(nbr) - used[nbr] = set([z]) - elif nbr == z: # self loops + used[nbr] = {z} + elif nbr == z: # self loops cycles.append([z]) elif nbr not in zused: # found a cycle pn = used[nbr] @@ -106,7 +95,7 @@ def cycle_basis(G, root=None): return cycles -@not_implemented_for('undirected') +@not_implemented_for("undirected") def simple_cycles(G): """Find simple cycles (elementary circuits) of a directed graph. @@ -167,8 +156,9 @@ def simple_cycles(G): -------- cycle_basis """ + def _unblock(thisnode, blocked, B): - stack = set([thisnode]) + stack = {thisnode} while stack: node = stack.pop() if node in blocked: @@ -182,18 +172,28 @@ def _unblock(thisnode, blocked, B): # Also we save the actual graph so we can mutate it. We only take the # edges because we do not want to copy edge and node attributes here. subG = type(G)(G.edges()) - sccs = list(nx.strongly_connected_components(subG)) + sccs = [scc for scc in nx.strongly_connected_components(subG) if len(scc) > 1] + + # Johnson's algorithm exclude self cycle edges like (v, v) + # To be backward compatible, we record those cycles in advance + # and then remove from subG + for v in subG: + if subG.has_edge(v, v): + yield [v] + subG.remove_edge(v, v) + while sccs: scc = sccs.pop() + sccG = subG.subgraph(scc) # order of scc determines ordering of nodes startnode = scc.pop() # Processing node runs "circuit" routine from recursive version path = [startnode] blocked = set() # vertex: blocked from search? - closed = set() # nodes involved in a cycle + closed = set() # nodes involved in a cycle blocked.add(startnode) B = defaultdict(set) # graph portions that yield no elementary circuit - stack = [(startnode, list(subG[startnode]))] # subG gives comp nbrs + stack = [(startnode, list(sccG[startnode]))] # sccG gives comp nbrs while stack: thisnode, nbrs = stack[-1] if nbrs: @@ -201,10 +201,10 @@ def _unblock(thisnode, blocked, B): if nextnode == startnode: yield path[:] closed.update(path) -# print "Found a cycle", path, closed + # print "Found a cycle", path, closed elif nextnode not in blocked: path.append(nextnode) - stack.append((nextnode, list(subG[nextnode]))) + stack.append((nextnode, list(sccG[nextnode]))) closed.discard(nextnode) blocked.add(nextnode) continue @@ -213,19 +213,18 @@ def _unblock(thisnode, blocked, B): if thisnode in closed: _unblock(thisnode, blocked, B) else: - for nbr in subG[thisnode]: + for nbr in sccG[thisnode]: if thisnode not in B[nbr]: B[nbr].add(thisnode) stack.pop() -# assert path[-1] == thisnode + # assert path[-1] == thisnode path.pop() # done processing this node - subG.remove_node(startnode) H = subG.subgraph(scc) # make smaller to avoid work in SCC routine - sccs.extend(list(nx.strongly_connected_components(H))) + sccs.extend(scc for scc in nx.strongly_connected_components(H) if len(scc) > 1) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def recursive_simple_cycles(G): """Find simple cycles (elementary circuits) of a directed graph. @@ -252,7 +251,7 @@ def recursive_simple_cycles(G): >>> edges = [(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)] >>> G = nx.DiGraph(edges) >>> nx.recursive_simple_cycles(G) - [[0], [0, 1, 2], [0, 2], [1, 2], [2]] + [[0], [2], [0, 1, 2], [0, 2], [1, 2]] See Also -------- @@ -303,23 +302,31 @@ def circuit(thisnode, startnode, component): path.pop() # remove thisnode from path return closed - path = [] # stack of nodes in current path + path = [] # stack of nodes in current path blocked = defaultdict(bool) # vertex: blocked from search? B = defaultdict(list) # graph portions that yield no elementary circuit - result = [] # list to accumulate the circuits found + result = [] # list to accumulate the circuits found + + # Johnson's algorithm exclude self cycle edges like (v, v) + # To be backward compatible, we record those cycles in advance + # and then remove from subG + for v in G: + if G.has_edge(v, v): + result.append([v]) + G.remove_edge(v, v) + # Johnson's algorithm requires some ordering of the nodes. # They might not be sortable so we assign an arbitrary ordering. ordering = dict(zip(G, range(len(G)))) for s in ordering: # Build the subgraph induced by s and following nodes in the ordering - subgraph = G.subgraph(node for node in G - if ordering[node] >= ordering[s]) + subgraph = G.subgraph(node for node in G if ordering[node] >= ordering[s]) # Find the strongly connected component in the subgraph # that contains the least node according to the ordering strongcomp = nx.strongly_connected_components(subgraph) mincomp = min(strongcomp, key=lambda ns: min(ordering[n] for n in ns)) component = G.subgraph(mincomp) - if component: + if len(component) > 1: # smallest node in the component according to the ordering startnode = min(component, key=ordering.__getitem__) for node in component: @@ -329,9 +336,11 @@ def circuit(thisnode, startnode, component): return result -def find_cycle(G, source=None, orientation='original'): - """ - Returns the edges of a cycle found via a directed, depth-first traversal. +def find_cycle(G, source=None, orientation=None): + """Returns a cycle found via depth-first traversal. + + The cycle is a list of edges indicating the cyclic path. + Orientation of directed edges is controlled by `orientation`. Parameters ---------- @@ -343,30 +352,29 @@ def find_cycle(G, source=None, orientation='original'): is chosen arbitrarily and repeatedly until all edges from each node in the graph are searched. - orientation : 'original' | 'reverse' | 'ignore' + orientation : None | 'original' | 'reverse' | 'ignore' (default: None) For directed graphs and directed multigraphs, edge traversals need not - respect the original orientation of the edges. When set to 'reverse', - then every edge will be traversed in the reverse direction. When set to - 'ignore', then each directed edge is treated as a single undirected - edge that can be traversed in either direction. For undirected graphs - and undirected multigraphs, this parameter is meaningless and is not - consulted by the algorithm. + respect the original orientation of the edges. + When set to 'reverse' every edge is traversed in the reverse direction. + When set to 'ignore', every edge is treated as undirected. + When set to 'original', every edge is treated as directed. + In all three cases, the yielded edge tuples add a last entry to + indicate the direction in which that edge was traversed. + If orientation is None, the yielded edge has no direction indicated. + The direction is respected, but not reported. Returns ------- edges : directed edges - A list of directed edges indicating the path taken for the loop. If - no cycle is found, then an exception is raised. For graphs, an - edge is of the form `(u, v)` where `u` and `v` are the tail and head - of the edge as determined by the traversal. For multigraphs, an edge is - of the form `(u, v, key)`, where `key` is the key of the edge. When the - graph is directed, then `u` and `v` are always in the order of the - actual directed edge. If orientation is 'ignore', then an edge takes - the form `(u, v, key, direction)` where direction indicates if the edge - was followed in the forward (tail to head) or reverse (head to tail) - direction. When the direction is forward, the value of `direction` - is 'forward'. When the direction is reverse, the value of `direction` - is 'reverse'. + A list of directed edges indicating the path taken for the loop. + If no cycle is found, then an exception is raised. + For graphs, an edge is of the form `(u, v)` where `u` and `v` + are the tail and head of the edge as determined by the traversal. + For multigraphs, an edge is of the form `(u, v, key)`, where `key` is + the key of the edge. When the graph is directed, then `u` and `v` + are always in the order of the actual directed edge. + If orientation is not None then the edge tuple is extended to include + the direction of traversal ('forward' or 'reverse') on that edge. Raises ------ @@ -383,18 +391,35 @@ def find_cycle(G, source=None, orientation='original'): This means that this DAG structure does not form a directed tree (which is also known as a polytree). - >>> import networkx as nx >>> G = nx.DiGraph([(0, 1), (0, 2), (1, 2)]) >>> try: - ... nx.find_cycle(G, orientation='original') + ... nx.find_cycle(G, orientation="original") ... except: - ... pass + ... pass ... - >>> list(nx.find_cycle(G, orientation='ignore')) + >>> list(nx.find_cycle(G, orientation="ignore")) [(0, 1, 'forward'), (1, 2, 'forward'), (0, 2, 'reverse')] + See Also + -------- + simple_cycles """ - out_edge, key, tailhead = helper_funcs(G, orientation) + if not G.is_directed() or orientation in (None, "original"): + + def tailhead(edge): + return edge[:2] + + elif orientation == "reverse": + + def tailhead(edge): + return edge[1], edge[0] + + elif orientation == "ignore": + + def tailhead(edge): + if edge[-1] == "reverse": + return edge[1], edge[0] + return edge[:2] explored = set() cycle = [] @@ -457,8 +482,8 @@ def find_cycle(G, source=None, orientation='original'): explored.update(seen) else: - assert(len(cycle) == 0) - raise nx.exception.NetworkXNoCycle('No cycle found.') + assert len(cycle) == 0 + raise nx.exception.NetworkXNoCycle("No cycle found.") # We now have a list of edges which ends on a cycle. # So we need to remove from the beginning edges that are not relevant. @@ -471,8 +496,8 @@ def find_cycle(G, source=None, orientation='original'): return cycle[i:] -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def minimum_cycle_basis(G, weight=None): """ Returns a minimum weight cycle basis for G @@ -493,10 +518,10 @@ def minimum_cycle_basis(G, weight=None): Examples -------- - >>> G=nx.Graph() - >>> G.add_cycle([0,1,2,3]) - >>> G.add_cycle([0,3,4,5]) - >>> print(nx.minimum_cycle_basis(G)) + >>> G = nx.Graph() + >>> nx.add_cycle(G, [0, 1, 2, 3]) + >>> nx.add_cycle(G, [0, 3, 4, 5]) + >>> print([sorted(c) for c in nx.minimum_cycle_basis(G)]) [[0, 1, 2, 3], [0, 3, 4, 5]] References: @@ -511,8 +536,10 @@ def minimum_cycle_basis(G, weight=None): simple_cycles, cycle_basis """ # We first split the graph in commected subgraphs - return sum((_min_cycle_basis(c, weight) for c in - nx.connected_component_subgraphs(G)), []) + return sum( + (_min_cycle_basis(G.subgraph(c), weight) for c in nx.connected_components(G)), + [], + ) def _min_cycle_basis(comp, weight): @@ -520,14 +547,12 @@ def _min_cycle_basis(comp, weight): # We extract the edges not in a spanning tree. We do not really need a # *minimum* spanning tree. That is why we call the next function with # weight=None. Depending on implementation, it may be faster as well - spanning_tree_edges = list(nx.minimum_spanning_edges(comp, weight=None, - data=False)) - edges_excl = [frozenset(e) for e in comp.edges() - if e not in spanning_tree_edges] + spanning_tree_edges = list(nx.minimum_spanning_edges(comp, weight=None, data=False)) + edges_excl = [frozenset(e) for e in comp.edges() if e not in spanning_tree_edges] N = len(edges_excl) # We maintain a set of vectors orthogonal to sofar found cycles - set_orth = [set([edge]) for edge in edges_excl] + set_orth = [{edge} for edge in edges_excl] for k in range(N): # kth cycle is "parallel" to kth vector in set_orth new_cycle = _min_cycle(comp, set_orth[k], weight=weight) @@ -535,8 +560,10 @@ def _min_cycle_basis(comp, weight): # now update set_orth so that k+1,k+2... th elements are # orthogonal to the newly found cycle, as per [p. 336, 1] base = set_orth[k] - set_orth[k + 1:] = [orth ^ base if len(orth & new_cycle) % 2 else orth - for orth in set_orth[k + 1:]] + set_orth[k + 1 :] = [ + orth ^ base if len(orth & new_cycle) % 2 else orth + for orth in set_orth[k + 1 :] + ] return cb @@ -559,23 +586,25 @@ def _min_cycle(G, orth, weight=None): edge_w = data.get(weight, 1) if frozenset((u, v)) in orth: T.add_edges_from( - [(uidx, nnodes + vidx), (nnodes + uidx, vidx)], weight=edge_w) + [(uidx, nnodes + vidx), (nnodes + uidx, vidx)], weight=edge_w + ) else: T.add_edges_from( - [(uidx, vidx), (nnodes + uidx, nnodes + vidx)], weight=edge_w) + [(uidx, vidx), (nnodes + uidx, nnodes + vidx)], weight=edge_w + ) all_shortest_pathlens = dict(nx.shortest_path_length(T, weight=weight)) - cross_paths_w_lens = {n: all_shortest_pathlens[n][nnodes + n] - for n in range(nnodes)} + cross_paths_w_lens = { + n: all_shortest_pathlens[n][nnodes + n] for n in range(nnodes) + } # Now compute shortest paths in T, which translates to cyles in G start = min(cross_paths_w_lens, key=cross_paths_w_lens.get) end = nnodes + start - min_path = nx.shortest_path(T, source=start, target=end, weight='weight') + min_path = nx.shortest_path(T, source=start, target=end, weight="weight") # Now we obtain the actual path, re-map nodes in T to those in G - min_path_nodes = [node if node < nnodes else node - nnodes - for node in min_path] + min_path_nodes = [node if node < nnodes else node - nnodes for node in min_path] # Now remove the edges that occur two times mcycle_pruned = _path_to_cycle(min_path_nodes) diff --git a/networkx/algorithms/d_separation.py b/networkx/algorithms/d_separation.py new file mode 100644 index 0000000..964a168 --- /dev/null +++ b/networkx/algorithms/d_separation.py @@ -0,0 +1,136 @@ +""" +Algorithm for testing d-separation in DAGs. + +*d-separation* is a test for conditional independence in probability +distributions that can be factorized using DAGs. It is a purely +graphical test that uses the underlying graph and makes no reference +to the actual distribution parameters. See [1]_ for a formal +definition. + +The implementation is based on the conceptually simple linear time +algorithm presented in [2]_. Refer to [3]_, [4]_ for a couple of +alternative algorithms. + + +Examples +-------- + +>>> +>>> # HMM graph with five states and observation nodes +... g = nx.DiGraph() +>>> g.add_edges_from( +... [ +... ("S1", "S2"), +... ("S2", "S3"), +... ("S3", "S4"), +... ("S4", "S5"), +... ("S1", "O1"), +... ("S2", "O2"), +... ("S3", "O3"), +... ("S4", "O4"), +... ("S5", "O5"), +... ] +... ) +>>> +>>> # states/obs before 'S3' are d-separated from states/obs after 'S3' +... nx.d_separated(g, {"S1", "S2", "O1", "O2"}, {"S4", "S5", "O4", "O5"}, {"S3"}) +True + + +References +---------- + +.. [1] Pearl, J. (2009). Causality. Cambridge: Cambridge University Press. + +.. [2] Darwiche, A. (2009). Modeling and reasoning with Bayesian networks. Cambridge: Cambridge University Press. + +.. [3] Shachter, R. D. (1998). Bayes-ball: rational pastime (for determining irrelevance and requisite information in belief networks and influence diagrams). In , Proceedings of the Fourteenth Conference on Uncertainty in Artificial Intelligence (pp. 480–487). San Francisco, CA, USA: Morgan Kaufmann Publishers Inc. + +.. [4] Koller, D., & Friedman, N. (2009). Probabilistic graphical models: principles and techniques. The MIT Press. + +""" + +from collections import deque +from typing import AbstractSet + +import networkx as nx +from networkx.utils import not_implemented_for, UnionFind + +__all__ = ["d_separated"] + + +@not_implemented_for("undirected") +def d_separated(G: nx.DiGraph, x: AbstractSet, y: AbstractSet, z: AbstractSet) -> bool: + """ + Return whether node sets ``x`` and ``y`` are d-separated by ``z``. + + Parameters + ---------- + G : graph + A NetworkX DAG. + + x : set + First set of nodes in ``G``. + + y : set + Second set of nodes in ``G``. + + z : set + Set of conditioning nodes in ``G``. Can be empty set. + + Returns + ------- + b : bool + A boolean that is true if ``x`` is d-separated from ``y`` given ``z`` in ``G``. + + Raises + ------ + NetworkXError + The *d-separation* test is commonly used with directed + graphical models which are acyclic. Accordingly, the algorithm + raises a :exc:`NetworkXError` if the input graph is not a DAG. + + NodeNotFound + If any of the input nodes are not found in the graph, + a :exc:`NodeNotFound` exception is raised. + + """ + + if not nx.is_directed_acyclic_graph(G): + raise nx.NetworkXError("graph should be directed acyclic") + + union_xyz = x.union(y).union(z) + + if any(n not in G.nodes for n in union_xyz): + raise nx.NodeNotFound("one or more specified nodes not found in the graph") + + G_copy = G.copy() + + # transform the graph by removing leaves that are not in x | y | z + # until no more leaves can be removed. + leaves = deque([n for n in G_copy.nodes if G_copy.out_degree[n] == 0]) + while len(leaves) > 0: + leaf = leaves.popleft() + if leaf not in union_xyz: + for p in G_copy.predecessors(leaf): + if G_copy.out_degree[p] == 1: + leaves.append(p) + G_copy.remove_node(leaf) + + # transform the graph by removing outgoing edges from the + # conditioning set. + edges_to_remove = list(G_copy.out_edges(z)) + G_copy.remove_edges_from(edges_to_remove) + + # use disjoint-set data structure to check if any node in `x` + # occurs in the same weakly connected component as a node in `y`. + disjoint_set = UnionFind(G_copy.nodes()) + for component in nx.weakly_connected_components(G_copy): + disjoint_set.union(*component) + disjoint_set.union(*x) + disjoint_set.union(*y) + + if x and y and disjoint_set[next(iter(x))] == disjoint_set[next(iter(y))]: + return False + else: + return True diff --git a/networkx/algorithms/dag.py b/networkx/algorithms/dag.py index b4c66ca..f034ce0 100644 --- a/networkx/algorithms/dag.py +++ b/networkx/algorithms/dag.py @@ -1,17 +1,3 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2006-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: -# Aric Hagberg -# Dan Schult -# Ben Edwards -# Neil Girdhar -# """Algorithms for directed acyclic graphs (DAGs). Note that most of these functions are only guaranteed to work for DAGs. @@ -19,8 +5,8 @@ to the user to check for that. """ -from collections import defaultdict -from fractions import gcd +from collections import deque +from math import gcd from functools import partial from itertools import chain from itertools import product @@ -28,31 +14,35 @@ import heapq import networkx as nx +from networkx.algorithms.traversal.breadth_first_search import descendants_at_distance from networkx.generators.trees import NIL from networkx.utils import arbitrary_element from networkx.utils import consume from networkx.utils import pairwise -from networkx.utils import generate_unique_node from networkx.utils import not_implemented_for -__all__ = ['descendants', - 'ancestors', - 'topological_sort', - 'lexicographical_topological_sort', - 'is_directed_acyclic_graph', - 'is_aperiodic', - 'transitive_closure', - 'transitive_reduction', - 'antichains', - 'dag_longest_path', - 'dag_longest_path_length', - 'dag_to_branching'] +__all__ = [ + "descendants", + "ancestors", + "topological_sort", + "lexicographical_topological_sort", + "all_topological_sorts", + "is_directed_acyclic_graph", + "is_aperiodic", + "transitive_closure", + "transitive_closure_dag", + "transitive_reduction", + "antichains", + "dag_longest_path", + "dag_longest_path_length", + "dag_to_branching", +] chaini = chain.from_iterable def descendants(G, source): - """Return all nodes reachable from `source` in `G`. + """Returns all nodes reachable from `source` in `G`. Parameters ---------- @@ -66,13 +56,13 @@ def descendants(G, source): The descendants of `source` in `G` """ if not G.has_node(source): - raise nx.NetworkXError("The node %s is not in the graph." % source) - des = set(n for n, d in nx.shortest_path_length(G, source=source).items()) + raise nx.NetworkXError(f"The node {source} is not in the graph.") + des = {n for n, d in nx.shortest_path_length(G, source=source).items()} return des - {source} def ancestors(G, source): - """Return all nodes having a path to `source` in `G`. + """Returns all nodes having a path to `source` in `G`. Parameters ---------- @@ -86,8 +76,8 @@ def ancestors(G, source): The ancestors of source in G """ if not G.has_node(source): - raise nx.NetworkXError("The node %s is not in the graph." % source) - anc = set(n for n, d in nx.shortest_path_length(G, target=source).items()) + raise nx.NetworkXError(f"The node {source} is not in the graph.") + anc = {n for n, d in nx.shortest_path_length(G, target=source).items()} return anc - {source} @@ -102,7 +92,7 @@ def has_cycle(G): def is_directed_acyclic_graph(G): - """Return True if the graph `G` is a directed acyclic graph (DAG) or + """Returns True if the graph `G` is a directed acyclic graph (DAG) or False if not. Parameters @@ -118,7 +108,7 @@ def is_directed_acyclic_graph(G): def topological_sort(G): - """Return a generator of nodes in topologically sorted order. + """Returns a generator of nodes in topologically sorted order. A topological sort is a nonunique permutation of the nodes such that an edge from u to v implies that u appears before v in the topological sort @@ -143,7 +133,7 @@ def topological_sort(G): NetworkXUnfeasible If `G` is not a directed acyclic graph (DAG) no topological sort exists and a :exc:`NetworkXUnfeasible` exception is raised. This can also be - raised if `G` is changed while the returned iterator is being processed. + raised if `G` is changed while the returned iterator is being processed RuntimeError If `G` is changed while the returned iterator is being processed. @@ -156,6 +146,16 @@ def topological_sort(G): >>> list(reversed(list(nx.topological_sort(DG)))) [3, 2, 1] + If your DiGraph naturally has the edges representing tasks/inputs + and nodes representing people/processes that initiate tasks, then + topological_sort is not quite what you need. You will have to change + the tasks to nodes with dependence reflected by edges. The result is + a kind of topological sort of the edges. This can be done + with :func:`networkx.line_graph` as follows: + + >>> list(nx.topological_sort(nx.line_graph(DG))) + [(1, 2), (2, 3)] + Notes ----- This algorithm is based on a description and proof in @@ -171,8 +171,7 @@ def topological_sort(G): *Introduction to Algorithms - A Creative Approach.* Addison-Wesley. """ if not G.is_directed(): - raise nx.NetworkXError( - "Topological sort not defined on undirected graphs.") + raise nx.NetworkXError("Topological sort not defined on undirected graphs.") indegree_map = {v: d for v, d in G.in_degree() if d > 0} # These nodes have zero indegree and ready to be returned. @@ -185,8 +184,8 @@ def topological_sort(G): for _, child in G.edges(node): try: indegree_map[child] -= 1 - except KeyError: - raise RuntimeError("Graph changed during iteration") + except KeyError as e: + raise RuntimeError("Graph changed during iteration") from e if indegree_map[child] == 0: zero_indegree.append(child) del indegree_map[child] @@ -194,12 +193,13 @@ def topological_sort(G): yield node if indegree_map: - raise nx.NetworkXUnfeasible("Graph contains a cycle or graph changed " - "during iteration") + raise nx.NetworkXUnfeasible( + "Graph contains a cycle or graph changed " "during iteration" + ) def lexicographical_topological_sort(G, key=None): - """Return a generator of nodes in lexicographically topologically sorted + """Returns a generator of nodes in lexicographically topologically sorted order. A topological sort is a nonunique permutation of the nodes such that an @@ -229,7 +229,7 @@ def lexicographical_topological_sort(G, key=None): NetworkXUnfeasible If `G` is not a directed acyclic graph (DAG) no topological sort exists and a :exc:`NetworkXUnfeasible` exception is raised. This can also be - raised if `G` is changed while the returned iterator is being processed. + raised if `G` is changed while the returned iterator is being processed RuntimeError If `G` is changed while the returned iterator is being processed. @@ -249,14 +249,18 @@ def lexicographical_topological_sort(G, key=None): *Introduction to Algorithms - A Creative Approach.* Addison-Wesley. """ if not G.is_directed(): - raise nx.NetworkXError( - "Topological sort not defined on undirected graphs.") + msg = "Topological sort not defined on undirected graphs." + raise nx.NetworkXError(msg) if key is None: - def key(x): return x + + def key(node): + return node + + nodeid_map = {n: i for i, n in enumerate(G)} def create_tuple(node): - return key(node), node + return key(node), nodeid_map[node], node indegree_map = {v: d for v, d in G.in_degree() if d > 0} # These nodes have zero indegree and ready to be returned. @@ -264,15 +268,15 @@ def create_tuple(node): heapq.heapify(zero_indegree) while zero_indegree: - _, node = heapq.heappop(zero_indegree) + _, _, node = heapq.heappop(zero_indegree) if node not in G: raise RuntimeError("Graph changed during iteration") for _, child in G.edges(node): try: indegree_map[child] -= 1 - except KeyError: - raise RuntimeError("Graph changed during iteration") + except KeyError as e: + raise RuntimeError("Graph changed during iteration") from e if indegree_map[child] == 0: heapq.heappush(zero_indegree, create_tuple(child)) del indegree_map[child] @@ -280,12 +284,131 @@ def create_tuple(node): yield node if indegree_map: - raise nx.NetworkXUnfeasible("Graph contains a cycle or graph changed " - "during iteration") + msg = "Graph contains a cycle or graph changed during iteration" + raise nx.NetworkXUnfeasible(msg) + + +@not_implemented_for("undirected") +def all_topological_sorts(G): + """Returns a generator of _all_ topological sorts of the directed graph G. + + A topological sort is a nonunique permutation of the nodes such that an + edge from u to v implies that u appears before v in the topological sort + order. + + Parameters + ---------- + G : NetworkX DiGraph + A directed graph + + Returns + ------- + generator + All topological sorts of the digraph G + + Raises + ------ + NetworkXNotImplemented + If `G` is not directed + NetworkXUnfeasible + If `G` is not acyclic + + Examples + -------- + To enumerate all topological sorts of directed graph: + + >>> DG = nx.DiGraph([(1, 2), (2, 3), (2, 4)]) + >>> list(nx.all_topological_sorts(DG)) + [[1, 2, 4, 3], [1, 2, 3, 4]] + + Notes + ----- + Implements an iterative version of the algorithm given in [1]. + + References + ---------- + .. [1] Knuth, Donald E., Szwarcfiter, Jayme L. (1974). + "A Structured Program to Generate All Topological Sorting Arrangements" + Information Processing Letters, Volume 2, Issue 6, 1974, Pages 153-157, + ISSN 0020-0190, + https://doi.org/10.1016/0020-0190(74)90001-5. + Elsevier (North-Holland), Amsterdam + """ + if not G.is_directed(): + raise nx.NetworkXError("Topological sort not defined on undirected graphs.") + + # the names of count and D are chosen to match the global variables in [1] + # number of edges originating in a vertex v + count = dict(G.in_degree()) + # vertices with indegree 0 + D = deque([v for v, d in G.in_degree() if d == 0]) + # stack of first value chosen at a position k in the topological sort + bases = [] + current_sort = [] + + # do-while construct + while True: + assert all([count[v] == 0 for v in D]) + + if len(current_sort) == len(G): + yield list(current_sort) + + # clean-up stack + while len(current_sort) > 0: + assert len(bases) == len(current_sort) + q = current_sort.pop() + + # "restores" all edges (q, x) + # NOTE: it is important to iterate over edges instead + # of successors, so count is updated correctly in multigraphs + for _, j in G.out_edges(q): + count[j] += 1 + assert count[j] >= 0 + # remove entries from D + while len(D) > 0 and count[D[-1]] > 0: + D.pop() + + # corresponds to a circular shift of the values in D + # if the first value chosen (the base) is in the first + # position of D again, we are done and need to consider the + # previous condition + D.appendleft(q) + if D[-1] == bases[-1]: + # all possible values have been chosen at current position + # remove corresponding marker + bases.pop() + else: + # there are still elements that have not been fixed + # at the current position in the topological sort + # stop removing elements, escape inner loop + break + + else: + if len(D) == 0: + raise nx.NetworkXUnfeasible("Graph contains a cycle.") + + # choose next node + q = D.pop() + # "erase" all edges (q, x) + # NOTE: it is important to iterate over edges instead + # of successors, so count is updated correctly in multigraphs + for _, j in G.out_edges(q): + count[j] -= 1 + assert count[j] >= 0 + if count[j] == 0: + D.append(j) + current_sort.append(q) + + # base for current position might _not_ be fixed yet + if len(bases) < len(current_sort): + bases.append(q) + + if len(bases) == 0: + break def is_aperiodic(G): - """Return True if `G` is aperiodic. + """Returns True if `G` is aperiodic. A directed graph is aperiodic if there is no integer k > 1 that divides the length of every cycle in the graph. @@ -319,14 +442,13 @@ def is_aperiodic(G): A Multidisciplinary Approach, CRC Press. """ if not G.is_directed(): - raise nx.NetworkXError( - "is_aperiodic not defined for undirected graphs") + raise nx.NetworkXError("is_aperiodic not defined for undirected graphs") s = arbitrary_element(G) levels = {s: 0} this_level = [s] g = 0 - l = 1 + lev = 1 while this_level: next_level = [] for u in this_level: @@ -335,27 +457,39 @@ def is_aperiodic(G): g = gcd(g, levels[u] - levels[v] + 1) else: # Tree Edge next_level.append(v) - levels[v] = l + levels[v] = lev this_level = next_level - l += 1 + lev += 1 if len(levels) == len(G): # All nodes in tree return g == 1 else: return g == 1 and nx.is_aperiodic(G.subgraph(set(G) - set(levels))) -@not_implemented_for('undirected') -def transitive_closure(G): +@not_implemented_for("undirected") +def transitive_closure(G, reflexive=False): """ Returns transitive closure of a directed graph The transitive closure of G = (V,E) is a graph G+ = (V,E+) such that - for all v,w in V there is an edge (v,w) in E+ if and only if there - is a non-null path from v to w in G. + for all v, w in V there is an edge (v, w) in E+ if and only if there + is a path from v to w in G. + + Handling of paths from v to v has some flexibility within this definition. + A reflexive transitive closure creates a self-loop for the path + from v to v of length 0. The usual transitive closure creates a + self-loop only if a cycle exists (a path from v to v with length > 0). + We also allow an option for no self-loops. Parameters ---------- G : NetworkX DiGraph A directed graph + reflexive : Bool or None, optional (default: False) + Determines when cycles create self-loops in the Transitive Closure. + If True, trivial cycles (length 0) create self-loops. The result + is a reflexive tranistive closure of G. + If False (the default) non-trivial cycles create self-loops. + If None, self-loops are not created. Returns ------- @@ -371,15 +505,79 @@ def transitive_closure(G): ---------- .. [1] http://www.ics.uci.edu/~eppstein/PADS/PartialOrder.py + TODO this function applies to all directed graphs and is probably misplaced + here in dag.py """ + if reflexive is None: + TC = G.copy() + for v in G: + edges = ((v, u) for u in nx.dfs_preorder_nodes(G, v) if v != u) + TC.add_edges_from(edges) + return TC + if reflexive is True: + TC = G.copy() + for v in G: + edges = ((v, u) for u in nx.dfs_preorder_nodes(G, v)) + TC.add_edges_from(edges) + return TC + # reflexive is False TC = G.copy() for v in G: - TC.add_edges_from((v, u) for u in nx.dfs_preorder_nodes(G, source=v) - if v != u) + edges = ((v, w) for u, w in nx.edge_dfs(G, v)) + TC.add_edges_from(edges) return TC -@not_implemented_for('undirected') +@not_implemented_for("undirected") +def transitive_closure_dag(G, topo_order=None): + """ Returns the transitive closure of a directed acyclic graph. + + This function is faster than the function `transitive_closure`, but fails + if the graph has a cycle. + + The transitive closure of G = (V,E) is a graph G+ = (V,E+) such that + for all v, w in V there is an edge (v, w) in E+ if and only if there + is a non-null path from v to w in G. + + Parameters + ---------- + G : NetworkX DiGraph + A directed acyclic graph (DAG) + + topo_order: list or tuple, optional + A topological order for G (if None, the function will compute one) + + Returns + ------- + NetworkX DiGraph + The transitive closure of `G` + + Raises + ------ + NetworkXNotImplemented + If `G` is not directed + NetworkXUnfeasible + If `G` has a cycle + + Notes + ----- + This algorithm is probably simple enough to be well-known but I didn't find + a mention in the literature. + """ + if topo_order is None: + topo_order = list(topological_sort(G)) + + TC = G.copy() + + # idea: traverse vertices following a reverse topological order, connecting + # each vertex to its descendants at distance 2 as we go + for v in reversed(topo_order): + TC.add_edges_from((v, u) for u in descendants_at_distance(TC, v, 2)) + + return TC + + +@not_implemented_for("undirected") def transitive_reduction(G): """ Returns transitive reduction of a directed graph @@ -409,20 +607,29 @@ def transitive_reduction(G): """ if not is_directed_acyclic_graph(G): - raise nx.NetworkXError( - "Transitive reduction only uniquely defined on directed acyclic graphs.") + msg = "Directed Acyclic Graph required for transitive_reduction" + raise nx.NetworkXError(msg) TR = nx.DiGraph() TR.add_nodes_from(G.nodes()) + descendants = {} + # count before removing set stored in descendants + check_count = dict(G.in_degree) for u in G: - u_edges = set(G[u]) + u_nbrs = set(G[u]) for v in G[u]: - u_edges -= {y for x, y in nx.dfs_edges(G, v)} - TR.add_edges_from((u, v) for v in u_edges) + if v in u_nbrs: + if v not in descendants: + descendants[v] = {y for x, y in nx.dfs_edges(G, v)} + u_nbrs -= descendants[v] + check_count[v] -= 1 + if check_count[v] == 0: + del descendants[v] + TR.add_edges_from((u, v) for v in u_nbrs) return TR -@not_implemented_for('undirected') -def antichains(G): +@not_implemented_for("undirected") +def antichains(G, topo_order=None): """Generates antichains from a directed acyclic graph (DAG). An antichain is a subset of a partially ordered set such that any @@ -433,6 +640,9 @@ def antichains(G): G : NetworkX DiGraph A directed acyclic graph (DAG) + topo_order: list or tuple, optional + A topological order for G (if None, the function will compute one) + Returns ------- generator object @@ -458,8 +668,12 @@ def antichains(G): .. [1] Free Lattices, by R. Freese, J. Jezek and J. B. Nation, AMS, Vol 42, 1995, p. 226. """ - TC = nx.transitive_closure(G) - antichains_stacks = [([], list(reversed(list(nx.topological_sort(G)))))] + if topo_order is None: + topo_order = list(nx.topological_sort(G)) + + TC = nx.transitive_closure_dag(G, topo_order) + antichains_stacks = [([], list(reversed(topo_order)))] + while antichains_stacks: (antichain, stack) = antichains_stacks.pop() # Invariant: @@ -469,13 +683,12 @@ def antichains(G): while stack: x = stack.pop() new_antichain = antichain + [x] - new_stack = [ - t for t in stack if not ((t in TC[x]) or (x in TC[t]))] + new_stack = [t for t in stack if not ((t in TC[x]) or (x in TC[t]))] antichains_stacks.append((new_antichain, new_stack)) -@not_implemented_for('undirected') -def dag_longest_path(G, weight='weight', default_weight=1): +@not_implemented_for("undirected") +def dag_longest_path(G, weight="weight", default_weight=1, topo_order=None): """Returns the longest path in a directed acyclic graph (DAG). If `G` has edges with `weight` attribute the edge data are used as @@ -492,6 +705,9 @@ def dag_longest_path(G, weight='weight', default_weight=1): default_weight : int, optional The weight of edges that do not have a weight attribute + topo_order: list or tuple, optional + A topological order for G (if None, the function will compute one) + Returns ------- list @@ -509,14 +725,22 @@ def dag_longest_path(G, weight='weight', default_weight=1): """ if not G: return [] + + if topo_order is None: + topo_order = nx.topological_sort(G) + dist = {} # stores {v : (length, u)} - for v in nx.topological_sort(G): - us = [(dist[u][0] + data.get(weight, default_weight), u) - for u, data in G.pred[v].items()] + for v in topo_order: + us = [ + (dist[u][0] + data.get(weight, default_weight), u) + for u, data in G.pred[v].items() + ] + # Use the best predecessor if there is one and its distance is # non-negative, otherwise terminate. maxu = max(us, key=lambda x: x[0]) if us else (0, v) dist[v] = maxu if maxu[0] >= 0 else (0, v) + u = None v = max(dist, key=lambda x: dist[x][0]) path = [] @@ -524,12 +748,13 @@ def dag_longest_path(G, weight='weight', default_weight=1): path.append(v) u = v v = dist[v][1] + path.reverse() return path -@not_implemented_for('undirected') -def dag_longest_path_length(G, weight='weight', default_weight=1): +@not_implemented_for("undirected") +def dag_longest_path_length(G, weight="weight", default_weight=1): """Returns the longest path length in a DAG Parameters @@ -583,8 +808,8 @@ def root_to_leaf_paths(G): return chaini(starmap(all_paths, product(roots, leaves))) -@not_implemented_for('multigraph') -@not_implemented_for('undirected') +@not_implemented_for("multigraph") +@not_implemented_for("undirected") def dag_to_branching(G): """Returns a branching representing all (overlapping) paths from root nodes to leaf nodes in the given directed acyclic graph. @@ -637,16 +862,16 @@ def dag_to_branching(G): >>> from collections import defaultdict >>> from operator import itemgetter >>> - >>> G = nx.DiGraph(nx.utils.pairwise('abd')) - >>> G.add_edges_from(nx.utils.pairwise('acd')) + >>> G = nx.DiGraph(nx.utils.pairwise("abd")) + >>> G.add_edges_from(nx.utils.pairwise("acd")) >>> B = nx.dag_to_branching(G) >>> >>> sources = defaultdict(set) - >>> for v, source in B.nodes(data='source'): + >>> for v, source in B.nodes(data="source"): ... sources[source].add(v) - >>> len(sources['a']) + >>> len(sources["a"]) 1 - >>> len(sources['d']) + >>> len(sources["d"]) 2 To copy node attributes from the original graph to the new graph, @@ -655,7 +880,7 @@ def dag_to_branching(G): >>> for source, nodes in sources.items(): ... for v in nodes: - ... B.node[v].update(G.node[source]) + ... B.nodes[v].update(G.nodes[source]) Notes ----- @@ -671,7 +896,7 @@ def dag_to_branching(G): """ if has_cycle(G): - msg = 'dag_to_branching is only defined for acyclic graphs' + msg = "dag_to_branching is only defined for acyclic graphs" raise nx.HasACycle(msg) paths = root_to_leaf_paths(G) B, root = nx.prefix_tree(paths) diff --git a/networkx/algorithms/distance_measures.py b/networkx/algorithms/distance_measures.py index d4a5f54..6e97279 100644 --- a/networkx/algorithms/distance_measures.py +++ b/networkx/algorithms/distance_measures.py @@ -1,18 +1,18 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg (hagberg@lanl.gov) -# Dan Schult (dschult@colgate.edu) """Graph diameter, radius, eccentricity and other properties.""" -import networkx -__all__ = ['extrema_bounding', 'eccentricity', 'diameter', - 'radius', 'periphery', 'center'] +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = [ + "extrema_bounding", + "eccentricity", + "diameter", + "radius", + "periphery", + "center", + "barycenter", + "resistance_distance", +] def extrema_bounding(G, compute="diameter"): @@ -89,17 +89,17 @@ def extrema_bounding(G, compute="diameter"): high = not high # get distances from/to current node and derive eccentricity - dist = dict(networkx.single_source_shortest_path_length(G, current)) + dist = dict(nx.single_source_shortest_path_length(G, current)) if len(dist) != N: - msg = ('Cannot compute metric because graph is not connected.') - raise networkx.NetworkXError(msg) + msg = "Cannot compute metric because graph is not connected." + raise nx.NetworkXError(msg) current_ecc = max(dist.values()) # print status update -# print ("ecc of " + str(current) + " (" + str(ecc_lower[current]) + "/" -# + str(ecc_upper[current]) + ", deg: " + str(dist[current]) + ") is " -# + str(current_ecc)) -# print(ecc_upper) + # print ("ecc of " + str(current) + " (" + str(ecc_lower[current]) + "/" + # + str(ecc_upper[current]) + ", deg: " + str(dist[current]) + ") is " + # + str(current_ecc)) + # print(ecc_upper) # (re)set bound extremes maxuppernode = None @@ -119,79 +119,101 @@ def extrema_bounding(G, compute="diameter"): maxupper = max(ecc_upper[i], maxupper) # update candidate set - if compute == 'diameter': - ruled_out = {i for i in candidates if ecc_upper[i] <= maxlower and - 2 * ecc_lower[i] >= maxupper} - - elif compute == 'radius': - ruled_out = {i for i in candidates if ecc_lower[i] >= minupper and - ecc_upper[i] + 1 <= 2 * minlower} - - elif compute == 'periphery': - ruled_out = {i for i in candidates if ecc_upper[i] < maxlower and - (maxlower == maxupper or ecc_lower[i] > maxupper)} - - elif compute == 'center': - ruled_out = {i for i in candidates if ecc_lower[i] > minupper and - (minlower == minupper or ecc_upper[i] + 1 < 2 * minlower)} - - elif compute == 'eccentricities': + if compute == "diameter": + ruled_out = { + i + for i in candidates + if ecc_upper[i] <= maxlower and 2 * ecc_lower[i] >= maxupper + } + + elif compute == "radius": + ruled_out = { + i + for i in candidates + if ecc_lower[i] >= minupper and ecc_upper[i] + 1 <= 2 * minlower + } + + elif compute == "periphery": + ruled_out = { + i + for i in candidates + if ecc_upper[i] < maxlower + and (maxlower == maxupper or ecc_lower[i] > maxupper) + } + + elif compute == "center": + ruled_out = { + i + for i in candidates + if ecc_lower[i] > minupper + and (minlower == minupper or ecc_upper[i] + 1 < 2 * minlower) + } + + elif compute == "eccentricities": ruled_out = {} ruled_out.update(i for i in candidates if ecc_lower[i] == ecc_upper[i]) candidates -= ruled_out -# for i in ruled_out: -# print("removing %g: ecc_u: %g maxl: %g ecc_l: %g maxu: %g"% -# (i,ecc_upper[i],maxlower,ecc_lower[i],maxupper)) -# print("node %g: ecc_u: %g maxl: %g ecc_l: %g maxu: %g"% -# (4,ecc_upper[4],maxlower,ecc_lower[4],maxupper)) -# print("NODE 4: %g"%(ecc_upper[4] <= maxlower)) -# print("NODE 4: %g"%(2 * ecc_lower[4] >= maxupper)) -# print("NODE 4: %g"%(ecc_upper[4] <= maxlower -# and 2 * ecc_lower[4] >= maxupper)) + # for i in ruled_out: + # print("removing %g: ecc_u: %g maxl: %g ecc_l: %g maxu: %g"% + # (i,ecc_upper[i],maxlower,ecc_lower[i],maxupper)) + # print("node %g: ecc_u: %g maxl: %g ecc_l: %g maxu: %g"% + # (4,ecc_upper[4],maxlower,ecc_lower[4],maxupper)) + # print("NODE 4: %g"%(ecc_upper[4] <= maxlower)) + # print("NODE 4: %g"%(2 * ecc_lower[4] >= maxupper)) + # print("NODE 4: %g"%(ecc_upper[4] <= maxlower + # and 2 * ecc_lower[4] >= maxupper)) # updating maxuppernode and minlowernode for selection in next round for i in candidates: - if minlowernode is None \ - or (ecc_lower[i] == ecc_lower[minlowernode] - and degrees[i] > degrees[minlowernode]) \ - or (ecc_lower[i] < ecc_lower[minlowernode]): + if ( + minlowernode is None + or ( + ecc_lower[i] == ecc_lower[minlowernode] + and degrees[i] > degrees[minlowernode] + ) + or (ecc_lower[i] < ecc_lower[minlowernode]) + ): minlowernode = i - if maxuppernode is None \ - or (ecc_upper[i] == ecc_upper[maxuppernode] - and degrees[i] > degrees[maxuppernode]) \ - or (ecc_upper[i] > ecc_upper[maxuppernode]): + if ( + maxuppernode is None + or ( + ecc_upper[i] == ecc_upper[maxuppernode] + and degrees[i] > degrees[maxuppernode] + ) + or (ecc_upper[i] > ecc_upper[maxuppernode]) + ): maxuppernode = i # print status update -# print (" min=" + str(minlower) + "/" + str(minupper) + -# " max=" + str(maxlower) + "/" + str(maxupper) + -# " candidates: " + str(len(candidates))) -# print("cand:",candidates) -# print("ecc_l",ecc_lower) -# print("ecc_u",ecc_upper) -# wait = input("press Enter to continue") + # print (" min=" + str(minlower) + "/" + str(minupper) + + # " max=" + str(maxlower) + "/" + str(maxupper) + + # " candidates: " + str(len(candidates))) + # print("cand:",candidates) + # print("ecc_l",ecc_lower) + # print("ecc_u",ecc_upper) + # wait = input("press Enter to continue") # return the correct value of the requested metric - if compute == 'diameter': + if compute == "diameter": return maxlower - elif compute == 'radius': + elif compute == "radius": return minupper - elif compute == 'periphery': + elif compute == "periphery": p = [v for v in G if ecc_lower[v] == maxlower] return p - elif compute == 'center': + elif compute == "center": c = [v for v in G if ecc_upper[v] == minupper] return c - elif compute == 'eccentricities': + elif compute == "eccentricities": return ecc_lower return None def eccentricity(G, v=None, sp=None): - """Return the eccentricity of nodes in G. + """Returns the eccentricity of nodes in G. The eccentricity of a node v is the maximum distance from v to all other nodes in G. @@ -212,33 +234,34 @@ def eccentricity(G, v=None, sp=None): ecc : dictionary A dictionary of eccentricity values keyed by node. """ -# if v is None: # none, use entire graph -# nodes=G.nodes() -# elif v in G: # is v a single node -# nodes=[v] -# else: # assume v is a container of nodes -# nodes=v + # if v is None: # none, use entire graph + # nodes=G.nodes() + # elif v in G: # is v a single node + # nodes=[v] + # else: # assume v is a container of nodes + # nodes=v order = G.order() e = {} for n in G.nbunch_iter(v): if sp is None: - length = networkx.single_source_shortest_path_length(G, n) + length = nx.single_source_shortest_path_length(G, n) L = len(length) else: try: length = sp[n] L = len(length) - except TypeError: - raise networkx.NetworkXError('Format of "sp" is invalid.') + except TypeError as e: + raise nx.NetworkXError('Format of "sp" is invalid.') from e if L != order: if G.is_directed(): - msg = ('Found infinite path length because the digraph is not' - ' strongly connected') + msg = ( + "Found infinite path length because the digraph is not" + " strongly connected" + ) else: - msg = ('Found infinite path length because the graph is not' - ' connected') - raise networkx.NetworkXError(msg) + msg = "Found infinite path length because the graph is not" " connected" + raise nx.NetworkXError(msg) e[n] = max(length.values()) @@ -249,7 +272,7 @@ def eccentricity(G, v=None, sp=None): def diameter(G, e=None, usebounds=False): - """Return the diameter of the graph G. + """Returns the diameter of the graph G. The diameter is the maximum eccentricity. @@ -278,7 +301,7 @@ def diameter(G, e=None, usebounds=False): def periphery(G, e=None, usebounds=False): - """Return the periphery of the graph G. + """Returns the periphery of the graph G. The periphery is the set of nodes with eccentricity equal to the diameter. @@ -294,6 +317,11 @@ def periphery(G, e=None, usebounds=False): ------- p : list List of nodes in periphery + + See Also + -------- + barycenter + center """ if usebounds is True and e is None and not G.is_directed(): return extrema_bounding(G, compute="periphery") @@ -305,7 +333,7 @@ def periphery(G, e=None, usebounds=False): def radius(G, e=None, usebounds=False): - """Return the radius of the graph G. + """Returns the radius of the graph G. The radius is the minimum eccentricity. @@ -330,7 +358,7 @@ def radius(G, e=None, usebounds=False): def center(G, e=None, usebounds=False): - """Return the center of the graph G. + """Returns the center of the graph G. The center is the set of nodes with eccentricity equal to radius. @@ -346,6 +374,11 @@ def center(G, e=None, usebounds=False): ------- c : list List of nodes in center + + See Also + -------- + barycenter + periphery """ if usebounds is True and e is None and not G.is_directed(): return extrema_bounding(G, compute="center") @@ -354,3 +387,221 @@ def center(G, e=None, usebounds=False): radius = min(e.values()) p = [v for v in e if e[v] == radius] return p + + +def barycenter(G, weight=None, attr=None, sp=None): + r"""Calculate barycenter of a connected graph, optionally with edge weights. + + The :dfn:`barycenter` a + :func:`connected ` graph + :math:`G` is the subgraph induced by the set of its nodes :math:`v` + minimizing the objective function + + .. math:: + + \sum_{u \in V(G)} d_G(u, v), + + where :math:`d_G` is the (possibly weighted) :func:`path length + `. + The barycenter is also called the :dfn:`median`. See [West01]_, p. 78. + + Parameters + ---------- + G : :class:`networkx.Graph` + The connected graph :math:`G`. + weight : :class:`str`, optional + Passed through to + :func:`~networkx.algorithms.shortest_paths.generic.shortest_path_length`. + attr : :class:`str`, optional + If given, write the value of the objective function to each node's + `attr` attribute. Otherwise do not store the value. + sp : dict of dicts, optional + All pairs shortest path lengths as a dictionary of dictionaries + + Returns + ------- + list + Nodes of `G` that induce the barycenter of `G`. + + Raises + ------ + NetworkXNoPath + If `G` is disconnected. `G` may appear disconnected to + :func:`barycenter` if `sp` is given but is missing shortest path + lengths for any pairs. + ValueError + If `sp` and `weight` are both given. + + See Also + -------- + center + periphery + """ + if sp is None: + sp = nx.shortest_path_length(G, weight=weight) + else: + sp = sp.items() + if weight is not None: + raise ValueError("Cannot use both sp, weight arguments together") + smallest, barycenter_vertices, n = float("inf"), [], len(G) + for v, dists in sp: + if len(dists) < n: + raise nx.NetworkXNoPath( + f"Input graph {G} is disconnected, so every induced subgraph " + "has infinite barycentricity." + ) + barycentricity = sum(dists.values()) + if attr is not None: + G.nodes[v][attr] = barycentricity + if barycentricity < smallest: + smallest = barycentricity + barycenter_vertices = [v] + elif barycentricity == smallest: + barycenter_vertices.append(v) + return barycenter_vertices + + +def _laplacian_submatrix(node, mat, node_list): + """Removes row/col from a sparse matrix and returns the submatrix + """ + j = node_list.index(node) + n = list(range(len(node_list))) + n.pop(j) + + if mat.shape[0] != mat.shape[1]: + raise nx.NetworkXError("Matrix must be square") + elif len(node_list) != mat.shape[0]: + msg = "Node list length does not match matrix dimentions" + raise nx.NetworkXError(msg) + + mat = mat.tocsr() + mat = mat[n, :] + + mat = mat.tocsc() + mat = mat[:, n] + + node_list.pop(j) + + return mat, node_list + + +def _count_lu_permutations(perm_array): + """Counts the number of permutations in SuperLU perm_c or perm_r + """ + perm_cnt = 0 + arr = perm_array.tolist() + for i in range(len(arr)): + if i != arr[i]: + perm_cnt += 1 + n = arr.index(i) + arr[n] = arr[i] + arr[i] = i + + return perm_cnt + + +@not_implemented_for("directed") +def resistance_distance(G, nodeA, nodeB, weight=None, invert_weight=True): + """Returns the resistance distance between node A and node B on graph G. + + The resistance distance between two nodes of a graph is akin to treating + the graph as a grid of resistorses with a resistance equal to the provided + weight. + + If weight is not provided, then a weight of 1 is used for all edges. + + Parameters + ---------- + G : NetworkX graph + A graph + + nodeA : node + A node within graph G. + + nodeB : node + A node within graph G, exclusive of Node A. + + weight : string or None, optional (default=None) + The edge data key used to compute the resistance distance. + If None, then each edge has weight 1. + + invert_weight : boolean (default=True) + Proper calculation of resistance distance requires building the + Laplacian matrix with the reciprocal of the weight. Not required + if the weight is already inverted. Weight cannot be zero. + + Returns + ------- + rd : float + Value of effective resistance distance + + Notes + ----- + Overview discussion: + * https://en.wikipedia.org/wiki/Resistance_distance + * http://mathworld.wolfram.com/ResistanceDistance.html + + Additional details: + Vaya Sapobi Samui Vos, “Methods for determining the effective resistance,” M.S., + Mathematisch Instituut, Universiteit Leiden, Leiden, Netherlands, 2016 + Available: `Link to thesis `_ + """ + import numpy as np + import scipy.sparse + + if not nx.is_connected(G): + msg = "Graph G must be strongly connected." + raise nx.NetworkXError(msg) + elif nodeA not in G: + msg = "Node A is not in graph G." + raise nx.NetworkXError(msg) + elif nodeB not in G: + msg = "Node B is not in graph G." + raise nx.NetworkXError(msg) + elif nodeA == nodeB: + msg = "Node A and Node B cannot be the same." + raise nx.NetworkXError(msg) + + G = G.copy() + node_list = list(G) + + if invert_weight and weight is not None: + if G.is_multigraph(): + for (u, v, k, d) in G.edges(keys=True, data=True): + d[weight] = 1 / d[weight] + else: + for (u, v, d) in G.edges(data=True): + d[weight] = 1 / d[weight] + # Replace with collapsing topology or approximated zero? + + # Using determinants to compute the effective resistance is more memory + # efficent than directly calculating the psuedo-inverse + L = nx.laplacian_matrix(G, node_list, weight=weight) + + Lsub_a, node_list_a = _laplacian_submatrix(nodeA, L.copy(), node_list[:]) + Lsub_ab, node_list_ab = _laplacian_submatrix(nodeB, Lsub_a.copy(), node_list_a[:]) + + # Factorize Laplacian submatrixes and extract diagonals + # Order the diagonals to minimize the likelihood over overflows + # during computing the determinant + lu_a = scipy.sparse.linalg.splu(Lsub_a, options=dict(SymmetricMode=True)) + LdiagA = lu_a.U.diagonal() + LdiagA_s = np.product(np.sign(LdiagA)) * np.product(lu_a.L.diagonal()) + LdiagA_s *= (-1) ** _count_lu_permutations(lu_a.perm_r) + LdiagA_s *= (-1) ** _count_lu_permutations(lu_a.perm_c) + LdiagA = np.absolute(LdiagA) + LdiagA = np.sort(LdiagA) + + lu_ab = scipy.sparse.linalg.splu(Lsub_ab, options=dict(SymmetricMode=True)) + LdiagAB = lu_ab.U.diagonal() + LdiagAB_s = np.product(np.sign(LdiagAB)) * np.product(lu_ab.L.diagonal()) + LdiagAB_s *= (-1) ** _count_lu_permutations(lu_ab.perm_r) + LdiagAB_s *= (-1) ** _count_lu_permutations(lu_ab.perm_c) + LdiagAB = np.absolute(LdiagAB) + LdiagAB = np.sort(LdiagAB) + + # Calculate the ratio of determinant, rd = det(Lsub_ab)/det(Lsub_a) + Ldet = np.product(np.divide(np.append(LdiagAB, [1]), LdiagA)) + rd = Ldet * LdiagAB_s / LdiagA_s + + return rd diff --git a/networkx/algorithms/distance_regular.py b/networkx/algorithms/distance_regular.py index d2f6546..0b4ebbf 100644 --- a/networkx/algorithms/distance_regular.py +++ b/networkx/algorithms/distance_regular.py @@ -1,8 +1,3 @@ -# Copyright (C) 2011 by -# Dheeraj M R -# Aric Hagberg -# All rights reserved. -# BSD license. """ ======================= Distance-regular graphs @@ -13,11 +8,12 @@ from networkx.utils import not_implemented_for from .distance_measures import diameter -__author__ = """\n""".join(['Dheeraj M R ', - 'Aric Hagberg ']) - -__all__ = ['is_distance_regular', 'is_strongly_regular', - 'intersection_array', 'global_parameters'] +__all__ = [ + "is_distance_regular", + "is_strongly_regular", + "intersection_array", + "global_parameters", +] def is_distance_regular(G): @@ -40,7 +36,7 @@ def is_distance_regular(G): Examples -------- - >>> G=nx.hypercube_graph(6) + >>> G = nx.hypercube_graph(6) >>> nx.is_distance_regular(G) True @@ -68,7 +64,7 @@ def is_distance_regular(G): def global_parameters(b, c): - """Return global parameters for a given intersection array. + """Returns global parameters for a given intersection array. Given a distance-regular graph G with integers b_i, c_i,i = 0,....,d such that for any 2 vertices x,y in G at a distance i=d(x,y), there @@ -111,7 +107,7 @@ def global_parameters(b, c): return ((y, b[0] - x - y, x) for x, y in zip(b + [0], [0] + c)) -@not_implemented_for('directed', 'multigraph') +@not_implemented_for("directed", "multigraph") def intersection_array(G): """Returns the intersection array of a distance-regular graph. @@ -133,7 +129,7 @@ def intersection_array(G): Examples -------- - >>> G=nx.icosahedral_graph() + >>> G = nx.icosahedral_graph() >>> nx.intersection_array(G) ([5, 2, 1], [1, 2, 5]) @@ -152,7 +148,7 @@ def intersection_array(G): (_, k) = next(degree) for _, knext in degree: if knext != k: - raise nx.NetworkXError('Graph is not distance regular.') + raise nx.NetworkXError("Graph is not distance regular.") k = knext path_length = dict(nx.all_pairs_shortest_path_length(G)) diameter = max([max(path_length[n].values()) for n in path_length]) @@ -162,23 +158,25 @@ def intersection_array(G): for v in G: try: i = path_length[u][v] - except KeyError: # graph must be connected - raise nx.NetworkXError('Graph is not distance regular.') + except KeyError as e: # graph must be connected + raise nx.NetworkXError("Graph is not distance regular.") from e # number of neighbors of v at a distance of i-1 from u c = len([n for n in G[v] if path_length[n][u] == i - 1]) # number of neighbors of v at a distance of i+1 from u b = len([n for n in G[v] if path_length[n][u] == i + 1]) # b,c are independent of u and v if cint.get(i, c) != c or bint.get(i, b) != b: - raise nx.NetworkXError('Graph is not distance regular') + raise nx.NetworkXError("Graph is not distance regular") bint[i] = b cint[i] = c - return ([bint.get(j, 0) for j in range(diameter)], - [cint.get(j + 1, 0) for j in range(diameter)]) + return ( + [bint.get(j, 0) for j in range(diameter)], + [cint.get(j + 1, 0) for j in range(diameter)], + ) # TODO There is a definition for directed strongly regular graphs. -@not_implemented_for('directed', 'multigraph') +@not_implemented_for("directed", "multigraph") def is_strongly_regular(G): """Returns True if and only if the given graph is strongly regular. @@ -213,7 +211,6 @@ def is_strongly_regular(G): two-regular, each pair of adjacent vertices has no shared neighbors, and each pair of nonadjacent vertices has one shared neighbor:: - >>> import networkx as nx >>> G = nx.cycle_graph(5) >>> nx.is_strongly_regular(G) True diff --git a/networkx/algorithms/dominance.py b/networkx/algorithms/dominance.py index fb437af..63ed7a8 100644 --- a/networkx/algorithms/dominance.py +++ b/networkx/algorithms/dominance.py @@ -1,11 +1,3 @@ -# Copyright (C) 2014-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: ysitu (ysitu@users.noreply.github.com) """ Dominance algorithms. """ @@ -14,10 +6,10 @@ import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['immediate_dominators', 'dominance_frontiers'] +__all__ = ["immediate_dominators", "dominance_frontiers"] -@not_implemented_for('undirected') +@not_implemented_for("undirected") def immediate_dominators(G, start): """Returns the immediate dominators of all nodes of a directed graph. @@ -61,7 +53,7 @@ def immediate_dominators(G, start): Software Practice & Experience, 4:110, 2001. """ if start not in G: - raise nx.NetworkXError('start is not in G') + raise nx.NetworkXError("start is not in G") idom = {start: start} diff --git a/networkx/algorithms/dominating.py b/networkx/algorithms/dominating.py index 4289f89..32fff4d 100644 --- a/networkx/algorithms/dominating.py +++ b/networkx/algorithms/dominating.py @@ -1,12 +1,10 @@ -# -*- coding: utf-8 -*- """Functions for computing dominating sets in a graph.""" from itertools import chain import networkx as nx from networkx.utils import arbitrary_element -__author__ = '\n'.join(['Jordi Torrents ']) -__all__ = ['dominating_set', 'is_dominating_set'] +__all__ = ["dominating_set", "is_dominating_set"] def dominating_set(G, start_with=None): @@ -49,7 +47,7 @@ def dominating_set(G, start_with=None): if start_with is None: start_with = arbitrary_element(all_nodes) if start_with not in G: - raise nx.NetworkXError('node {} is not in G'.format(start_with)) + raise nx.NetworkXError(f"node {start_with} is not in G") dominating_set = {start_with} dominated_nodes = set(G[start_with]) remaining_nodes = all_nodes - dominated_nodes - dominating_set @@ -89,6 +87,6 @@ def is_dominating_set(G, nbunch): .. [1] https://en.wikipedia.org/wiki/Dominating_set """ - testset = set(n for n in nbunch if n in G) + testset = {n for n in nbunch if n in G} nbrs = set(chain.from_iterable(G[n] for n in testset)) return len(set(G) - testset - nbrs) == 0 diff --git a/networkx/algorithms/efficiency.py b/networkx/algorithms/efficiency_measures.py similarity index 85% rename from networkx/algorithms/efficiency.py rename to networkx/algorithms/efficiency_measures.py index 5f6a396..b50b751 100644 --- a/networkx/algorithms/efficiency.py +++ b/networkx/algorithms/efficiency_measures.py @@ -1,24 +1,13 @@ -# efficiency.py - functions for computing node, edge, and graph efficiency -# -# Copyright 2011, 2012, 2013, 2014, 2015 NetworkX developers -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Provides functions for computing the efficiency of nodes and graphs.""" -from __future__ import division - -from itertools import permutations import networkx as nx from networkx.exception import NetworkXNoPath from ..utils import not_implemented_for -__all__ = ['efficiency', 'local_efficiency', 'global_efficiency'] +__all__ = ["efficiency", "local_efficiency", "global_efficiency"] -@not_implemented_for('directed') +@not_implemented_for("directed") def efficiency(G, u, v): """Returns the efficiency of a pair of nodes in a graph. @@ -62,7 +51,7 @@ def efficiency(G, u, v): return eff -@not_implemented_for('directed') +@not_implemented_for("directed") def global_efficiency(G): """Returns the average global efficiency of the graph. @@ -100,17 +89,23 @@ def global_efficiency(G): n = len(G) denom = n * (n - 1) if denom != 0: - g_eff = sum(efficiency(G, u, v) for u, v in permutations(G, 2)) / denom + lengths = nx.all_pairs_shortest_path_length(G) + g_eff = 0 + for source, targets in lengths: + for target, distance in targets.items(): + if distance > 0: + g_eff += 1 / distance + g_eff /= denom + # g_eff = sum(1 / d for s, tgts in lengths + # for t, d in tgts.items() if d > 0) / denom else: g_eff = 0 # TODO This can be made more efficient by computing all pairs shortest # path lengths in parallel. - # - # TODO This summation can be trivially parallelized. return g_eff -@not_implemented_for('directed') +@not_implemented_for("directed") def local_efficiency(G): """Returns the average local efficiency of the graph. diff --git a/networkx/algorithms/euler.py b/networkx/algorithms/euler.py index 4a49b50..6f8283f 100644 --- a/networkx/algorithms/euler.py +++ b/networkx/algorithms/euler.py @@ -1,23 +1,19 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2010 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: -# Nima Mohammadi -# Aric Hagberg """ Eulerian circuits and graphs. """ -import networkx as nx +from itertools import combinations -from ..utils import arbitrary_element +import networkx as nx +from ..utils import arbitrary_element, not_implemented_for -__all__ = ['is_eulerian', 'eulerian_circuit'] +__all__ = [ + "is_eulerian", + "eulerian_circuit", + "eulerize", + "is_semieulerian", + "has_eulerian_path", + "eulerian_path", +] def is_eulerian(G): @@ -50,13 +46,47 @@ def is_eulerian(G): if G.is_directed(): # Every node must have equal in degree and out degree and the # graph must be strongly connected - return (all(G.in_degree(n) == G.out_degree(n) for n in G) and - nx.is_strongly_connected(G)) + return all( + G.in_degree(n) == G.out_degree(n) for n in G + ) and nx.is_strongly_connected(G) # An undirected Eulerian graph has no vertices of odd degree and # must be connected. return all(d % 2 == 0 for v, d in G.degree()) and nx.is_connected(G) +def is_semieulerian(G): + """Return True iff `G` is semi-Eulerian. + + G is semi-Eulerian if it has an Eulerian path but no Eulerian circuit. + """ + return has_eulerian_path(G) and not is_eulerian(G) + + +def _find_path_start(G): + """Return a suitable starting vertex for an Eulerian path. + + If no path exists, return None. + """ + if not has_eulerian_path(G): + return None + + if is_eulerian(G): + return arbitrary_element(G) + + if G.is_directed(): + v1, v2 = [v for v in G if G.in_degree(v) != G.out_degree(v)] + # Determines which is the 'start' node (as opposed to the 'end') + if G.out_degree(v1) > G.in_degree(v1): + return v1 + else: + return v2 + + else: + # In an undirected graph randomly choose one of the possibilities + start = [v for v in G if G.degree(v) % 2 != 0][0] + return start + + def _simplegraph_eulerian_circuit(G, source): if G.is_directed(): degree = G.out_degree @@ -97,7 +127,8 @@ def _multigraph_eulerian_circuit(G, source): last_vertex, last_key = current_vertex, current_key vertex_stack.pop() else: - _, next_vertex, next_key = arbitrary_element(edges(current_vertex, keys=True)) + triple = arbitrary_element(edges(current_vertex, keys=True)) + _, next_vertex, next_key = triple vertex_stack.append((next_vertex, next_key)) G.remove_edge(current_vertex, next_vertex, next_key) @@ -179,5 +210,162 @@ def eulerian_circuit(G, source=None, keys=False): else: yield u, v else: - for u, v in _simplegraph_eulerian_circuit(G, source): - yield u, v + yield from _simplegraph_eulerian_circuit(G, source) + + +def has_eulerian_path(G): + """Return True iff `G` has an Eulerian path. + + An Eulerian path is a path in a graph which uses each edge of a graph + exactly once. + + A directed graph has an Eulerian path iff: + - at most one vertex has out_degree - in_degree = 1, + - at most one vertex has in_degree - out_degree = 1, + - every other vertex has equal in_degree and out_degree, + - and all of its vertices with nonzero degree belong to a + - single connected component of the underlying undirected graph. + + An undirected graph has an Eulerian path iff: + - exactly zero or two vertices have odd degree, + - and all of its vertices with nonzero degree belong to a + - single connected component. + + Parameters + ---------- + G : NetworkX Graph + The graph to find an euler path in. + + Returns + ------- + Bool : True if G has an eulerian path. + + See Also + -------- + is_eulerian + eulerian_path + """ + if G.is_directed(): + ins = G.in_degree + outs = G.out_degree + semibalanced_ins = sum(ins(v) - outs(v) == 1 for v in G) + semibalanced_outs = sum(outs(v) - ins(v) == 1 for v in G) + return ( + semibalanced_ins <= 1 + and semibalanced_outs <= 1 + and sum(G.in_degree(v) != G.out_degree(v) for v in G) <= 2 + and nx.is_weakly_connected(G) + ) + else: + return sum(d % 2 == 1 for v, d in G.degree()) in (0, 2) and nx.is_connected(G) + + +def eulerian_path(G, source=None, keys=False): + """Return an iterator over the edges of an Eulerian path in `G`. + + Parameters + ---------- + G : NetworkX Graph + The graph in which to look for an eulerian path. + source : node or None (default: None) + The node at which to start the search. None means search over all + starting nodes. + keys : Bool (default: False) + Indicates whether to yield edge 3-tuples (u, v, edge_key). + The default yields edge 2-tuples + + Yields + ------ + Edge tuples along the eulerian path. + + Warning: If `source` provided is not the start node of an Euler path + will raise error even if an Euler Path exists. + """ + if not has_eulerian_path(G): + raise nx.NetworkXError("Graph has no Eulerian paths.") + if G.is_directed(): + G = G.reverse() + else: + G = G.copy() + if source is None: + source = _find_path_start(G) + if G.is_multigraph(): + for u, v, k in _multigraph_eulerian_circuit(G, source): + if keys: + yield u, v, k + else: + yield u, v + else: + yield from _simplegraph_eulerian_circuit(G, source) + + +@not_implemented_for("directed") +def eulerize(G): + """Transforms a graph into an Eulerian graph + + Parameters + ---------- + G : NetworkX graph + An undirected graph + + Returns + ------- + G : NetworkX multigraph + + Raises + ------ + NetworkXError + If the graph is not connected. + + See Also + -------- + is_eulerian + eulerian_circuit + + References + ---------- + .. [1] J. Edmonds, E. L. Johnson. + Matching, Euler tours and the Chinese postman. + Mathematical programming, Volume 5, Issue 1 (1973), 111-114. + [2] https://en.wikipedia.org/wiki/Eulerian_path + .. [3] http://web.math.princeton.edu/math_alive/5/Notes1.pdf + + Examples + -------- + >>> G = nx.complete_graph(10) + >>> H = nx.eulerize(G) + >>> nx.is_eulerian(H) + True + + """ + if G.order() == 0: + raise nx.NetworkXPointlessConcept("Cannot Eulerize null graph") + if not nx.is_connected(G): + raise nx.NetworkXError("G is not connected") + odd_degree_nodes = [n for n, d in G.degree() if d % 2 == 1] + G = nx.MultiGraph(G) + if len(odd_degree_nodes) == 0: + return G + + # get all shortest paths between vertices of odd degree + odd_deg_pairs_paths = [ + (m, {n: nx.shortest_path(G, source=m, target=n)}) + for m, n in combinations(odd_degree_nodes, 2) + ] + + # use inverse path lengths as edge-weights in a new graph + # store the paths in the graph for easy indexing later + Gp = nx.Graph() + for n, Ps in odd_deg_pairs_paths: + for m, P in Ps.items(): + if n != m: + Gp.add_edge(m, n, weight=1 / len(P), path=P) + + # find the minimum weight matching of edges in the weighted graph + best_matching = nx.Graph(list(nx.max_weight_matching(Gp))) + + # duplicate each edge along each path in the set of paths in Gp + for m, n in best_matching.edges(): + path = Gp[m][n]["path"] + G.add_edges_from(nx.utils.pairwise(path)) + return G diff --git a/networkx/algorithms/flow/boykovkolmogorov.py b/networkx/algorithms/flow/boykovkolmogorov.py index 4c834c2..ea8ae3d 100644 --- a/networkx/algorithms/flow/boykovkolmogorov.py +++ b/networkx/algorithms/flow/boykovkolmogorov.py @@ -1,13 +1,3 @@ -# boykovkolmogorov.py - Boykov Kolmogorov algorithm for maximum flow problems. -# -# Copyright 2016-2018 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. -# -# Author: Jordi Torrents """ Boykov-Kolmogorov algorithm for maximum flow problems. """ @@ -17,11 +7,12 @@ import networkx as nx from networkx.algorithms.flow.utils import build_residual_network -__all__ = ['boykov_kolmogorov'] +__all__ = ["boykov_kolmogorov"] -def boykov_kolmogorov(G, s, t, capacity='capacity', residual=None, - value_only=False, cutoff=None): +def boykov_kolmogorov( + G, s, t, capacity="capacity", residual=None, value_only=False, cutoff=None +): r"""Find a maximum single-commodity flow using Boykov-Kolmogorov algorithm. This function returns the residual network resulting after computing @@ -114,7 +105,6 @@ def boykov_kolmogorov(G, s, t, capacity='capacity', residual=None, Examples -------- - >>> import networkx as nx >>> from networkx.algorithms.flow import boykov_kolmogorov The functions that implement flow algorithms and output a residual @@ -122,19 +112,19 @@ def boykov_kolmogorov(G, s, t, capacity='capacity', residual=None, namespace, so you have to explicitly import them from the flow package. >>> G = nx.DiGraph() - >>> G.add_edge('x','a', capacity=3.0) - >>> G.add_edge('x','b', capacity=1.0) - >>> G.add_edge('a','c', capacity=3.0) - >>> G.add_edge('b','c', capacity=5.0) - >>> G.add_edge('b','d', capacity=4.0) - >>> G.add_edge('d','e', capacity=2.0) - >>> G.add_edge('c','y', capacity=2.0) - >>> G.add_edge('e','y', capacity=3.0) - >>> R = boykov_kolmogorov(G, 'x', 'y') - >>> flow_value = nx.maximum_flow_value(G, 'x', 'y') + >>> G.add_edge("x", "a", capacity=3.0) + >>> G.add_edge("x", "b", capacity=1.0) + >>> G.add_edge("a", "c", capacity=3.0) + >>> G.add_edge("b", "c", capacity=5.0) + >>> G.add_edge("b", "d", capacity=4.0) + >>> G.add_edge("d", "e", capacity=2.0) + >>> G.add_edge("c", "y", capacity=2.0) + >>> G.add_edge("e", "y", capacity=3.0) + >>> R = boykov_kolmogorov(G, "x", "y") + >>> flow_value = nx.maximum_flow_value(G, "x", "y") >>> flow_value 3.0 - >>> flow_value == R.graph['flow_value'] + >>> flow_value == R.graph["flow_value"] True A nice feature of the Boykov-Kolmogorov algorithm is that a partition @@ -142,7 +132,7 @@ def boykov_kolmogorov(G, s, t, capacity='capacity', residual=None, on the search trees used during the algorithm. These trees are stored in the graph attribute `trees` of the residual network. - >>> source_tree, target_tree = R.graph['trees'] + >>> source_tree, target_tree = R.graph["trees"] >>> partition = (set(source_tree), set(G) - set(source_tree)) Or equivalently: @@ -164,17 +154,17 @@ def boykov_kolmogorov(G, s, t, capacity='capacity', residual=None, """ R = boykov_kolmogorov_impl(G, s, t, capacity, residual, cutoff) - R.graph['algorithm'] = 'boykov_kolmogorov' + R.graph["algorithm"] = "boykov_kolmogorov" return R def boykov_kolmogorov_impl(G, s, t, capacity, residual, cutoff): if s not in G: - raise nx.NetworkXError('node %s not in graph' % str(s)) + raise nx.NetworkXError(f"node {str(s)} not in graph") if t not in G: - raise nx.NetworkXError('node %s not in graph' % str(t)) + raise nx.NetworkXError(f"node {str(t)} not in graph") if s == t: - raise nx.NetworkXError('source and sink are the same node') + raise nx.NetworkXError("source and sink are the same node") if residual is None: R = build_residual_network(G, capacity) @@ -183,14 +173,14 @@ def boykov_kolmogorov_impl(G, s, t, capacity, residual, cutoff): # Initialize/reset the residual network. # This is way too slow - #nx.set_edge_attributes(R, 0, 'flow') + # nx.set_edge_attributes(R, 0, 'flow') for u in R: for e in R[u].values(): - e['flow'] = 0 + e["flow"] = 0 # Use an arbitrary high value as infinite. It is computed # when building the residual network. - INF = R.graph['inf'] + INF = R.graph["inf"] if cutoff is None: cutoff = INF @@ -218,7 +208,7 @@ def grow(): other_tree = source_tree neighbors = R_pred for v, attr in neighbors[u].items(): - if attr['capacity'] - attr['flow'] > 0: + if attr["capacity"] - attr["flow"] > 0: if v not in this_tree: if v in other_tree: return (u, v) if this_tree is source_tree else (v, u) @@ -243,7 +233,7 @@ def augment(u, v): and the input of this function. """ attr = R_succ[u][v] - flow = min(INF, attr['capacity'] - attr['flow']) + flow = min(INF, attr["capacity"] - attr["flow"]) path = [u] # Trace a path from u to s in source_tree. w = u @@ -251,7 +241,7 @@ def augment(u, v): n = w w = source_tree[n] attr = R_pred[n][w] - flow = min(flow, attr['capacity'] - attr['flow']) + flow = min(flow, attr["capacity"] - attr["flow"]) path.append(w) path.reverse() # Trace a path from v to t in target_tree. @@ -261,16 +251,16 @@ def augment(u, v): n = w w = target_tree[n] attr = R_succ[n][w] - flow = min(flow, attr['capacity'] - attr['flow']) + flow = min(flow, attr["capacity"] - attr["flow"]) path.append(w) # Augment flow along the path and check for saturated edges. it = iter(path) u = next(it) these_orphans = [] for v in it: - R_succ[u][v]['flow'] += flow - R_succ[v][u]['flow'] -= flow - if R_succ[u][v]['flow'] == R_succ[u][v]['capacity']: + R_succ[u][v]["flow"] += flow + R_succ[v][u]["flow"] -= flow + if R_succ[u][v]["flow"] == R_succ[u][v]["capacity"]: if v in source_tree: source_tree[v] = None these_orphans.append(v) @@ -299,20 +289,20 @@ def adopt(): else: tree = target_tree neighbors = R_succ - nbrs = ((n, attr, dist[n]) for n, attr in neighbors[u].items() - if n in tree) + nbrs = ((n, attr, dist[n]) for n, attr in neighbors[u].items() if n in tree) for v, attr, d in sorted(nbrs, key=itemgetter(2)): - if attr['capacity'] - attr['flow'] > 0: + if attr["capacity"] - attr["flow"] > 0: if _has_valid_root(v, tree): tree[u] = v dist[u] = dist[v] + 1 timestamp[u] = time break else: - nbrs = ((n, attr, dist[n]) for n, attr in neighbors[u].items() - if n in tree) + nbrs = ( + (n, attr, dist[n]) for n, attr in neighbors[u].items() if n in tree + ) for v, attr, d in sorted(nbrs, key=itemgetter(2)): - if attr['capacity'] - attr['flow'] > 0: + if attr["capacity"] - attr["flow"] > 0: if v not in active: active.append(v) if tree[v] == u: @@ -366,12 +356,12 @@ def _is_closer(u, v): adopt() if flow_value * 2 > INF: - raise nx.NetworkXUnbounded('Infinite capacity path, flow unbounded above.') + raise nx.NetworkXUnbounded("Infinite capacity path, flow unbounded above.") # Add source and target tree in a graph attribute. # A partition that defines a minimum cut can be directly # computed from the search trees as explained in the docstrings. - R.graph['trees'] = (source_tree, target_tree) + R.graph["trees"] = (source_tree, target_tree) # Add the standard flow_value graph attribute. - R.graph['flow_value'] = flow_value + R.graph["flow_value"] = flow_value return R diff --git a/networkx/algorithms/flow/capacityscaling.py b/networkx/algorithms/flow/capacityscaling.py index dd390fd..1f1eb47 100644 --- a/networkx/algorithms/flow/capacityscaling.py +++ b/networkx/algorithms/flow/capacityscaling.py @@ -1,14 +1,8 @@ -# -*- coding: utf-8 -*- """ Capacity scaling minimum cost flow algorithm. """ -__author__ = """ysitu """ -# Copyright (C) 2014 ysitu -# All rights reserved. -# BSD license. - -__all__ = ['capacity_scaling'] +__all__ = ["capacity_scaling"] from itertools import chain from math import log @@ -27,26 +21,27 @@ def _detect_unboundedness(R): G.add_nodes_from(R) # Value simulating infinity. - inf = R.graph['inf'] + inf = R.graph["inf"] # True infinity. - f_inf = float('inf') + f_inf = float("inf") for u in R: for v, e in R[u].items(): # Compute the minimum weight of infinite-capacity (u, v) edges. w = f_inf for k, e in e.items(): - if e['capacity'] == inf: - w = min(w, e['weight']) + if e["capacity"] == inf: + w = min(w, e["weight"]) if w != f_inf: G.add_edge(u, v, weight=w) if nx.negative_edge_cycle(G): raise nx.NetworkXUnbounded( - 'Negative cost cycle of infinite capacity found. ' - 'Min cost flow may be unbounded below.') + "Negative cost cycle of infinite capacity found. " + "Min cost flow may be unbounded below." + ) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def _build_residual_network(G, demand, capacity, weight): """Build a residual network and initialize a zero flow. """ @@ -54,33 +49,49 @@ def _build_residual_network(G, demand, capacity, weight): raise nx.NetworkXUnfeasible("Sum of the demands should be 0.") R = nx.MultiDiGraph() - R.add_nodes_from((u, {'excess': -G.nodes[u].get(demand, 0), - 'potential': 0}) for u in G) + R.add_nodes_from( + (u, {"excess": -G.nodes[u].get(demand, 0), "potential": 0}) for u in G + ) - inf = float('inf') + inf = float("inf") # Detect selfloops with infinite capacities and negative weights. for u, v, e in nx.selfloop_edges(G, data=True): if e.get(weight, 0) < 0 and e.get(capacity, inf) == inf: raise nx.NetworkXUnbounded( - 'Negative cost cycle of infinite capacity found. ' - 'Min cost flow may be unbounded below.') + "Negative cost cycle of infinite capacity found. " + "Min cost flow may be unbounded below." + ) # Extract edges with positive capacities. Self loops excluded. if G.is_multigraph(): - edge_list = [(u, v, k, e) - for u, v, k, e in G.edges(data=True, keys=True) - if u != v and e.get(capacity, inf) > 0] + edge_list = [ + (u, v, k, e) + for u, v, k, e in G.edges(data=True, keys=True) + if u != v and e.get(capacity, inf) > 0 + ] else: - edge_list = [(u, v, 0, e) for u, v, e in G.edges(data=True) - if u != v and e.get(capacity, inf) > 0] + edge_list = [ + (u, v, 0, e) + for u, v, e in G.edges(data=True) + if u != v and e.get(capacity, inf) > 0 + ] # Simulate infinity with the larger of the sum of absolute node imbalances # the sum of finite edge capacities or any positive value if both sums are # zero. This allows the infinite-capacity edges to be distinguished for # unboundedness detection and directly participate in residual capacity # calculation. - inf = max(sum(abs(R.nodes[u]['excess']) for u in R), - 2 * sum(e[capacity] for u, v, k, e in edge_list - if capacity in e and e[capacity] != inf)) or 1 + inf = ( + max( + sum(abs(R.nodes[u]["excess"]) for u in R), + 2 + * sum( + e[capacity] + for u, v, k, e in edge_list + if capacity in e and e[capacity] != inf + ), + ) + or 1 + ) for u, v, k, e in edge_list: r = min(e.get(capacity, inf), inf) w = e.get(weight, 0) @@ -91,7 +102,7 @@ def _build_residual_network(G, demand, capacity, weight): R.add_edge(v, u, key=(k, False), capacity=0, weight=-w, flow=0) # Record the value simulating infinity. - R.graph['inf'] = inf + R.graph["inf"] = inf _detect_unboundedness(R) @@ -101,36 +112,51 @@ def _build_residual_network(G, demand, capacity, weight): def _build_flow_dict(G, R, capacity, weight): """Build a flow dictionary from a residual network. """ - inf = float('inf') + inf = float("inf") flow_dict = {} if G.is_multigraph(): for u in G: flow_dict[u] = {} for v, es in G[u].items(): - flow_dict[u][v] = dict( + flow_dict[u][v] = { # Always saturate negative selfloops. - (k, (0 if (u != v or e.get(capacity, inf) <= 0 or - e.get(weight, 0) >= 0) else e[capacity])) - for k, e in es.items()) + k: ( + 0 + if ( + u != v or e.get(capacity, inf) <= 0 or e.get(weight, 0) >= 0 + ) + else e[capacity] + ) + for k, e in es.items() + } for v, es in R[u].items(): if v in flow_dict[u]: - flow_dict[u][v].update((k[0], e['flow']) - for k, e in es.items() - if e['flow'] > 0) + flow_dict[u][v].update( + (k[0], e["flow"]) for k, e in es.items() if e["flow"] > 0 + ) else: for u in G: - flow_dict[u] = dict( + flow_dict[u] = { # Always saturate negative selfloops. - (v, (0 if (u != v or e.get(capacity, inf) <= 0 or - e.get(weight, 0) >= 0) else e[capacity])) - for v, e in G[u].items()) - flow_dict[u].update((v, e['flow']) for v, es in R[u].items() - for e in es.values() if e['flow'] > 0) + v: ( + 0 + if (u != v or e.get(capacity, inf) <= 0 or e.get(weight, 0) >= 0) + else e[capacity] + ) + for v, e in G[u].items() + } + flow_dict[u].update( + (v, e["flow"]) + for v, es in R[u].items() + for e in es.values() + if e["flow"] > 0 + ) return flow_dict -def capacity_scaling(G, demand='demand', capacity='capacity', weight='weight', - heap=BinaryHeap): +def capacity_scaling( + G, demand="demand", capacity="capacity", weight="weight", heap=BinaryHeap +): r"""Find a minimum cost flow satisfying all demands in digraph G. This is a capacity scaling successive shortest augmenting path algorithm. @@ -220,40 +246,39 @@ def capacity_scaling(G, demand='demand', capacity='capacity', weight='weight', -------- A simple example of a min cost flow problem. - >>> import networkx as nx >>> G = nx.DiGraph() - >>> G.add_node('a', demand = -5) - >>> G.add_node('d', demand = 5) - >>> G.add_edge('a', 'b', weight = 3, capacity = 4) - >>> G.add_edge('a', 'c', weight = 6, capacity = 10) - >>> G.add_edge('b', 'd', weight = 1, capacity = 9) - >>> G.add_edge('c', 'd', weight = 2, capacity = 5) + >>> G.add_node("a", demand=-5) + >>> G.add_node("d", demand=5) + >>> G.add_edge("a", "b", weight=3, capacity=4) + >>> G.add_edge("a", "c", weight=6, capacity=10) + >>> G.add_edge("b", "d", weight=1, capacity=9) + >>> G.add_edge("c", "d", weight=2, capacity=5) >>> flowCost, flowDict = nx.capacity_scaling(G) >>> flowCost 24 - >>> flowDict # doctest: +SKIP + >>> flowDict # doctest: +SKIP {'a': {'c': 1, 'b': 4}, 'c': {'d': 1}, 'b': {'d': 4}, 'd': {}} It is possible to change the name of the attributes used for the algorithm. >>> G = nx.DiGraph() - >>> G.add_node('p', spam = -4) - >>> G.add_node('q', spam = 2) - >>> G.add_node('a', spam = -2) - >>> G.add_node('d', spam = -1) - >>> G.add_node('t', spam = 2) - >>> G.add_node('w', spam = 3) - >>> G.add_edge('p', 'q', cost = 7, vacancies = 5) - >>> G.add_edge('p', 'a', cost = 1, vacancies = 4) - >>> G.add_edge('q', 'd', cost = 2, vacancies = 3) - >>> G.add_edge('t', 'q', cost = 1, vacancies = 2) - >>> G.add_edge('a', 't', cost = 2, vacancies = 4) - >>> G.add_edge('d', 'w', cost = 3, vacancies = 4) - >>> G.add_edge('t', 'w', cost = 4, vacancies = 1) - >>> flowCost, flowDict = nx.capacity_scaling(G, demand = 'spam', - ... capacity = 'vacancies', - ... weight = 'cost') + >>> G.add_node("p", spam=-4) + >>> G.add_node("q", spam=2) + >>> G.add_node("a", spam=-2) + >>> G.add_node("d", spam=-1) + >>> G.add_node("t", spam=2) + >>> G.add_node("w", spam=3) + >>> G.add_edge("p", "q", cost=7, vacancies=5) + >>> G.add_edge("p", "a", cost=1, vacancies=4) + >>> G.add_edge("q", "d", cost=2, vacancies=3) + >>> G.add_edge("t", "q", cost=1, vacancies=2) + >>> G.add_edge("a", "t", cost=2, vacancies=4) + >>> G.add_edge("d", "w", cost=3, vacancies=4) + >>> G.add_edge("t", "w", cost=4, vacancies=1) + >>> flowCost, flowDict = nx.capacity_scaling( + ... G, demand="spam", capacity="vacancies", weight="cost" + ... ) >>> flowCost 37 >>> flowDict # doctest: +SKIP @@ -261,16 +286,17 @@ def capacity_scaling(G, demand='demand', capacity='capacity', weight='weight', """ R = _build_residual_network(G, demand, capacity, weight) - inf = float('inf') + inf = float("inf") # Account cost of negative selfloops. flow_cost = sum( - 0 if e.get(capacity, inf) <= 0 or e.get(weight, 0) >= 0 + 0 + if e.get(capacity, inf) <= 0 or e.get(weight, 0) >= 0 else e[capacity] * e[weight] - for u, v, e in nx.selfloop_edges(G, data=True)) + for u, v, e in nx.selfloop_edges(G, data=True) + ) # Determine the maxmimum edge capacity. - wmax = max(chain([-inf], - (e['capacity'] for u, v, e in R.edges(data=True)))) + wmax = max(chain([-inf], (e["capacity"] for u, v, e in R.edges(data=True)))) if wmax == -inf: # Residual network has no edges. return flow_cost, _build_flow_dict(G, R, capacity, weight) @@ -283,17 +309,17 @@ def capacity_scaling(G, demand='demand', capacity='capacity', weight='weight', # Saturate Δ-residual edges with negative reduced costs to achieve # Δ-optimality. for u in R: - p_u = R_nodes[u]['potential'] + p_u = R_nodes[u]["potential"] for v, es in R_succ[u].items(): for k, e in es.items(): - flow = e['capacity'] - e['flow'] - if e['weight'] - p_u + R_nodes[v]['potential'] < 0: - flow = e['capacity'] - e['flow'] + flow = e["capacity"] - e["flow"] + if e["weight"] - p_u + R_nodes[v]["potential"] < 0: + flow = e["capacity"] - e["flow"] if flow >= delta: - e['flow'] += flow - R_succ[v][u][(k[0], not k[1])]['flow'] -= flow - R_nodes[u]['excess'] -= flow - R_nodes[v]['excess'] += flow + e["flow"] += flow + R_succ[v][u][(k[0], not k[1])]["flow"] -= flow + R_nodes[u]["excess"] -= flow + R_nodes[v]["excess"] += flow # Determine the Δ-active nodes. S = set() T = set() @@ -302,7 +328,7 @@ def capacity_scaling(G, demand='demand', capacity='capacity', weight='weight', T_add = T.add T_remove = T.remove for u in R: - excess = R_nodes[u]['excess'] + excess = R_nodes[u]["excess"] if excess >= delta: S_add(u) elif excess <= -delta: @@ -327,15 +353,15 @@ def capacity_scaling(G, demand='demand', capacity='capacity', weight='weight', # Path found. t = u break - p_u = R_nodes[u]['potential'] + p_u = R_nodes[u]["potential"] for v, es in R_succ[u].items(): if v in d: continue wmin = inf # Find the minimum-weighted (u, v) Δ-residual edge. for k, e in es.items(): - if e['capacity'] - e['flow'] >= delta: - w = e['weight'] + if e["capacity"] - e["flow"] >= delta: + w = e["weight"] if w < wmin: wmin = w kmin = k @@ -343,7 +369,7 @@ def capacity_scaling(G, demand='demand', capacity='capacity', weight='weight', if wmin == inf: continue # Update the distance label of v. - d_v = d_u + wmin - p_u + R_nodes[v]['potential'] + d_v = d_u + wmin - p_u + R_nodes[v]["potential"] if h_insert(v, d_v): pred[v] = (u, kmin, emin) if t is not None: @@ -351,33 +377,33 @@ def capacity_scaling(G, demand='demand', capacity='capacity', weight='weight', while u != s: v = u u, k, e = pred[v] - e['flow'] += delta - R_succ[v][u][(k[0], not k[1])]['flow'] -= delta + e["flow"] += delta + R_succ[v][u][(k[0], not k[1])]["flow"] -= delta # Account node excess and deficit. - R_nodes[s]['excess'] -= delta - R_nodes[t]['excess'] += delta - if R_nodes[s]['excess'] < delta: + R_nodes[s]["excess"] -= delta + R_nodes[t]["excess"] += delta + if R_nodes[s]["excess"] < delta: S_remove(s) - if R_nodes[t]['excess'] > -delta: + if R_nodes[t]["excess"] > -delta: T_remove(t) # Update node potentials. d_t = d[t] for u, d_u in d.items(): - R_nodes[u]['potential'] -= d_u - d_t + R_nodes[u]["potential"] -= d_u - d_t else: # Path not found. S_remove(s) delta //= 2 - if any(R.nodes[u]['excess'] != 0 for u in R): - raise nx.NetworkXUnfeasible('No flow satisfying all demands.') + if any(R.nodes[u]["excess"] != 0 for u in R): + raise nx.NetworkXUnfeasible("No flow satisfying all demands.") # Calculate the flow cost. for u in R: for v, es in R_succ[u].items(): for e in es.values(): - flow = e['flow'] + flow = e["flow"] if flow > 0: - flow_cost += flow * e['weight'] + flow_cost += flow * e["weight"] return flow_cost, _build_flow_dict(G, R, capacity, weight) diff --git a/networkx/algorithms/flow/dinitz_alg.py b/networkx/algorithms/flow/dinitz_alg.py index 8eb6b07..e996e14 100644 --- a/networkx/algorithms/flow/dinitz_alg.py +++ b/networkx/algorithms/flow/dinitz_alg.py @@ -1,13 +1,3 @@ -# dinitz.py - Dinitz' algorithm for maximum flow problems. -# -# Copyright 2016-2018 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. -# -# Author: Jordi Torrents """ Dinitz' algorithm for maximum flow problems. """ @@ -17,10 +7,10 @@ from networkx.algorithms.flow.utils import build_residual_network from networkx.utils import pairwise -__all__ = ['dinitz'] +__all__ = ["dinitz"] -def dinitz(G, s, t, capacity='capacity', residual=None, value_only=False, cutoff=None): +def dinitz(G, s, t, capacity="capacity", residual=None, value_only=False, cutoff=None): """Find a maximum single-commodity flow using Dinitz' algorithm. This function returns the residual network resulting after computing @@ -112,7 +102,6 @@ def dinitz(G, s, t, capacity='capacity', residual=None, value_only=False, cutoff Examples -------- - >>> import networkx as nx >>> from networkx.algorithms.flow import dinitz The functions that implement flow algorithms and output a residual @@ -120,19 +109,19 @@ def dinitz(G, s, t, capacity='capacity', residual=None, value_only=False, cutoff namespace, so you have to explicitly import them from the flow package. >>> G = nx.DiGraph() - >>> G.add_edge('x','a', capacity=3.0) - >>> G.add_edge('x','b', capacity=1.0) - >>> G.add_edge('a','c', capacity=3.0) - >>> G.add_edge('b','c', capacity=5.0) - >>> G.add_edge('b','d', capacity=4.0) - >>> G.add_edge('d','e', capacity=2.0) - >>> G.add_edge('c','y', capacity=2.0) - >>> G.add_edge('e','y', capacity=3.0) - >>> R = dinitz(G, 'x', 'y') - >>> flow_value = nx.maximum_flow_value(G, 'x', 'y') + >>> G.add_edge("x", "a", capacity=3.0) + >>> G.add_edge("x", "b", capacity=1.0) + >>> G.add_edge("a", "c", capacity=3.0) + >>> G.add_edge("b", "c", capacity=5.0) + >>> G.add_edge("b", "d", capacity=4.0) + >>> G.add_edge("d", "e", capacity=2.0) + >>> G.add_edge("c", "y", capacity=2.0) + >>> G.add_edge("e", "y", capacity=3.0) + >>> R = dinitz(G, "x", "y") + >>> flow_value = nx.maximum_flow_value(G, "x", "y") >>> flow_value 3.0 - >>> flow_value == R.graph['flow_value'] + >>> flow_value == R.graph["flow_value"] True References @@ -144,17 +133,17 @@ def dinitz(G, s, t, capacity='capacity', residual=None, value_only=False, cutoff """ R = dinitz_impl(G, s, t, capacity, residual, cutoff) - R.graph['algorithm'] = 'dinitz' + R.graph["algorithm"] = "dinitz" return R def dinitz_impl(G, s, t, capacity, residual, cutoff): if s not in G: - raise nx.NetworkXError('node %s not in graph' % str(s)) + raise nx.NetworkXError(f"node {str(s)} not in graph") if t not in G: - raise nx.NetworkXError('node %s not in graph' % str(t)) + raise nx.NetworkXError(f"node {str(t)} not in graph") if s == t: - raise nx.NetworkXError('source and sink are the same node') + raise nx.NetworkXError("source and sink are the same node") if residual is None: R = build_residual_network(G, capacity) @@ -164,11 +153,11 @@ def dinitz_impl(G, s, t, capacity, residual, cutoff): # Initialize/reset the residual network. for u in R: for e in R[u].values(): - e['flow'] = 0 + e["flow"] = 0 # Use an arbitrary high value as infinite. It is computed # when building the residual network. - INF = R.graph['inf'] + INF = R.graph["inf"] if cutoff is None: cutoff = INF @@ -185,7 +174,7 @@ def breath_first_search(): u = queue.popleft() for v in R_succ[u]: attr = R_succ[u][v] - if v not in parents and attr['capacity'] - attr['flow'] > 0: + if v not in parents and attr["capacity"] - attr["flow"] > 0: parents[v] = u queue.append(v) return parents @@ -198,14 +187,14 @@ def depth_first_search(parents): while u != s: path.append(u) v = parents[u] - flow = min(flow, R_pred[u][v]['capacity'] - R_pred[u][v]['flow']) + flow = min(flow, R_pred[u][v]["capacity"] - R_pred[u][v]["flow"]) u = v path.append(s) # Augment the flow along the path found if flow > 0: for u, v in pairwise(path): - R_pred[u][v]['flow'] += flow - R_pred[v][u]['flow'] -= flow + R_pred[u][v]["flow"] += flow + R_pred[v][u]["flow"] -= flow return flow flow_value = 0 @@ -215,9 +204,8 @@ def depth_first_search(parents): break this_flow = depth_first_search(parents) if this_flow * 2 > INF: - raise nx.NetworkXUnbounded( - 'Infinite capacity path, flow unbounded above.') + raise nx.NetworkXUnbounded("Infinite capacity path, flow unbounded above.") flow_value += this_flow - R.graph['flow_value'] = flow_value + R.graph["flow_value"] = flow_value return R diff --git a/networkx/algorithms/flow/edmondskarp.py b/networkx/algorithms/flow/edmondskarp.py index aa94f98..f0cc94c 100644 --- a/networkx/algorithms/flow/edmondskarp.py +++ b/networkx/algorithms/flow/edmondskarp.py @@ -1,17 +1,11 @@ -# -*- coding: utf-8 -*- """ Edmonds-Karp algorithm for maximum flow problems. """ -__author__ = """ysitu """ -# Copyright (C) 2014 ysitu -# All rights reserved. -# BSD license. - import networkx as nx -from networkx.algorithms.flow.utils import * +from networkx.algorithms.flow.utils import build_residual_network -__all__ = ['edmonds_karp'] +__all__ = ["edmonds_karp"] def edmonds_karp_core(R, s, t, cutoff): @@ -21,7 +15,7 @@ def edmonds_karp_core(R, s, t, cutoff): R_pred = R.pred R_succ = R.succ - inf = R.graph['inf'] + inf = R.graph["inf"] def augment(path): """Augment flow along a path from s to t. @@ -32,17 +26,16 @@ def augment(path): u = next(it) for v in it: attr = R_succ[u][v] - flow = min(flow, attr['capacity'] - attr['flow']) + flow = min(flow, attr["capacity"] - attr["flow"]) u = v if flow * 2 > inf: - raise nx.NetworkXUnbounded( - 'Infinite capacity path, flow unbounded above.') + raise nx.NetworkXUnbounded("Infinite capacity path, flow unbounded above.") # Augment flow along the path. it = iter(path) u = next(it) for v in it: - R_succ[u][v]['flow'] += flow - R_succ[v][u]['flow'] -= flow + R_succ[u][v]["flow"] += flow + R_succ[v][u]["flow"] -= flow u = v return flow @@ -58,7 +51,7 @@ def bidirectional_bfs(): if len(q_s) <= len(q_t): for u in q_s: for v, attr in R_succ[u].items(): - if v not in pred and attr['flow'] < attr['capacity']: + if v not in pred and attr["flow"] < attr["capacity"]: pred[v] = u if v in succ: return v, pred, succ @@ -69,7 +62,7 @@ def bidirectional_bfs(): else: for u in q_t: for v, attr in R_pred[u].items(): - if v not in succ and attr['flow'] < attr['capacity']: + if v not in succ and attr["flow"] < attr["capacity"]: succ[v] = u if v in pred: return v, pred, succ @@ -105,11 +98,11 @@ def edmonds_karp_impl(G, s, t, capacity, residual, cutoff): """Implementation of the Edmonds-Karp algorithm. """ if s not in G: - raise nx.NetworkXError('node %s not in graph' % str(s)) + raise nx.NetworkXError(f"node {str(s)} not in graph") if t not in G: - raise nx.NetworkXError('node %s not in graph' % str(t)) + raise nx.NetworkXError(f"node {str(t)} not in graph") if s == t: - raise nx.NetworkXError('source and sink are the same node') + raise nx.NetworkXError("source and sink are the same node") if residual is None: R = build_residual_network(G, capacity) @@ -119,17 +112,18 @@ def edmonds_karp_impl(G, s, t, capacity, residual, cutoff): # Initialize/reset the residual network. for u in R: for e in R[u].values(): - e['flow'] = 0 + e["flow"] = 0 if cutoff is None: - cutoff = float('inf') - R.graph['flow_value'] = edmonds_karp_core(R, s, t, cutoff) + cutoff = float("inf") + R.graph["flow_value"] = edmonds_karp_core(R, s, t, cutoff) return R -def edmonds_karp(G, s, t, capacity='capacity', residual=None, value_only=False, - cutoff=None): +def edmonds_karp( + G, s, t, capacity="capacity", residual=None, value_only=False, cutoff=None +): """Find a maximum single-commodity flow using the Edmonds-Karp algorithm. This function returns the residual network resulting after computing @@ -221,7 +215,6 @@ def edmonds_karp(G, s, t, capacity='capacity', residual=None, value_only=False, Examples -------- - >>> import networkx as nx >>> from networkx.algorithms.flow import edmonds_karp The functions that implement flow algorithms and output a residual @@ -229,22 +222,22 @@ def edmonds_karp(G, s, t, capacity='capacity', residual=None, value_only=False, namespace, so you have to explicitly import them from the flow package. >>> G = nx.DiGraph() - >>> G.add_edge('x','a', capacity=3.0) - >>> G.add_edge('x','b', capacity=1.0) - >>> G.add_edge('a','c', capacity=3.0) - >>> G.add_edge('b','c', capacity=5.0) - >>> G.add_edge('b','d', capacity=4.0) - >>> G.add_edge('d','e', capacity=2.0) - >>> G.add_edge('c','y', capacity=2.0) - >>> G.add_edge('e','y', capacity=3.0) - >>> R = edmonds_karp(G, 'x', 'y') - >>> flow_value = nx.maximum_flow_value(G, 'x', 'y') + >>> G.add_edge("x", "a", capacity=3.0) + >>> G.add_edge("x", "b", capacity=1.0) + >>> G.add_edge("a", "c", capacity=3.0) + >>> G.add_edge("b", "c", capacity=5.0) + >>> G.add_edge("b", "d", capacity=4.0) + >>> G.add_edge("d", "e", capacity=2.0) + >>> G.add_edge("c", "y", capacity=2.0) + >>> G.add_edge("e", "y", capacity=3.0) + >>> R = edmonds_karp(G, "x", "y") + >>> flow_value = nx.maximum_flow_value(G, "x", "y") >>> flow_value 3.0 - >>> flow_value == R.graph['flow_value'] + >>> flow_value == R.graph["flow_value"] True """ R = edmonds_karp_impl(G, s, t, capacity, residual, cutoff) - R.graph['algorithm'] = 'edmonds_karp' + R.graph["algorithm"] = "edmonds_karp" return R diff --git a/networkx/algorithms/flow/gomory_hu.py b/networkx/algorithms/flow/gomory_hu.py index 7ef3d20..f244339 100644 --- a/networkx/algorithms/flow/gomory_hu.py +++ b/networkx/algorithms/flow/gomory_hu.py @@ -1,14 +1,3 @@ -# -*- coding: utf-8 -*- -# gomory_hu.py - function for computing Gomory Hu trees -# -# Copyright 2017-2018 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. -# -# Author: Jordi Torrents """ Gomory-Hu tree of undirected Graphs. """ @@ -20,11 +9,11 @@ default_flow_func = edmonds_karp -__all__ = ['gomory_hu_tree'] +__all__ = ["gomory_hu_tree"] -@not_implemented_for('directed') -def gomory_hu_tree(G, capacity='capacity', flow_func=None): +@not_implemented_for("directed") +def gomory_hu_tree(G, capacity="capacity", flow_func=None): r"""Returns the Gomory-Hu tree of an undirected graph G. A Gomory-Hu tree of an undirected graph with capacities is a @@ -70,24 +59,24 @@ def gomory_hu_tree(G, capacity='capacity', flow_func=None): Raises ------ - NetworkXNotImplemented : Exception + NetworkXNotImplemented Raised if the input graph is directed. - NetworkXError: Exception + NetworkXError Raised if the input graph is an empty Graph. Examples -------- >>> G = nx.karate_club_graph() - >>> nx.set_edge_attributes(G, 1, 'capacity') + >>> nx.set_edge_attributes(G, 1, "capacity") >>> T = nx.gomory_hu_tree(G) >>> # The value of the minimum cut between any pair ... # of nodes in G is the minimum edge weight in the ... # shortest path between the two nodes in the ... # Gomory-Hu tree. ... def minimum_edge_weight_in_shortest_path(T, u, v): - ... path = nx.shortest_path(T, u, v, weight='weight') - ... return min((T[u][v]['weight'], (u,v)) for (u, v) in zip(path, path[1:])) + ... path = nx.shortest_path(T, u, v, weight="weight") + ... return min((T[u][v]["weight"], (u, v)) for (u, v) in zip(path, path[1:])) >>> u, v = 0, 33 >>> cut_value, edge = minimum_edge_weight_in_shortest_path(T, u, v) >>> cut_value @@ -104,7 +93,7 @@ def gomory_hu_tree(G, capacity='capacity', flow_func=None): >>> U, V = list(nx.connected_components(T)) >>> # Thus U and V form a partition that defines a minimum cut ... # between u and v in G. You can compute the edge cut set, - ... # that is, the set of edges that if removed from G will + ... # that is, the set of edges that if removed from G will ... # disconnect u from v in G, with this information: ... cutset = set() >>> for x, nbrs in ((n, G[n]) for n in U): @@ -144,7 +133,7 @@ def gomory_hu_tree(G, capacity='capacity', flow_func=None): flow_func = default_flow_func if len(G) == 0: # empty graph - msg = 'Empty Graph does not have a Gomory-Hu tree representation' + msg = "Empty Graph does not have a Gomory-Hu tree representation" raise nx.NetworkXError(msg) # Start the tree as a star graph with an arbitrary node at the center @@ -163,18 +152,25 @@ def gomory_hu_tree(G, capacity='capacity', flow_func=None): # Find neighbor in the tree target = tree[source] # compute minimum cut - cut_value, partition = nx.minimum_cut(G, source, target, - capacity=capacity, flow_func=flow_func, - residual=R) + cut_value, partition = nx.minimum_cut( + G, source, target, capacity=capacity, flow_func=flow_func, residual=R + ) labels[(source, target)] = cut_value # Update the tree # Source will always be in partition[0] and target in partition[1] for node in partition[0]: if node != source and node in tree and tree[node] == target: tree[node] = source - labels[(node, source)] = labels.get((node, target), cut_value) + labels[node, source] = labels.get((node, target), cut_value) + # + if target != root and tree[target] in partition[0]: + labels[source, tree[target]] = labels[target, tree[target]] + labels[target, source] = cut_value + tree[source] = tree[target] + tree[target] = source + # Build the tree T = nx.Graph() T.add_nodes_from(G) - T.add_weighted_edges_from(((u, v, labels[(u, v)]) for u, v in tree.items())) + T.add_weighted_edges_from(((u, v, labels[u, v]) for u, v in tree.items())) return T diff --git a/networkx/algorithms/flow/maxflow.py b/networkx/algorithms/flow/maxflow.py index b307359..8d2fb8f 100644 --- a/networkx/algorithms/flow/maxflow.py +++ b/networkx/algorithms/flow/maxflow.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Maximum flow (and minimum cut) algorithms on capacitated graphs. """ @@ -10,6 +9,7 @@ from .preflowpush import preflow_push from .shortestaugmentingpath import shortest_augmenting_path from .utils import build_flow_dict + # Define the default flow function for computing maximum flow. default_flow_func = preflow_push # Functions that don't support cutoff for minimum cut computations. @@ -21,14 +21,10 @@ shortest_augmenting_path, ] -__all__ = ['maximum_flow', - 'maximum_flow_value', - 'minimum_cut', - 'minimum_cut_value'] +__all__ = ["maximum_flow", "maximum_flow_value", "minimum_cut", "minimum_cut_value"] -def maximum_flow(flowG, _s, _t, - capacity='capacity', flow_func=None, **kwargs): +def maximum_flow(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): """Find a maximum single-commodity flow. Parameters @@ -128,39 +124,41 @@ def maximum_flow(flowG, _s, _t, Examples -------- - >>> import networkx as nx >>> G = nx.DiGraph() - >>> G.add_edge('x','a', capacity=3.0) - >>> G.add_edge('x','b', capacity=1.0) - >>> G.add_edge('a','c', capacity=3.0) - >>> G.add_edge('b','c', capacity=5.0) - >>> G.add_edge('b','d', capacity=4.0) - >>> G.add_edge('d','e', capacity=2.0) - >>> G.add_edge('c','y', capacity=2.0) - >>> G.add_edge('e','y', capacity=3.0) + >>> G.add_edge("x", "a", capacity=3.0) + >>> G.add_edge("x", "b", capacity=1.0) + >>> G.add_edge("a", "c", capacity=3.0) + >>> G.add_edge("b", "c", capacity=5.0) + >>> G.add_edge("b", "d", capacity=4.0) + >>> G.add_edge("d", "e", capacity=2.0) + >>> G.add_edge("c", "y", capacity=2.0) + >>> G.add_edge("e", "y", capacity=3.0) maximum_flow returns both the value of the maximum flow and a dictionary with all flows. - >>> flow_value, flow_dict = nx.maximum_flow(G, 'x', 'y') + >>> flow_value, flow_dict = nx.maximum_flow(G, "x", "y") >>> flow_value 3.0 - >>> print(flow_dict['x']['b']) + >>> print(flow_dict["x"]["b"]) 1.0 You can also use alternative algorithms for computing the maximum flow by using the flow_func parameter. >>> from networkx.algorithms.flow import shortest_augmenting_path - >>> flow_value == nx.maximum_flow(G, 'x', 'y', - ... flow_func=shortest_augmenting_path)[0] + >>> flow_value == nx.maximum_flow(G, "x", "y", flow_func=shortest_augmenting_path)[ + ... 0 + ... ] True """ if flow_func is None: if kwargs: - raise nx.NetworkXError("You have to explicitly set a flow_func if" - " you need to pass parameters via kwargs.") + raise nx.NetworkXError( + "You have to explicitly set a flow_func if" + " you need to pass parameters via kwargs." + ) flow_func = default_flow_func if not callable(flow_func): @@ -169,11 +167,10 @@ def maximum_flow(flowG, _s, _t, R = flow_func(flowG, _s, _t, capacity=capacity, value_only=False, **kwargs) flow_dict = build_flow_dict(flowG, R) - return (R.graph['flow_value'], flow_dict) + return (R.graph["flow_value"], flow_dict) -def maximum_flow_value(flowG, _s, _t, - capacity='capacity', flow_func=None, **kwargs): +def maximum_flow_value(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): """Find the value of maximum single-commodity flow. Parameters @@ -269,21 +266,20 @@ def maximum_flow_value(flowG, _s, _t, Examples -------- - >>> import networkx as nx >>> G = nx.DiGraph() - >>> G.add_edge('x','a', capacity=3.0) - >>> G.add_edge('x','b', capacity=1.0) - >>> G.add_edge('a','c', capacity=3.0) - >>> G.add_edge('b','c', capacity=5.0) - >>> G.add_edge('b','d', capacity=4.0) - >>> G.add_edge('d','e', capacity=2.0) - >>> G.add_edge('c','y', capacity=2.0) - >>> G.add_edge('e','y', capacity=3.0) + >>> G.add_edge("x", "a", capacity=3.0) + >>> G.add_edge("x", "b", capacity=1.0) + >>> G.add_edge("a", "c", capacity=3.0) + >>> G.add_edge("b", "c", capacity=5.0) + >>> G.add_edge("b", "d", capacity=4.0) + >>> G.add_edge("d", "e", capacity=2.0) + >>> G.add_edge("c", "y", capacity=2.0) + >>> G.add_edge("e", "y", capacity=3.0) maximum_flow_value computes only the value of the maximum flow: - >>> flow_value = nx.maximum_flow_value(G, 'x', 'y') + >>> flow_value = nx.maximum_flow_value(G, "x", "y") >>> flow_value 3.0 @@ -291,15 +287,18 @@ def maximum_flow_value(flowG, _s, _t, maximum flow by using the flow_func parameter. >>> from networkx.algorithms.flow import shortest_augmenting_path - >>> flow_value == nx.maximum_flow_value(G, 'x', 'y', - ... flow_func=shortest_augmenting_path) + >>> flow_value == nx.maximum_flow_value( + ... G, "x", "y", flow_func=shortest_augmenting_path + ... ) True """ if flow_func is None: if kwargs: - raise nx.NetworkXError("You have to explicitly set a flow_func if" - " you need to pass parameters via kwargs.") + raise nx.NetworkXError( + "You have to explicitly set a flow_func if" + " you need to pass parameters via kwargs." + ) flow_func = default_flow_func if not callable(flow_func): @@ -307,11 +306,10 @@ def maximum_flow_value(flowG, _s, _t, R = flow_func(flowG, _s, _t, capacity=capacity, value_only=True, **kwargs) - return R.graph['flow_value'] + return R.graph["flow_value"] -def minimum_cut(flowG, _s, _t, - capacity='capacity', flow_func=None, **kwargs): +def minimum_cut(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): """Compute the value and the node partition of a minimum (s, t)-cut. Use the max-flow min-cut theorem, i.e., the capacity of a minimum @@ -407,21 +405,20 @@ def minimum_cut(flowG, _s, _t, Examples -------- - >>> import networkx as nx >>> G = nx.DiGraph() - >>> G.add_edge('x','a', capacity = 3.0) - >>> G.add_edge('x','b', capacity = 1.0) - >>> G.add_edge('a','c', capacity = 3.0) - >>> G.add_edge('b','c', capacity = 5.0) - >>> G.add_edge('b','d', capacity = 4.0) - >>> G.add_edge('d','e', capacity = 2.0) - >>> G.add_edge('c','y', capacity = 2.0) - >>> G.add_edge('e','y', capacity = 3.0) + >>> G.add_edge("x", "a", capacity=3.0) + >>> G.add_edge("x", "b", capacity=1.0) + >>> G.add_edge("a", "c", capacity=3.0) + >>> G.add_edge("b", "c", capacity=5.0) + >>> G.add_edge("b", "d", capacity=4.0) + >>> G.add_edge("d", "e", capacity=2.0) + >>> G.add_edge("c", "y", capacity=2.0) + >>> G.add_edge("e", "y", capacity=3.0) minimum_cut computes both the value of the minimum cut and the node partition: - >>> cut_value, partition = nx.minimum_cut(G, 'x', 'y') + >>> cut_value, partition = nx.minimum_cut(G, "x", "y") >>> reachable, non_reachable = partition 'partition' here is a tuple with the two sets of nodes that define @@ -433,34 +430,34 @@ def minimum_cut(flowG, _s, _t, ... cutset.update((u, v) for v in nbrs if v in non_reachable) >>> print(sorted(cutset)) [('c', 'y'), ('x', 'b')] - >>> cut_value == sum(G.edges[u, v]['capacity'] for (u, v) in cutset) + >>> cut_value == sum(G.edges[u, v]["capacity"] for (u, v) in cutset) True You can also use alternative algorithms for computing the minimum cut by using the flow_func parameter. >>> from networkx.algorithms.flow import shortest_augmenting_path - >>> cut_value == nx.minimum_cut(G, 'x', 'y', - ... flow_func=shortest_augmenting_path)[0] + >>> cut_value == nx.minimum_cut(G, "x", "y", flow_func=shortest_augmenting_path)[0] True """ if flow_func is None: if kwargs: - raise nx.NetworkXError("You have to explicitly set a flow_func if" - " you need to pass parameters via kwargs.") + raise nx.NetworkXError( + "You have to explicitly set a flow_func if" + " you need to pass parameters via kwargs." + ) flow_func = default_flow_func if not callable(flow_func): raise nx.NetworkXError("flow_func has to be callable.") - if kwargs.get('cutoff') is not None and flow_func in flow_funcs: + if kwargs.get("cutoff") is not None and flow_func in flow_funcs: raise nx.NetworkXError("cutoff should not be specified.") R = flow_func(flowG, _s, _t, capacity=capacity, value_only=True, **kwargs) # Remove saturated edges from the residual network - cutset = [(u, v, d) for u, v, d in R.edges(data=True) - if d['flow'] == d['capacity']] + cutset = [(u, v, d) for u, v, d in R.edges(data=True) if d["flow"] == d["capacity"]] R.remove_edges_from(cutset) # Then, reachable and non reachable nodes from source in the @@ -472,11 +469,10 @@ def minimum_cut(flowG, _s, _t, # sure that it is reusable. if cutset is not None: R.add_edges_from(cutset) - return (R.graph['flow_value'], partition) + return (R.graph["flow_value"], partition) -def minimum_cut_value(flowG, _s, _t, - capacity='capacity', flow_func=None, **kwargs): +def minimum_cut_value(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): """Compute the value of a minimum (s, t)-cut. Use the max-flow min-cut theorem, i.e., the capacity of a minimum @@ -569,21 +565,20 @@ def minimum_cut_value(flowG, _s, _t, Examples -------- - >>> import networkx as nx >>> G = nx.DiGraph() - >>> G.add_edge('x','a', capacity = 3.0) - >>> G.add_edge('x','b', capacity = 1.0) - >>> G.add_edge('a','c', capacity = 3.0) - >>> G.add_edge('b','c', capacity = 5.0) - >>> G.add_edge('b','d', capacity = 4.0) - >>> G.add_edge('d','e', capacity = 2.0) - >>> G.add_edge('c','y', capacity = 2.0) - >>> G.add_edge('e','y', capacity = 3.0) + >>> G.add_edge("x", "a", capacity=3.0) + >>> G.add_edge("x", "b", capacity=1.0) + >>> G.add_edge("a", "c", capacity=3.0) + >>> G.add_edge("b", "c", capacity=5.0) + >>> G.add_edge("b", "d", capacity=4.0) + >>> G.add_edge("d", "e", capacity=2.0) + >>> G.add_edge("c", "y", capacity=2.0) + >>> G.add_edge("e", "y", capacity=3.0) minimum_cut_value computes only the value of the minimum cut: - >>> cut_value = nx.minimum_cut_value(G, 'x', 'y') + >>> cut_value = nx.minimum_cut_value(G, "x", "y") >>> cut_value 3.0 @@ -591,23 +586,26 @@ def minimum_cut_value(flowG, _s, _t, minimum cut by using the flow_func parameter. >>> from networkx.algorithms.flow import shortest_augmenting_path - >>> cut_value == nx.minimum_cut_value(G, 'x', 'y', - ... flow_func=shortest_augmenting_path) + >>> cut_value == nx.minimum_cut_value( + ... G, "x", "y", flow_func=shortest_augmenting_path + ... ) True """ if flow_func is None: if kwargs: - raise nx.NetworkXError("You have to explicitly set a flow_func if" - " you need to pass parameters via kwargs.") + raise nx.NetworkXError( + "You have to explicitly set a flow_func if" + " you need to pass parameters via kwargs." + ) flow_func = default_flow_func if not callable(flow_func): raise nx.NetworkXError("flow_func has to be callable.") - if kwargs.get('cutoff') is not None and flow_func in flow_funcs: + if kwargs.get("cutoff") is not None and flow_func in flow_funcs: raise nx.NetworkXError("cutoff should not be specified.") R = flow_func(flowG, _s, _t, capacity=capacity, value_only=True, **kwargs) - return R.graph['flow_value'] + return R.graph["flow_value"] diff --git a/networkx/algorithms/flow/mincost.py b/networkx/algorithms/flow/mincost.py index 44cdc45..6089c71 100644 --- a/networkx/algorithms/flow/mincost.py +++ b/networkx/algorithms/flow/mincost.py @@ -1,24 +1,13 @@ -# -*- coding: utf-8 -*- """ Minimum cost flow algorithms on directed connected graphs. """ -__author__ = """Loïc Séguin-C. """ -# Copyright (C) 2010 Loïc Séguin-C. -# All rights reserved. -# BSD license. - - -__all__ = ['min_cost_flow_cost', - 'min_cost_flow', - 'cost_of_flow', - 'max_flow_min_cost'] +__all__ = ["min_cost_flow_cost", "min_cost_flow", "cost_of_flow", "max_flow_min_cost"] import networkx as nx -def min_cost_flow_cost(G, demand='demand', capacity='capacity', - weight='weight'): +def min_cost_flow_cost(G, demand="demand", capacity="capacity", weight="weight"): r"""Find the cost of a minimum cost flow satisfying all demands in digraph G. G is a digraph with edge costs and capacities and in which nodes @@ -93,25 +82,22 @@ def min_cost_flow_cost(G, demand='demand', capacity='capacity', -------- A simple example of a min cost flow problem. - >>> import networkx as nx >>> G = nx.DiGraph() - >>> G.add_node('a', demand = -5) - >>> G.add_node('d', demand = 5) - >>> G.add_edge('a', 'b', weight = 3, capacity = 4) - >>> G.add_edge('a', 'c', weight = 6, capacity = 10) - >>> G.add_edge('b', 'd', weight = 1, capacity = 9) - >>> G.add_edge('c', 'd', weight = 2, capacity = 5) + >>> G.add_node("a", demand=-5) + >>> G.add_node("d", demand=5) + >>> G.add_edge("a", "b", weight=3, capacity=4) + >>> G.add_edge("a", "c", weight=6, capacity=10) + >>> G.add_edge("b", "d", weight=1, capacity=9) + >>> G.add_edge("c", "d", weight=2, capacity=5) >>> flowCost = nx.min_cost_flow_cost(G) >>> flowCost 24 """ - return nx.network_simplex(G, demand=demand, capacity=capacity, - weight=weight)[0] + return nx.network_simplex(G, demand=demand, capacity=capacity, weight=weight)[0] -def min_cost_flow(G, demand='demand', capacity='capacity', - weight='weight'): - r"""Return a minimum cost flow satisfying all demands in digraph G. +def min_cost_flow(G, demand="demand", capacity="capacity", weight="weight"): + r"""Returns a minimum cost flow satisfying all demands in digraph G. G is a digraph with edge costs and capacities and in which nodes have demand, i.e., they want to send or receive some amount of @@ -186,21 +172,19 @@ def min_cost_flow(G, demand='demand', capacity='capacity', -------- A simple example of a min cost flow problem. - >>> import networkx as nx >>> G = nx.DiGraph() - >>> G.add_node('a', demand = -5) - >>> G.add_node('d', demand = 5) - >>> G.add_edge('a', 'b', weight = 3, capacity = 4) - >>> G.add_edge('a', 'c', weight = 6, capacity = 10) - >>> G.add_edge('b', 'd', weight = 1, capacity = 9) - >>> G.add_edge('c', 'd', weight = 2, capacity = 5) + >>> G.add_node("a", demand=-5) + >>> G.add_node("d", demand=5) + >>> G.add_edge("a", "b", weight=3, capacity=4) + >>> G.add_edge("a", "c", weight=6, capacity=10) + >>> G.add_edge("b", "d", weight=1, capacity=9) + >>> G.add_edge("c", "d", weight=2, capacity=5) >>> flowDict = nx.min_cost_flow(G) """ - return nx.network_simplex(G, demand=demand, capacity=capacity, - weight=weight)[1] + return nx.network_simplex(G, demand=demand, capacity=capacity, weight=weight)[1] -def cost_of_flow(G, flowDict, weight='weight'): +def cost_of_flow(G, flowDict, weight="weight"): """Compute the cost of the flow given by flowDict on graph G. Note that this function does not check for the validity of the @@ -241,12 +225,11 @@ def cost_of_flow(G, flowDict, weight='weight'): multiplying the relevant edge attributes by a convenient constant factor (eg 100). """ - return sum((flowDict[u][v] * d.get(weight, 0) - for u, v, d in G.edges(data=True))) + return sum((flowDict[u][v] * d.get(weight, 0) for u, v, d in G.edges(data=True))) -def max_flow_min_cost(G, s, t, capacity='capacity', weight='weight'): - """Return a maximum (s, t)-flow of minimum cost. +def max_flow_min_cost(G, s, t, capacity="capacity", weight="weight"): + """Returns a maximum (s, t)-flow of minimum cost. G is a digraph with edge costs and capacities. There is a source node s and a sink node t. This function finds a maximum flow from @@ -310,18 +293,22 @@ def max_flow_min_cost(G, s, t, capacity='capacity', weight='weight'): Examples -------- >>> G = nx.DiGraph() - >>> G.add_edges_from([(1, 2, {'capacity': 12, 'weight': 4}), - ... (1, 3, {'capacity': 20, 'weight': 6}), - ... (2, 3, {'capacity': 6, 'weight': -3}), - ... (2, 6, {'capacity': 14, 'weight': 1}), - ... (3, 4, {'weight': 9}), - ... (3, 5, {'capacity': 10, 'weight': 5}), - ... (4, 2, {'capacity': 19, 'weight': 13}), - ... (4, 5, {'capacity': 4, 'weight': 0}), - ... (5, 7, {'capacity': 28, 'weight': 2}), - ... (6, 5, {'capacity': 11, 'weight': 1}), - ... (6, 7, {'weight': 8}), - ... (7, 4, {'capacity': 6, 'weight': 6})]) + >>> G.add_edges_from( + ... [ + ... (1, 2, {"capacity": 12, "weight": 4}), + ... (1, 3, {"capacity": 20, "weight": 6}), + ... (2, 3, {"capacity": 6, "weight": -3}), + ... (2, 6, {"capacity": 14, "weight": 1}), + ... (3, 4, {"weight": 9}), + ... (3, 5, {"capacity": 10, "weight": 5}), + ... (4, 2, {"capacity": 19, "weight": 13}), + ... (4, 5, {"capacity": 4, "weight": 0}), + ... (5, 7, {"capacity": 28, "weight": 2}), + ... (6, 5, {"capacity": 11, "weight": 1}), + ... (6, 7, {"weight": 8}), + ... (7, 4, {"capacity": 6, "weight": 6}), + ... ] + ... ) >>> mincostFlow = nx.max_flow_min_cost(G, 1, 7) >>> mincost = nx.cost_of_flow(G, mincostFlow) >>> mincost @@ -330,8 +317,9 @@ def max_flow_min_cost(G, s, t, capacity='capacity', weight='weight'): >>> maxFlow = maximum_flow(G, 1, 7)[1] >>> nx.cost_of_flow(G, maxFlow) >= mincost True - >>> mincostFlowValue = (sum((mincostFlow[u][7] for u in G.predecessors(7))) - ... - sum((mincostFlow[7][v] for v in G.successors(7)))) + >>> mincostFlowValue = sum((mincostFlow[u][7] for u in G.predecessors(7))) - sum( + ... (mincostFlow[7][v] for v in G.successors(7)) + ... ) >>> mincostFlowValue == nx.maximum_flow_value(G, 1, 7) True diff --git a/networkx/algorithms/flow/networksimplex.py b/networkx/algorithms/flow/networksimplex.py index 8ac5bb7..bdf84f1 100644 --- a/networkx/algorithms/flow/networksimplex.py +++ b/networkx/algorithms/flow/networksimplex.py @@ -1,32 +1,17 @@ -# -*- coding: utf-8 -*- """ Minimum cost flow algorithms on directed connected graphs. """ -__author__ = """Loïc Séguin-C. """ -# Copyright (C) 2010 Loïc Séguin-C. -# All rights reserved. -# BSD license. - -__all__ = ['network_simplex'] +__all__ = ["network_simplex"] from itertools import chain, islice, repeat from math import ceil, sqrt import networkx as nx from networkx.utils import not_implemented_for -try: - from itertools import izip as zip -except ImportError: - pass -try: - range = xrange -except NameError: - pass - -@not_implemented_for('undirected') -def network_simplex(G, demand='demand', capacity='capacity', weight='weight'): +@not_implemented_for("undirected") +def network_simplex(G, demand="demand", capacity="capacity", weight="weight"): r"""Find a minimum cost flow satisfying all demands in digraph G. This is a primal network simplex algorithm that uses the leaving @@ -108,18 +93,17 @@ def network_simplex(G, demand='demand', capacity='capacity', weight='weight'): -------- A simple example of a min cost flow problem. - >>> import networkx as nx >>> G = nx.DiGraph() - >>> G.add_node('a', demand=-5) - >>> G.add_node('d', demand=5) - >>> G.add_edge('a', 'b', weight=3, capacity=4) - >>> G.add_edge('a', 'c', weight=6, capacity=10) - >>> G.add_edge('b', 'd', weight=1, capacity=9) - >>> G.add_edge('c', 'd', weight=2, capacity=5) + >>> G.add_node("a", demand=-5) + >>> G.add_node("d", demand=5) + >>> G.add_edge("a", "b", weight=3, capacity=4) + >>> G.add_edge("a", "c", weight=6, capacity=10) + >>> G.add_edge("b", "d", weight=1, capacity=9) + >>> G.add_edge("c", "d", weight=2, capacity=5) >>> flowCost, flowDict = nx.network_simplex(G) >>> flowCost 24 - >>> flowDict # doctest: +SKIP + >>> flowDict # doctest: +SKIP {'a': {'c': 1, 'b': 4}, 'c': {'d': 1}, 'b': {'d': 4}, 'd': {}} The mincost flow algorithm can also be used to solve shortest path @@ -129,42 +113,51 @@ def network_simplex(G, demand='demand', capacity='capacity', weight='weight'): min cost flow will be the distance between u and v and edges carrying positive flow will indicate the path. - >>> G=nx.DiGraph() - >>> G.add_weighted_edges_from([('s', 'u' ,10), ('s' ,'x' ,5), - ... ('u', 'v' ,1), ('u' ,'x' ,2), - ... ('v', 'y' ,1), ('x' ,'u' ,3), - ... ('x', 'v' ,5), ('x' ,'y' ,2), - ... ('y', 's' ,7), ('y' ,'v' ,6)]) - >>> G.add_node('s', demand = -1) - >>> G.add_node('v', demand = 1) + >>> G = nx.DiGraph() + >>> G.add_weighted_edges_from( + ... [ + ... ("s", "u", 10), + ... ("s", "x", 5), + ... ("u", "v", 1), + ... ("u", "x", 2), + ... ("v", "y", 1), + ... ("x", "u", 3), + ... ("x", "v", 5), + ... ("x", "y", 2), + ... ("y", "s", 7), + ... ("y", "v", 6), + ... ] + ... ) + >>> G.add_node("s", demand=-1) + >>> G.add_node("v", demand=1) >>> flowCost, flowDict = nx.network_simplex(G) - >>> flowCost == nx.shortest_path_length(G, 's', 'v', weight='weight') + >>> flowCost == nx.shortest_path_length(G, "s", "v", weight="weight") True >>> sorted([(u, v) for u in flowDict for v in flowDict[u] if flowDict[u][v] > 0]) [('s', 'x'), ('u', 'v'), ('x', 'u')] - >>> nx.shortest_path(G, 's', 'v', weight = 'weight') + >>> nx.shortest_path(G, "s", "v", weight="weight") ['s', 'x', 'u', 'v'] It is possible to change the name of the attributes used for the algorithm. >>> G = nx.DiGraph() - >>> G.add_node('p', spam=-4) - >>> G.add_node('q', spam=2) - >>> G.add_node('a', spam=-2) - >>> G.add_node('d', spam=-1) - >>> G.add_node('t', spam=2) - >>> G.add_node('w', spam=3) - >>> G.add_edge('p', 'q', cost=7, vacancies=5) - >>> G.add_edge('p', 'a', cost=1, vacancies=4) - >>> G.add_edge('q', 'd', cost=2, vacancies=3) - >>> G.add_edge('t', 'q', cost=1, vacancies=2) - >>> G.add_edge('a', 't', cost=2, vacancies=4) - >>> G.add_edge('d', 'w', cost=3, vacancies=4) - >>> G.add_edge('t', 'w', cost=4, vacancies=1) - >>> flowCost, flowDict = nx.network_simplex(G, demand='spam', - ... capacity='vacancies', - ... weight='cost') + >>> G.add_node("p", spam=-4) + >>> G.add_node("q", spam=2) + >>> G.add_node("a", spam=-2) + >>> G.add_node("d", spam=-1) + >>> G.add_node("t", spam=2) + >>> G.add_node("w", spam=3) + >>> G.add_edge("p", "q", cost=7, vacancies=5) + >>> G.add_edge("p", "a", cost=1, vacancies=4) + >>> G.add_edge("q", "d", cost=2, vacancies=3) + >>> G.add_edge("t", "q", cost=1, vacancies=2) + >>> G.add_edge("a", "t", cost=2, vacancies=4) + >>> G.add_edge("d", "w", cost=3, vacancies=4) + >>> G.add_edge("t", "w", cost=4, vacancies=1) + >>> flowCost, flowDict = nx.network_simplex( + ... G, demand="spam", capacity="vacancies", weight="cost" + ... ) >>> flowCost 37 >>> flowDict # doctest: +SKIP @@ -185,19 +178,19 @@ def network_simplex(G, demand='demand', capacity='capacity', weight='weight'): ########################################################################### if len(G) == 0: - raise nx.NetworkXError('graph has no nodes') + raise nx.NetworkXError("graph has no nodes") # Number all nodes and edges and hereafter reference them using ONLY their # numbers - N = list(G) # nodes - I = {u: i for i, u in enumerate(N)} # node indices + N = list(G) # nodes + I = {u: i for i, u in enumerate(N)} # node indices D = [G.nodes[u].get(demand, 0) for u in N] # node demands - inf = float('inf') + inf = float("inf") for p, b in zip(N, D): if abs(b) == inf: - raise nx.NetworkXError('node %r has infinite demand' % (p,)) + raise nx.NetworkXError(f"node {p!r} has infinite demand") multigraph = G.is_multigraph() S = [] # edge sources @@ -212,8 +205,7 @@ def network_simplex(G, demand='demand', capacity='capacity', weight='weight'): edges = G.edges(data=True) else: edges = G.edges(data=True, keys=True) - edges = (e for e in edges - if e[0] != e[1] and e[-1].get(capacity, inf) != 0) + edges = (e for e in edges if e[0] != e[1] and e[-1].get(capacity, inf) != 0) for i, e in enumerate(edges): S.append(I[e[0]]) T.append(I[e[1]]) @@ -225,32 +217,31 @@ def network_simplex(G, demand='demand', capacity='capacity', weight='weight'): for e, c in zip(E, C): if abs(c) == inf: - raise nx.NetworkXError('edge %r has infinite weight' % (e,)) + raise nx.NetworkXError(f"edge {e!r} has infinite weight") if not multigraph: edges = nx.selfloop_edges(G, data=True) else: edges = nx.selfloop_edges(G, data=True, keys=True) for e in edges: if abs(e[-1].get(weight, 0)) == inf: - raise nx.NetworkXError('edge %r has infinite weight' % (e[:-1],)) + raise nx.NetworkXError(f"edge {e[:-1]!r} has infinite weight") ########################################################################### # Quick infeasibility detection ########################################################################### if sum(D) != 0: - raise nx.NetworkXUnfeasible('total node demand is not zero') + raise nx.NetworkXUnfeasible("total node demand is not zero") for e, u in zip(E, U): if u < 0: - raise nx.NetworkXUnfeasible('edge %r has negative capacity' % (e,)) + raise nx.NetworkXUnfeasible(f"edge {e!r} has negative capacity") if not multigraph: edges = nx.selfloop_edges(G, data=True) else: edges = nx.selfloop_edges(G, data=True, keys=True) for e in edges: if e[-1].get(capacity, inf) < 0: - raise nx.NetworkXUnfeasible( - 'edge %r has negative capacity' % (e[:-1],)) + raise nx.NetworkXUnfeasible(f"edge {e[:-1]!r} has negative capacity") ########################################################################### # Initialization @@ -263,37 +254,45 @@ def network_simplex(G, demand='demand', capacity='capacity', weight='weight'): # feasible spanning tree. n = len(N) # number of nodes for p, d in enumerate(D): - if d > 0: # Must be greater-than here. Zero-demand nodes must have - # edges pointing towards the root to ensure strong - # feasibility. + # Must be greater-than here. Zero-demand nodes must have + # edges pointing towards the root to ensure strong + # feasibility. + if d > 0: S.append(-1) T.append(p) else: S.append(p) T.append(-1) - faux_inf = 3 * max(chain([sum(u for u in U if u < inf), - sum(abs(c) for c in C)], - (abs(d) for d in D))) or 1 + faux_inf = ( + 3 + * max( + chain( + [sum(u for u in U if u < inf), sum(abs(c) for c in C)], + (abs(d) for d in D), + ) + ) + or 1 + ) C.extend(repeat(faux_inf, n)) U.extend(repeat(faux_inf, n)) # Construct the initial spanning tree. - e = len(E) # number of edges - x = list(chain(repeat(0, e), (abs(d) for d in D))) # edge flows + e = len(E) # number of edges + x = list(chain(repeat(0, e), (abs(d) for d in D))) # edge flows pi = [faux_inf if d <= 0 else -faux_inf for d in D] # node potentials parent = list(chain(repeat(-1, n), [None])) # parent nodes - edge = list(range(e, e + n)) # edges to parents - size = list(chain(repeat(1, n), [n + 1])) # subtree sizes - next = list(chain(range(1, n), [-1, 0])) # next nodes in depth-first thread - prev = list(range(-1, n)) # previous nodes in depth-first thread - last = list(chain(range(n), [n - 1])) # last descendants in depth-first thread + edge = list(range(e, e + n)) # edges to parents + size = list(chain(repeat(1, n), [n + 1])) # subtree sizes + next = list(chain(range(1, n), [-1, 0])) # next nodes in depth-first thread + prev = list(range(-1, n)) # previous nodes in depth-first thread + last = list(chain(range(n), [n - 1])) # last descendants in depth-first thread ########################################################################### # Pivot loop ########################################################################### def reduced_cost(i): - """Return the reduced cost of an edge i. + """Returns the reduced cost of an edge i. """ c = C[i] - pi[S[i]] + pi[T[i]] return c if x[i] == 0 else -c @@ -309,10 +308,10 @@ def find_entering_edges(): # each block, Dantzig's rule is applied to find an entering edge. The # blocks to search is determined following Bland's rule. B = int(ceil(sqrt(e))) # pivot block size - M = (e + B - 1) // B # number of blocks needed to cover all edges - m = 0 # number of consecutive blocks without eligible + M = (e + B - 1) // B # number of blocks needed to cover all edges + m = 0 # number of consecutive blocks without eligible # entering edges - f = 0 # first edge in block + f = 0 # first edge in block while m < M: # Determine the next block of edges. l = f + B @@ -364,7 +363,7 @@ def find_apex(p, q): return p def trace_path(p, w): - """Return the nodes and edges on the path from node p to its ancestor + """Returns the nodes and edges on the path from node p to its ancestor w. """ Wn = [p] @@ -376,7 +375,7 @@ def trace_path(p, w): return Wn, We def find_cycle(i, p, q): - """Return the nodes and edges on the cycle containing edge i == (p, q) + """Returns the nodes and edges on the cycle containing edge i == (p, q) when the latter is added to the spanning tree. The cycle is oriented in the direction from p to q. @@ -385,7 +384,8 @@ def find_cycle(i, p, q): Wn, We = trace_path(p, w) Wn.reverse() We.reverse() - We.append(i) + if We != [i]: + We.append(i) WnR, WeR = trace_path(q, w) del WnR[-1] Wn += WnR @@ -393,16 +393,17 @@ def find_cycle(i, p, q): return Wn, We def residual_capacity(i, p): - """Return the residual capacity of an edge i in the direction away + """Returns the residual capacity of an edge i in the direction away from its endpoint p. """ return U[i] - x[i] if S[i] == p else x[i] def find_leaving_edge(Wn, We): - """Return the leaving edge in a cycle represented by Wn and We. + """Returns the leaving edge in a cycle represented by Wn and We. """ - j, s = min(zip(reversed(We), reversed(Wn)), - key=lambda i_p: residual_capacity(*i_p)) + j, s = min( + zip(reversed(We), reversed(Wn)), key=lambda i_p: residual_capacity(*i_p) + ) t = T[j] if S[j] == s else S[j] return j, s, t @@ -524,8 +525,8 @@ def update_potentials(i, p, q): Wn, We = find_cycle(i, p, q) j, s, t = find_leaving_edge(Wn, We) augment_flow(Wn, We, residual_capacity(j, s)) - if i != j: # Do nothing more if the entering edge is the same as the - # the leaving edge. + # Do nothing more if the entering edge is the same as the leaving edge. + if i != j: if parent[t] != s: # Ensure that s is the parent of t. s, t = t, s @@ -542,13 +543,13 @@ def update_potentials(i, p, q): ########################################################################### if any(x[i] != 0 for i in range(-n, 0)): - raise nx.NetworkXUnfeasible('no flow satisfies all node demands') + raise nx.NetworkXUnfeasible("no flow satisfies all node demands") - if (any(x[i] * 2 >= faux_inf for i in range(e)) or - any(e[-1].get(capacity, inf) == inf and e[-1].get(weight, 0) < 0 - for e in nx.selfloop_edges(G, data=True))): - raise nx.NetworkXUnbounded( - 'negative cycle with infinite capacity found') + if any(x[i] * 2 >= faux_inf for i in range(e)) or any( + e[-1].get(capacity, inf) == inf and e[-1].get(weight, 0) < 0 + for e in nx.selfloop_edges(G, data=True) + ): + raise nx.NetworkXUnbounded("negative cycle with infinite capacity found") ########################################################################### # Flow cost calculation and flow dict construction diff --git a/networkx/algorithms/flow/preflowpush.py b/networkx/algorithms/flow/preflowpush.py index aaa3acc..dfbd82c 100644 --- a/networkx/algorithms/flow/preflowpush.py +++ b/networkx/algorithms/flow/preflowpush.py @@ -1,17 +1,10 @@ -# -*- coding: utf-8 -*- """ Highest-label preflow-push algorithm for maximum flow problems. """ -__author__ = """ysitu """ -# Copyright (C) 2014 ysitu -# All rights reserved. -# BSD license. - from collections import deque from itertools import islice import networkx as nx -#from networkx.algorithms.flow.utils import * from ...utils import arbitrary_element from .utils import build_residual_network from .utils import CurrentEdge @@ -19,24 +12,23 @@ from .utils import GlobalRelabelThreshold from .utils import Level -__all__ = ['preflow_push'] +__all__ = ["preflow_push"] -def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, - value_only): +def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, value_only): """Implementation of the highest-label preflow-push algorithm. """ if s not in G: - raise nx.NetworkXError('node %s not in graph' % str(s)) + raise nx.NetworkXError(f"node {str(s)} not in graph") if t not in G: - raise nx.NetworkXError('node %s not in graph' % str(t)) + raise nx.NetworkXError(f"node {str(t)} not in graph") if s == t: - raise nx.NetworkXError('source and sink are the same node') + raise nx.NetworkXError("source and sink are the same node") if global_relabel_freq is None: global_relabel_freq = 0 if global_relabel_freq < 0: - raise nx.NetworkXError('global_relabel_freq must be nonnegative.') + raise nx.NetworkXError("global_relabel_freq must be nonnegative.") if residual is None: R = build_residual_network(G, capacity) @@ -51,9 +43,9 @@ def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, # Initialize/reset the residual network. for u in R: - R_nodes[u]['excess'] = 0 + R_nodes[u]["excess"] = 0 for e in R_succ[u].values(): - e['flow'] = 0 + e["flow"] = 0 def reverse_bfs(src): """Perform a reverse breadth-first search from src in the residual @@ -65,7 +57,7 @@ def reverse_bfs(src): u, height = q.popleft() height += 1 for v, attr in R_pred[u].items(): - if v not in heights and attr['flow'] < attr['capacity']: + if v not in heights and attr["flow"] < attr["capacity"]: heights[v] = height q.append((v, height)) return heights @@ -76,7 +68,7 @@ def reverse_bfs(src): if s not in heights: # t is not reachable from s in the residual network. The maximum flow # must be zero. - R.graph['flow_value'] = 0 + R.graph["flow_value"] = 0 return R n = len(R) @@ -89,21 +81,21 @@ def reverse_bfs(src): # Initialize heights and 'current edge' data structures of the nodes. for u in R: - R_nodes[u]['height'] = heights[u] if u in heights else n + 1 - R_nodes[u]['curr_edge'] = CurrentEdge(R_succ[u]) + R_nodes[u]["height"] = heights[u] if u in heights else n + 1 + R_nodes[u]["curr_edge"] = CurrentEdge(R_succ[u]) def push(u, v, flow): """Push flow units of flow from u to v. """ - R_succ[u][v]['flow'] += flow - R_succ[v][u]['flow'] -= flow - R_nodes[u]['excess'] -= flow - R_nodes[v]['excess'] += flow + R_succ[u][v]["flow"] += flow + R_succ[v][u]["flow"] -= flow + R_nodes[u]["excess"] -= flow + R_nodes[v]["excess"] += flow # The maximum flow must be nonzero now. Initialize the preflow by # saturating all edges emanating from s. for u, attr in R_succ[s].items(): - flow = attr['capacity'] + flow = attr["capacity"] if flow > 0: push(s, u, flow) @@ -111,8 +103,8 @@ def push(u, v, flow): levels = [Level() for i in range(2 * n)] for u in R: if u != s and u != t: - level = levels[R_nodes[u]['height']] - if R_nodes[u]['excess'] > 0: + level = levels[R_nodes[u]["height"]] + if R_nodes[u]["excess"] > 0: level.active.add(u) else: level.inactive.add(u) @@ -121,7 +113,7 @@ def activate(v): """Move a node from the inactive set to the active set of its level. """ if v != s and v != t: - level = levels[R_nodes[v]['height']] + level = levels[R_nodes[v]["height"]] if v in level.inactive: level.inactive.remove(v) level.active.add(v) @@ -130,29 +122,33 @@ def relabel(u): """Relabel a node to create an admissible edge. """ grt.add_work(len(R_succ[u])) - return min(R_nodes[v]['height'] for v, attr in R_succ[u].items() - if attr['flow'] < attr['capacity']) + 1 + return ( + min( + R_nodes[v]["height"] + for v, attr in R_succ[u].items() + if attr["flow"] < attr["capacity"] + ) + + 1 + ) def discharge(u, is_phase1): """Discharge a node until it becomes inactive or, during phase 1 (see below), its height reaches at least n. The node is known to have the largest height among active nodes. """ - height = R_nodes[u]['height'] - curr_edge = R_nodes[u]['curr_edge'] + height = R_nodes[u]["height"] + curr_edge = R_nodes[u]["curr_edge"] # next_height represents the next height to examine after discharging # the current node. During phase 1, it is capped to below n. next_height = height levels[height].active.remove(u) while True: v, attr = curr_edge.get() - if (height == R_nodes[v]['height'] + 1 and - attr['flow'] < attr['capacity']): - flow = min(R_nodes[u]['excess'], - attr['capacity'] - attr['flow']) + if height == R_nodes[v]["height"] + 1 and attr["flow"] < attr["capacity"]: + flow = min(R_nodes[u]["excess"], attr["capacity"] - attr["flow"]) push(u, v, flow) activate(v) - if R_nodes[u]['excess'] == 0: + if R_nodes[u]["excess"] == 0: # The node has become inactive. levels[height].inactive.add(u) break @@ -173,7 +169,7 @@ def discharge(u, is_phase1): # structure is not rewound. Use height instead of (height - 1) # in case other active nodes at the same level are missed. next_height = height - R_nodes[u]['height'] = height + R_nodes[u]["height"] = height return next_height def gap_heuristic(height): @@ -182,9 +178,9 @@ def gap_heuristic(height): # Move all nodes at levels (height + 1) to max_height to level n + 1. for level in islice(levels, height + 1, max_height + 1): for u in level.active: - R_nodes[u]['height'] = n + 1 + R_nodes[u]["height"] = n + 1 for u in level.inactive: - R_nodes[u]['height'] = n + 1 + R_nodes[u]["height"] = n + 1 levels[n + 1].active.update(level.active) level.active.clear() levels[n + 1].inactive.update(level.inactive) @@ -203,7 +199,7 @@ def global_relabel(from_sink): # Also mark nodes from which t is unreachable for relabeling. This # serves the same purpose as the gap heuristic. for u in R: - if u not in heights and R_nodes[u]['height'] < n: + if u not in heights and R_nodes[u]["height"] < n: heights[u] = n + 1 else: # Shift the computed heights because the height of s is n. @@ -212,7 +208,7 @@ def global_relabel(from_sink): max_height += n del heights[src] for u, new_height in heights.items(): - old_height = R_nodes[u]['height'] + old_height = R_nodes[u]["height"] if new_height != old_height: if u in levels[old_height].active: levels[old_height].active.remove(u) @@ -220,7 +216,7 @@ def global_relabel(from_sink): else: levels[old_height].inactive.remove(u) levels[new_height].inactive.add(u) - R_nodes[u]['height'] = new_height + R_nodes[u]["height"] = new_height return max_height # Phase 1: Find the maximum preflow by pushing as much flow as possible to @@ -263,7 +259,7 @@ def global_relabel(from_sink): # A maximum preflow has been found. The excess at t is the maximum flow # value. if value_only: - R.graph['flow_value'] = R_nodes[t]['excess'] + R.graph["flow_value"] = R_nodes[t]["excess"] return R # Phase 2: Convert the maximum preflow into a maximum flow by returning the @@ -290,13 +286,14 @@ def global_relabel(from_sink): height = global_relabel(False) grt.clear_work() - R.graph['flow_value'] = R_nodes[t]['excess'] + R.graph["flow_value"] = R_nodes[t]["excess"] return R -def preflow_push(G, s, t, capacity='capacity', residual=None, - global_relabel_freq=1, value_only=False): - """Find a maximum single-commodity flow using the highest-label +def preflow_push( + G, s, t, capacity="capacity", residual=None, global_relabel_freq=1, value_only=False +): + r"""Find a maximum single-commodity flow using the highest-label preflow-push algorithm. This function returns the residual network resulting after computing @@ -391,7 +388,6 @@ def preflow_push(G, s, t, capacity='capacity', residual=None, Examples -------- - >>> import networkx as nx >>> from networkx.algorithms.flow import preflow_push The functions that implement flow algorithms and output a residual @@ -399,32 +395,31 @@ def preflow_push(G, s, t, capacity='capacity', residual=None, namespace, so you have to explicitly import them from the flow package. >>> G = nx.DiGraph() - >>> G.add_edge('x','a', capacity=3.0) - >>> G.add_edge('x','b', capacity=1.0) - >>> G.add_edge('a','c', capacity=3.0) - >>> G.add_edge('b','c', capacity=5.0) - >>> G.add_edge('b','d', capacity=4.0) - >>> G.add_edge('d','e', capacity=2.0) - >>> G.add_edge('c','y', capacity=2.0) - >>> G.add_edge('e','y', capacity=3.0) - >>> R = preflow_push(G, 'x', 'y') - >>> flow_value = nx.maximum_flow_value(G, 'x', 'y') - >>> flow_value == R.graph['flow_value'] + >>> G.add_edge("x", "a", capacity=3.0) + >>> G.add_edge("x", "b", capacity=1.0) + >>> G.add_edge("a", "c", capacity=3.0) + >>> G.add_edge("b", "c", capacity=5.0) + >>> G.add_edge("b", "d", capacity=4.0) + >>> G.add_edge("d", "e", capacity=2.0) + >>> G.add_edge("c", "y", capacity=2.0) + >>> G.add_edge("e", "y", capacity=3.0) + >>> R = preflow_push(G, "x", "y") + >>> flow_value = nx.maximum_flow_value(G, "x", "y") + >>> flow_value == R.graph["flow_value"] True >>> # preflow_push also stores the maximum flow value >>> # in the excess attribute of the sink node t - >>> flow_value == R.nodes['y']['excess'] + >>> flow_value == R.nodes["y"]["excess"] True >>> # For some problems, you might only want to compute a >>> # maximum preflow. - >>> R = preflow_push(G, 'x', 'y', value_only=True) - >>> flow_value == R.graph['flow_value'] + >>> R = preflow_push(G, "x", "y", value_only=True) + >>> flow_value == R.graph["flow_value"] True - >>> flow_value == R.nodes['y']['excess'] + >>> flow_value == R.nodes["y"]["excess"] True """ - R = preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, - value_only) - R.graph['algorithm'] = 'preflow_push' + R = preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, value_only) + R.graph["algorithm"] = "preflow_push" return R diff --git a/networkx/algorithms/flow/shortestaugmentingpath.py b/networkx/algorithms/flow/shortestaugmentingpath.py index bc550ef..23db544 100644 --- a/networkx/algorithms/flow/shortestaugmentingpath.py +++ b/networkx/algorithms/flow/shortestaugmentingpath.py @@ -1,31 +1,24 @@ -# -*- coding: utf-8 -*- """ Shortest augmenting path algorithm for maximum flow problems. """ -__author__ = """ysitu """ -# Copyright (C) 2014 ysitu -# All rights reserved. -# BSD license. - from collections import deque import networkx as nx -from .utils import * +from .utils import build_residual_network, CurrentEdge from .edmondskarp import edmonds_karp_core -__all__ = ['shortest_augmenting_path'] +__all__ = ["shortest_augmenting_path"] -def shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, - cutoff): +def shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, cutoff): """Implementation of the shortest augmenting path algorithm. """ if s not in G: - raise nx.NetworkXError('node %s not in graph' % str(s)) + raise nx.NetworkXError(f"node {str(s)} not in graph") if t not in G: - raise nx.NetworkXError('node %s not in graph' % str(t)) + raise nx.NetworkXError(f"node {str(t)} not in graph") if s == t: - raise nx.NetworkXError('source and sink are the same node') + raise nx.NetworkXError("source and sink are the same node") if residual is None: R = build_residual_network(G, capacity) @@ -39,7 +32,7 @@ def shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, # Initialize/reset the residual network. for u in R: for e in R_succ[u].values(): - e['flow'] = 0 + e["flow"] = 0 # Initialize heights of the nodes. heights = {t: 0} @@ -48,14 +41,14 @@ def shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, u, height = q.popleft() height += 1 for v, attr in R_pred[u].items(): - if v not in heights and attr['flow'] < attr['capacity']: + if v not in heights and attr["flow"] < attr["capacity"]: heights[v] = height q.append((v, height)) if s not in heights: # t is not reachable from s in the residual network. The maximum flow # must be zero. - R.graph['flow_value'] = 0 + R.graph["flow_value"] = 0 return R n = len(G) @@ -63,15 +56,15 @@ def shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, # Initialize heights and 'current edge' data structures of the nodes. for u in R: - R_nodes[u]['height'] = heights[u] if u in heights else n - R_nodes[u]['curr_edge'] = CurrentEdge(R_succ[u]) + R_nodes[u]["height"] = heights[u] if u in heights else n + R_nodes[u]["curr_edge"] = CurrentEdge(R_succ[u]) # Initialize counts of nodes in each level. counts = [0] * (2 * n - 1) for u in R: - counts[R_nodes[u]['height']] += 1 + counts[R_nodes[u]["height"]] += 1 - inf = R.graph['inf'] + inf = R.graph["inf"] def augment(path): """Augment flow along a path from s to t. @@ -82,17 +75,16 @@ def augment(path): u = next(it) for v in it: attr = R_succ[u][v] - flow = min(flow, attr['capacity'] - attr['flow']) + flow = min(flow, attr["capacity"] - attr["flow"]) u = v if flow * 2 > inf: - raise nx.NetworkXUnbounded( - 'Infinite capacity path, flow unbounded above.') + raise nx.NetworkXUnbounded("Infinite capacity path, flow unbounded above.") # Augment flow along the path. it = iter(path) u = next(it) for v in it: - R_succ[u][v]['flow'] += flow - R_succ[v][u]['flow'] -= flow + R_succ[u][v]["flow"] += flow + R_succ[v][u]["flow"] -= flow u = v return flow @@ -101,28 +93,27 @@ def relabel(u): """ height = n - 1 for v, attr in R_succ[u].items(): - if attr['flow'] < attr['capacity']: - height = min(height, R_nodes[v]['height']) + if attr["flow"] < attr["capacity"]: + height = min(height, R_nodes[v]["height"]) return height + 1 if cutoff is None: - cutoff = float('inf') + cutoff = float("inf") # Phase 1: Look for shortest augmenting paths using depth-first search. flow_value = 0 path = [s] u = s - d = n if not two_phase else int(min(m ** 0.5, 2 * n ** (2. / 3))) - done = R_nodes[s]['height'] >= d + d = n if not two_phase else int(min(m ** 0.5, 2 * n ** (2.0 / 3))) + done = R_nodes[s]["height"] >= d while not done: - height = R_nodes[u]['height'] - curr_edge = R_nodes[u]['curr_edge'] + height = R_nodes[u]["height"] + curr_edge = R_nodes[u]["curr_edge"] # Depth-first search for the next node on the path to t. while True: v, attr = curr_edge.get() - if (height == R_nodes[v]['height'] + 1 and - attr['flow'] < attr['capacity']): + if height == R_nodes[v]["height"] + 1 and attr["flow"] < attr["capacity"]: # Advance to the next node following an admissible edge. path.append(v) u = v @@ -135,21 +126,21 @@ def relabel(u): # Gap heuristic: If relabeling causes a level to become # empty, a minimum cut has been identified. The algorithm # can now be terminated. - R.graph['flow_value'] = flow_value + R.graph["flow_value"] = flow_value return R height = relabel(u) if u == s and height >= d: if not two_phase: # t is disconnected from s in the residual network. No # more augmenting paths exist. - R.graph['flow_value'] = flow_value + R.graph["flow_value"] = flow_value return R else: # t is at least d steps away from s. End of phase 1. done = True break counts[height] += 1 - R_nodes[u]['height'] = height + R_nodes[u]["height"] = height if u != s: # After relabeling, the last edge on the path is no longer # admissible. Retreat one step to look for an alternative. @@ -161,7 +152,7 @@ def relabel(u): # depth-first search. flow_value += augment(path) if flow_value >= cutoff: - R.graph['flow_value'] = flow_value + R.graph["flow_value"] = flow_value return R path = [s] u = s @@ -169,13 +160,21 @@ def relabel(u): # Phase 2: Look for shortest augmenting paths using breadth-first search. flow_value += edmonds_karp_core(R, s, t, cutoff - flow_value) - R.graph['flow_value'] = flow_value + R.graph["flow_value"] = flow_value return R -def shortest_augmenting_path(G, s, t, capacity='capacity', residual=None, - value_only=False, two_phase=False, cutoff=None): - """Find a maximum single-commodity flow using the shortest augmenting path +def shortest_augmenting_path( + G, + s, + t, + capacity="capacity", + residual=None, + value_only=False, + two_phase=False, + cutoff=None, +): + r"""Find a maximum single-commodity flow using the shortest augmenting path algorithm. This function returns the residual network resulting after computing @@ -272,7 +271,6 @@ def shortest_augmenting_path(G, s, t, capacity='capacity', residual=None, Examples -------- - >>> import networkx as nx >>> from networkx.algorithms.flow import shortest_augmenting_path The functions that implement flow algorithms and output a residual @@ -280,23 +278,22 @@ def shortest_augmenting_path(G, s, t, capacity='capacity', residual=None, namespace, so you have to explicitly import them from the flow package. >>> G = nx.DiGraph() - >>> G.add_edge('x','a', capacity=3.0) - >>> G.add_edge('x','b', capacity=1.0) - >>> G.add_edge('a','c', capacity=3.0) - >>> G.add_edge('b','c', capacity=5.0) - >>> G.add_edge('b','d', capacity=4.0) - >>> G.add_edge('d','e', capacity=2.0) - >>> G.add_edge('c','y', capacity=2.0) - >>> G.add_edge('e','y', capacity=3.0) - >>> R = shortest_augmenting_path(G, 'x', 'y') - >>> flow_value = nx.maximum_flow_value(G, 'x', 'y') + >>> G.add_edge("x", "a", capacity=3.0) + >>> G.add_edge("x", "b", capacity=1.0) + >>> G.add_edge("a", "c", capacity=3.0) + >>> G.add_edge("b", "c", capacity=5.0) + >>> G.add_edge("b", "d", capacity=4.0) + >>> G.add_edge("d", "e", capacity=2.0) + >>> G.add_edge("c", "y", capacity=2.0) + >>> G.add_edge("e", "y", capacity=3.0) + >>> R = shortest_augmenting_path(G, "x", "y") + >>> flow_value = nx.maximum_flow_value(G, "x", "y") >>> flow_value 3.0 - >>> flow_value == R.graph['flow_value'] + >>> flow_value == R.graph["flow_value"] True """ - R = shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, - cutoff) - R.graph['algorithm'] = 'shortest_augmenting_path' + R = shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, cutoff) + R.graph["algorithm"] = "shortest_augmenting_path" return R diff --git a/networkx/algorithms/flow/tests/__init__.py b/networkx/algorithms/flow/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/algorithms/flow/tests/test_gomory_hu.py b/networkx/algorithms/flow/tests/test_gomory_hu.py index 4ce2c75..33249c9 100644 --- a/networkx/algorithms/flow/tests/test_gomory_hu.py +++ b/networkx/algorithms/flow/tests/test_gomory_hu.py @@ -1,5 +1,5 @@ from itertools import combinations -from nose.tools import assert_equal, assert_true, raises +import pytest import networkx as nx from networkx.algorithms.flow import boykov_kolmogorov @@ -18,10 +18,9 @@ class TestGomoryHuTree: - def minimum_edge_weight(self, T, u, v): - path = nx.shortest_path(T, u, v, weight='weight') - return min((T[u][v]['weight'], (u, v)) for (u, v) in zip(path, path[1:])) + path = nx.shortest_path(T, u, v, weight="weight") + return min((T[u][v]["weight"], (u, v)) for (u, v) in zip(path, path[1:])) def compute_cutset(self, G, T_orig, edge): T = T_orig.copy() @@ -34,79 +33,93 @@ def compute_cutset(self, G, T_orig, edge): def test_default_flow_function_karate_club_graph(self): G = nx.karate_club_graph() - nx.set_edge_attributes(G, 1, 'capacity') + nx.set_edge_attributes(G, 1, "capacity") T = nx.gomory_hu_tree(G) - assert_true(nx.is_tree(T)) + assert nx.is_tree(T) for u, v in combinations(G, 2): cut_value, edge = self.minimum_edge_weight(T, u, v) - assert_equal(nx.minimum_cut_value(G, u, v), - cut_value) + assert nx.minimum_cut_value(G, u, v) == cut_value def test_karate_club_graph(self): G = nx.karate_club_graph() - nx.set_edge_attributes(G, 1, 'capacity') + nx.set_edge_attributes(G, 1, "capacity") for flow_func in flow_funcs: T = nx.gomory_hu_tree(G, flow_func=flow_func) - assert_true(nx.is_tree(T)) + assert nx.is_tree(T) for u, v in combinations(G, 2): cut_value, edge = self.minimum_edge_weight(T, u, v) - assert_equal(nx.minimum_cut_value(G, u, v), - cut_value) + assert nx.minimum_cut_value(G, u, v) == cut_value def test_davis_southern_women_graph(self): G = nx.davis_southern_women_graph() - nx.set_edge_attributes(G, 1, 'capacity') + nx.set_edge_attributes(G, 1, "capacity") for flow_func in flow_funcs: T = nx.gomory_hu_tree(G, flow_func=flow_func) - assert_true(nx.is_tree(T)) + assert nx.is_tree(T) for u, v in combinations(G, 2): cut_value, edge = self.minimum_edge_weight(T, u, v) - assert_equal(nx.minimum_cut_value(G, u, v), - cut_value) + assert nx.minimum_cut_value(G, u, v) == cut_value def test_florentine_families_graph(self): G = nx.florentine_families_graph() - nx.set_edge_attributes(G, 1, 'capacity') + nx.set_edge_attributes(G, 1, "capacity") + for flow_func in flow_funcs: + T = nx.gomory_hu_tree(G, flow_func=flow_func) + assert nx.is_tree(T) + for u, v in combinations(G, 2): + cut_value, edge = self.minimum_edge_weight(T, u, v) + assert nx.minimum_cut_value(G, u, v) == cut_value + + @pytest.mark.slow + def test_les_miserables_graph_cutset(self): + G = nx.les_miserables_graph() + nx.set_edge_attributes(G, 1, "capacity") for flow_func in flow_funcs: T = nx.gomory_hu_tree(G, flow_func=flow_func) - assert_true(nx.is_tree(T)) + assert nx.is_tree(T) for u, v in combinations(G, 2): cut_value, edge = self.minimum_edge_weight(T, u, v) - assert_equal(nx.minimum_cut_value(G, u, v), - cut_value) + assert nx.minimum_cut_value(G, u, v) == cut_value def test_karate_club_graph_cutset(self): G = nx.karate_club_graph() - nx.set_edge_attributes(G, 1, 'capacity') + nx.set_edge_attributes(G, 1, "capacity") T = nx.gomory_hu_tree(G) - assert_true(nx.is_tree(T)) + assert nx.is_tree(T) u, v = 0, 33 cut_value, edge = self.minimum_edge_weight(T, u, v) cutset = self.compute_cutset(G, T, edge) - assert_equal(cut_value, len(cutset)) + assert cut_value == len(cutset) def test_wikipedia_example(self): # Example from https://en.wikipedia.org/wiki/Gomory%E2%80%93Hu_tree G = nx.Graph() - G.add_weighted_edges_from(( - (0, 1, 1), (0, 2, 7), (1, 2, 1), - (1, 3, 3), (1, 4, 2), (2, 4, 4), - (3, 4, 1), (3, 5, 6), (4, 5, 2), - )) + G.add_weighted_edges_from( + ( + (0, 1, 1), + (0, 2, 7), + (1, 2, 1), + (1, 3, 3), + (1, 4, 2), + (2, 4, 4), + (3, 4, 1), + (3, 5, 6), + (4, 5, 2), + ) + ) for flow_func in flow_funcs: - T = nx.gomory_hu_tree(G, capacity='weight', flow_func=flow_func) - assert_true(nx.is_tree(T)) + T = nx.gomory_hu_tree(G, capacity="weight", flow_func=flow_func) + assert nx.is_tree(T) for u, v in combinations(G, 2): cut_value, edge = self.minimum_edge_weight(T, u, v) - assert_equal(nx.minimum_cut_value(G, u, v, capacity='weight'), - cut_value) + assert nx.minimum_cut_value(G, u, v, capacity="weight") == cut_value - @raises(nx.NetworkXNotImplemented) def test_directed_raises(self): - G = nx.DiGraph() - T = nx.gomory_hu_tree(G) + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.DiGraph() + T = nx.gomory_hu_tree(G) - @raises(nx.NetworkXError) def test_empty_raises(self): - G = nx.empty_graph() - T = nx.gomory_hu_tree(G) + with pytest.raises(nx.NetworkXError): + G = nx.empty_graph() + T = nx.gomory_hu_tree(G) diff --git a/networkx/algorithms/flow/tests/test_maxflow.py b/networkx/algorithms/flow/tests/test_maxflow.py index 09df99f..17b82b4 100644 --- a/networkx/algorithms/flow/tests/test_maxflow.py +++ b/networkx/algorithms/flow/tests/test_maxflow.py @@ -1,7 +1,6 @@ -# -*- coding: utf-8 -*- """Maximum flow algorithms test suite. """ -from nose.tools import * +import pytest import networkx as nx from networkx.algorithms.flow import build_flow_dict, build_residual_network @@ -11,15 +10,18 @@ from networkx.algorithms.flow import shortest_augmenting_path from networkx.algorithms.flow import dinitz -flow_funcs = [boykov_kolmogorov, dinitz, edmonds_karp, preflow_push, shortest_augmenting_path] +flow_funcs = [ + boykov_kolmogorov, + dinitz, + edmonds_karp, + preflow_push, + shortest_augmenting_path, +] max_min_funcs = [nx.maximum_flow, nx.minimum_cut] flow_value_funcs = [nx.maximum_flow_value, nx.minimum_cut_value] interface_funcs = sum([max_min_funcs, flow_value_funcs], []) all_funcs = sum([flow_funcs, interface_funcs], []) -msg = "Assertion failed in function: {0}" -msgi = "Assertion failed in function: {0} in interface {1}" - def compute_cutset(G, partition): reachable, non_reachable = partition @@ -30,69 +32,65 @@ def compute_cutset(G, partition): def validate_flows(G, s, t, flowDict, solnValue, capacity, flow_func): - assert_equal(set(G), set(flowDict), msg=msg.format(flow_func.__name__)) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert set(G) == set(flowDict), errmsg for u in G: - assert_equal(set(G[u]), set(flowDict[u]), - msg=msg.format(flow_func.__name__)) + assert set(G[u]) == set(flowDict[u]), errmsg excess = {u: 0 for u in flowDict} for u in flowDict: for v, flow in flowDict[u].items(): if capacity in G[u][v]: - ok_(flow <= G[u][v][capacity]) - ok_(flow >= 0, msg=msg.format(flow_func.__name__)) + assert flow <= G[u][v][capacity] + assert flow >= 0, errmsg excess[u] -= flow excess[v] += flow for u, exc in excess.items(): if u == s: - assert_equal(exc, -solnValue, msg=msg.format(flow_func.__name__)) + assert exc == -solnValue, errmsg elif u == t: - assert_equal(exc, solnValue, msg=msg.format(flow_func.__name__)) + assert exc == solnValue, errmsg else: - assert_equal(exc, 0, msg=msg.format(flow_func.__name__)) + assert exc == 0, errmsg def validate_cuts(G, s, t, solnValue, partition, capacity, flow_func): - assert_true(all(n in G for n in partition[0]), - msg=msg.format(flow_func.__name__)) - assert_true(all(n in G for n in partition[1]), - msg=msg.format(flow_func.__name__)) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert all(n in G for n in partition[0]), errmsg + assert all(n in G for n in partition[1]), errmsg cutset = compute_cutset(G, partition) - assert_true(all(G.has_edge(u, v) for (u, v) in cutset), - msg=msg.format(flow_func.__name__)) - assert_equal(solnValue, sum(G[u][v][capacity] for (u, v) in cutset), - msg=msg.format(flow_func.__name__)) + assert all(G.has_edge(u, v) for (u, v) in cutset), errmsg + assert solnValue == sum(G[u][v][capacity] for (u, v) in cutset), errmsg H = G.copy() H.remove_edges_from(cutset) if not G.is_directed(): - assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__)) + assert not nx.is_connected(H), errmsg else: - assert_false(nx.is_strongly_connected(H), - msg=msg.format(flow_func.__name__)) + assert not nx.is_strongly_connected(H), errmsg -def compare_flows_and_cuts(G, s, t, solnFlows, solnValue, capacity='capacity'): +def compare_flows_and_cuts(G, s, t, solnFlows, solnValue, capacity="capacity"): for flow_func in flow_funcs: + errmsg = f"Assertion failed in function: {flow_func.__name__}" R = flow_func(G, s, t, capacity) # Test both legacy and new implementations. - flow_value = R.graph['flow_value'] + flow_value = R.graph["flow_value"] flow_dict = build_flow_dict(G, R) - assert_equal(flow_value, solnValue, msg=msg.format(flow_func.__name__)) + assert flow_value == solnValue, errmsg validate_flows(G, s, t, flow_dict, solnValue, capacity, flow_func) # Minimum cut - cut_value, partition = nx.minimum_cut(G, s, t, capacity=capacity, - flow_func=flow_func) + cut_value, partition = nx.minimum_cut( + G, s, t, capacity=capacity, flow_func=flow_func + ) validate_cuts(G, s, t, solnValue, partition, capacity, flow_func) class TestMaxflowMinCutCommon: - def test_graph1(self): # Trivial undirected graph G = nx.Graph() G.add_edge(1, 2, capacity=1.0) - solnFlows = {1: {2: 1.0}, - 2: {1: 1.0}} + solnFlows = {1: {2: 1.0}, 2: {1: 1.0}} compare_flows_and_cuts(G, 1, 2, solnFlows, 1.0) @@ -100,281 +98,301 @@ def test_graph2(self): # A more complex undirected graph # adapted from www.topcoder.com/tc?module=Statc&d1=tutorials&d2=maxFlow G = nx.Graph() - G.add_edge('x', 'a', capacity=3.0) - G.add_edge('x', 'b', capacity=1.0) - G.add_edge('a', 'c', capacity=3.0) - G.add_edge('b', 'c', capacity=5.0) - G.add_edge('b', 'd', capacity=4.0) - G.add_edge('d', 'e', capacity=2.0) - G.add_edge('c', 'y', capacity=2.0) - G.add_edge('e', 'y', capacity=3.0) - - H = {'x': {'a': 3, 'b': 1}, - 'a': {'c': 3, 'x': 3}, - 'b': {'c': 1, 'd': 2, 'x': 1}, - 'c': {'a': 3, 'b': 1, 'y': 2}, - 'd': {'b': 2, 'e': 2}, - 'e': {'d': 2, 'y': 2}, - 'y': {'c': 2, 'e': 2}} - - compare_flows_and_cuts(G, 'x', 'y', H, 4.0) + G.add_edge("x", "a", capacity=3.0) + G.add_edge("x", "b", capacity=1.0) + G.add_edge("a", "c", capacity=3.0) + G.add_edge("b", "c", capacity=5.0) + G.add_edge("b", "d", capacity=4.0) + G.add_edge("d", "e", capacity=2.0) + G.add_edge("c", "y", capacity=2.0) + G.add_edge("e", "y", capacity=3.0) + + H = { + "x": {"a": 3, "b": 1}, + "a": {"c": 3, "x": 3}, + "b": {"c": 1, "d": 2, "x": 1}, + "c": {"a": 3, "b": 1, "y": 2}, + "d": {"b": 2, "e": 2}, + "e": {"d": 2, "y": 2}, + "y": {"c": 2, "e": 2}, + } + + compare_flows_and_cuts(G, "x", "y", H, 4.0) def test_digraph1(self): # The classic directed graph example G = nx.DiGraph() - G.add_edge('a', 'b', capacity=1000.0) - G.add_edge('a', 'c', capacity=1000.0) - G.add_edge('b', 'c', capacity=1.0) - G.add_edge('b', 'd', capacity=1000.0) - G.add_edge('c', 'd', capacity=1000.0) + G.add_edge("a", "b", capacity=1000.0) + G.add_edge("a", "c", capacity=1000.0) + G.add_edge("b", "c", capacity=1.0) + G.add_edge("b", "d", capacity=1000.0) + G.add_edge("c", "d", capacity=1000.0) - H = {'a': {'b': 1000.0, 'c': 1000.0}, - 'b': {'c': 0, 'd': 1000.0}, - 'c': {'d': 1000.0}, - 'd': {}} + H = { + "a": {"b": 1000.0, "c": 1000.0}, + "b": {"c": 0, "d": 1000.0}, + "c": {"d": 1000.0}, + "d": {}, + } - compare_flows_and_cuts(G, 'a', 'd', H, 2000.0) + compare_flows_and_cuts(G, "a", "d", H, 2000.0) def test_digraph2(self): # An example in which some edges end up with zero flow. G = nx.DiGraph() - G.add_edge('s', 'b', capacity=2) - G.add_edge('s', 'c', capacity=1) - G.add_edge('c', 'd', capacity=1) - G.add_edge('d', 'a', capacity=1) - G.add_edge('b', 'a', capacity=2) - G.add_edge('a', 't', capacity=2) - - H = {'s': {'b': 2, 'c': 0}, - 'c': {'d': 0}, - 'd': {'a': 0}, - 'b': {'a': 2}, - 'a': {'t': 2}, - 't': {}} - - compare_flows_and_cuts(G, 's', 't', H, 2) + G.add_edge("s", "b", capacity=2) + G.add_edge("s", "c", capacity=1) + G.add_edge("c", "d", capacity=1) + G.add_edge("d", "a", capacity=1) + G.add_edge("b", "a", capacity=2) + G.add_edge("a", "t", capacity=2) + + H = { + "s": {"b": 2, "c": 0}, + "c": {"d": 0}, + "d": {"a": 0}, + "b": {"a": 2}, + "a": {"t": 2}, + "t": {}, + } + + compare_flows_and_cuts(G, "s", "t", H, 2) def test_digraph3(self): # A directed graph example from Cormen et al. G = nx.DiGraph() - G.add_edge('s', 'v1', capacity=16.0) - G.add_edge('s', 'v2', capacity=13.0) - G.add_edge('v1', 'v2', capacity=10.0) - G.add_edge('v2', 'v1', capacity=4.0) - G.add_edge('v1', 'v3', capacity=12.0) - G.add_edge('v3', 'v2', capacity=9.0) - G.add_edge('v2', 'v4', capacity=14.0) - G.add_edge('v4', 'v3', capacity=7.0) - G.add_edge('v3', 't', capacity=20.0) - G.add_edge('v4', 't', capacity=4.0) - - H = {'s': {'v1': 12.0, 'v2': 11.0}, - 'v2': {'v1': 0, 'v4': 11.0}, - 'v1': {'v2': 0, 'v3': 12.0}, - 'v3': {'v2': 0, 't': 19.0}, - 'v4': {'v3': 7.0, 't': 4.0}, - 't': {}} - - compare_flows_and_cuts(G, 's', 't', H, 23.0) + G.add_edge("s", "v1", capacity=16.0) + G.add_edge("s", "v2", capacity=13.0) + G.add_edge("v1", "v2", capacity=10.0) + G.add_edge("v2", "v1", capacity=4.0) + G.add_edge("v1", "v3", capacity=12.0) + G.add_edge("v3", "v2", capacity=9.0) + G.add_edge("v2", "v4", capacity=14.0) + G.add_edge("v4", "v3", capacity=7.0) + G.add_edge("v3", "t", capacity=20.0) + G.add_edge("v4", "t", capacity=4.0) + + H = { + "s": {"v1": 12.0, "v2": 11.0}, + "v2": {"v1": 0, "v4": 11.0}, + "v1": {"v2": 0, "v3": 12.0}, + "v3": {"v2": 0, "t": 19.0}, + "v4": {"v3": 7.0, "t": 4.0}, + "t": {}, + } + + compare_flows_and_cuts(G, "s", "t", H, 23.0) def test_digraph4(self): # A more complex directed graph # from www.topcoder.com/tc?module=Statc&d1=tutorials&d2=maxFlow G = nx.DiGraph() - G.add_edge('x', 'a', capacity=3.0) - G.add_edge('x', 'b', capacity=1.0) - G.add_edge('a', 'c', capacity=3.0) - G.add_edge('b', 'c', capacity=5.0) - G.add_edge('b', 'd', capacity=4.0) - G.add_edge('d', 'e', capacity=2.0) - G.add_edge('c', 'y', capacity=2.0) - G.add_edge('e', 'y', capacity=3.0) - - H = {'x': {'a': 2.0, 'b': 1.0}, - 'a': {'c': 2.0}, - 'b': {'c': 0, 'd': 1.0}, - 'c': {'y': 2.0}, - 'd': {'e': 1.0}, - 'e': {'y': 1.0}, - 'y': {}} - - compare_flows_and_cuts(G, 'x', 'y', H, 3.0) + G.add_edge("x", "a", capacity=3.0) + G.add_edge("x", "b", capacity=1.0) + G.add_edge("a", "c", capacity=3.0) + G.add_edge("b", "c", capacity=5.0) + G.add_edge("b", "d", capacity=4.0) + G.add_edge("d", "e", capacity=2.0) + G.add_edge("c", "y", capacity=2.0) + G.add_edge("e", "y", capacity=3.0) + + H = { + "x": {"a": 2.0, "b": 1.0}, + "a": {"c": 2.0}, + "b": {"c": 0, "d": 1.0}, + "c": {"y": 2.0}, + "d": {"e": 1.0}, + "e": {"y": 1.0}, + "y": {}, + } + + compare_flows_and_cuts(G, "x", "y", H, 3.0) def test_wikipedia_dinitz_example(self): # Nice example from https://en.wikipedia.org/wiki/Dinic's_algorithm G = nx.DiGraph() - G.add_edge('s', 1, capacity=10) - G.add_edge('s', 2, capacity=10) + G.add_edge("s", 1, capacity=10) + G.add_edge("s", 2, capacity=10) G.add_edge(1, 3, capacity=4) G.add_edge(1, 4, capacity=8) G.add_edge(1, 2, capacity=2) G.add_edge(2, 4, capacity=9) - G.add_edge(3, 't', capacity=10) + G.add_edge(3, "t", capacity=10) G.add_edge(4, 3, capacity=6) - G.add_edge(4, 't', capacity=10) + G.add_edge(4, "t", capacity=10) - solnFlows = {1: {2: 0, 3: 4, 4: 6}, - 2: {4: 9}, - 3: {'t': 9}, - 4: {3: 5, 't': 10}, - 's': {1: 10, 2: 9}, - 't': {}} + solnFlows = { + 1: {2: 0, 3: 4, 4: 6}, + 2: {4: 9}, + 3: {"t": 9}, + 4: {3: 5, "t": 10}, + "s": {1: 10, 2: 9}, + "t": {}, + } - compare_flows_and_cuts(G, 's', 't', solnFlows, 19) + compare_flows_and_cuts(G, "s", "t", solnFlows, 19) def test_optional_capacity(self): # Test optional capacity parameter. G = nx.DiGraph() - G.add_edge('x', 'a', spam=3.0) - G.add_edge('x', 'b', spam=1.0) - G.add_edge('a', 'c', spam=3.0) - G.add_edge('b', 'c', spam=5.0) - G.add_edge('b', 'd', spam=4.0) - G.add_edge('d', 'e', spam=2.0) - G.add_edge('c', 'y', spam=2.0) - G.add_edge('e', 'y', spam=3.0) - - solnFlows = {'x': {'a': 2.0, 'b': 1.0}, - 'a': {'c': 2.0}, - 'b': {'c': 0, 'd': 1.0}, - 'c': {'y': 2.0}, - 'd': {'e': 1.0}, - 'e': {'y': 1.0}, - 'y': {}} + G.add_edge("x", "a", spam=3.0) + G.add_edge("x", "b", spam=1.0) + G.add_edge("a", "c", spam=3.0) + G.add_edge("b", "c", spam=5.0) + G.add_edge("b", "d", spam=4.0) + G.add_edge("d", "e", spam=2.0) + G.add_edge("c", "y", spam=2.0) + G.add_edge("e", "y", spam=3.0) + + solnFlows = { + "x": {"a": 2.0, "b": 1.0}, + "a": {"c": 2.0}, + "b": {"c": 0, "d": 1.0}, + "c": {"y": 2.0}, + "d": {"e": 1.0}, + "e": {"y": 1.0}, + "y": {}, + } solnValue = 3.0 - s = 'x' - t = 'y' + s = "x" + t = "y" - compare_flows_and_cuts(G, s, t, solnFlows, solnValue, capacity='spam') + compare_flows_and_cuts(G, s, t, solnFlows, solnValue, capacity="spam") def test_digraph_infcap_edges(self): # DiGraph with infinite capacity edges G = nx.DiGraph() - G.add_edge('s', 'a') - G.add_edge('s', 'b', capacity=30) - G.add_edge('a', 'c', capacity=25) - G.add_edge('b', 'c', capacity=12) - G.add_edge('a', 't', capacity=60) - G.add_edge('c', 't') - - H = {'s': {'a': 85, 'b': 12}, - 'a': {'c': 25, 't': 60}, - 'b': {'c': 12}, - 'c': {'t': 37}, - 't': {}} - - compare_flows_and_cuts(G, 's', 't', H, 97) + G.add_edge("s", "a") + G.add_edge("s", "b", capacity=30) + G.add_edge("a", "c", capacity=25) + G.add_edge("b", "c", capacity=12) + G.add_edge("a", "t", capacity=60) + G.add_edge("c", "t") + + H = { + "s": {"a": 85, "b": 12}, + "a": {"c": 25, "t": 60}, + "b": {"c": 12}, + "c": {"t": 37}, + "t": {}, + } + + compare_flows_and_cuts(G, "s", "t", H, 97) # DiGraph with infinite capacity digon G = nx.DiGraph() - G.add_edge('s', 'a', capacity=85) - G.add_edge('s', 'b', capacity=30) - G.add_edge('a', 'c') - G.add_edge('c', 'a') - G.add_edge('b', 'c', capacity=12) - G.add_edge('a', 't', capacity=60) - G.add_edge('c', 't', capacity=37) - - H = {'s': {'a': 85, 'b': 12}, - 'a': {'c': 25, 't': 60}, - 'c': {'a': 0, 't': 37}, - 'b': {'c': 12}, - 't': {}} - - compare_flows_and_cuts(G, 's', 't', H, 97) + G.add_edge("s", "a", capacity=85) + G.add_edge("s", "b", capacity=30) + G.add_edge("a", "c") + G.add_edge("c", "a") + G.add_edge("b", "c", capacity=12) + G.add_edge("a", "t", capacity=60) + G.add_edge("c", "t", capacity=37) + + H = { + "s": {"a": 85, "b": 12}, + "a": {"c": 25, "t": 60}, + "c": {"a": 0, "t": 37}, + "b": {"c": 12}, + "t": {}, + } + + compare_flows_and_cuts(G, "s", "t", H, 97) def test_digraph_infcap_path(self): # Graph with infinite capacity (s, t)-path G = nx.DiGraph() - G.add_edge('s', 'a') - G.add_edge('s', 'b', capacity=30) - G.add_edge('a', 'c') - G.add_edge('b', 'c', capacity=12) - G.add_edge('a', 't', capacity=60) - G.add_edge('c', 't') + G.add_edge("s", "a") + G.add_edge("s", "b", capacity=30) + G.add_edge("a", "c") + G.add_edge("b", "c", capacity=12) + G.add_edge("a", "t", capacity=60) + G.add_edge("c", "t") for flow_func in all_funcs: - assert_raises(nx.NetworkXUnbounded, - flow_func, G, 's', 't') + pytest.raises(nx.NetworkXUnbounded, flow_func, G, "s", "t") def test_graph_infcap_edges(self): # Undirected graph with infinite capacity edges G = nx.Graph() - G.add_edge('s', 'a') - G.add_edge('s', 'b', capacity=30) - G.add_edge('a', 'c', capacity=25) - G.add_edge('b', 'c', capacity=12) - G.add_edge('a', 't', capacity=60) - G.add_edge('c', 't') - - H = {'s': {'a': 85, 'b': 12}, - 'a': {'c': 25, 's': 85, 't': 60}, - 'b': {'c': 12, 's': 12}, - 'c': {'a': 25, 'b': 12, 't': 37}, - 't': {'a': 60, 'c': 37}} - - compare_flows_and_cuts(G, 's', 't', H, 97) + G.add_edge("s", "a") + G.add_edge("s", "b", capacity=30) + G.add_edge("a", "c", capacity=25) + G.add_edge("b", "c", capacity=12) + G.add_edge("a", "t", capacity=60) + G.add_edge("c", "t") + + H = { + "s": {"a": 85, "b": 12}, + "a": {"c": 25, "s": 85, "t": 60}, + "b": {"c": 12, "s": 12}, + "c": {"a": 25, "b": 12, "t": 37}, + "t": {"a": 60, "c": 37}, + } + + compare_flows_and_cuts(G, "s", "t", H, 97) def test_digraph5(self): # From ticket #429 by mfrasca. G = nx.DiGraph() - G.add_edge('s', 'a', capacity=2) - G.add_edge('s', 'b', capacity=2) - G.add_edge('a', 'b', capacity=5) - G.add_edge('a', 't', capacity=1) - G.add_edge('b', 'a', capacity=1) - G.add_edge('b', 't', capacity=3) - flowSoln = {'a': {'b': 1, 't': 1}, - 'b': {'a': 0, 't': 3}, - 's': {'a': 2, 'b': 2}, - 't': {}} - compare_flows_and_cuts(G, 's', 't', flowSoln, 4) + G.add_edge("s", "a", capacity=2) + G.add_edge("s", "b", capacity=2) + G.add_edge("a", "b", capacity=5) + G.add_edge("a", "t", capacity=1) + G.add_edge("b", "a", capacity=1) + G.add_edge("b", "t", capacity=3) + flowSoln = { + "a": {"b": 1, "t": 1}, + "b": {"a": 0, "t": 3}, + "s": {"a": 2, "b": 2}, + "t": {}, + } + compare_flows_and_cuts(G, "s", "t", flowSoln, 4) def test_disconnected(self): G = nx.Graph() - G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight='capacity') + G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight="capacity") G.remove_node(1) - assert_equal(nx.maximum_flow_value(G, 0, 3), 0) + assert nx.maximum_flow_value(G, 0, 3) == 0 flowSoln = {0: {}, 2: {3: 0}, 3: {2: 0}} compare_flows_and_cuts(G, 0, 3, flowSoln, 0) def test_source_target_not_in_graph(self): G = nx.Graph() - G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight='capacity') + G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight="capacity") G.remove_node(0) for flow_func in all_funcs: - assert_raises(nx.NetworkXError, flow_func, G, 0, 3) - G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight='capacity') + pytest.raises(nx.NetworkXError, flow_func, G, 0, 3) + G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight="capacity") G.remove_node(3) for flow_func in all_funcs: - assert_raises(nx.NetworkXError, flow_func, G, 0, 3) + pytest.raises(nx.NetworkXError, flow_func, G, 0, 3) def test_source_target_coincide(self): G = nx.Graph() G.add_node(0) for flow_func in all_funcs: - assert_raises(nx.NetworkXError, flow_func, G, 0, 0) + pytest.raises(nx.NetworkXError, flow_func, G, 0, 0) def test_multigraphs_raise(self): G = nx.MultiGraph() M = nx.MultiDiGraph() G.add_edges_from([(0, 1), (1, 0)], capacity=True) for flow_func in all_funcs: - assert_raises(nx.NetworkXError, flow_func, G, 0, 0) + pytest.raises(nx.NetworkXError, flow_func, G, 0, 0) class TestMaxFlowMinCutInterface: - def setup(self): G = nx.DiGraph() - G.add_edge('x', 'a', capacity=3.0) - G.add_edge('x', 'b', capacity=1.0) - G.add_edge('a', 'c', capacity=3.0) - G.add_edge('b', 'c', capacity=5.0) - G.add_edge('b', 'd', capacity=4.0) - G.add_edge('d', 'e', capacity=2.0) - G.add_edge('c', 'y', capacity=2.0) - G.add_edge('e', 'y', capacity=3.0) + G.add_edge("x", "a", capacity=3.0) + G.add_edge("x", "b", capacity=1.0) + G.add_edge("a", "c", capacity=3.0) + G.add_edge("b", "c", capacity=5.0) + G.add_edge("b", "d", capacity=4.0) + G.add_edge("d", "e", capacity=2.0) + G.add_edge("c", "y", capacity=2.0) + G.add_edge("e", "y", capacity=3.0) self.G = G H = nx.DiGraph() H.add_edge(0, 1, capacity=1.0) @@ -382,34 +400,49 @@ def setup(self): self.H = H def test_flow_func_not_callable(self): - elements = ['this_should_be_callable', 10, set([1, 2, 3])] + elements = ["this_should_be_callable", 10, {1, 2, 3}] G = nx.Graph() - G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight='capacity') + G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight="capacity") for flow_func in interface_funcs: for element in elements: - assert_raises(nx.NetworkXError, - flow_func, G, 0, 1, flow_func=element) - assert_raises(nx.NetworkXError, - flow_func, G, 0, 1, flow_func=element) + pytest.raises(nx.NetworkXError, flow_func, G, 0, 1, flow_func=element) + pytest.raises(nx.NetworkXError, flow_func, G, 0, 1, flow_func=element) def test_flow_func_parameters(self): G = self.G fv = 3.0 for interface_func in interface_funcs: for flow_func in flow_funcs: - result = interface_func(G, 'x', 'y', flow_func=flow_func) + errmsg = ( + f"Assertion failed in function: {flow_func.__name__} " + f"in interface {interface_func.__name__}" + ) + result = interface_func(G, "x", "y", flow_func=flow_func) if interface_func in max_min_funcs: result = result[0] - assert_equal(fv, result, msg=msgi.format(flow_func.__name__, - interface_func.__name__)) + assert fv == result, errmsg def test_minimum_cut_no_cutoff(self): G = self.G for flow_func in flow_funcs: - assert_raises(nx.NetworkXError, nx.minimum_cut, G, 'x', 'y', - flow_func=flow_func, cutoff=1.0) - assert_raises(nx.NetworkXError, nx.minimum_cut_value, G, 'x', 'y', - flow_func=flow_func, cutoff=1.0) + pytest.raises( + nx.NetworkXError, + nx.minimum_cut, + G, + "x", + "y", + flow_func=flow_func, + cutoff=1.0, + ) + pytest.raises( + nx.NetworkXError, + nx.minimum_cut_value, + G, + "x", + "y", + flow_func=flow_func, + cutoff=1.0, + ) def test_kwargs(self): G = self.H @@ -420,33 +453,40 @@ def test_kwargs(self): ) for interface_func in interface_funcs: for flow_func, kwargs in to_test: + errmsg = ( + f"Assertion failed in function: {flow_func.__name__} " + f"in interface {interface_func.__name__}" + ) result = interface_func(G, 0, 2, flow_func=flow_func, **kwargs) if interface_func in max_min_funcs: result = result[0] - assert_equal(fv, result, msg=msgi.format(flow_func.__name__, - interface_func.__name__)) + assert fv == result, errmsg def test_kwargs_default_flow_func(self): G = self.H for interface_func in interface_funcs: - assert_raises(nx.NetworkXError, interface_func, - G, 0, 1, global_relabel_freq=2) + pytest.raises( + nx.NetworkXError, interface_func, G, 0, 1, global_relabel_freq=2 + ) def test_reusing_residual(self): G = self.G fv = 3.0 - s, t = 'x', 'y' - R = build_residual_network(G, 'capacity') + s, t = "x", "y" + R = build_residual_network(G, "capacity") for interface_func in interface_funcs: for flow_func in flow_funcs: + errmsg = ( + f"Assertion failed in function: {flow_func.__name__} " + f"in interface {interface_func.__name__}" + ) for i in range(3): - result = interface_func(G, 'x', 'y', flow_func=flow_func, - residual=R) + result = interface_func( + G, "x", "y", flow_func=flow_func, residual=R + ) if interface_func in max_min_funcs: result = result[0] - assert_equal(fv, result, - msg=msgi.format(flow_func.__name__, - interface_func.__name__)) + assert fv == result, errmsg # Tests specific to one algorithm @@ -454,9 +494,8 @@ def test_preflow_push_global_relabel_freq(): G = nx.DiGraph() G.add_edge(1, 2, capacity=1) R = preflow_push(G, 1, 2, global_relabel_freq=None) - assert_equal(R.graph['flow_value'], 1) - assert_raises(nx.NetworkXError, preflow_push, G, 1, 2, - global_relabel_freq=-1) + assert R.graph["flow_value"] == 1 + pytest.raises(nx.NetworkXError, preflow_push, G, 1, 2, global_relabel_freq=-1) def test_preflow_push_makes_enough_space(): @@ -465,7 +504,7 @@ def test_preflow_push_makes_enough_space(): nx.add_path(G, [0, 1, 3], capacity=1) nx.add_path(G, [1, 2, 3], capacity=1) R = preflow_push(G, 0, 3, value_only=False) - assert_equal(R.graph['flow_value'], 1) + assert R.graph["flow_value"] == 1 def test_shortest_augmenting_path_two_phase(): @@ -473,39 +512,37 @@ def test_shortest_augmenting_path_two_phase(): p = 1000 G = nx.DiGraph() for i in range(k): - G.add_edge('s', (i, 0), capacity=1) + G.add_edge("s", (i, 0), capacity=1) nx.add_path(G, ((i, j) for j in range(p)), capacity=1) - G.add_edge((i, p - 1), 't', capacity=1) - R = shortest_augmenting_path(G, 's', 't', two_phase=True) - assert_equal(R.graph['flow_value'], k) - R = shortest_augmenting_path(G, 's', 't', two_phase=False) - assert_equal(R.graph['flow_value'], k) + G.add_edge((i, p - 1), "t", capacity=1) + R = shortest_augmenting_path(G, "s", "t", two_phase=True) + assert R.graph["flow_value"] == k + R = shortest_augmenting_path(G, "s", "t", two_phase=False) + assert R.graph["flow_value"] == k class TestCutoff: - def test_cutoff(self): k = 5 p = 1000 G = nx.DiGraph() for i in range(k): - G.add_edge('s', (i, 0), capacity=2) + G.add_edge("s", (i, 0), capacity=2) nx.add_path(G, ((i, j) for j in range(p)), capacity=2) - G.add_edge((i, p - 1), 't', capacity=2) - R = shortest_augmenting_path(G, 's', 't', two_phase=True, cutoff=k) - ok_(k <= R.graph['flow_value'] <= 2 * k) - R = shortest_augmenting_path(G, 's', 't', two_phase=False, cutoff=k) - ok_(k <= R.graph['flow_value'] <= 2 * k) - R = edmonds_karp(G, 's', 't', cutoff=k) - ok_(k <= R.graph['flow_value'] <= 2 * k) + G.add_edge((i, p - 1), "t", capacity=2) + R = shortest_augmenting_path(G, "s", "t", two_phase=True, cutoff=k) + assert k <= R.graph["flow_value"] <= (2 * k) + R = shortest_augmenting_path(G, "s", "t", two_phase=False, cutoff=k) + assert k <= R.graph["flow_value"] <= (2 * k) + R = edmonds_karp(G, "s", "t", cutoff=k) + assert k <= R.graph["flow_value"] <= (2 * k) def test_complete_graph_cutoff(self): G = nx.complete_graph(5) - nx.set_edge_attributes(G, {(u, v): 1 for u, v in G.edges()}, - 'capacity') + nx.set_edge_attributes(G, {(u, v): 1 for u, v in G.edges()}, "capacity") for flow_func in [shortest_augmenting_path, edmonds_karp]: for cutoff in [3, 2, 1]: - result = nx.maximum_flow_value(G, 0, 4, flow_func=flow_func, - cutoff=cutoff) - assert_equal(cutoff, result, - msg="cutoff error in {0}".format(flow_func.__name__)) + result = nx.maximum_flow_value( + G, 0, 4, flow_func=flow_func, cutoff=cutoff + ) + assert cutoff == result, f"cutoff error in {flow_func.__name__}" diff --git a/networkx/algorithms/flow/tests/test_maxflow_large_graph.py b/networkx/algorithms/flow/tests/test_maxflow_large_graph.py index 818a9e0..73f2e0a 100644 --- a/networkx/algorithms/flow/tests/test_maxflow_large_graph.py +++ b/networkx/algorithms/flow/tests/test_maxflow_large_graph.py @@ -1,13 +1,8 @@ -# -*- coding: utf-8 -*- """Maximum flow algorithms test suite on large graphs. """ -__author__ = """Loïc Séguin-C. """ -# Copyright (C) 2010 Loïc Séguin-C. -# All rights reserved. -# BSD license. import os -from nose.tools import * +import pytest import networkx as nx from networkx.algorithms.flow import build_flow_dict, build_residual_network @@ -16,6 +11,7 @@ from networkx.algorithms.flow import edmonds_karp from networkx.algorithms.flow import preflow_push from networkx.algorithms.flow import shortest_augmenting_path +from networkx.testing import almost_equal flow_funcs = [ boykov_kolmogorov, @@ -25,8 +21,6 @@ shortest_augmenting_path, ] -msg = "Assertion failed in function: {0}" - def gen_pyramid(N): # This graph admits a flow of value 1 for which every arc is at @@ -35,122 +29,118 @@ def gen_pyramid(N): G = nx.DiGraph() for i in range(N - 1): - cap = 1. / (i + 2) + cap = 1.0 / (i + 2) for j in range(i + 1): - G.add_edge((i, j), (i + 1, j), - capacity=cap) - cap = 1. / (i + 1) - cap - G.add_edge((i, j), (i + 1, j + 1), - capacity=cap) - cap = 1. / (i + 2) - cap + G.add_edge((i, j), (i + 1, j), capacity=cap) + cap = 1.0 / (i + 1) - cap + G.add_edge((i, j), (i + 1, j + 1), capacity=cap) + cap = 1.0 / (i + 2) - cap for j in range(N): - G.add_edge((N - 1, j), 't') + G.add_edge((N - 1, j), "t") return G def read_graph(name): dirname = os.path.dirname(__file__) - path = os.path.join(dirname, name + '.gpickle.bz2') + path = os.path.join(dirname, name + ".gpickle.bz2") return nx.read_gpickle(path) def validate_flows(G, s, t, soln_value, R, flow_func): - flow_value = R.graph['flow_value'] + flow_value = R.graph["flow_value"] flow_dict = build_flow_dict(G, R) - assert_equal(soln_value, flow_value, msg=msg.format(flow_func.__name__)) - assert_equal(set(G), set(flow_dict), msg=msg.format(flow_func.__name__)) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert soln_value == flow_value, errmsg + assert set(G) == set(flow_dict), errmsg for u in G: - assert_equal(set(G[u]), set(flow_dict[u]), - msg=msg.format(flow_func.__name__)) + assert set(G[u]) == set(flow_dict[u]), errmsg excess = {u: 0 for u in flow_dict} for u in flow_dict: for v, flow in flow_dict[u].items(): - ok_(flow <= G[u][v].get('capacity', float('inf')), - msg=msg.format(flow_func.__name__)) - ok_(flow >= 0, msg=msg.format(flow_func.__name__)) + assert flow <= G[u][v].get("capacity", float("inf")), errmsg + assert flow >= 0, errmsg excess[u] -= flow excess[v] += flow for u, exc in excess.items(): if u == s: - assert_equal(exc, -soln_value, msg=msg.format(flow_func.__name__)) + assert exc == -soln_value, errmsg elif u == t: - assert_equal(exc, soln_value, msg=msg.format(flow_func.__name__)) + assert exc == soln_value, errmsg else: - assert_equal(exc, 0, msg=msg.format(flow_func.__name__)) + assert exc == 0, errmsg class TestMaxflowLargeGraph: - def test_complete_graph(self): N = 50 G = nx.complete_graph(N) - nx.set_edge_attributes(G, 5, 'capacity') - R = build_residual_network(G, 'capacity') + nx.set_edge_attributes(G, 5, "capacity") + R = build_residual_network(G, "capacity") kwargs = dict(residual=R) for flow_func in flow_funcs: - kwargs['flow_func'] = flow_func + kwargs["flow_func"] = flow_func + errmsg = f"Assertion failed in function: {flow_func.__name__}" flow_value = nx.maximum_flow_value(G, 1, 2, **kwargs) - assert_equal(flow_value, 5 * (N - 1), - msg=msg.format(flow_func.__name__)) + assert flow_value == 5 * (N - 1), errmsg def test_pyramid(self): N = 10 # N = 100 # this gives a graph with 5051 nodes G = gen_pyramid(N) - R = build_residual_network(G, 'capacity') + R = build_residual_network(G, "capacity") kwargs = dict(residual=R) for flow_func in flow_funcs: - kwargs['flow_func'] = flow_func - flow_value = nx.maximum_flow_value(G, (0, 0), 't', **kwargs) - assert_almost_equal(flow_value, 1., - msg=msg.format(flow_func.__name__)) + kwargs["flow_func"] = flow_func + errmsg = f"Assertion failed in function: {flow_func.__name__}" + flow_value = nx.maximum_flow_value(G, (0, 0), "t", **kwargs) + assert almost_equal(flow_value, 1.0), errmsg def test_gl1(self): - G = read_graph('gl1') + G = read_graph("gl1") s = 1 t = len(G) - R = build_residual_network(G, 'capacity') + R = build_residual_network(G, "capacity") kwargs = dict(residual=R) # do one flow_func to save time flow_func = flow_funcs[0] - validate_flows(G, s, t, 156545, flow_func(G, s, t, **kwargs), - flow_func) -# for flow_func in flow_funcs: -# validate_flows(G, s, t, 156545, flow_func(G, s, t, **kwargs), -# flow_func) + validate_flows(G, s, t, 156545, flow_func(G, s, t, **kwargs), flow_func) + # for flow_func in flow_funcs: + # validate_flows(G, s, t, 156545, flow_func(G, s, t, **kwargs), + # flow_func) + + @pytest.mark.slow def test_gw1(self): - G = read_graph('gw1') + G = read_graph("gw1") s = 1 t = len(G) - R = build_residual_network(G, 'capacity') + R = build_residual_network(G, "capacity") kwargs = dict(residual=R) for flow_func in flow_funcs: - validate_flows(G, s, t, 1202018, flow_func(G, s, t, **kwargs), - flow_func) + validate_flows(G, s, t, 1202018, flow_func(G, s, t, **kwargs), flow_func) def test_wlm3(self): - G = read_graph('wlm3') + G = read_graph("wlm3") s = 1 t = len(G) - R = build_residual_network(G, 'capacity') + R = build_residual_network(G, "capacity") kwargs = dict(residual=R) # do one flow_func to save time flow_func = flow_funcs[0] - validate_flows(G, s, t, 11875108, flow_func(G, s, t, **kwargs), - flow_func) -# for flow_func in flow_funcs: -# validate_flows(G, s, t, 11875108, flow_func(G, s, t, **kwargs), -# flow_func) + validate_flows(G, s, t, 11875108, flow_func(G, s, t, **kwargs), flow_func) + + # for flow_func in flow_funcs: + # validate_flows(G, s, t, 11875108, flow_func(G, s, t, **kwargs), + # flow_func) def test_preflow_push_global_relabel(self): - G = read_graph('gw1') + G = read_graph("gw1") R = preflow_push(G, 1, len(G), global_relabel_freq=50) - assert_equal(R.graph['flow_value'], 1202018) + assert R.graph["flow_value"] == 1202018 diff --git a/networkx/algorithms/flow/tests/test_mincost.py b/networkx/algorithms/flow/tests/test_mincost.py index e53fd50..10f7617 100644 --- a/networkx/algorithms/flow/tests/test_mincost.py +++ b/networkx/algorithms/flow/tests/test_mincost.py @@ -1,148 +1,145 @@ -# -*- coding: utf-8 -*- - import networkx as nx -from nose.tools import assert_equal, assert_raises +import pytest import os class TestMinCostFlow: def test_simple_digraph(self): G = nx.DiGraph() - G.add_node('a', demand=-5) - G.add_node('d', demand=5) - G.add_edge('a', 'b', weight=3, capacity=4) - G.add_edge('a', 'c', weight=6, capacity=10) - G.add_edge('b', 'd', weight=1, capacity=9) - G.add_edge('c', 'd', weight=2, capacity=5) + G.add_node("a", demand=-5) + G.add_node("d", demand=5) + G.add_edge("a", "b", weight=3, capacity=4) + G.add_edge("a", "c", weight=6, capacity=10) + G.add_edge("b", "d", weight=1, capacity=9) + G.add_edge("c", "d", weight=2, capacity=5) flowCost, H = nx.network_simplex(G) - soln = {'a': {'b': 4, 'c': 1}, - 'b': {'d': 4}, - 'c': {'d': 1}, - 'd': {}} - assert_equal(flowCost, 24) - assert_equal(nx.min_cost_flow_cost(G), 24) - assert_equal(H, soln) - assert_equal(nx.min_cost_flow(G), soln) - assert_equal(nx.cost_of_flow(G, H), 24) + soln = {"a": {"b": 4, "c": 1}, "b": {"d": 4}, "c": {"d": 1}, "d": {}} + assert flowCost == 24 + assert nx.min_cost_flow_cost(G) == 24 + assert H == soln + assert nx.min_cost_flow(G) == soln + assert nx.cost_of_flow(G, H) == 24 flowCost, H = nx.capacity_scaling(G) - assert_equal(flowCost, 24) - assert_equal(nx.cost_of_flow(G, H), 24) - assert_equal(H, soln) + assert flowCost == 24 + assert nx.cost_of_flow(G, H) == 24 + assert H == soln def test_negcycle_infcap(self): G = nx.DiGraph() - G.add_node('s', demand=-5) - G.add_node('t', demand=5) - G.add_edge('s', 'a', weight=1, capacity=3) - G.add_edge('a', 'b', weight=3) - G.add_edge('c', 'a', weight=-6) - G.add_edge('b', 'd', weight=1) - G.add_edge('d', 'c', weight=-2) - G.add_edge('d', 't', weight=1, capacity=3) - assert_raises(nx.NetworkXUnfeasible, nx.network_simplex, G) - assert_raises(nx.NetworkXUnbounded, nx.capacity_scaling, G) + G.add_node("s", demand=-5) + G.add_node("t", demand=5) + G.add_edge("s", "a", weight=1, capacity=3) + G.add_edge("a", "b", weight=3) + G.add_edge("c", "a", weight=-6) + G.add_edge("b", "d", weight=1) + G.add_edge("d", "c", weight=-2) + G.add_edge("d", "t", weight=1, capacity=3) + pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G) + pytest.raises(nx.NetworkXUnbounded, nx.capacity_scaling, G) def test_sum_demands_not_zero(self): G = nx.DiGraph() - G.add_node('s', demand=-5) - G.add_node('t', demand=4) - G.add_edge('s', 'a', weight=1, capacity=3) - G.add_edge('a', 'b', weight=3) - G.add_edge('a', 'c', weight=-6) - G.add_edge('b', 'd', weight=1) - G.add_edge('c', 'd', weight=-2) - G.add_edge('d', 't', weight=1, capacity=3) - assert_raises(nx.NetworkXUnfeasible, nx.network_simplex, G) - assert_raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) + G.add_node("s", demand=-5) + G.add_node("t", demand=4) + G.add_edge("s", "a", weight=1, capacity=3) + G.add_edge("a", "b", weight=3) + G.add_edge("a", "c", weight=-6) + G.add_edge("b", "d", weight=1) + G.add_edge("c", "d", weight=-2) + G.add_edge("d", "t", weight=1, capacity=3) + pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G) + pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) def test_no_flow_satisfying_demands(self): G = nx.DiGraph() - G.add_node('s', demand=-5) - G.add_node('t', demand=5) - G.add_edge('s', 'a', weight=1, capacity=3) - G.add_edge('a', 'b', weight=3) - G.add_edge('a', 'c', weight=-6) - G.add_edge('b', 'd', weight=1) - G.add_edge('c', 'd', weight=-2) - G.add_edge('d', 't', weight=1, capacity=3) - assert_raises(nx.NetworkXUnfeasible, nx.network_simplex, G) - assert_raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) + G.add_node("s", demand=-5) + G.add_node("t", demand=5) + G.add_edge("s", "a", weight=1, capacity=3) + G.add_edge("a", "b", weight=3) + G.add_edge("a", "c", weight=-6) + G.add_edge("b", "d", weight=1) + G.add_edge("c", "d", weight=-2) + G.add_edge("d", "t", weight=1, capacity=3) + pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G) + pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) def test_transshipment(self): G = nx.DiGraph() - G.add_node('a', demand=1) - G.add_node('b', demand=-2) - G.add_node('c', demand=-2) - G.add_node('d', demand=3) - G.add_node('e', demand=-4) - G.add_node('f', demand=-4) - G.add_node('g', demand=3) - G.add_node('h', demand=2) - G.add_node('r', demand=3) - G.add_edge('a', 'c', weight=3) - G.add_edge('r', 'a', weight=2) - G.add_edge('b', 'a', weight=9) - G.add_edge('r', 'c', weight=0) - G.add_edge('b', 'r', weight=-6) - G.add_edge('c', 'd', weight=5) - G.add_edge('e', 'r', weight=4) - G.add_edge('e', 'f', weight=3) - G.add_edge('h', 'b', weight=4) - G.add_edge('f', 'd', weight=7) - G.add_edge('f', 'h', weight=12) - G.add_edge('g', 'd', weight=12) - G.add_edge('f', 'g', weight=-1) - G.add_edge('h', 'g', weight=-10) + G.add_node("a", demand=1) + G.add_node("b", demand=-2) + G.add_node("c", demand=-2) + G.add_node("d", demand=3) + G.add_node("e", demand=-4) + G.add_node("f", demand=-4) + G.add_node("g", demand=3) + G.add_node("h", demand=2) + G.add_node("r", demand=3) + G.add_edge("a", "c", weight=3) + G.add_edge("r", "a", weight=2) + G.add_edge("b", "a", weight=9) + G.add_edge("r", "c", weight=0) + G.add_edge("b", "r", weight=-6) + G.add_edge("c", "d", weight=5) + G.add_edge("e", "r", weight=4) + G.add_edge("e", "f", weight=3) + G.add_edge("h", "b", weight=4) + G.add_edge("f", "d", weight=7) + G.add_edge("f", "h", weight=12) + G.add_edge("g", "d", weight=12) + G.add_edge("f", "g", weight=-1) + G.add_edge("h", "g", weight=-10) flowCost, H = nx.network_simplex(G) - soln = {'a': {'c': 0}, - 'b': {'a': 0, 'r': 2}, - 'c': {'d': 3}, - 'd': {}, - 'e': {'r': 3, 'f': 1}, - 'f': {'d': 0, 'g': 3, 'h': 2}, - 'g': {'d': 0}, - 'h': {'b': 0, 'g': 0}, - 'r': {'a': 1, 'c': 1}} - assert_equal(flowCost, 41) - assert_equal(nx.min_cost_flow_cost(G), 41) - assert_equal(H, soln) - assert_equal(nx.min_cost_flow(G), soln) - assert_equal(nx.cost_of_flow(G, H), 41) + soln = { + "a": {"c": 0}, + "b": {"a": 0, "r": 2}, + "c": {"d": 3}, + "d": {}, + "e": {"r": 3, "f": 1}, + "f": {"d": 0, "g": 3, "h": 2}, + "g": {"d": 0}, + "h": {"b": 0, "g": 0}, + "r": {"a": 1, "c": 1}, + } + assert flowCost == 41 + assert nx.min_cost_flow_cost(G) == 41 + assert H == soln + assert nx.min_cost_flow(G) == soln + assert nx.cost_of_flow(G, H) == 41 flowCost, H = nx.capacity_scaling(G) - assert_equal(flowCost, 41) - assert_equal(nx.cost_of_flow(G, H), 41) - assert_equal(H, soln) + assert flowCost == 41 + assert nx.cost_of_flow(G, H) == 41 + assert H == soln def test_max_flow_min_cost(self): G = nx.DiGraph() - G.add_edge('s', 'a', bandwidth=6) - G.add_edge('s', 'c', bandwidth=10, cost=10) - G.add_edge('a', 'b', cost=6) - G.add_edge('b', 'd', bandwidth=8, cost=7) - G.add_edge('c', 'd', cost=10) - G.add_edge('d', 't', bandwidth=5, cost=5) - soln = {'s': {'a': 5, 'c': 0}, - 'a': {'b': 5}, - 'b': {'d': 5}, - 'c': {'d': 0}, - 'd': {'t': 5}, - 't': {}} - flow = nx.max_flow_min_cost(G, 's', 't', capacity='bandwidth', - weight='cost') - assert_equal(flow, soln) - assert_equal(nx.cost_of_flow(G, flow, weight='cost'), 90) - - G.add_edge('t', 's', cost=-100) - flowCost, flow = nx.capacity_scaling(G, capacity='bandwidth', - weight='cost') - G.remove_edge('t', 's') - assert_equal(flowCost, -410) - assert_equal(flow['t']['s'], 5) - del flow['t']['s'] - assert_equal(flow, soln) - assert_equal(nx.cost_of_flow(G, flow, weight='cost'), 90) + G.add_edge("s", "a", bandwidth=6) + G.add_edge("s", "c", bandwidth=10, cost=10) + G.add_edge("a", "b", cost=6) + G.add_edge("b", "d", bandwidth=8, cost=7) + G.add_edge("c", "d", cost=10) + G.add_edge("d", "t", bandwidth=5, cost=5) + soln = { + "s": {"a": 5, "c": 0}, + "a": {"b": 5}, + "b": {"d": 5}, + "c": {"d": 0}, + "d": {"t": 5}, + "t": {}, + } + flow = nx.max_flow_min_cost(G, "s", "t", capacity="bandwidth", weight="cost") + assert flow == soln + assert nx.cost_of_flow(G, flow, weight="cost") == 90 + + G.add_edge("t", "s", cost=-100) + flowCost, flow = nx.capacity_scaling(G, capacity="bandwidth", weight="cost") + G.remove_edge("t", "s") + assert flowCost == -410 + assert flow["t"]["s"] == 5 + del flow["t"]["s"] + assert flow == soln + assert nx.cost_of_flow(G, flow, weight="cost") == 90 def test_digraph1(self): # From Bradley, S. P., Hax, A. C. and Magnanti, T. L. Applied @@ -151,39 +148,45 @@ def test_digraph1(self): G.add_node(1, demand=-20) G.add_node(4, demand=5) G.add_node(5, demand=15) - G.add_edges_from([(1, 2, {'capacity': 15, 'weight': 4}), - (1, 3, {'capacity': 8, 'weight': 4}), - (2, 3, {'weight': 2}), - (2, 4, {'capacity': 4, 'weight': 2}), - (2, 5, {'capacity': 10, 'weight': 6}), - (3, 4, {'capacity': 15, 'weight': 1}), - (3, 5, {'capacity': 5, 'weight': 3}), - (4, 5, {'weight': 2}), - (5, 3, {'capacity': 4, 'weight': 1})]) + G.add_edges_from( + [ + (1, 2, {"capacity": 15, "weight": 4}), + (1, 3, {"capacity": 8, "weight": 4}), + (2, 3, {"weight": 2}), + (2, 4, {"capacity": 4, "weight": 2}), + (2, 5, {"capacity": 10, "weight": 6}), + (3, 4, {"capacity": 15, "weight": 1}), + (3, 5, {"capacity": 5, "weight": 3}), + (4, 5, {"weight": 2}), + (5, 3, {"capacity": 4, "weight": 1}), + ] + ) flowCost, H = nx.network_simplex(G) - soln = {1: {2: 12, 3: 8}, - 2: {3: 8, 4: 4, 5: 0}, - 3: {4: 11, 5: 5}, - 4: {5: 10}, - 5: {3: 0}} - assert_equal(flowCost, 150) - assert_equal(nx.min_cost_flow_cost(G), 150) - assert_equal(H, soln) - assert_equal(nx.min_cost_flow(G), soln) - assert_equal(nx.cost_of_flow(G, H), 150) + soln = { + 1: {2: 12, 3: 8}, + 2: {3: 8, 4: 4, 5: 0}, + 3: {4: 11, 5: 5}, + 4: {5: 10}, + 5: {3: 0}, + } + assert flowCost == 150 + assert nx.min_cost_flow_cost(G) == 150 + assert H == soln + assert nx.min_cost_flow(G) == soln + assert nx.cost_of_flow(G, H) == 150 flowCost, H = nx.capacity_scaling(G) - assert_equal(flowCost, 150) - assert_equal(H, soln) - assert_equal(nx.cost_of_flow(G, H), 150) + assert flowCost == 150 + assert H == soln + assert nx.cost_of_flow(G, H) == 150 def test_digraph2(self): # Example from ticket #430 from mfrasca. Original source: # http://www.cs.princeton.edu/courses/archive/spr03/cs226/lectures/mincost.4up.pdf, slide 11. G = nx.DiGraph() - G.add_edge('s', 1, capacity=12) - G.add_edge('s', 2, capacity=6) - G.add_edge('s', 3, capacity=14) + G.add_edge("s", 1, capacity=12) + G.add_edge("s", 2, capacity=6) + G.add_edge("s", 3, capacity=14) G.add_edge(1, 2, capacity=11, weight=4) G.add_edge(2, 3, capacity=9, weight=6) G.add_edge(1, 4, capacity=5, weight=5) @@ -193,28 +196,30 @@ def test_digraph2(self): G.add_edge(3, 6, capacity=31, weight=3) G.add_edge(4, 5, capacity=18, weight=4) G.add_edge(5, 6, capacity=9, weight=5) - G.add_edge(4, 't', capacity=3) - G.add_edge(5, 't', capacity=7) - G.add_edge(6, 't', capacity=22) - flow = nx.max_flow_min_cost(G, 's', 't') - soln = {1: {2: 6, 4: 5, 5: 1}, - 2: {3: 6, 5: 4, 6: 2}, - 3: {6: 20}, - 4: {5: 2, 't': 3}, - 5: {6: 0, 't': 7}, - 6: {'t': 22}, - 's': {1: 12, 2: 6, 3: 14}, - 't': {}} - assert_equal(flow, soln) - - G.add_edge('t', 's', weight=-100) + G.add_edge(4, "t", capacity=3) + G.add_edge(5, "t", capacity=7) + G.add_edge(6, "t", capacity=22) + flow = nx.max_flow_min_cost(G, "s", "t") + soln = { + 1: {2: 6, 4: 5, 5: 1}, + 2: {3: 6, 5: 4, 6: 2}, + 3: {6: 20}, + 4: {5: 2, "t": 3}, + 5: {6: 0, "t": 7}, + 6: {"t": 22}, + "s": {1: 12, 2: 6, 3: 14}, + "t": {}, + } + assert flow == soln + + G.add_edge("t", "s", weight=-100) flowCost, flow = nx.capacity_scaling(G) - G.remove_edge('t', 's') - assert_equal(flow['t']['s'], 32) - assert_equal(flowCost, -3007) - del flow['t']['s'] - assert_equal(flow, soln) - assert_equal(nx.cost_of_flow(G, flow), 193) + G.remove_edge("t", "s") + assert flow["t"]["s"] == 32 + assert flowCost == -3007 + del flow["t"]["s"] + assert flow == soln + assert nx.cost_of_flow(G, flow) == 193 def test_digraph3(self): """Combinatorial Optimization: Algorithms and Complexity, @@ -223,146 +228,152 @@ def test_digraph3(self): by mfrasca.""" G = nx.DiGraph() - G.add_edge('s', 'a') - G['s']['a'].update({0: 2, 1: 4}) - G.add_edge('s', 'b') - G['s']['b'].update({0: 2, 1: 1}) - G.add_edge('a', 'b') - G['a']['b'].update({0: 5, 1: 2}) - G.add_edge('a', 't') - G['a']['t'].update({0: 1, 1: 5}) - G.add_edge('b', 'a') - G['b']['a'].update({0: 1, 1: 3}) - G.add_edge('b', 't') - G['b']['t'].update({0: 3, 1: 2}) + G.add_edge("s", "a") + G["s"]["a"].update({0: 2, 1: 4}) + G.add_edge("s", "b") + G["s"]["b"].update({0: 2, 1: 1}) + G.add_edge("a", "b") + G["a"]["b"].update({0: 5, 1: 2}) + G.add_edge("a", "t") + G["a"]["t"].update({0: 1, 1: 5}) + G.add_edge("b", "a") + G["b"]["a"].update({0: 1, 1: 3}) + G.add_edge("b", "t") + G["b"]["t"].update({0: 3, 1: 2}) "PS.ex.7.1: testing main function" - sol = nx.max_flow_min_cost(G, 's', 't', capacity=0, weight=1) - flow = sum(v for v in sol['s'].values()) - assert_equal(4, flow) - assert_equal(23, nx.cost_of_flow(G, sol, weight=1)) - assert_equal(sol['s'], {'a': 2, 'b': 2}) - assert_equal(sol['a'], {'b': 1, 't': 1}) - assert_equal(sol['b'], {'a': 0, 't': 3}) - assert_equal(sol['t'], {}) - - G.add_edge('t', 's') - G['t']['s'].update({1: -100}) + sol = nx.max_flow_min_cost(G, "s", "t", capacity=0, weight=1) + flow = sum(v for v in sol["s"].values()) + assert 4 == flow + assert 23 == nx.cost_of_flow(G, sol, weight=1) + assert sol["s"] == {"a": 2, "b": 2} + assert sol["a"] == {"b": 1, "t": 1} + assert sol["b"] == {"a": 0, "t": 3} + assert sol["t"] == {} + + G.add_edge("t", "s") + G["t"]["s"].update({1: -100}) flowCost, sol = nx.capacity_scaling(G, capacity=0, weight=1) - G.remove_edge('t', 's') - flow = sum(v for v in sol['s'].values()) - assert_equal(4, flow) - assert_equal(sol['t']['s'], 4) - assert_equal(flowCost, -377) - del sol['t']['s'] - assert_equal(sol['s'], {'a': 2, 'b': 2}) - assert_equal(sol['a'], {'b': 1, 't': 1}) - assert_equal(sol['b'], {'a': 0, 't': 3}) - assert_equal(sol['t'], {}) - assert_equal(nx.cost_of_flow(G, sol, weight=1), 23) + G.remove_edge("t", "s") + flow = sum(v for v in sol["s"].values()) + assert 4 == flow + assert sol["t"]["s"] == 4 + assert flowCost == -377 + del sol["t"]["s"] + assert sol["s"] == {"a": 2, "b": 2} + assert sol["a"] == {"b": 1, "t": 1} + assert sol["b"] == {"a": 0, "t": 3} + assert sol["t"] == {} + assert nx.cost_of_flow(G, sol, weight=1) == 23 def test_zero_capacity_edges(self): """Address issue raised in ticket #617 by arv.""" G = nx.DiGraph() - G.add_edges_from([(1, 2, {'capacity': 1, 'weight': 1}), - (1, 5, {'capacity': 1, 'weight': 1}), - (2, 3, {'capacity': 0, 'weight': 1}), - (2, 5, {'capacity': 1, 'weight': 1}), - (5, 3, {'capacity': 2, 'weight': 1}), - (5, 4, {'capacity': 0, 'weight': 1}), - (3, 4, {'capacity': 2, 'weight': 1})]) - G.nodes[1]['demand'] = -1 - G.nodes[2]['demand'] = -1 - G.nodes[4]['demand'] = 2 + G.add_edges_from( + [ + (1, 2, {"capacity": 1, "weight": 1}), + (1, 5, {"capacity": 1, "weight": 1}), + (2, 3, {"capacity": 0, "weight": 1}), + (2, 5, {"capacity": 1, "weight": 1}), + (5, 3, {"capacity": 2, "weight": 1}), + (5, 4, {"capacity": 0, "weight": 1}), + (3, 4, {"capacity": 2, "weight": 1}), + ] + ) + G.nodes[1]["demand"] = -1 + G.nodes[2]["demand"] = -1 + G.nodes[4]["demand"] = 2 flowCost, H = nx.network_simplex(G) - soln = {1: {2: 0, 5: 1}, - 2: {3: 0, 5: 1}, - 3: {4: 2}, - 4: {}, - 5: {3: 2, 4: 0}} - assert_equal(flowCost, 6) - assert_equal(nx.min_cost_flow_cost(G), 6) - assert_equal(H, soln) - assert_equal(nx.min_cost_flow(G), soln) - assert_equal(nx.cost_of_flow(G, H), 6) + soln = {1: {2: 0, 5: 1}, 2: {3: 0, 5: 1}, 3: {4: 2}, 4: {}, 5: {3: 2, 4: 0}} + assert flowCost == 6 + assert nx.min_cost_flow_cost(G) == 6 + assert H == soln + assert nx.min_cost_flow(G) == soln + assert nx.cost_of_flow(G, H) == 6 flowCost, H = nx.capacity_scaling(G) - assert_equal(flowCost, 6) - assert_equal(H, soln) - assert_equal(nx.cost_of_flow(G, H), 6) + assert flowCost == 6 + assert H == soln + assert nx.cost_of_flow(G, H) == 6 def test_digon(self): """Check if digons are handled properly. Taken from ticket #618 by arv.""" - nodes = [(1, {}), - (2, {'demand': -4}), - (3, {'demand': 4}), - ] - edges = [(1, 2, {'capacity': 3, 'weight': 600000}), - (2, 1, {'capacity': 2, 'weight': 0}), - (2, 3, {'capacity': 5, 'weight': 714285}), - (3, 2, {'capacity': 2, 'weight': 0}), - ] + nodes = [(1, {}), (2, {"demand": -4}), (3, {"demand": 4})] + edges = [ + (1, 2, {"capacity": 3, "weight": 600000}), + (2, 1, {"capacity": 2, "weight": 0}), + (2, 3, {"capacity": 5, "weight": 714285}), + (3, 2, {"capacity": 2, "weight": 0}), + ] G = nx.DiGraph(edges) G.add_nodes_from(nodes) flowCost, H = nx.network_simplex(G) - soln = {1: {2: 0}, - 2: {1: 0, 3: 4}, - 3: {2: 0}} - assert_equal(flowCost, 2857140) - assert_equal(nx.min_cost_flow_cost(G), 2857140) - assert_equal(H, soln) - assert_equal(nx.min_cost_flow(G), soln) - assert_equal(nx.cost_of_flow(G, H), 2857140) + soln = {1: {2: 0}, 2: {1: 0, 3: 4}, 3: {2: 0}} + assert flowCost == 2857140 + assert nx.min_cost_flow_cost(G) == 2857140 + assert H == soln + assert nx.min_cost_flow(G) == soln + assert nx.cost_of_flow(G, H) == 2857140 flowCost, H = nx.capacity_scaling(G) - assert_equal(flowCost, 2857140) - assert_equal(H, soln) - assert_equal(nx.cost_of_flow(G, H), 2857140) + assert flowCost == 2857140 + assert H == soln + assert nx.cost_of_flow(G, H) == 2857140 + + def test_deadend(self): + """Check if one-node cycles are handled properly. Taken from ticket + #2906 from @sshraven.""" + G = nx.DiGraph() + + G.add_nodes_from(range(5), demand=0) + G.nodes[4]["demand"] = -13 + G.nodes[3]["demand"] = 13 + + G.add_edges_from([(0, 2), (0, 3), (2, 1)], capacity=20, weight=0.1) + pytest.raises(nx.NetworkXUnfeasible, nx.min_cost_flow, G) def test_infinite_capacity_neg_digon(self): """An infinite capacity negative cost digon results in an unbounded instance.""" - nodes = [(1, {}), - (2, {'demand': -4}), - (3, {'demand': 4}), - ] - edges = [(1, 2, {'weight': -600}), - (2, 1, {'weight': 0}), - (2, 3, {'capacity': 5, 'weight': 714285}), - (3, 2, {'capacity': 2, 'weight': 0}), - ] + nodes = [(1, {}), (2, {"demand": -4}), (3, {"demand": 4})] + edges = [ + (1, 2, {"weight": -600}), + (2, 1, {"weight": 0}), + (2, 3, {"capacity": 5, "weight": 714285}), + (3, 2, {"capacity": 2, "weight": 0}), + ] G = nx.DiGraph(edges) G.add_nodes_from(nodes) - assert_raises(nx.NetworkXUnbounded, nx.network_simplex, G) - assert_raises(nx.NetworkXUnbounded, nx.capacity_scaling, G) + pytest.raises(nx.NetworkXUnbounded, nx.network_simplex, G) + pytest.raises(nx.NetworkXUnbounded, nx.capacity_scaling, G) def test_finite_capacity_neg_digon(self): """The digon should receive the maximum amount of flow it can handle. Taken from ticket #749 by @chuongdo.""" G = nx.DiGraph() - G.add_edge('a', 'b', capacity=1, weight=-1) - G.add_edge('b', 'a', capacity=1, weight=-1) + G.add_edge("a", "b", capacity=1, weight=-1) + G.add_edge("b", "a", capacity=1, weight=-1) min_cost = -2 - assert_equal(nx.min_cost_flow_cost(G), min_cost) + assert nx.min_cost_flow_cost(G) == min_cost flowCost, H = nx.capacity_scaling(G) - assert_equal(flowCost, -2) - assert_equal(H, {'a': {'b': 1}, 'b': {'a': 1}}) - assert_equal(nx.cost_of_flow(G, H), -2) + assert flowCost == -2 + assert H == {"a": {"b": 1}, "b": {"a": 1}} + assert nx.cost_of_flow(G, H) == -2 def test_multidigraph(self): """Multidigraphs are acceptable.""" G = nx.MultiDiGraph() - G.add_weighted_edges_from([(1, 2, 1), (2, 3, 2)], weight='capacity') + G.add_weighted_edges_from([(1, 2, 1), (2, 3, 2)], weight="capacity") flowCost, H = nx.network_simplex(G) - assert_equal(flowCost, 0) - assert_equal(H, {1: {2: {0: 0}}, 2: {3: {0: 0}}, 3: {}}) + assert flowCost == 0 + assert H == {1: {2: {0: 0}}, 2: {3: {0: 0}}, 3: {}} flowCost, H = nx.capacity_scaling(G) - assert_equal(flowCost, 0) - assert_equal(H, {1: {2: {0: 0}}, 2: {3: {0: 0}}, 3: {}}) + assert flowCost == 0 + assert H == {1: {2: {0: 0}}, 2: {3: {0: 0}}, 3: {}} def test_negative_selfloops(self): """Negative selfloops should cause an exception if uncapacitated and @@ -370,28 +381,28 @@ def test_negative_selfloops(self): """ G = nx.DiGraph() G.add_edge(1, 1, weight=-1) - assert_raises(nx.NetworkXUnbounded, nx.network_simplex, G) - assert_raises(nx.NetworkXUnbounded, nx.capacity_scaling, G) - G[1][1]['capacity'] = 2 + pytest.raises(nx.NetworkXUnbounded, nx.network_simplex, G) + pytest.raises(nx.NetworkXUnbounded, nx.capacity_scaling, G) + G[1][1]["capacity"] = 2 flowCost, H = nx.network_simplex(G) - assert_equal(flowCost, -2) - assert_equal(H, {1: {1: 2}}) + assert flowCost == -2 + assert H == {1: {1: 2}} flowCost, H = nx.capacity_scaling(G) - assert_equal(flowCost, -2) - assert_equal(H, {1: {1: 2}}) + assert flowCost == -2 + assert H == {1: {1: 2}} G = nx.MultiDiGraph() - G.add_edge(1, 1, 'x', weight=-1) - G.add_edge(1, 1, 'y', weight=1) - assert_raises(nx.NetworkXUnbounded, nx.network_simplex, G) - assert_raises(nx.NetworkXUnbounded, nx.capacity_scaling, G) - G[1][1]['x']['capacity'] = 2 + G.add_edge(1, 1, "x", weight=-1) + G.add_edge(1, 1, "y", weight=1) + pytest.raises(nx.NetworkXUnbounded, nx.network_simplex, G) + pytest.raises(nx.NetworkXUnbounded, nx.capacity_scaling, G) + G[1][1]["x"]["capacity"] = 2 flowCost, H = nx.network_simplex(G) - assert_equal(flowCost, -2) - assert_equal(H, {1: {1: {'x': 2, 'y': 0}}}) + assert flowCost == -2 + assert H == {1: {1: {"x": 2, "y": 0}}} flowCost, H = nx.capacity_scaling(G) - assert_equal(flowCost, -2) - assert_equal(H, {1: {1: {'x': 2, 'y': 0}}}) + assert flowCost == -2 + assert H == {1: {1: {"x": 2, "y": 0}}} def test_bone_shaped(self): # From #1283 @@ -408,51 +419,49 @@ def test_bone_shaped(self): G.add_edge(5, 3, capacity=4) G.add_edge(0, 3, capacity=0) flowCost, H = nx.network_simplex(G) - assert_equal(flowCost, 0) - assert_equal( - H, {0: {1: 2, 2: 2, 3: 0}, 1: {}, 2: {}, 3: {}, 4: {3: 2}, 5: {3: 2}}) + assert flowCost == 0 + assert H == {0: {1: 2, 2: 2, 3: 0}, 1: {}, 2: {}, 3: {}, 4: {3: 2}, 5: {3: 2}} flowCost, H = nx.capacity_scaling(G) - assert_equal(flowCost, 0) - assert_equal( - H, {0: {1: 2, 2: 2, 3: 0}, 1: {}, 2: {}, 3: {}, 4: {3: 2}, 5: {3: 2}}) + assert flowCost == 0 + assert H == {0: {1: 2, 2: 2, 3: 0}, 1: {}, 2: {}, 3: {}, 4: {3: 2}, 5: {3: 2}} def test_exceptions(self): G = nx.Graph() - assert_raises(nx.NetworkXNotImplemented, nx.network_simplex, G) - assert_raises(nx.NetworkXNotImplemented, nx.capacity_scaling, G) + pytest.raises(nx.NetworkXNotImplemented, nx.network_simplex, G) + pytest.raises(nx.NetworkXNotImplemented, nx.capacity_scaling, G) G = nx.MultiGraph() - assert_raises(nx.NetworkXNotImplemented, nx.network_simplex, G) - assert_raises(nx.NetworkXNotImplemented, nx.capacity_scaling, G) + pytest.raises(nx.NetworkXNotImplemented, nx.network_simplex, G) + pytest.raises(nx.NetworkXNotImplemented, nx.capacity_scaling, G) G = nx.DiGraph() - assert_raises(nx.NetworkXError, nx.network_simplex, G) - assert_raises(nx.NetworkXError, nx.capacity_scaling, G) - G.add_node(0, demand=float('inf')) - assert_raises(nx.NetworkXError, nx.network_simplex, G) - assert_raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) - G.nodes[0]['demand'] = 0 + pytest.raises(nx.NetworkXError, nx.network_simplex, G) + pytest.raises(nx.NetworkXError, nx.capacity_scaling, G) + G.add_node(0, demand=float("inf")) + pytest.raises(nx.NetworkXError, nx.network_simplex, G) + pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) + G.nodes[0]["demand"] = 0 G.add_node(1, demand=0) - G.add_edge(0, 1, weight=-float('inf')) - assert_raises(nx.NetworkXError, nx.network_simplex, G) - assert_raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) - G[0][1]['weight'] = 0 - G.add_edge(0, 0, weight=float('inf')) - assert_raises(nx.NetworkXError, nx.network_simplex, G) - #assert_raises(nx.NetworkXError, nx.capacity_scaling, G) - G[0][0]['weight'] = 0 - G[0][1]['capacity'] = -1 - assert_raises(nx.NetworkXUnfeasible, nx.network_simplex, G) - #assert_raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) - G[0][1]['capacity'] = 0 - G[0][0]['capacity'] = -1 - assert_raises(nx.NetworkXUnfeasible, nx.network_simplex, G) - #assert_raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) + G.add_edge(0, 1, weight=-float("inf")) + pytest.raises(nx.NetworkXError, nx.network_simplex, G) + pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) + G[0][1]["weight"] = 0 + G.add_edge(0, 0, weight=float("inf")) + pytest.raises(nx.NetworkXError, nx.network_simplex, G) + # pytest.raises(nx.NetworkXError, nx.capacity_scaling, G) + G[0][0]["weight"] = 0 + G[0][1]["capacity"] = -1 + pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G) + # pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) + G[0][1]["capacity"] = 0 + G[0][0]["capacity"] = -1 + pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G) + # pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) def test_large(self): - fname = os.path.join(os.path.dirname(__file__), 'netgen-2.gpickle.bz2') + fname = os.path.join(os.path.dirname(__file__), "netgen-2.gpickle.bz2") G = nx.read_gpickle(fname) flowCost, flowDict = nx.network_simplex(G) - assert_equal(6749969302, flowCost) - assert_equal(6749969302, nx.cost_of_flow(G, flowDict)) + assert 6749969302 == flowCost + assert 6749969302 == nx.cost_of_flow(G, flowDict) flowCost, flowDict = nx.capacity_scaling(G) - assert_equal(6749969302, flowCost) - assert_equal(6749969302, nx.cost_of_flow(G, flowDict)) + assert 6749969302 == flowCost + assert 6749969302 == nx.cost_of_flow(G, flowDict) diff --git a/networkx/algorithms/flow/utils.py b/networkx/algorithms/flow/utils.py index aea178c..a0b8309 100644 --- a/networkx/algorithms/flow/utils.py +++ b/networkx/algorithms/flow/utils.py @@ -1,25 +1,26 @@ -# -*- coding: utf-8 -*- """ Utility classes and functions for network flow algorithms. """ -__author__ = """ysitu """ -# Copyright (C) 2014 ysitu -# All rights reserved. -# BSD license. - from collections import deque import networkx as nx -__all__ = ['CurrentEdge', 'Level', 'GlobalRelabelThreshold', - 'build_residual_network', 'detect_unboundedness', 'build_flow_dict'] +__all__ = [ + "CurrentEdge", + "Level", + "GlobalRelabelThreshold", + "build_residual_network", + "detect_unboundedness", + "build_flow_dict", +] -class CurrentEdge(object): +class CurrentEdge: """Mechanism for iterating over out-edges incident to a node in a circular manner. StopIteration exception is raised when wraparound occurs. """ - __slots__ = ('_edges', '_it', '_curr') + + __slots__ = ("_edges", "_it", "_curr") def __init__(self, edges): self._edges = edges @@ -41,23 +42,24 @@ def _rewind(self): self._curr = next(self._it) -class Level(object): +class Level: """Active and inactive nodes in a level. """ - __slots__ = ('active', 'inactive') + + __slots__ = ("active", "inactive") def __init__(self): self.active = set() self.inactive = set() -class GlobalRelabelThreshold(object): +class GlobalRelabelThreshold: """Measurement of work before the global relabeling heuristic should be applied. """ def __init__(self, n, m, freq): - self._threshold = (n + m) / freq if freq else float('inf') + self._threshold = (n + m) / freq if freq else float("inf") self._work = 0 def add_work(self, work): @@ -96,16 +98,18 @@ def build_residual_network(G, capacity): """ if G.is_multigraph(): - raise nx.NetworkXError( - 'MultiGraph and MultiDiGraph not supported (yet).') + raise nx.NetworkXError("MultiGraph and MultiDiGraph not supported (yet).") R = nx.DiGraph() R.add_nodes_from(G) - inf = float('inf') + inf = float("inf") # Extract edges with positive capacities. Self loops excluded. - edge_list = [(u, v, attr) for u, v, attr in G.edges(data=True) - if u != v and attr.get(capacity, inf) > 0] + edge_list = [ + (u, v, attr) + for u, v, attr in G.edges(data=True) + if u != v and attr.get(capacity, inf) > 0 + ] # Simulate infinity with three times the sum of the finite edge capacities # or any positive value if the sum is zero. This allows the # infinite-capacity edges to be distinguished for unboundedness detection @@ -116,8 +120,15 @@ def build_residual_network(G, capacity): # finite-capacity edge is at most 1/3 of inf, if an operation moves more # than 1/3 of inf units of flow to t, there must be an infinite-capacity # s-t path in G. - inf = 3 * sum(attr[capacity] for u, v, attr in edge_list - if capacity in attr and attr[capacity] != inf) or 1 + inf = ( + 3 + * sum( + attr[capacity] + for u, v, attr in edge_list + if capacity in attr and attr[capacity] != inf + ) + or 1 + ) if G.is_directed(): for u, v, attr in edge_list: r = min(attr.get(capacity, inf), inf) @@ -128,7 +139,7 @@ def build_residual_network(G, capacity): R.add_edge(v, u, capacity=0) else: # The edge (u, v) was added when (v, u) was visited. - R[u][v]['capacity'] = r + R[u][v]["capacity"] = r else: for u, v, attr in edge_list: # Add a pair of edges with equal residual capacities. @@ -137,7 +148,7 @@ def build_residual_network(G, capacity): R.add_edge(v, u, capacity=r) # Record the value simulating infinity. - R.graph['inf'] = inf + R.graph["inf"] = inf return R @@ -146,15 +157,16 @@ def detect_unboundedness(R, s, t): """Detect an infinite-capacity s-t path in R. """ q = deque([s]) - seen = set([s]) - inf = R.graph['inf'] + seen = {s} + inf = R.graph["inf"] while q: u = q.popleft() for v, attr in R[u].items(): - if attr['capacity'] == inf and v not in seen: + if attr["capacity"] == inf and v not in seen: if v == t: raise nx.NetworkXUnbounded( - 'Infinite capacity path, flow unbounded above.') + "Infinite capacity path, flow unbounded above." + ) seen.add(v) q.append(v) @@ -165,6 +177,7 @@ def build_flow_dict(G, R): flow_dict = {} for u in G: flow_dict[u] = {v: 0 for v in G[u]} - flow_dict[u].update((v, attr['flow']) for v, attr in R[u].items() - if attr['flow'] > 0) + flow_dict[u].update( + (v, attr["flow"]) for v, attr in R[u].items() if attr["flow"] > 0 + ) return flow_dict diff --git a/networkx/algorithms/graph_hashing.py b/networkx/algorithms/graph_hashing.py new file mode 100644 index 0000000..a03553a --- /dev/null +++ b/networkx/algorithms/graph_hashing.py @@ -0,0 +1,151 @@ +""" +Functions for hashing graphs to strings. +Isomorphic graphs should be assigned identical hashes. +For now, only Weisfeiler-Lehman hashing is implemented. +""" + +from collections import Counter +from hashlib import blake2b + +__all__ = ["weisfeiler_lehman_graph_hash"] + + +def weisfeiler_lehman_graph_hash( + G, edge_attr=None, node_attr=None, iterations=3, digest_size=16 +): + """Return Weisfeiler Lehman (WL) graph hash. + + The function iteratively aggregates and hashes neighbourhoods of each node. + After each node's neighbors are hashed to obtain updated node labels, + a hashed histogram of resulting labels is returned as the final hash. + + Hashes are identical for isomorphic graphs and strong guarantees that + non-isomorphic graphs will get different hashes. See [1] for details. + + Note: Similarity between hashes does not imply similarity between graphs. + + If no node or edge attributes are provided, the degree of each node + is used as its initial label. + Otherwise, node and/or edge labels are used to compute the hash. + + Parameters + ---------- + G: graph + The graph to be hashed. + Can have node and/or edge attributes. Can also have no attributes. + edge_attr: string + The key in edge attribute dictionary to be used for hashing. + If None, edge labels are ignored. + node_attr: string + The key in node attribute dictionary to be used for hashing. + If None, and no edge_attr given, use + degree of node as label. + iterations: int + Number of neighbor aggregations to perform. + Should be larger for larger graphs. + digest_size: int + Size of blake2b hash digest to use for hashing node labels. + + Returns + ------- + h : string + Hexadecimal string corresponding to hash of the input graph. + + Examples + -------- + Two graphs with edge attributes that are isomorphic, except for + differences in the edge labels. + + >>> G1 = nx.Graph() + >>> G1.add_edges_from( + ... [ + ... (1, 2, {"label": "A"}), + ... (2, 3, {"label": "A"}), + ... (3, 1, {"label": "A"}), + ... (1, 4, {"label": "B"}), + ... ] + ... ) + >>> G2 = nx.Graph() + >>> G2.add_edges_from( + ... [ + ... (5, 6, {"label": "B"}), + ... (6, 7, {"label": "A"}), + ... (7, 5, {"label": "A"}), + ... (7, 8, {"label": "A"}), + ... ] + ... ) + + Omitting the `edge_attr` option, results in identical hashes. + + >>> weisfeiler_lehman_graph_hash(G1) + '0db442538bb6dc81d675bd94e6ebb7ca' + >>> weisfeiler_lehman_graph_hash(G2) + '0db442538bb6dc81d675bd94e6ebb7ca' + + With edge labels, the graphs are no longer assigned + the same hash digest. + + >>> weisfeiler_lehman_graph_hash(G1, edge_attr="label") + '408c18537e67d3e56eb7dc92c72cb79e' + >>> weisfeiler_lehman_graph_hash(G2, edge_attr="label") + 'f9e9cb01c6d2f3b17f83ffeaa24e5986' + + References + ------- + .. [1] Shervashidze, Nino, Pascal Schweitzer, Erik Jan Van Leeuwen, + Kurt Mehlhorn, and Karsten M. Borgwardt. Weisfeiler Lehman + Graph Kernels. Journal of Machine Learning Research. 2011. + http://www.jmlr.org/papers/volume12/shervashidze11a/shervashidze11a.pdf + """ + + def neighborhood_aggregate(G, node, node_labels, edge_attr=None): + """ + Compute new labels for given node by aggregating + the labels of each node's neighbors. + """ + label_list = [node_labels[node]] + for nei in G.neighbors(node): + prefix = "" if not edge_attr else G[node][nei][edge_attr] + label_list.append(prefix + node_labels[nei]) + return "".join(sorted(label_list)) + + def weisfeiler_lehman_step(G, labels, edge_attr=None, node_attr=None): + """ + Apply neighborhood aggregation to each node + in the graph. + Computes a dictionary with labels for each node. + """ + new_labels = dict() + for node in G.nodes(): + new_labels[node] = neighborhood_aggregate( + G, node, labels, edge_attr=edge_attr + ) + return new_labels + + items = [] + node_labels = dict() + # set initial node labels + for node in G.nodes(): + if (not node_attr) and (not edge_attr): + node_labels[node] = str(G.degree(node)) + elif node_attr: + node_labels[node] = str(G.nodes[node][node_attr]) + else: + node_labels[node] = "" + + for k in range(iterations): + node_labels = weisfeiler_lehman_step(G, node_labels, edge_attr=edge_attr) + counter = Counter() + # count node labels + for node, d in node_labels.items(): + h = blake2b(digest_size=digest_size) + h.update(d.encode("ascii")) + counter.update([h.hexdigest()]) + # sort the counter, extend total counts + items.extend(sorted(counter.items(), key=lambda x: x[0])) + + # hash the final counter + h = blake2b(digest_size=digest_size) + h.update(str(tuple(items)).encode("ascii")) + h = h.hexdigest() + return h diff --git a/networkx/algorithms/graphical.py b/networkx/algorithms/graphical.py index 4de452e..1931db5 100644 --- a/networkx/algorithms/graphical.py +++ b/networkx/algorithms/graphical.py @@ -1,31 +1,19 @@ -# -*- coding: utf-8 -*- """Test sequences for graphiness. """ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. import heapq import networkx as nx -__author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', - 'Pieter Swart (swart@lanl.gov)', - 'Dan Schult (dschult@colgate.edu)' - 'Joel Miller (joel.c.miller.research@gmail.com)' - 'Ben Edwards' - 'Brian Cloteaux ']) - -__all__ = ['is_graphical', - 'is_multigraphical', - 'is_pseudographical', - 'is_digraphical', - 'is_valid_degree_sequence_erdos_gallai', - 'is_valid_degree_sequence_havel_hakimi', - ] - - -def is_graphical(sequence, method='eg'): + +__all__ = [ + "is_graphical", + "is_multigraphical", + "is_pseudographical", + "is_digraphical", + "is_valid_degree_sequence_erdos_gallai", + "is_valid_degree_sequence_havel_hakimi", +] + + +def is_graphical(sequence, method="eg"): """Returns True if sequence is a valid degree sequence. A degree sequence is valid if some graph can realize it. @@ -35,7 +23,7 @@ def is_graphical(sequence, method='eg'): sequence : list or iterable container A sequence of integer node degrees - method : "eg" | "hh" + method : "eg" | "hh" (default: 'eg') The method used to validate the degree sequence. "eg" corresponds to the Erdős-Gallai algorithm, and "hh" to the Havel-Hakimi algorithm. @@ -60,9 +48,9 @@ def is_graphical(sequence, method='eg'): Havel-Hakimi [havel1955]_, [hakimi1962]_, [CL1996]_ """ - if method == 'eg': + if method == "eg": valid = is_valid_degree_sequence_erdos_gallai(list(sequence)) - elif method == 'hh': + elif method == "hh": valid = is_valid_degree_sequence_havel_hakimi(list(sequence)) else: msg = "`method` must be 'eg' or 'hh'" @@ -72,8 +60,7 @@ def is_graphical(sequence, method='eg'): def _basic_graphical_tests(deg_sequence): # Sort and perform some simple tests on the sequence - if not nx.utils.is_list_of_ints(deg_sequence): - raise nx.NetworkXUnfeasible + deg_sequence = nx.utils.make_list_of_ints(deg_sequence) p = len(deg_sequence) num_degs = [0] * p dmax, dmin, dsum, n = 0, p, 0, 0 @@ -225,12 +212,12 @@ def is_valid_degree_sequence_erdos_gallai(deg_sequence): # Perform the EG checks using the reformulation of Zverovich and Zverovich k, sum_deg, sum_nj, sum_jnj = 0, 0, 0, 0 for dk in range(dmax, dmin - 1, -1): - if dk < k + 1: # Check if already past Durfee index + if dk < k + 1: # Check if already past Durfee index return True if num_degs[dk] > 0: run_size = num_degs[dk] # Process a run of identical-valued degrees - if dk < k + run_size: # Check if end of run is past Durfee index - run_size = dk - k # Adjust back to Durfee index + if dk < k + run_size: # Check if end of run is past Durfee index + run_size = dk - k # Adjust back to Durfee index sum_deg += run_size * dk for v in range(run_size): sum_nj += num_degs[k + v] @@ -264,8 +251,9 @@ def is_multigraphical(sequence): degrees of the vertices of a linear graph", J. SIAM, 10, pp. 496-506 (1962). """ - deg_sequence = list(sequence) - if not nx.utils.is_list_of_ints(deg_sequence): + try: + deg_sequence = nx.utils.make_list_of_ints(sequence) + except nx.NetworkXError: return False dsum, dmax = 0, 0 for d in deg_sequence: @@ -303,10 +291,11 @@ def is_pseudographical(sequence): and their degree lists", IEEE Trans. Circuits and Systems, CAS-23(12), pp. 778-782 (1976). """ - s = list(sequence) - if not nx.utils.is_list_of_ints(s): + try: + deg_sequence = nx.utils.make_list_of_ints(sequence) + except nx.NetworkXError: return False - return sum(s) % 2 == 0 and min(s) >= 0 + return sum(deg_sequence) % 2 == 0 and min(deg_sequence) >= 0 def is_digraphical(in_sequence, out_sequence): @@ -338,11 +327,10 @@ def is_digraphical(in_sequence, out_sequence): Algorithms for Constructing Graphs and Digraphs with Given Valences and Factors, Discrete Mathematics, 6(1), pp. 79-88 (1973) """ - in_deg_sequence = list(in_sequence) - out_deg_sequence = list(out_sequence) - if not nx.utils.is_list_of_ints(in_deg_sequence): - return False - if not nx.utils.is_list_of_ints(out_deg_sequence): + try: + in_deg_sequence = nx.utils.make_list_of_ints(in_sequence) + out_deg_sequence = nx.utils.make_list_of_ints(out_sequence) + except nx.NetworkXError: return False # Process the sequences and form two heaps to store degree pairs with # either zero or non-zero out degrees diff --git a/networkx/algorithms/hierarchy.py b/networkx/algorithms/hierarchy.py index a6a8262..fcf25bd 100644 --- a/networkx/algorithms/hierarchy.py +++ b/networkx/algorithms/hierarchy.py @@ -1,16 +1,9 @@ -# -*- coding: utf-8 -*- """ Flow Hierarchy. """ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. import networkx as nx -__authors__ = "\n".join(['Ben Edwards (bedwards@cs.unm.edu)']) -__all__ = ['flow_hierarchy'] + +__all__ = ["flow_hierarchy"] def flow_hierarchy(G, weight=None): @@ -49,6 +42,6 @@ def flow_hierarchy(G, weight=None): http://web.mit.edu/~cmagee/www/documents/28-DetectingEvolvingPatterns_FlowHierarchy.pdf """ if not G.is_directed(): - raise nx.NetworkXError("G must be a digraph in flow_heirarchy") + raise nx.NetworkXError("G must be a digraph in flow_hierarchy") scc = nx.strongly_connected_components(G) - return 1. - sum(G.subgraph(c).size(weight) for c in scc) / float(G.size(weight)) + return 1.0 - sum(G.subgraph(c).size(weight) for c in scc) / float(G.size(weight)) diff --git a/networkx/algorithms/hybrid.py b/networkx/algorithms/hybrid.py index 56a42cd..58868e8 100644 --- a/networkx/algorithms/hybrid.py +++ b/networkx/algorithms/hybrid.py @@ -1,13 +1,3 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg (hagberg@lanl.gov) and Dan Schult (dschult@colgate.edu) -# """ Provides functions for finding and testing for locally `(k, l)`-connected graphs. @@ -16,7 +6,7 @@ import copy import networkx as nx -__all__ = ['kl_connected_subgraph', 'is_kl_connected'] +__all__ = ["kl_connected_subgraph", "is_kl_connected"] def kl_connected_subgraph(G, k, l, low_memory=False, same_as_graph=False): @@ -69,7 +59,7 @@ def kl_connected_subgraph(G, k, l, low_memory=False, same_as_graph=False): 2004. 89--104. """ - H = copy.deepcopy(G) # subgraph we construct by removing from G + H = copy.deepcopy(G) # subgraph we construct by removing from G graphOK = True deleted_some = True # hack to start off the while loop @@ -83,7 +73,7 @@ def kl_connected_subgraph(G, k, l, low_memory=False, same_as_graph=False): (u, v) = edge # Get copy of graph needed for this search if low_memory: - verts = set([u, v]) + verts = {u, v} for i in range(k): for w in verts.copy(): verts.update(G[w]) @@ -105,7 +95,7 @@ def kl_connected_subgraph(G, k, l, low_memory=False, same_as_graph=False): if prev != w: G2.remove_edge(prev, w) prev = w -# path = shortest_path(G2, u, v, k) # ??? should "Cutoff" be k+1? + # path = shortest_path(G2, u, v, k) # ??? should "Cutoff" be k+1? try: path = nx.shortest_path(G2, u, v) # ??? should "Cutoff" be k+1? except nx.NetworkXNoPath: @@ -168,7 +158,7 @@ def is_kl_connected(G, k, l, low_memory=False): (u, v) = edge # Get copy of graph needed for this search if low_memory: - verts = set([u, v]) + verts = {u, v} for i in range(k): [verts.update(G.neighbors(w)) for w in verts.copy()] G2 = G.subgraph(verts) @@ -189,7 +179,7 @@ def is_kl_connected(G, k, l, low_memory=False): if w != prev: G2.remove_edge(prev, w) prev = w -# path = shortest_path(G2, u, v, k) # ??? should "Cutoff" be k+1? + # path = shortest_path(G2, u, v, k) # ??? should "Cutoff" be k+1? try: path = nx.shortest_path(G2, u, v) # ??? should "Cutoff" be k+1? except nx.NetworkXNoPath: diff --git a/networkx/algorithms/isolate.py b/networkx/algorithms/isolate.py index 69da2f4..e81e722 100644 --- a/networkx/algorithms/isolate.py +++ b/networkx/algorithms/isolate.py @@ -1,20 +1,8 @@ -# -*- encoding: utf-8 -*- -# Copyright 2015 NetworkX developers. -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. """ Functions for identifying isolate (degree zero) nodes. """ -import networkx as nx -__author__ = """\n""".join(['Drew Conway ', - 'Aric Hagberg ']) - -__all__ = ['is_isolate', 'isolates', 'number_of_isolates'] +__all__ = ["is_isolate", "isolates", "number_of_isolates"] def is_isolate(G, n): @@ -38,12 +26,12 @@ def is_isolate(G, n): Examples -------- - >>> G=nx.Graph() - >>> G.add_edge(1,2) + >>> G = nx.Graph() + >>> G.add_edge(1, 2) >>> G.add_node(3) - >>> nx.is_isolate(G,2) + >>> nx.is_isolate(G, 2) False - >>> nx.is_isolate(G,3) + >>> nx.is_isolate(G, 3) True """ return G.degree(n) == 0 diff --git a/networkx/algorithms/isomorphism/__init__.py b/networkx/algorithms/isomorphism/__init__.py index 0c441f5..ddcedea 100644 --- a/networkx/algorithms/isomorphism/__init__.py +++ b/networkx/algorithms/isomorphism/__init__.py @@ -2,3 +2,5 @@ from networkx.algorithms.isomorphism.vf2userfunc import * from networkx.algorithms.isomorphism.matchhelpers import * from networkx.algorithms.isomorphism.temporalisomorphvf2 import * +from networkx.algorithms.isomorphism.ismags import * +from networkx.algorithms.isomorphism.tree_isomorphism import * diff --git a/networkx/algorithms/isomorphism/ismags.py b/networkx/algorithms/isomorphism/ismags.py new file mode 100644 index 0000000..d058361 --- /dev/null +++ b/networkx/algorithms/isomorphism/ismags.py @@ -0,0 +1,1153 @@ +""" +**************** +ISMAGS Algorithm +**************** + +Provides a Python implementation of the ISMAGS algorithm. [1]_ + +It is capable of finding (subgraph) isomorphisms between two graphs, taking the +symmetry of the subgraph into account. In most cases the VF2 algorithm is +faster (at least on small graphs) than this implementation, but in some cases +there is an exponential number of isomorphisms that are symmetrically +equivalent. In that case, the ISMAGS algorithm will provide only one solution +per symmetry group. + +>>> petersen = nx.petersen_graph() +>>> ismags = nx.isomorphism.ISMAGS(petersen, petersen) +>>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=False)) +>>> len(isomorphisms) +120 +>>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=True)) +>>> answer = [{0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9}] +>>> answer == isomorphisms +True + +In addition, this implementation also provides an interface to find the +largest common induced subgraph [2]_ between any two graphs, again taking +symmetry into account. Given `graph` and `subgraph` the algorithm will remove +nodes from the `subgraph` until `subgraph` is isomorphic to a subgraph of +`graph`. Since only the symmetry of `subgraph` is taken into account it is +worth thinking about how you provide your graphs: + +>>> graph1 = nx.path_graph(4) +>>> graph2 = nx.star_graph(3) +>>> ismags = nx.isomorphism.ISMAGS(graph1, graph2) +>>> ismags.is_isomorphic() +False +>>> largest_common_subgraph = list(ismags.largest_common_subgraph()) +>>> answer = [{1: 0, 0: 1, 2: 2}, {2: 0, 1: 1, 3: 2}] +>>> answer == largest_common_subgraph +True +>>> ismags2 = nx.isomorphism.ISMAGS(graph2, graph1) +>>> largest_common_subgraph = list(ismags2.largest_common_subgraph()) +>>> answer = [ +... {1: 0, 0: 1, 2: 2}, +... {1: 0, 0: 1, 3: 2}, +... {2: 0, 0: 1, 1: 2}, +... {2: 0, 0: 1, 3: 2}, +... {3: 0, 0: 1, 1: 2}, +... {3: 0, 0: 1, 2: 2}, +... ] +>>> answer == largest_common_subgraph +True + +However, when not taking symmetry into account, it doesn't matter: + +>>> largest_common_subgraph = list(ismags.largest_common_subgraph(symmetry=False)) +>>> answer = [ +... {1: 0, 0: 1, 2: 2}, +... {1: 0, 2: 1, 0: 2}, +... {2: 0, 1: 1, 3: 2}, +... {2: 0, 3: 1, 1: 2}, +... {1: 0, 0: 1, 2: 3}, +... {1: 0, 2: 1, 0: 3}, +... {2: 0, 1: 1, 3: 3}, +... {2: 0, 3: 1, 1: 3}, +... {1: 0, 0: 2, 2: 3}, +... {1: 0, 2: 2, 0: 3}, +... {2: 0, 1: 2, 3: 3}, +... {2: 0, 3: 2, 1: 3}, +... ] +>>> answer == largest_common_subgraph +True +>>> largest_common_subgraph = list(ismags2.largest_common_subgraph(symmetry=False)) +>>> answer = [ +... {1: 0, 0: 1, 2: 2}, +... {1: 0, 0: 1, 3: 2}, +... {2: 0, 0: 1, 1: 2}, +... {2: 0, 0: 1, 3: 2}, +... {3: 0, 0: 1, 1: 2}, +... {3: 0, 0: 1, 2: 2}, +... {1: 1, 0: 2, 2: 3}, +... {1: 1, 0: 2, 3: 3}, +... {2: 1, 0: 2, 1: 3}, +... {2: 1, 0: 2, 3: 3}, +... {3: 1, 0: 2, 1: 3}, +... {3: 1, 0: 2, 2: 3}, +... ] +>>> answer == largest_common_subgraph +True + +Notes +----- + - The current implementation works for undirected graphs only. The algorithm + in general should work for directed graphs as well though. + - Node keys for both provided graphs need to be fully orderable as well as + hashable. + - Node and edge equality is assumed to be transitive: if A is equal to B, and + B is equal to C, then A is equal to C. + +References +---------- + .. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle, + M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General + Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph + Enumeration", PLoS One 9(5): e97896, 2014. + https://doi.org/10.1371/journal.pone.0097896 + .. [2] https://en.wikipedia.org/wiki/Maximum_common_induced_subgraph +""" + +__all__ = ["ISMAGS"] + +from collections import defaultdict, Counter +from functools import reduce, wraps +import itertools + + +def are_all_equal(iterable): + """ + Returns ``True`` if and only if all elements in `iterable` are equal; and + ``False`` otherwise. + + Parameters + ---------- + iterable: collections.abc.Iterable + The container whose elements will be checked. + + Returns + ------- + bool + ``True`` iff all elements in `iterable` compare equal, ``False`` + otherwise. + """ + try: + shape = iterable.shape + except AttributeError: + pass + else: + if len(shape) > 1: + message = "The function does not works on multidimension arrays." + raise NotImplementedError(message) from None + + iterator = iter(iterable) + first = next(iterator, None) + return all(item == first for item in iterator) + + +def make_partitions(items, test): + """ + Partitions items into sets based on the outcome of ``test(item1, item2)``. + Pairs of items for which `test` returns `True` end up in the same set. + + Parameters + ---------- + items : collections.abc.Iterable[collections.abc.Hashable] + Items to partition + test : collections.abc.Callable[collections.abc.Hashable, collections.abc.Hashable] + A function that will be called with 2 arguments, taken from items. + Should return `True` if those 2 items need to end up in the same + partition, and `False` otherwise. + + Returns + ------- + list[set] + A list of sets, with each set containing part of the items in `items`, + such that ``all(test(*pair) for pair in itertools.combinations(set, 2)) + == True`` + + Notes + ----- + The function `test` is assumed to be transitive: if ``test(a, b)`` and + ``test(b, c)`` return ``True``, then ``test(a, c)`` must also be ``True``. + """ + partitions = [] + for item in items: + for partition in partitions: + p_item = next(iter(partition)) + if test(item, p_item): + partition.add(item) + break + else: # No break + partitions.append({item}) + return partitions + + +def partition_to_color(partitions): + """ + Creates a dictionary with for every item in partition for every partition + in partitions the index of partition in partitions. + + Parameters + ---------- + partitions: collections.abc.Sequence[collections.abc.Iterable] + As returned by :func:`make_partitions`. + + Returns + ------- + dict + """ + colors = dict() + for color, keys in enumerate(partitions): + for key in keys: + colors[key] = color + return colors + + +def intersect(collection_of_sets): + """ + Given an collection of sets, returns the intersection of those sets. + + Parameters + ---------- + collection_of_sets: collections.abc.Collection[set] + A collection of sets. + + Returns + ------- + set + An intersection of all sets in `collection_of_sets`. Will have the same + type as the item initially taken from `collection_of_sets`. + """ + collection_of_sets = list(collection_of_sets) + first = collection_of_sets.pop() + out = reduce(set.intersection, collection_of_sets, set(first)) + return type(first)(out) + + +class ISMAGS: + """ + Implements the ISMAGS subgraph matching algorith. [1]_ ISMAGS stands for + "Index-based Subgraph Matching Algorithm with General Symmetries". As the + name implies, it is symmetry aware and will only generate non-symmetric + isomorphisms. + + Notes + ----- + The implementation imposes additional conditions compared to the VF2 + algorithm on the graphs provided and the comparison functions + (:attr:`node_equality` and :attr:`edge_equality`): + + - Node keys in both graphs must be orderable as well as hashable. + - Equality must be transitive: if A is equal to B, and B is equal to C, + then A must be equal to C. + + Attributes + ---------- + graph: networkx.Graph + subgraph: networkx.Graph + node_equality: collections.abc.Callable + The function called to see if two nodes should be considered equal. + It's signature looks like this: + ``f(graph1: networkx.Graph, node1, graph2: networkx.Graph, node2) -> bool``. + `node1` is a node in `graph1`, and `node2` a node in `graph2`. + Constructed from the argument `node_match`. + edge_equality: collections.abc.Callable + The function called to see if two edges should be considered equal. + It's signature looks like this: + ``f(graph1: networkx.Graph, edge1, graph2: networkx.Graph, edge2) -> bool``. + `edge1` is an edge in `graph1`, and `edge2` an edge in `graph2`. + Constructed from the argument `edge_match`. + + References + ---------- + .. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle, + M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General + Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph + Enumeration", PLoS One 9(5): e97896, 2014. + https://doi.org/10.1371/journal.pone.0097896 + """ + + def __init__(self, graph, subgraph, node_match=None, edge_match=None, cache=None): + """ + Parameters + ---------- + graph: networkx.Graph + subgraph: networkx.Graph + node_match: collections.abc.Callable or None + Function used to determine whether two nodes are equivalent. Its + signature should look like ``f(n1: dict, n2: dict) -> bool``, with + `n1` and `n2` node property dicts. See also + :func:`~networkx.algorithms.isomorphism.categorical_node_match` and + friends. + If `None`, all nodes are considered equal. + edge_match: collections.abc.Callable or None + Function used to determine whether two edges are equivalent. Its + signature should look like ``f(e1: dict, e2: dict) -> bool``, with + `e1` and `e2` edge property dicts. See also + :func:`~networkx.algorithms.isomorphism.categorical_edge_match` and + friends. + If `None`, all edges are considered equal. + cache: collections.abc.Mapping + A cache used for caching graph symmetries. + """ + # TODO: graph and subgraph setter methods that invalidate the caches. + # TODO: allow for precomputed partitions and colors + self.graph = graph + self.subgraph = subgraph + self._symmetry_cache = cache + # Naming conventions are taken from the original paper. For your + # sanity: + # sg: subgraph + # g: graph + # e: edge(s) + # n: node(s) + # So: sgn means "subgraph nodes". + self._sgn_partitions_ = None + self._sge_partitions_ = None + + self._sgn_colors_ = None + self._sge_colors_ = None + + self._gn_partitions_ = None + self._ge_partitions_ = None + + self._gn_colors_ = None + self._ge_colors_ = None + + self._node_compat_ = None + self._edge_compat_ = None + + if node_match is None: + self.node_equality = self._node_match_maker(lambda n1, n2: True) + self._sgn_partitions_ = [set(self.subgraph.nodes)] + self._gn_partitions_ = [set(self.graph.nodes)] + self._node_compat_ = {0: 0} + else: + self.node_equality = self._node_match_maker(node_match) + if edge_match is None: + self.edge_equality = self._edge_match_maker(lambda e1, e2: True) + self._sge_partitions_ = [set(self.subgraph.edges)] + self._ge_partitions_ = [set(self.graph.edges)] + self._edge_compat_ = {0: 0} + else: + self.edge_equality = self._edge_match_maker(edge_match) + + @property + def _sgn_partitions(self): + if self._sgn_partitions_ is None: + + def nodematch(node1, node2): + return self.node_equality(self.subgraph, node1, self.subgraph, node2) + + self._sgn_partitions_ = make_partitions(self.subgraph.nodes, nodematch) + return self._sgn_partitions_ + + @property + def _sge_partitions(self): + if self._sge_partitions_ is None: + + def edgematch(edge1, edge2): + return self.edge_equality(self.subgraph, edge1, self.subgraph, edge2) + + self._sge_partitions_ = make_partitions(self.subgraph.edges, edgematch) + return self._sge_partitions_ + + @property + def _gn_partitions(self): + if self._gn_partitions_ is None: + + def nodematch(node1, node2): + return self.node_equality(self.graph, node1, self.graph, node2) + + self._gn_partitions_ = make_partitions(self.graph.nodes, nodematch) + return self._gn_partitions_ + + @property + def _ge_partitions(self): + if self._ge_partitions_ is None: + + def edgematch(edge1, edge2): + return self.edge_equality(self.graph, edge1, self.graph, edge2) + + self._ge_partitions_ = make_partitions(self.graph.edges, edgematch) + return self._ge_partitions_ + + @property + def _sgn_colors(self): + if self._sgn_colors_ is None: + self._sgn_colors_ = partition_to_color(self._sgn_partitions) + return self._sgn_colors_ + + @property + def _sge_colors(self): + if self._sge_colors_ is None: + self._sge_colors_ = partition_to_color(self._sge_partitions) + return self._sge_colors_ + + @property + def _gn_colors(self): + if self._gn_colors_ is None: + self._gn_colors_ = partition_to_color(self._gn_partitions) + return self._gn_colors_ + + @property + def _ge_colors(self): + if self._ge_colors_ is None: + self._ge_colors_ = partition_to_color(self._ge_partitions) + return self._ge_colors_ + + @property + def _node_compatibility(self): + if self._node_compat_ is not None: + return self._node_compat_ + self._node_compat_ = {} + for sgn_part_color, gn_part_color in itertools.product( + range(len(self._sgn_partitions)), range(len(self._gn_partitions)) + ): + sgn = next(iter(self._sgn_partitions[sgn_part_color])) + gn = next(iter(self._gn_partitions[gn_part_color])) + if self.node_equality(self.subgraph, sgn, self.graph, gn): + self._node_compat_[sgn_part_color] = gn_part_color + return self._node_compat_ + + @property + def _edge_compatibility(self): + if self._edge_compat_ is not None: + return self._edge_compat_ + self._edge_compat_ = {} + for sge_part_color, ge_part_color in itertools.product( + range(len(self._sge_partitions)), range(len(self._ge_partitions)) + ): + sge = next(iter(self._sge_partitions[sge_part_color])) + ge = next(iter(self._ge_partitions[ge_part_color])) + if self.edge_equality(self.subgraph, sge, self.graph, ge): + self._edge_compat_[sge_part_color] = ge_part_color + return self._edge_compat_ + + @staticmethod + def _node_match_maker(cmp): + @wraps(cmp) + def comparer(graph1, node1, graph2, node2): + return cmp(graph1.nodes[node1], graph2.nodes[node2]) + + return comparer + + @staticmethod + def _edge_match_maker(cmp): + @wraps(cmp) + def comparer(graph1, edge1, graph2, edge2): + return cmp(graph1.edges[edge1], graph2.edges[edge2]) + + return comparer + + def find_isomorphisms(self, symmetry=True): + """Find all subgraph isomorphisms between subgraph and graph + + Finds isomorphisms where :attr:`subgraph` <= :attr:`graph`. + + Parameters + ---------- + symmetry: bool + Whether symmetry should be taken into account. If False, found + isomorphisms may be symmetrically equivalent. + + Yields + ------ + dict + The found isomorphism mappings of {graph_node: subgraph_node}. + """ + # The networkx VF2 algorithm is slightly funny in when it yields an + # empty dict and when not. + if not self.subgraph: + yield {} + return + elif not self.graph: + return + elif len(self.graph) < len(self.subgraph): + return + + if symmetry: + _, cosets = self.analyze_symmetry( + self.subgraph, self._sgn_partitions, self._sge_colors + ) + constraints = self._make_constraints(cosets) + else: + constraints = [] + + candidates = self._find_nodecolor_candidates() + la_candidates = self._get_lookahead_candidates() + for sgn in self.subgraph: + extra_candidates = la_candidates[sgn] + if extra_candidates: + candidates[sgn] = candidates[sgn] | {frozenset(extra_candidates)} + + if any(candidates.values()): + start_sgn = min(candidates, key=lambda n: min(candidates[n], key=len)) + candidates[start_sgn] = (intersect(candidates[start_sgn]),) + yield from self._map_nodes(start_sgn, candidates, constraints) + else: + return + + @staticmethod + def _find_neighbor_color_count(graph, node, node_color, edge_color): + """ + For `node` in `graph`, count the number of edges of a specific color + it has to nodes of a specific color. + """ + counts = Counter() + neighbors = graph[node] + for neighbor in neighbors: + n_color = node_color[neighbor] + if (node, neighbor) in edge_color: + e_color = edge_color[node, neighbor] + else: + e_color = edge_color[neighbor, node] + counts[e_color, n_color] += 1 + return counts + + def _get_lookahead_candidates(self): + """ + Returns a mapping of {subgraph node: collection of graph nodes} for + which the graph nodes are feasible candidates for the subgraph node, as + determined by looking ahead one edge. + """ + g_counts = {} + for gn in self.graph: + g_counts[gn] = self._find_neighbor_color_count( + self.graph, gn, self._gn_colors, self._ge_colors + ) + candidates = defaultdict(set) + for sgn in self.subgraph: + sg_count = self._find_neighbor_color_count( + self.subgraph, sgn, self._sgn_colors, self._sge_colors + ) + new_sg_count = Counter() + for (sge_color, sgn_color), count in sg_count.items(): + try: + ge_color = self._edge_compatibility[sge_color] + gn_color = self._node_compatibility[sgn_color] + except KeyError: + pass + else: + new_sg_count[ge_color, gn_color] = count + + for gn, g_count in g_counts.items(): + if all(new_sg_count[x] <= g_count[x] for x in new_sg_count): + # Valid candidate + candidates[sgn].add(gn) + return candidates + + def largest_common_subgraph(self, symmetry=True): + """ + Find the largest common induced subgraphs between :attr:`subgraph` and + :attr:`graph`. + + Parameters + ---------- + symmetry: bool + Whether symmetry should be taken into account. If False, found + largest common subgraphs may be symmetrically equivalent. + + Yields + ------ + dict + The found isomorphism mappings of {graph_node: subgraph_node}. + """ + # The networkx VF2 algorithm is slightly funny in when it yields an + # empty dict and when not. + if not self.subgraph: + yield {} + return + elif not self.graph: + return + + if symmetry: + _, cosets = self.analyze_symmetry( + self.subgraph, self._sgn_partitions, self._sge_colors + ) + constraints = self._make_constraints(cosets) + else: + constraints = [] + + candidates = self._find_nodecolor_candidates() + + if any(candidates.values()): + yield from self._largest_common_subgraph(candidates, constraints) + else: + return + + def analyze_symmetry(self, graph, node_partitions, edge_colors): + """ + Find a minimal set of permutations and corresponding co-sets that + describe the symmetry of :attr:`subgraph`. + + Returns + ------- + set[frozenset] + The found permutations. This is a set of frozenset of pairs of node + keys which can be exchanged without changing :attr:`subgraph`. + dict[collections.abc.Hashable, set[collections.abc.Hashable]] + The found co-sets. The co-sets is a dictionary of {node key: + set of node keys}. Every key-value pair describes which `values` + can be interchanged without changing nodes less than `key`. + """ + if self._symmetry_cache is not None: + key = hash( + ( + tuple(graph.nodes), + tuple(graph.edges), + tuple(map(tuple, node_partitions)), + tuple(edge_colors.items()), + ) + ) + if key in self._symmetry_cache: + return self._symmetry_cache[key] + node_partitions = list( + self._refine_node_partitions(graph, node_partitions, edge_colors) + ) + assert len(node_partitions) == 1 + node_partitions = node_partitions[0] + permutations, cosets = self._process_ordered_pair_partitions( + graph, node_partitions, node_partitions, edge_colors + ) + if self._symmetry_cache is not None: + self._symmetry_cache[key] = permutations, cosets + return permutations, cosets + + def is_isomorphic(self, symmetry=False): + """ + Returns True if :attr:`graph` is isomorphic to :attr:`subgraph` and + False otherwise. + + Returns + ------- + bool + """ + return len(self.subgraph) == len(self.graph) and self.subgraph_is_isomorphic( + symmetry + ) + + def subgraph_is_isomorphic(self, symmetry=False): + """ + Returns True if a subgraph of :attr:`graph` is isomorphic to + :attr:`subgraph` and False otherwise. + + Returns + ------- + bool + """ + # symmetry=False, since we only need to know whether there is any + # example; figuring out all symmetry elements probably costs more time + # than it gains. + isom = next(self.subgraph_isomorphisms_iter(symmetry=symmetry), None) + return isom is not None + + def isomorphisms_iter(self, symmetry=True): + """ + Does the same as :meth:`find_isomorphisms` if :attr:`graph` and + :attr:`subgraph` have the same number of nodes. + """ + if len(self.graph) == len(self.subgraph): + yield from self.subgraph_isomorphisms_iter(symmetry=symmetry) + + def subgraph_isomorphisms_iter(self, symmetry=True): + """Alternative name for :meth:`find_isomorphisms`.""" + return self.find_isomorphisms(symmetry) + + def _find_nodecolor_candidates(self): + """ + Per node in subgraph find all nodes in graph that have the same color. + """ + candidates = defaultdict(set) + for sgn in self.subgraph.nodes: + sgn_color = self._sgn_colors[sgn] + if sgn_color in self._node_compatibility: + gn_color = self._node_compatibility[sgn_color] + candidates[sgn].add(frozenset(self._gn_partitions[gn_color])) + else: + candidates[sgn].add(frozenset()) + candidates = dict(candidates) + for sgn, options in candidates.items(): + candidates[sgn] = frozenset(options) + return candidates + + @staticmethod + def _make_constraints(cosets): + """ + Turn cosets into constraints. + """ + constraints = [] + for node_i, node_ts in cosets.items(): + for node_t in node_ts: + if node_i != node_t: + # Node i must be smaller than node t. + constraints.append((node_i, node_t)) + return constraints + + @staticmethod + def _find_node_edge_color(graph, node_colors, edge_colors): + """ + For every node in graph, come up with a color that combines 1) the + color of the node, and 2) the number of edges of a color to each type + of node. + """ + counts = defaultdict(lambda: defaultdict(int)) + for node1, node2 in graph.edges: + if (node1, node2) in edge_colors: + # FIXME directed graphs + ecolor = edge_colors[node1, node2] + else: + ecolor = edge_colors[node2, node1] + # Count per node how many edges it has of what color to nodes of + # what color + counts[node1][ecolor, node_colors[node2]] += 1 + counts[node2][ecolor, node_colors[node1]] += 1 + + node_edge_colors = dict() + for node in graph.nodes: + node_edge_colors[node] = node_colors[node], set(counts[node].items()) + + return node_edge_colors + + @staticmethod + def _get_permutations_by_length(items): + """ + Get all permutations of items, but only permute items with the same + length. + + >>> found = list(ISMAGS._get_permutations_by_length([[1], [2], [3, 4], [4, 5]])) + >>> answer = [ + ... (([1], [2]), ([3, 4], [4, 5])), + ... (([1], [2]), ([4, 5], [3, 4])), + ... (([2], [1]), ([3, 4], [4, 5])), + ... (([2], [1]), ([4, 5], [3, 4])), + ... ] + >>> found == answer + True + """ + by_len = defaultdict(list) + for item in items: + by_len[len(item)].append(item) + + yield from itertools.product( + *(itertools.permutations(by_len[l]) for l in sorted(by_len)) + ) + + @classmethod + def _refine_node_partitions(cls, graph, node_partitions, edge_colors, branch=False): + """ + Given a partition of nodes in graph, make the partitions smaller such + that all nodes in a partition have 1) the same color, and 2) the same + number of edges to specific other partitions. + """ + + def equal_color(node1, node2): + return node_edge_colors[node1] == node_edge_colors[node2] + + node_partitions = list(node_partitions) + node_colors = partition_to_color(node_partitions) + node_edge_colors = cls._find_node_edge_color(graph, node_colors, edge_colors) + if all( + are_all_equal(node_edge_colors[node] for node in partition) + for partition in node_partitions + ): + yield node_partitions + return + + new_partitions = [] + output = [new_partitions] + for partition in node_partitions: + if not are_all_equal(node_edge_colors[node] for node in partition): + refined = make_partitions(partition, equal_color) + if ( + branch + and len(refined) != 1 + and len({len(r) for r in refined}) != len([len(r) for r in refined]) + ): + # This is where it breaks. There are multiple new cells + # in refined with the same length, and their order + # matters. + # So option 1) Hit it with a big hammer and simply make all + # orderings. + permutations = cls._get_permutations_by_length(refined) + new_output = [] + for n_p in output: + for permutation in permutations: + new_output.append(n_p + list(permutation[0])) + output = new_output + else: + for n_p in output: + n_p.extend(sorted(refined, key=len)) + else: + for n_p in output: + n_p.append(partition) + for n_p in output: + yield from cls._refine_node_partitions(graph, n_p, edge_colors, branch) + + def _edges_of_same_color(self, sgn1, sgn2): + """ + Returns all edges in :attr:`graph` that have the same colour as the + edge between sgn1 and sgn2 in :attr:`subgraph`. + """ + if (sgn1, sgn2) in self._sge_colors: + # FIXME directed graphs + sge_color = self._sge_colors[sgn1, sgn2] + else: + sge_color = self._sge_colors[sgn2, sgn1] + if sge_color in self._edge_compatibility: + ge_color = self._edge_compatibility[sge_color] + g_edges = self._ge_partitions[ge_color] + else: + g_edges = [] + return g_edges + + def _map_nodes(self, sgn, candidates, constraints, mapping=None, to_be_mapped=None): + """ + Find all subgraph isomorphisms honoring constraints. + """ + if mapping is None: + mapping = {} + else: + mapping = mapping.copy() + if to_be_mapped is None: + to_be_mapped = set(self.subgraph.nodes) + + # Note, we modify candidates here. Doesn't seem to affect results, but + # remember this. + # candidates = candidates.copy() + sgn_candidates = intersect(candidates[sgn]) + candidates[sgn] = frozenset([sgn_candidates]) + for gn in sgn_candidates: + # We're going to try to map sgn to gn. + if gn in mapping.values() or sgn not in to_be_mapped: + # gn is already mapped to something + continue # pragma: no cover + + # REDUCTION and COMBINATION + mapping[sgn] = gn + # BASECASE + if to_be_mapped == set(mapping.keys()): + yield {v: k for k, v in mapping.items()} + continue + left_to_map = to_be_mapped - set(mapping.keys()) + + new_candidates = candidates.copy() + sgn_neighbours = set(self.subgraph[sgn]) + not_gn_neighbours = set(self.graph.nodes) - set(self.graph[gn]) + for sgn2 in left_to_map: + if sgn2 not in sgn_neighbours: + gn2_options = not_gn_neighbours + else: + # Get all edges to gn of the right color: + g_edges = self._edges_of_same_color(sgn, sgn2) + # FIXME directed graphs + # And all nodes involved in those which are connected to gn + gn2_options = {n for e in g_edges for n in e if gn in e} + # Node color compatibility should be taken care of by the + # initial candidate lists made by find_subgraphs + + # Add gn2_options to the right collection. Since new_candidates + # is a dict of frozensets of frozensets of node indices it's + # a bit clunky. We can't do .add, and + also doesn't work. We + # could do |, but I deem union to be clearer. + new_candidates[sgn2] = new_candidates[sgn2].union( + [frozenset(gn2_options)] + ) + + if (sgn, sgn2) in constraints: + gn2_options = {gn2 for gn2 in self.graph if gn2 > gn} + elif (sgn2, sgn) in constraints: + gn2_options = {gn2 for gn2 in self.graph if gn2 < gn} + else: + continue # pragma: no cover + new_candidates[sgn2] = new_candidates[sgn2].union( + [frozenset(gn2_options)] + ) + + # The next node is the one that is unmapped and has fewest + # candidates + # Pylint disables because it's a one-shot function. + next_sgn = min( + left_to_map, key=lambda n: min(new_candidates[n], key=len) + ) # pylint: disable=cell-var-from-loop + yield from self._map_nodes( + next_sgn, + new_candidates, + constraints, + mapping=mapping, + to_be_mapped=to_be_mapped, + ) + # Unmap sgn-gn. Strictly not necessary since it'd get overwritten + # when making a new mapping for sgn. + # del mapping[sgn] + + def _largest_common_subgraph(self, candidates, constraints, to_be_mapped=None): + """ + Find all largest common subgraphs honoring constraints. + """ + if to_be_mapped is None: + to_be_mapped = {frozenset(self.subgraph.nodes)} + + # The LCS problem is basically a repeated subgraph isomorphism problem + # with smaller and smaller subgraphs. We store the nodes that are + # "part of" the subgraph in to_be_mapped, and we make it a little + # smaller every iteration. + + # pylint disable becuase it's guarded against by default value + current_size = len( + next(iter(to_be_mapped), []) + ) # pylint: disable=stop-iteration-return + + found_iso = False + if current_size <= len(self.graph): + # There's no point in trying to find isomorphisms of + # graph >= subgraph if subgraph has more nodes than graph. + + # Try the isomorphism first with the nodes with lowest ID. So sort + # them. Those are more likely to be part of the final + # correspondence. This makes finding the first answer(s) faster. In + # theory. + for nodes in sorted(to_be_mapped, key=sorted): + # Find the isomorphism between subgraph[to_be_mapped] <= graph + next_sgn = min(nodes, key=lambda n: min(candidates[n], key=len)) + isomorphs = self._map_nodes( + next_sgn, candidates, constraints, to_be_mapped=nodes + ) + + # This is effectively `yield from isomorphs`, except that we look + # whether an item was yielded. + try: + item = next(isomorphs) + except StopIteration: + pass + else: + yield item + yield from isomorphs + found_iso = True + + # BASECASE + if found_iso or current_size == 1: + # Shrinking has no point because either 1) we end up with a smaller + # common subgraph (and we want the largest), or 2) there'll be no + # more subgraph. + return + + left_to_be_mapped = set() + for nodes in to_be_mapped: + for sgn in nodes: + # We're going to remove sgn from to_be_mapped, but subject to + # symmetry constraints. We know that for every constraint we + # have those subgraph nodes are equal. So whenever we would + # remove the lower part of a constraint, remove the higher + # instead. This is all dealth with by _remove_node. And because + # left_to_be_mapped is a set, we don't do double work. + + # And finally, make the subgraph one node smaller. + # REDUCTION + new_nodes = self._remove_node(sgn, nodes, constraints) + left_to_be_mapped.add(new_nodes) + # COMBINATION + yield from self._largest_common_subgraph( + candidates, constraints, to_be_mapped=left_to_be_mapped + ) + + @staticmethod + def _remove_node(node, nodes, constraints): + """ + Returns a new set where node has been removed from nodes, subject to + symmetry constraints. We know, that for every constraint we have + those subgraph nodes are equal. So whenever we would remove the + lower part of a constraint, remove the higher instead. + """ + while True: + for low, high in constraints: + if low == node and high in nodes: + node = high + break + else: # no break, couldn't find node in constraints + break + return frozenset(nodes - {node}) + + @staticmethod + def _find_permutations(top_partitions, bottom_partitions): + """ + Return the pairs of top/bottom partitions where the partitions are + different. Ensures that all partitions in both top and bottom + partitions have size 1. + """ + # Find permutations + permutations = set() + for top, bot in zip(top_partitions, bottom_partitions): + # top and bot have only one element + if len(top) != 1 or len(bot) != 1: + raise IndexError( + "Not all nodes are coupled. This is" + f" impossible: {top_partitions}, {bottom_partitions}" + ) + if top != bot: + permutations.add(frozenset((next(iter(top)), next(iter(bot))))) + return permutations + + @staticmethod + def _update_orbits(orbits, permutations): + """ + Update orbits based on permutations. Orbits is modified in place. + For every pair of items in permutations their respective orbits are + merged. + """ + for permutation in permutations: + node, node2 = permutation + # Find the orbits that contain node and node2, and replace the + # orbit containing node with the union + first = second = None + for idx, orbit in enumerate(orbits): + if first is not None and second is not None: + break + if node in orbit: + first = idx + if node2 in orbit: + second = idx + if first != second: + orbits[first].update(orbits[second]) + del orbits[second] + + def _couple_nodes( + self, + top_partitions, + bottom_partitions, + pair_idx, + t_node, + b_node, + graph, + edge_colors, + ): + """ + Generate new partitions from top and bottom_partitions where t_node is + coupled to b_node. pair_idx is the index of the partitions where t_ and + b_node can be found. + """ + t_partition = top_partitions[pair_idx] + b_partition = bottom_partitions[pair_idx] + assert t_node in t_partition and b_node in b_partition + # Couple node to node2. This means they get their own partition + new_top_partitions = [top.copy() for top in top_partitions] + new_bottom_partitions = [bot.copy() for bot in bottom_partitions] + new_t_groups = {t_node}, t_partition - {t_node} + new_b_groups = {b_node}, b_partition - {b_node} + # Replace the old partitions with the coupled ones + del new_top_partitions[pair_idx] + del new_bottom_partitions[pair_idx] + new_top_partitions[pair_idx:pair_idx] = new_t_groups + new_bottom_partitions[pair_idx:pair_idx] = new_b_groups + + new_top_partitions = self._refine_node_partitions( + graph, new_top_partitions, edge_colors + ) + new_bottom_partitions = self._refine_node_partitions( + graph, new_bottom_partitions, edge_colors, branch=True + ) + new_top_partitions = list(new_top_partitions) + assert len(new_top_partitions) == 1 + new_top_partitions = new_top_partitions[0] + for bot in new_bottom_partitions: + yield list(new_top_partitions), bot + + def _process_ordered_pair_partitions( + self, + graph, + top_partitions, + bottom_partitions, + edge_colors, + orbits=None, + cosets=None, + ): + """ + Processes ordered pair partitions as per the reference paper. Finds and + returns all permutations and cosets that leave the graph unchanged. + """ + if orbits is None: + orbits = [{node} for node in graph.nodes] + else: + # Note that we don't copy orbits when we are given one. This means + # we leak information between the recursive branches. This is + # intentional! + orbits = orbits + if cosets is None: + cosets = {} + else: + cosets = cosets.copy() + + assert all( + len(t_p) == len(b_p) for t_p, b_p in zip(top_partitions, bottom_partitions) + ) + + # BASECASE + if all(len(top) == 1 for top in top_partitions): + # All nodes are mapped + permutations = self._find_permutations(top_partitions, bottom_partitions) + self._update_orbits(orbits, permutations) + if permutations: + return [permutations], cosets + else: + return [], cosets + + permutations = [] + unmapped_nodes = { + (node, idx) + for idx, t_partition in enumerate(top_partitions) + for node in t_partition + if len(t_partition) > 1 + } + node, pair_idx = min(unmapped_nodes) + b_partition = bottom_partitions[pair_idx] + + for node2 in sorted(b_partition): + if len(b_partition) == 1: + # Can never result in symmetry + continue + if node != node2 and any( + node in orbit and node2 in orbit for orbit in orbits + ): + # Orbit prune branch + continue + # REDUCTION + # Couple node to node2 + partitions = self._couple_nodes( + top_partitions, + bottom_partitions, + pair_idx, + node, + node2, + graph, + edge_colors, + ) + for opp in partitions: + new_top_partitions, new_bottom_partitions = opp + + new_perms, new_cosets = self._process_ordered_pair_partitions( + graph, + new_top_partitions, + new_bottom_partitions, + edge_colors, + orbits, + cosets, + ) + # COMBINATION + permutations += new_perms + cosets.update(new_cosets) + + mapped = { + k + for top, bottom in zip(top_partitions, bottom_partitions) + for k in top + if len(top) == 1 and top == bottom + } + ks = {k for k in graph.nodes if k < node} + # Have all nodes with ID < node been mapped? + find_coset = ks <= mapped and node not in cosets + if find_coset: + # Find the orbit that contains node + for orbit in orbits: + if node in orbit: + cosets[node] = orbit.copy() + return permutations, cosets diff --git a/networkx/algorithms/isomorphism/isomorph.py b/networkx/algorithms/isomorphism/isomorph.py index 7c91b00..3f2b95d 100644 --- a/networkx/algorithms/isomorphism/isomorph.py +++ b/networkx/algorithms/isomorphism/isomorph.py @@ -3,19 +3,13 @@ """ import networkx as nx from networkx.exception import NetworkXError -__author__ = """\n""".join(['Aric Hagberg (hagberg@lanl.gov)', - 'Pieter Swart (swart@lanl.gov)', - 'Christopher Ellison cellison@cse.ucdavis.edu)']) -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -__all__ = ['could_be_isomorphic', - 'fast_could_be_isomorphic', - 'faster_could_be_isomorphic', - 'is_isomorphic'] + +__all__ = [ + "could_be_isomorphic", + "fast_could_be_isomorphic", + "faster_could_be_isomorphic", + "is_isomorphic", +] def could_be_isomorphic(G1, G2): @@ -175,33 +169,33 @@ def is_isomorphic(G1, G2, node_match=None, edge_match=None): >>> G1 = nx.DiGraph() >>> G2 = nx.DiGraph() - >>> nx.add_path(G1, [1,2,3,4], weight=1) - >>> nx.add_path(G2, [10,20,30,40], weight=2) - >>> em = iso.numerical_edge_match('weight', 1) + >>> nx.add_path(G1, [1, 2, 3, 4], weight=1) + >>> nx.add_path(G2, [10, 20, 30, 40], weight=2) + >>> em = iso.numerical_edge_match("weight", 1) >>> nx.is_isomorphic(G1, G2) # no weights considered True - >>> nx.is_isomorphic(G1, G2, edge_match=em) # match weights + >>> nx.is_isomorphic(G1, G2, edge_match=em) # match weights False For multidigraphs G1 and G2, using 'fill' node attribute (default: '') >>> G1 = nx.MultiDiGraph() >>> G2 = nx.MultiDiGraph() - >>> G1.add_nodes_from([1,2,3], fill='red') - >>> G2.add_nodes_from([10,20,30,40], fill='red') - >>> nx.add_path(G1, [1,2,3,4], weight=3, linewidth=2.5) - >>> nx.add_path(G2, [10,20,30,40], weight=3) - >>> nm = iso.categorical_node_match('fill', 'red') + >>> G1.add_nodes_from([1, 2, 3], fill="red") + >>> G2.add_nodes_from([10, 20, 30, 40], fill="red") + >>> nx.add_path(G1, [1, 2, 3, 4], weight=3, linewidth=2.5) + >>> nx.add_path(G2, [10, 20, 30, 40], weight=3) + >>> nm = iso.categorical_node_match("fill", "red") >>> nx.is_isomorphic(G1, G2, node_match=nm) True For multidigraphs G1 and G2, using 'weight' edge attribute (default: 7) - >>> G1.add_edge(1,2, weight=7) + >>> G1.add_edge(1, 2, weight=7) 1 - >>> G2.add_edge(10,20) + >>> G2.add_edge(10, 20) 1 - >>> em = iso.numerical_multiedge_match('weight', 7, rtol=1e-6) + >>> em = iso.numerical_multiedge_match("weight", 7, rtol=1e-6) >>> nx.is_isomorphic(G1, G2, edge_match=em) True @@ -209,8 +203,8 @@ def is_isomorphic(G1, G2, node_match=None, edge_match=None): with default values 7 and 2.5. Also using 'fill' node attribute with default value 'red'. - >>> em = iso.numerical_multiedge_match(['weight', 'linewidth'], [7, 2.5]) - >>> nm = iso.categorical_node_match('fill', 'red') + >>> em = iso.numerical_multiedge_match(["weight", "linewidth"], [7, 2.5]) + >>> nm = iso.categorical_node_match("fill", "red") >>> nx.is_isomorphic(G1, G2, edge_match=em, node_match=nm) True diff --git a/networkx/algorithms/isomorphism/isomorphvf2.py b/networkx/algorithms/isomorphism/isomorphvf2.py index 349bf18..03eb648 100644 --- a/networkx/algorithms/isomorphism/isomorphvf2.py +++ b/networkx/algorithms/isomorphism/isomorphvf2.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ ************* VF2 Algorithm @@ -35,7 +34,7 @@ >>> from networkx.algorithms import isomorphism >>> G1 = nx.path_graph(4) >>> G2 = nx.path_graph(4) ->>> GM = isomorphism.GraphMatcher(G1,G2) +>>> GM = isomorphism.GraphMatcher(G1, G2) >>> GM.is_isomorphic() True @@ -50,7 +49,7 @@ >>> G1 = nx.path_graph(4, create_using=nx.DiGraph()) >>> G2 = nx.path_graph(4, create_using=nx.DiGraph()) ->>> DiGM = isomorphism.DiGraphMatcher(G1,G2) +>>> DiGM = isomorphism.DiGraphMatcher(G1, G2) >>> DiGM.is_isomorphic() True @@ -63,7 +62,7 @@ Subgraph Isomorphism -------------------- -Graph theory literature can be ambiguious about the meaning of the +Graph theory literature can be ambiguous about the meaning of the above statement, and we seek to clarify it now. In the VF2 literature, a mapping M is said to be a graph-subgraph @@ -81,8 +80,7 @@ subgraph isomorphisms are not directly supported, but one should be able to perform the check by making use of nx.line_graph(). For subgraphs which are not induced, the term 'monomorphism' is preferred -over 'isomorphism'. Currently, it is not possible to check for -monomorphisms. +over 'isomorphism'. Let G=(N,E) be a graph with a set of nodes N and set of edges E. @@ -98,6 +96,14 @@ N' is the subset of nodes in N related by edges in E' E' is a subset of E +If G'=(N',E') is a monomorphism, then: + N' is a subset of N + E' is a subset of the set of edges in E relating nodes in N' + +Note that if G' is a node-induced subgraph of G, then it is always a +subgraph monomorphism of G, but the opposite is not always true, as a +monomorphism can have fewer edges. + References ---------- [1] Luigi P. Cordella, Pasquale Foggia, Carlo Sansone, Mario Vento, @@ -114,25 +120,13 @@ See Also -------- -syntactic_feasibliity(), semantic_feasibility() +syntactic_feasibility(), semantic_feasibility() Notes ----- The implementation handles both directed and undirected graphs as well -as multigraphs. However, it does require that nodes in the graph are -orderable (in addition to the general NetworkX requirement that nodes -are hashable). If the nodes in your graph are not orderable, you can -convert them to an orderable type (`int`, for example) by using the -:func:`networkx.relabel` function. You can store the dictionary of -old-to-new node labels to retrieve the original node labels after -running the isomorphism algorithm:: - - >>> G = nx.Graph() - >>> node1, node2 = object(), object() - >>> G.add_nodes_from([node1, node2]) - >>> mapping = {k: v for v, k in enumerate(G)} - >>> G = nx.relabel_nodes(G, mapping) +as multigraphs. In general, the subgraph isomorphism problem is NP-complete whereas the graph isomorphism problem is most likely not NP-complete (although no @@ -140,23 +134,17 @@ """ -# Copyright (C) 2007-2009 by the NetworkX maintainers -# All rights reserved. -# BSD license. - -# This work was originally coded by Christopher Ellison -# as part of the Computational Mechanics Python (CMPy) project. -# James P. Crutchfield, principal investigator. -# Complexity Sciences Center and Physics Department, UC Davis. +# This work was originally coded by Christopher Ellison +# as part of the Computational Mechanics Python (CMPy) project. +# James P. Crutchfield, principal investigator. +# Complexity Sciences Center and Physics Department, UC Davis. import sys -import networkx as nx -__all__ = ['GraphMatcher', - 'DiGraphMatcher'] +__all__ = ["GraphMatcher", "DiGraphMatcher"] -class GraphMatcher(object): +class GraphMatcher: """Implementation of VF2 algorithm for matching undirected graphs. Suitable for Graph and MultiGraph instances. @@ -168,7 +156,7 @@ def __init__(self, G1, G2): Parameters ---------- G1,G2: NetworkX Graph or MultiGraph instances. - The two graphs to check for isomorphism. + The two graphs to check for isomorphism or monomorphism. Examples -------- @@ -177,12 +165,13 @@ def __init__(self, G1, G2): >>> from networkx.algorithms import isomorphism >>> G1 = nx.path_graph(4) >>> G2 = nx.path_graph(4) - >>> GM = isomorphism.GraphMatcher(G1,G2) + >>> GM = isomorphism.GraphMatcher(G1, G2) """ self.G1 = G1 self.G2 = G2 self.G1_nodes = set(G1.nodes()) self.G2_nodes = set(G2.nodes()) + self.G2_node_order = {n: i for i, n in enumerate(G2)} # Set recursion limit. self.old_recursion_limit = sys.getrecursionlimit() @@ -192,7 +181,7 @@ def __init__(self, G1, G2): sys.setrecursionlimit(int(1.5 * expected_max_recursion_level)) # Declare that we will be searching for a graph-graph isomorphism. - self.test = 'graph' + self.test = "graph" # Initialize state self.initialize() @@ -217,24 +206,26 @@ def candidate_pairs_iter(self): G1_nodes = self.G1_nodes G2_nodes = self.G2_nodes + min_key = self.G2_node_order.__getitem__ # First we compute the inout-terminal sets. - T1_inout = [node for node in G1_nodes if (node in self.inout_1) and (node not in self.core_1)] - T2_inout = [node for node in G2_nodes if (node in self.inout_2) and (node not in self.core_2)] + T1_inout = [node for node in self.inout_1 if node not in self.core_1] + T2_inout = [node for node in self.inout_2 if node not in self.core_2] # If T1_inout and T2_inout are both nonempty. # P(s) = T1_inout x {min T2_inout} if T1_inout and T2_inout: - for node in T1_inout: - yield node, min(T2_inout) + node_2 = min(T2_inout, key=min_key) + for node_1 in T1_inout: + yield node_1, node_2 else: # If T1_inout and T2_inout were both empty.... # P(s) = (N_1 - M_1) x {min (N_2 - M_2)} - # if not (T1_inout or T2_inout): # as suggested by [2], incorrect - if 1: # as inferred from [1], correct + # if not (T1_inout or T2_inout): # as suggested by [2], incorrect + if 1: # as inferred from [1], correct # First we determine the candidate node for G2 - other_node = min(G2_nodes - set(self.core_2)) + other_node = min(G2_nodes - set(self.core_2), key=min_key) for node in self.G1: if node not in self.core_1: yield node, other_node @@ -298,10 +289,9 @@ def is_isomorphic(self): def isomorphisms_iter(self): """Generator over isomorphisms between G1 and G2.""" # Declare that we are looking for a graph-graph isomorphism. - self.test = 'graph' + self.test = "graph" self.initialize() - for mapping in self.match(): - yield mapping + yield from self.match() def match(self): """Extends the isomorphism mapping. @@ -323,8 +313,7 @@ def match(self): if self.semantic_feasibility(G1_node, G2_node): # Recursive call, adding the feasible state. newstate = self.state.__class__(self, G1_node, G2_node) - for mapping in self.match(): - yield mapping + yield from self.match() # restore data structures newstate.restore() @@ -359,6 +348,10 @@ def semantic_feasibility(self, G1_node, G2_node): Indicates that the graph matcher is looking for a subgraph-graph isomorphism such that a subgraph of G1 is isomorphic to G2. + test='mono' + Indicates that the graph matcher is looking for a subgraph-graph + monomorphism such that a subgraph of G1 is monomorphic to G2. + Any subclass which redefines semantic_feasibility() must maintain the above form to keep the match() method functional. Implementations should consider multigraphs. @@ -373,25 +366,39 @@ def subgraph_is_isomorphic(self): except StopIteration: return False -# subgraph_is_isomorphic.__doc__ += "\n" + subgraph.replace('\n','\n'+indent) + def subgraph_is_monomorphic(self): + """Returns True if a subgraph of G1 is monomorphic to G2.""" + try: + x = next(self.subgraph_monomorphisms_iter()) + return True + except StopIteration: + return False + + # subgraph_is_isomorphic.__doc__ += "\n" + subgraph.replace('\n','\n'+indent) def subgraph_isomorphisms_iter(self): """Generator over isomorphisms between a subgraph of G1 and G2.""" # Declare that we are looking for graph-subgraph isomorphism. - self.test = 'subgraph' + self.test = "subgraph" self.initialize() - for mapping in self.match(): - yield mapping + yield from self.match() -# subgraph_isomorphisms_iter.__doc__ += "\n" + subgraph.replace('\n','\n'+indent) + def subgraph_monomorphisms_iter(self): + """Generator over monomorphisms between a subgraph of G1 and G2.""" + # Declare that we are looking for graph-subgraph monomorphism. + self.test = "mono" + self.initialize() + yield from self.match() + + # subgraph_isomorphisms_iter.__doc__ += "\n" + subgraph.replace('\n','\n'+indent) def syntactic_feasibility(self, G1_node, G2_node): """Returns True if adding (G1_node, G2_node) is syntactically feasible. This function returns True if it is adding the candidate pair - to the current partial isomorphism mapping is allowable. The addition - is allowable if the inclusion of the candidate pair does not make it - impossible for an isomorphism to be found. + to the current partial isomorphism/monomorphism mapping is allowable. + The addition is allowable if the inclusion of the candidate pair does + not make it impossible for an isomorphism/monomorphism to be found. """ # The VF2 algorithm was designed to work with graphs having, at most, @@ -420,68 +427,90 @@ def syntactic_feasibility(self, G1_node, G2_node): # self-loops for G2_node. Without this check, we would fail on # R_neighbor at the next recursion level. But it is good to prune the # search tree now. - if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges(G2_node, G2_node): - return False + + if self.test == "mono": + if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges( + G2_node, G2_node + ): + return False + else: + if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges( + G2_node, G2_node + ): + return False # R_neighbor # For each neighbor n' of n in the partial mapping, the corresponding # node m' is a neighbor of m, and vice versa. Also, the number of # edges must be equal. - for neighbor in self.G1[G1_node]: - if neighbor in self.core_1: - if not (self.core_1[neighbor] in self.G2[G2_node]): - return False - elif self.G1.number_of_edges(neighbor, G1_node) != self.G2.number_of_edges(self.core_1[neighbor], G2_node): - return False + if self.test != "mono": + for neighbor in self.G1[G1_node]: + if neighbor in self.core_1: + if not (self.core_1[neighbor] in self.G2[G2_node]): + return False + elif self.G1.number_of_edges( + neighbor, G1_node + ) != self.G2.number_of_edges(self.core_1[neighbor], G2_node): + return False + for neighbor in self.G2[G2_node]: if neighbor in self.core_2: if not (self.core_2[neighbor] in self.G1[G1_node]): return False - elif self.G1.number_of_edges(self.core_2[neighbor], G1_node) != self.G2.number_of_edges(neighbor, G2_node): + elif self.test == "mono": + if self.G1.number_of_edges( + self.core_2[neighbor], G1_node + ) < self.G2.number_of_edges(neighbor, G2_node): + return False + else: + if self.G1.number_of_edges( + self.core_2[neighbor], G1_node + ) != self.G2.number_of_edges(neighbor, G2_node): + return False + + if self.test != "mono": + # Look ahead 1 + + # R_terminout + # The number of neighbors of n in T_1^{inout} is equal to the + # number of neighbors of m that are in T_2^{inout}, and vice versa. + num1 = 0 + for neighbor in self.G1[G1_node]: + if (neighbor in self.inout_1) and (neighbor not in self.core_1): + num1 += 1 + num2 = 0 + for neighbor in self.G2[G2_node]: + if (neighbor in self.inout_2) and (neighbor not in self.core_2): + num2 += 1 + if self.test == "graph": + if not (num1 == num2): + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): return False - # Look ahead 1 - - # R_terminout - # The number of neighbors of n that are in T_1^{inout} is equal to the - # number of neighbors of m that are in T_2^{inout}, and vice versa. - num1 = 0 - for neighbor in self.G1[G1_node]: - if (neighbor in self.inout_1) and (neighbor not in self.core_1): - num1 += 1 - num2 = 0 - for neighbor in self.G2[G2_node]: - if (neighbor in self.inout_2) and (neighbor not in self.core_2): - num2 += 1 - if self.test == 'graph': - if not (num1 == num2): - return False - else: # self.test == 'subgraph' - if not (num1 >= num2): - return False - - # Look ahead 2 - - # R_new - - # The number of neighbors of n that are neither in the core_1 nor - # T_1^{inout} is equal to the number of neighbors of m - # that are neither in core_2 nor T_2^{inout}. - num1 = 0 - for neighbor in self.G1[G1_node]: - if neighbor not in self.inout_1: - num1 += 1 - num2 = 0 - for neighbor in self.G2[G2_node]: - if neighbor not in self.inout_2: - num2 += 1 - if self.test == 'graph': - if not (num1 == num2): - return False - else: # self.test == 'subgraph' - if not (num1 >= num2): - return False + # Look ahead 2 + + # R_new + + # The number of neighbors of n that are neither in the core_1 nor + # T_1^{inout} is equal to the number of neighbors of m + # that are neither in core_2 nor T_2^{inout}. + num1 = 0 + for neighbor in self.G1[G1_node]: + if neighbor not in self.inout_1: + num1 += 1 + num2 = 0 + for neighbor in self.G2[G2_node]: + if neighbor not in self.inout_2: + num2 += 1 + if self.test == "graph": + if not (num1 == num2): + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False # Otherwise, this node pair is syntactically feasible! return True @@ -492,7 +521,6 @@ class DiGraphMatcher(GraphMatcher): Suitable for DiGraph and MultiDiGraph instances. """ -# __doc__ += "Notes\n%s-----" % (indent,) + sources.replace('\n','\n'+indent) def __init__(self, G1, G2): """Initialize DiGraphMatcher. @@ -506,9 +534,9 @@ def __init__(self, G1, G2): >>> from networkx.algorithms import isomorphism >>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph())) >>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph())) - >>> DiGM = isomorphism.DiGraphMatcher(G1,G2) + >>> DiGM = isomorphism.DiGraphMatcher(G1, G2) """ - super(DiGraphMatcher, self).__init__(G1, G2) + super().__init__(G1, G2) def candidate_pairs_iter(self): """Iterator over candidate pairs of nodes in G1 and G2.""" @@ -517,15 +545,16 @@ def candidate_pairs_iter(self): G1_nodes = self.G1_nodes G2_nodes = self.G2_nodes + min_key = self.G2_node_order.__getitem__ # First we compute the out-terminal sets. - T1_out = [node for node in G1_nodes if (node in self.out_1) and (node not in self.core_1)] - T2_out = [node for node in G2_nodes if (node in self.out_2) and (node not in self.core_2)] + T1_out = [node for node in self.out_1 if node not in self.core_1] + T2_out = [node for node in self.out_2 if node not in self.core_2] # If T1_out and T2_out are both nonempty. # P(s) = T1_out x {min T2_out} if T1_out and T2_out: - node_2 = min(T2_out) + node_2 = min(T2_out, key=min_key) for node_1 in T1_out: yield node_1, node_2 @@ -533,14 +562,14 @@ def candidate_pairs_iter(self): # We compute the in-terminal sets. # elif not (T1_out or T2_out): # as suggested by [2], incorrect - else: # as suggested by [1], correct - T1_in = [node for node in G1_nodes if (node in self.in_1) and (node not in self.core_1)] - T2_in = [node for node in G2_nodes if (node in self.in_2) and (node not in self.core_2)] + else: # as suggested by [1], correct + T1_in = [node for node in self.in_1 if node not in self.core_1] + T2_in = [node for node in self.in_2 if node not in self.core_2] # If T1_in and T2_in are both nonempty. # P(s) = T1_out x {min T2_out} if T1_in and T2_in: - node_2 = min(T2_in) + node_2 = min(T2_in, key=min_key) for node_1 in T1_in: yield node_1, node_2 @@ -548,8 +577,8 @@ def candidate_pairs_iter(self): # P(s) = (N_1 - M_1) x {min (N_2 - M_2)} # elif not (T1_in or T2_in): # as suggested by [2], incorrect - else: # as inferred from [1], correct - node_2 = min(G2_nodes - set(self.core_2)) + else: # as inferred from [1], correct + node_2 = min(G2_nodes - set(self.core_2), key=min_key) for node_1 in G1_nodes: if node_1 not in self.core_1: yield node_1, node_2 @@ -594,9 +623,9 @@ def syntactic_feasibility(self, G1_node, G2_node): """Returns True if adding (G1_node, G2_node) is syntactically feasible. This function returns True if it is adding the candidate pair - to the current partial isomorphism mapping is allowable. The addition - is allowable if the inclusion of the candidate pair does not make it - impossible for an isomorphism to be found. + to the current partial isomorphism/monomorphism mapping is allowable. + The addition is allowable if the inclusion of the candidate pair does + not make it impossible for an isomorphism/monomorphism to be found. """ # The VF2 algorithm was designed to work with graphs having, at most, @@ -624,166 +653,197 @@ def syntactic_feasibility(self, G1_node, G2_node): # The number of selfloops for G1_node must equal the number of # self-loops for G2_node. Without this check, we would fail on R_pred # at the next recursion level. This should prune the tree even further. - - if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges(G2_node, G2_node): - return False + if self.test == "mono": + if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges( + G2_node, G2_node + ): + return False + else: + if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges( + G2_node, G2_node + ): + return False # R_pred # For each predecessor n' of n in the partial mapping, the # corresponding node m' is a predecessor of m, and vice versa. Also, # the number of edges must be equal - for predecessor in self.G1.pred[G1_node]: - if predecessor in self.core_1: - if not (self.core_1[predecessor] in self.G2.pred[G2_node]): - return False - elif self.G1.number_of_edges(predecessor, G1_node) != self.G2.number_of_edges(self.core_1[predecessor], G2_node): - return False + if self.test != "mono": + for predecessor in self.G1.pred[G1_node]: + if predecessor in self.core_1: + if not (self.core_1[predecessor] in self.G2.pred[G2_node]): + return False + elif self.G1.number_of_edges( + predecessor, G1_node + ) != self.G2.number_of_edges(self.core_1[predecessor], G2_node): + return False for predecessor in self.G2.pred[G2_node]: if predecessor in self.core_2: if not (self.core_2[predecessor] in self.G1.pred[G1_node]): return False - elif self.G1.number_of_edges(self.core_2[predecessor], G1_node) != self.G2.number_of_edges(predecessor, G2_node): - return False + elif self.test == "mono": + if self.G1.number_of_edges( + self.core_2[predecessor], G1_node + ) < self.G2.number_of_edges(predecessor, G2_node): + return False + else: + if self.G1.number_of_edges( + self.core_2[predecessor], G1_node + ) != self.G2.number_of_edges(predecessor, G2_node): + return False # R_succ # For each successor n' of n in the partial mapping, the corresponding # node m' is a successor of m, and vice versa. Also, the number of # edges must be equal. - for successor in self.G1[G1_node]: - if successor in self.core_1: - if not (self.core_1[successor] in self.G2[G2_node]): - return False - elif self.G1.number_of_edges(G1_node, successor) != self.G2.number_of_edges(G2_node, self.core_1[successor]): - return False + if self.test != "mono": + for successor in self.G1[G1_node]: + if successor in self.core_1: + if not (self.core_1[successor] in self.G2[G2_node]): + return False + elif self.G1.number_of_edges( + G1_node, successor + ) != self.G2.number_of_edges(G2_node, self.core_1[successor]): + return False for successor in self.G2[G2_node]: if successor in self.core_2: if not (self.core_2[successor] in self.G1[G1_node]): return False - elif self.G1.number_of_edges(G1_node, self.core_2[successor]) != self.G2.number_of_edges(G2_node, successor): + elif self.test == "mono": + if self.G1.number_of_edges( + G1_node, self.core_2[successor] + ) < self.G2.number_of_edges(G2_node, successor): + return False + else: + if self.G1.number_of_edges( + G1_node, self.core_2[successor] + ) != self.G2.number_of_edges(G2_node, successor): + return False + + if self.test != "mono": + + # Look ahead 1 + + # R_termin + # The number of predecessors of n that are in T_1^{in} is equal to the + # number of predecessors of m that are in T_2^{in}. + num1 = 0 + for predecessor in self.G1.pred[G1_node]: + if (predecessor in self.in_1) and (predecessor not in self.core_1): + num1 += 1 + num2 = 0 + for predecessor in self.G2.pred[G2_node]: + if (predecessor in self.in_2) and (predecessor not in self.core_2): + num2 += 1 + if self.test == "graph": + if not (num1 == num2): + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): return False - # Look ahead 1 - - # R_termin - # The number of predecessors of n that are in T_1^{in} is equal to the - # number of predecessors of m that are in T_2^{in}. - num1 = 0 - for predecessor in self.G1.pred[G1_node]: - if (predecessor in self.in_1) and (predecessor not in self.core_1): - num1 += 1 - num2 = 0 - for predecessor in self.G2.pred[G2_node]: - if (predecessor in self.in_2) and (predecessor not in self.core_2): - num2 += 1 - if self.test == 'graph': - if not (num1 == num2): - return False - else: # self.test == 'subgraph' - if not (num1 >= num2): - return False - - # The number of successors of n that are in T_1^{in} is equal to the - # number of successors of m that are in T_2^{in}. - num1 = 0 - for successor in self.G1[G1_node]: - if (successor in self.in_1) and (successor not in self.core_1): - num1 += 1 - num2 = 0 - for successor in self.G2[G2_node]: - if (successor in self.in_2) and (successor not in self.core_2): - num2 += 1 - if self.test == 'graph': - if not (num1 == num2): - return False - else: # self.test == 'subgraph' - if not (num1 >= num2): - return False - - # R_termout - - # The number of predecessors of n that are in T_1^{out} is equal to the - # number of predecessors of m that are in T_2^{out}. - num1 = 0 - for predecessor in self.G1.pred[G1_node]: - if (predecessor in self.out_1) and (predecessor not in self.core_1): - num1 += 1 - num2 = 0 - for predecessor in self.G2.pred[G2_node]: - if (predecessor in self.out_2) and (predecessor not in self.core_2): - num2 += 1 - if self.test == 'graph': - if not (num1 == num2): - return False - else: # self.test == 'subgraph' - if not (num1 >= num2): - return False - - # The number of successors of n that are in T_1^{out} is equal to the - # number of successors of m that are in T_2^{out}. - num1 = 0 - for successor in self.G1[G1_node]: - if (successor in self.out_1) and (successor not in self.core_1): - num1 += 1 - num2 = 0 - for successor in self.G2[G2_node]: - if (successor in self.out_2) and (successor not in self.core_2): - num2 += 1 - if self.test == 'graph': - if not (num1 == num2): - return False - else: # self.test == 'subgraph' - if not (num1 >= num2): - return False + # The number of successors of n that are in T_1^{in} is equal to the + # number of successors of m that are in T_2^{in}. + num1 = 0 + for successor in self.G1[G1_node]: + if (successor in self.in_1) and (successor not in self.core_1): + num1 += 1 + num2 = 0 + for successor in self.G2[G2_node]: + if (successor in self.in_2) and (successor not in self.core_2): + num2 += 1 + if self.test == "graph": + if not (num1 == num2): + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False - # Look ahead 2 + # R_termout + + # The number of predecessors of n that are in T_1^{out} is equal to the + # number of predecessors of m that are in T_2^{out}. + num1 = 0 + for predecessor in self.G1.pred[G1_node]: + if (predecessor in self.out_1) and (predecessor not in self.core_1): + num1 += 1 + num2 = 0 + for predecessor in self.G2.pred[G2_node]: + if (predecessor in self.out_2) and (predecessor not in self.core_2): + num2 += 1 + if self.test == "graph": + if not (num1 == num2): + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False - # R_new + # The number of successors of n that are in T_1^{out} is equal to the + # number of successors of m that are in T_2^{out}. + num1 = 0 + for successor in self.G1[G1_node]: + if (successor in self.out_1) and (successor not in self.core_1): + num1 += 1 + num2 = 0 + for successor in self.G2[G2_node]: + if (successor in self.out_2) and (successor not in self.core_2): + num2 += 1 + if self.test == "graph": + if not (num1 == num2): + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False - # The number of predecessors of n that are neither in the core_1 nor - # T_1^{in} nor T_1^{out} is equal to the number of predecessors of m - # that are neither in core_2 nor T_2^{in} nor T_2^{out}. - num1 = 0 - for predecessor in self.G1.pred[G1_node]: - if (predecessor not in self.in_1) and (predecessor not in self.out_1): - num1 += 1 - num2 = 0 - for predecessor in self.G2.pred[G2_node]: - if (predecessor not in self.in_2) and (predecessor not in self.out_2): - num2 += 1 - if self.test == 'graph': - if not (num1 == num2): - return False - else: # self.test == 'subgraph' - if not (num1 >= num2): - return False + # Look ahead 2 + + # R_new + + # The number of predecessors of n that are neither in the core_1 nor + # T_1^{in} nor T_1^{out} is equal to the number of predecessors of m + # that are neither in core_2 nor T_2^{in} nor T_2^{out}. + num1 = 0 + for predecessor in self.G1.pred[G1_node]: + if (predecessor not in self.in_1) and (predecessor not in self.out_1): + num1 += 1 + num2 = 0 + for predecessor in self.G2.pred[G2_node]: + if (predecessor not in self.in_2) and (predecessor not in self.out_2): + num2 += 1 + if self.test == "graph": + if not (num1 == num2): + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False - # The number of successors of n that are neither in the core_1 nor - # T_1^{in} nor T_1^{out} is equal to the number of successors of m - # that are neither in core_2 nor T_2^{in} nor T_2^{out}. - num1 = 0 - for successor in self.G1[G1_node]: - if (successor not in self.in_1) and (successor not in self.out_1): - num1 += 1 - num2 = 0 - for successor in self.G2[G2_node]: - if (successor not in self.in_2) and (successor not in self.out_2): - num2 += 1 - if self.test == 'graph': - if not (num1 == num2): - return False - else: # self.test == 'subgraph' - if not (num1 >= num2): - return False + # The number of successors of n that are neither in the core_1 nor + # T_1^{in} nor T_1^{out} is equal to the number of successors of m + # that are neither in core_2 nor T_2^{in} nor T_2^{out}. + num1 = 0 + for successor in self.G1[G1_node]: + if (successor not in self.in_1) and (successor not in self.out_1): + num1 += 1 + num2 = 0 + for successor in self.G2[G2_node]: + if (successor not in self.in_2) and (successor not in self.out_2): + num2 += 1 + if self.test == "graph": + if not (num1 == num2): + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False # Otherwise, this node pair is syntactically feasible! return True -class GMState(object): +class GMState: """Internal representation of state for the GraphMatcher class. This class is used internally by the GraphMatcher class. It is used @@ -836,17 +896,21 @@ def __init__(self, GM, G1_node=None, G2_node=None): # Now we add every other node... # Updates for T_1^{inout} - new_nodes = set([]) + new_nodes = set() for node in GM.core_1: - new_nodes.update([neighbor for neighbor in GM.G1[node] if neighbor not in GM.core_1]) + new_nodes.update( + [neighbor for neighbor in GM.G1[node] if neighbor not in GM.core_1] + ) for node in new_nodes: if node not in GM.inout_1: GM.inout_1[node] = self.depth # Updates for T_2^{inout} - new_nodes = set([]) + new_nodes = set() for node in GM.core_2: - new_nodes.update([neighbor for neighbor in GM.G2[node] if neighbor not in GM.core_2]) + new_nodes.update( + [neighbor for neighbor in GM.G2[node] if neighbor not in GM.core_2] + ) for node in new_nodes: if node not in GM.inout_2: GM.inout_2[node] = self.depth @@ -867,7 +931,7 @@ def restore(self): del vector[node] -class DiGMState(object): +class DiGMState: """Internal representation of state for the DiGraphMatcher class. This class is used internally by the DiGraphMatcher class. It is used @@ -925,35 +989,57 @@ def __init__(self, GM, G1_node=None, G2_node=None): # Now we add every other node... # Updates for T_1^{in} - new_nodes = set([]) + new_nodes = set() for node in GM.core_1: - new_nodes.update([predecessor for predecessor in GM.G1.predecessors(node) - if predecessor not in GM.core_1]) + new_nodes.update( + [ + predecessor + for predecessor in GM.G1.predecessors(node) + if predecessor not in GM.core_1 + ] + ) for node in new_nodes: if node not in GM.in_1: GM.in_1[node] = self.depth # Updates for T_2^{in} - new_nodes = set([]) + new_nodes = set() for node in GM.core_2: - new_nodes.update([predecessor for predecessor in GM.G2.predecessors(node) - if predecessor not in GM.core_2]) + new_nodes.update( + [ + predecessor + for predecessor in GM.G2.predecessors(node) + if predecessor not in GM.core_2 + ] + ) for node in new_nodes: if node not in GM.in_2: GM.in_2[node] = self.depth # Updates for T_1^{out} - new_nodes = set([]) + new_nodes = set() for node in GM.core_1: - new_nodes.update([successor for successor in GM.G1.successors(node) if successor not in GM.core_1]) + new_nodes.update( + [ + successor + for successor in GM.G1.successors(node) + if successor not in GM.core_1 + ] + ) for node in new_nodes: if node not in GM.out_1: GM.out_1[node] = self.depth # Updates for T_2^{out} - new_nodes = set([]) + new_nodes = set() for node in GM.core_2: - new_nodes.update([successor for successor in GM.G2.successors(node) if successor not in GM.core_2]) + new_nodes.update( + [ + successor + for successor in GM.G2.successors(node) + if successor not in GM.core_2 + ] + ) for node in new_nodes: if node not in GM.out_2: GM.out_2[node] = self.depth diff --git a/networkx/algorithms/isomorphism/matchhelpers.py b/networkx/algorithms/isomorphism/matchhelpers.py index dc20a43..32f5490 100644 --- a/networkx/algorithms/isomorphism/matchhelpers.py +++ b/networkx/algorithms/isomorphism/matchhelpers.py @@ -3,32 +3,25 @@ """ from itertools import permutations import types -import networkx as nx -__all__ = ['categorical_node_match', - 'categorical_edge_match', - 'categorical_multiedge_match', - 'numerical_node_match', - 'numerical_edge_match', - 'numerical_multiedge_match', - 'generic_node_match', - 'generic_edge_match', - 'generic_multiedge_match', - ] +__all__ = [ + "categorical_node_match", + "categorical_edge_match", + "categorical_multiedge_match", + "numerical_node_match", + "numerical_edge_match", + "numerical_multiedge_match", + "generic_node_match", + "generic_edge_match", + "generic_multiedge_match", +] def copyfunc(f, name=None): """Returns a deepcopy of a function.""" - try: - # Python <3 - return types.FunctionType(f.func_code, f.func_globals, - name or f.__name__, f.func_defaults, - f.func_closure) - except AttributeError: - # Python >=3 - return types.FunctionType(f.__code__, f.__globals__, - name or f.__name__, f.__defaults__, - f.__closure__) + return types.FunctionType( + f.__code__, f.__globals__, name or f.__name__, f.__defaults__, f.__closure__ + ) def allclose(x, y, rtol=1.0000000000000001e-05, atol=1e-08): @@ -88,26 +81,29 @@ def close(x, y, rtol=1.0000000000000001e-05, atol=1e-08): Examples -------- >>> import networkx.algorithms.isomorphism as iso ->>> nm = iso.categorical_node_match('size', 1) ->>> nm = iso.categorical_node_match(['color', 'size'], ['red', 2]) +>>> nm = iso.categorical_node_match("size", 1) +>>> nm = iso.categorical_node_match(["color", "size"], ["red", 2]) """ def categorical_node_match(attr, default): - if nx.utils.is_string_like(attr): + if isinstance(attr, str): + def match(data1, data2): return data1.get(attr, default) == data2.get(attr, default) + else: attrs = list(zip(attr, default)) # Python 3 def match(data1, data2): return all(data1.get(attr, d) == data2.get(attr, d) for attr, d in attrs) + return match try: - categorical_edge_match = copyfunc(categorical_node_match, 'categorical_edge_match') + categorical_edge_match = copyfunc(categorical_node_match, "categorical_edge_match") except NotImplementedError: # IronPython lacks support for types.FunctionType. # https://github.com/networkx/networkx/issues/949 @@ -117,32 +113,35 @@ def categorical_edge_match(*args, **kwargs): def categorical_multiedge_match(attr, default): - if nx.utils.is_string_like(attr): + if isinstance(attr, str): + def match(datasets1, datasets2): - values1 = set([data.get(attr, default) for data in datasets1.values()]) - values2 = set([data.get(attr, default) for data in datasets2.values()]) + values1 = {data.get(attr, default) for data in datasets1.values()} + values2 = {data.get(attr, default) for data in datasets2.values()} return values1 == values2 + else: attrs = list(zip(attr, default)) # Python 3 def match(datasets1, datasets2): - values1 = set([]) + values1 = set() for data1 in datasets1.values(): x = tuple(data1.get(attr, d) for attr, d in attrs) values1.add(x) - values2 = set([]) + values2 = set() for data2 in datasets2.values(): x = tuple(data2.get(attr, d) for attr, d in attrs) values2.add(x) return values1 == values2 + return match # Docstrings for categorical functions. categorical_node_match.__doc__ = categorical_doc -categorical_edge_match.__doc__ = categorical_doc.replace('node', 'edge') -tmpdoc = categorical_doc.replace('node', 'edge') -tmpdoc = tmpdoc.replace('categorical_edge_match', 'categorical_multiedge_match') +categorical_edge_match.__doc__ = categorical_doc.replace("node", "edge") +tmpdoc = categorical_doc.replace("node", "edge") +tmpdoc = tmpdoc.replace("categorical_edge_match", "categorical_multiedge_match") categorical_multiedge_match.__doc__ = tmpdoc @@ -174,18 +173,20 @@ def match(datasets1, datasets2): Examples -------- >>> import networkx.algorithms.isomorphism as iso ->>> nm = iso.numerical_node_match('weight', 1.0) ->>> nm = iso.numerical_node_match(['weight', 'linewidth'], [.25, .5]) +>>> nm = iso.numerical_node_match("weight", 1.0) +>>> nm = iso.numerical_node_match(["weight", "linewidth"], [0.25, 0.5]) """ def numerical_node_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08): - if nx.utils.is_string_like(attr): + if isinstance(attr, str): + def match(data1, data2): - return close(data1.get(attr, default), - data2.get(attr, default), - rtol=rtol, atol=atol) + return close( + data1.get(attr, default), data2.get(attr, default), rtol=rtol, atol=atol + ) + else: attrs = list(zip(attr, default)) # Python 3 @@ -193,11 +194,12 @@ def match(data1, data2): values1 = [data1.get(attr, d) for attr, d in attrs] values2 = [data2.get(attr, d) for attr, d in attrs] return allclose(values1, values2, rtol=rtol, atol=atol) + return match try: - numerical_edge_match = copyfunc(numerical_node_match, 'numerical_edge_match') + numerical_edge_match = copyfunc(numerical_node_match, "numerical_edge_match") except NotImplementedError: # IronPython lacks support for types.FunctionType. # https://github.com/networkx/networkx/issues/949 @@ -207,11 +209,13 @@ def numerical_edge_match(*args, **kwargs): def numerical_multiedge_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08): - if nx.utils.is_string_like(attr): + if isinstance(attr, str): + def match(datasets1, datasets2): values1 = sorted([data.get(attr, default) for data in datasets1.values()]) values2 = sorted([data.get(attr, default) for data in datasets2.values()]) return allclose(values1, values2, rtol=rtol, atol=atol) + else: attrs = list(zip(attr, default)) # Python 3 @@ -231,14 +235,15 @@ def match(datasets1, datasets2): return False else: return True + return match # Docstrings for numerical functions. numerical_node_match.__doc__ = numerical_doc -numerical_edge_match.__doc__ = numerical_doc.replace('node', 'edge') -tmpdoc = numerical_doc.replace('node', 'edge') -tmpdoc = tmpdoc.replace('numerical_edge_match', 'numerical_multiedge_match') +numerical_edge_match.__doc__ = numerical_doc.replace("node", "edge") +tmpdoc = numerical_doc.replace("node", "edge") +tmpdoc = tmpdoc.replace("numerical_edge_match", "numerical_multiedge_match") numerical_multiedge_match.__doc__ = tmpdoc @@ -271,17 +276,19 @@ def match(datasets1, datasets2): >>> from operator import eq >>> from networkx.algorithms.isomorphism.matchhelpers import close >>> from networkx.algorithms.isomorphism import generic_node_match ->>> nm = generic_node_match('weight', 1.0, close) ->>> nm = generic_node_match('color', 'red', eq) ->>> nm = generic_node_match(['weight', 'color'], [1.0, 'red'], [close, eq]) +>>> nm = generic_node_match("weight", 1.0, close) +>>> nm = generic_node_match("color", "red", eq) +>>> nm = generic_node_match(["weight", "color"], [1.0, "red"], [close, eq]) """ def generic_node_match(attr, default, op): - if nx.utils.is_string_like(attr): + if isinstance(attr, str): + def match(data1, data2): return op(data1.get(attr, default), data2.get(attr, default)) + else: attrs = list(zip(attr, default, op)) # Python 3 @@ -291,11 +298,12 @@ def match(data1, data2): return False else: return True + return match try: - generic_edge_match = copyfunc(generic_node_match, 'generic_edge_match') + generic_edge_match = copyfunc(generic_node_match, "generic_edge_match") except NotImplementedError: # IronPython lacks support for types.FunctionType. # https://github.com/networkx/networkx/issues/949 @@ -335,18 +343,16 @@ def generic_multiedge_match(attr, default, op): >>> from operator import eq >>> from networkx.algorithms.isomorphism.matchhelpers import close >>> from networkx.algorithms.isomorphism import generic_node_match - >>> nm = generic_node_match('weight', 1.0, close) - >>> nm = generic_node_match('color', 'red', eq) - >>> nm = generic_node_match(['weight', 'color'], - ... [1.0, 'red'], - ... [close, eq]) + >>> nm = generic_node_match("weight", 1.0, close) + >>> nm = generic_node_match("color", "red", eq) + >>> nm = generic_node_match(["weight", "color"], [1.0, "red"], [close, eq]) ... """ # This is slow, but generic. # We must test every possible isomorphism between the edges. - if nx.utils.is_string_like(attr): + if isinstance(attr, str): attr = [attr] default = [default] op = [op] @@ -372,9 +378,10 @@ def match(datasets1, datasets2): else: # Then there are no isomorphisms between the multiedges. return False + return match # Docstrings for numerical functions. generic_node_match.__doc__ = generic_doc -generic_edge_match.__doc__ = generic_doc.replace('node', 'edge') +generic_edge_match.__doc__ = generic_doc.replace("node", "edge") diff --git a/networkx/algorithms/isomorphism/temporalisomorphvf2.py b/networkx/algorithms/isomorphism/temporalisomorphvf2.py index 42039ca..50a823a 100644 --- a/networkx/algorithms/isomorphism/temporalisomorphvf2.py +++ b/networkx/algorithms/isomorphism/temporalisomorphvf2.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ ***************************** Time-respecting VF2 Algorithm @@ -66,17 +65,13 @@ """ -from __future__ import absolute_import import networkx as nx -from datetime import datetime, timedelta from .isomorphvf2 import GraphMatcher, DiGraphMatcher -__all__ = ['TimeRespectingGraphMatcher', - 'TimeRespectingDiGraphMatcher'] +__all__ = ["TimeRespectingGraphMatcher", "TimeRespectingDiGraphMatcher"] class TimeRespectingGraphMatcher(GraphMatcher): - def __init__(self, G1, G2, temporal_attribute_name, delta): """Initialize TimeRespectingGraphMatcher. @@ -88,15 +83,18 @@ def __init__(self, G1, G2, temporal_attribute_name, delta): syntactic and semantic feasibility: >>> from networkx.algorithms import isomorphism + >>> from datetime import timedelta >>> G1 = nx.Graph(nx.path_graph(4, create_using=nx.Graph())) >>> G2 = nx.Graph(nx.path_graph(4, create_using=nx.Graph())) - >>> GM = isomorphism.TimeRespectingGraphMatcher(G1, G2, 'date', timedelta(days=1)) + >>> GM = isomorphism.TimeRespectingGraphMatcher( + ... G1, G2, "date", timedelta(days=1) + ... ) """ self.temporal_attribute_name = temporal_attribute_name self.delta = delta - super(TimeRespectingGraphMatcher, self).__init__(G1, G2) + super().__init__(G1, G2) def one_hop(self, Gx, Gx_node, neighbors): """ @@ -105,20 +103,25 @@ def one_hop(self, Gx, Gx_node, neighbors): """ dates = [] for n in neighbors: - if type(Gx) == type(nx.Graph()): # Graph G[u][v] returns the data dictionary. + if isinstance(Gx, nx.Graph): # Graph G[u][v] returns the data dictionary. dates.append(Gx[Gx_node][n][self.temporal_attribute_name]) else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary. - for edge in Gx[Gx_node][n].values(): # Iterates all edges between node pair. + for edge in Gx[Gx_node][ + n + ].values(): # Iterates all edges between node pair. dates.append(edge[self.temporal_attribute_name]) if any(x is None for x in dates): - raise ValueError('Datetime not supplied for at least one edge.') + raise ValueError("Datetime not supplied for at least one edge.") return not dates or max(dates) - min(dates) <= self.delta def two_hop(self, Gx, core_x, Gx_node, neighbors): """ Paths of length 2 from Gx_node should be time-respecting. """ - return all(self.one_hop(Gx, v, [n for n in Gx[v] if n in core_x] + [Gx_node]) for v in neighbors) + return all( + self.one_hop(Gx, v, [n for n in Gx[v] if n in core_x] + [Gx_node]) + for v in neighbors + ) def semantic_feasibility(self, G1_node, G2_node): """Returns True if adding (G1_node, G2_node) is semantically @@ -138,7 +141,6 @@ def semantic_feasibility(self, G1_node, G2_node): class TimeRespectingDiGraphMatcher(DiGraphMatcher): - def __init__(self, G1, G2, temporal_attribute_name, delta): """Initialize TimeRespectingDiGraphMatcher. @@ -150,27 +152,32 @@ def __init__(self, G1, G2, temporal_attribute_name, delta): syntactic and semantic feasibility: >>> from networkx.algorithms import isomorphism + >>> from datetime import timedelta >>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph())) >>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph())) - >>> GM = isomorphism.TimeRespectingDiGraphMatcher(G1, G2, 'date', timedelta(days=1)) + >>> GM = isomorphism.TimeRespectingDiGraphMatcher( + ... G1, G2, "date", timedelta(days=1) + ... ) """ self.temporal_attribute_name = temporal_attribute_name self.delta = delta - super(TimeRespectingDiGraphMatcher, self).__init__(G1, G2) + super().__init__(G1, G2) def get_pred_dates(self, Gx, Gx_node, core_x, pred): """ Get the dates of edges from predecessors. """ pred_dates = [] - if type(Gx) == type(nx.DiGraph()): # Graph G[u][v] returns the data dictionary. + if isinstance(Gx, nx.DiGraph): # Graph G[u][v] returns the data dictionary. for n in pred: pred_dates.append(Gx[n][Gx_node][self.temporal_attribute_name]) else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary. for n in pred: - for edge in Gx[n][Gx_node].values(): # Iterates all edge data between node pair. + for edge in Gx[n][ + Gx_node + ].values(): # Iterates all edge data between node pair. pred_dates.append(edge[self.temporal_attribute_name]) return pred_dates @@ -179,12 +186,14 @@ def get_succ_dates(self, Gx, Gx_node, core_x, succ): Get the dates of edges to successors. """ succ_dates = [] - if type(Gx) == type(nx.DiGraph()): # Graph G[u][v] returns the data dictionary. + if isinstance(Gx, nx.DiGraph): # Graph G[u][v] returns the data dictionary. for n in succ: succ_dates.append(Gx[Gx_node][n][self.temporal_attribute_name]) else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary. for n in succ: - for edge in Gx[Gx_node][n].values(): # Iterates all edge data between node pair. + for edge in Gx[Gx_node][ + n + ].values(): # Iterates all edge data between node pair. succ_dates.append(edge[self.temporal_attribute_name]) return succ_dates @@ -194,19 +203,39 @@ def one_hop(self, Gx, Gx_node, core_x, pred, succ): """ pred_dates = self.get_pred_dates(Gx, Gx_node, core_x, pred) succ_dates = self.get_succ_dates(Gx, Gx_node, core_x, succ) - return self.test_one(pred_dates, succ_dates) and self.test_two(pred_dates, succ_dates) + return self.test_one(pred_dates, succ_dates) and self.test_two( + pred_dates, succ_dates + ) def two_hop_pred(self, Gx, Gx_node, core_x, pred): """ The predeccessors of the ego node. """ - return all(self.one_hop(Gx, p, core_x, self.preds(Gx, core_x, p), self.succs(Gx, core_x, p, Gx_node)) for p in pred) + return all( + self.one_hop( + Gx, + p, + core_x, + self.preds(Gx, core_x, p), + self.succs(Gx, core_x, p, Gx_node), + ) + for p in pred + ) def two_hop_succ(self, Gx, Gx_node, core_x, succ): """ The successors of the ego node. """ - return all(self.one_hop(Gx, s, core_x, self.preds(Gx, core_x, s, Gx_node), self.succs(Gx, core_x, s)) for s in succ) + return all( + self.one_hop( + Gx, + s, + core_x, + self.preds(Gx, core_x, s, Gx_node), + self.succs(Gx, core_x, s), + ) + for s in succ + ) def preds(self, Gx, core_x, v, Gx_node=None): pred = [n for n in Gx.predecessors(v) if n in core_x] @@ -230,7 +259,7 @@ def test_one(self, pred_dates, succ_dates): dates = pred_dates + succ_dates if any(x is None for x in dates): - raise ValueError('Date or datetime not supplied for at least one edge.') + raise ValueError("Date or datetime not supplied for at least one edge.") dates.sort() # Small to large. if 0 < len(dates) and not (dates[-1] - dates[0] <= self.delta): @@ -246,7 +275,11 @@ def test_two(self, pred_dates, succ_dates): pred_dates.sort() succ_dates.sort() # First out before last in; negative of the necessary condition for time-respect. - if 0 < len(succ_dates) and 0 < len(pred_dates) and succ_dates[0] < pred_dates[-1]: + if ( + 0 < len(succ_dates) + and 0 < len(pred_dates) + and succ_dates[0] < pred_dates[-1] + ): time_respecting = False return time_respecting @@ -258,9 +291,13 @@ def semantic_feasibility(self, G1_node, G2_node): maintain the self.tests if needed, to keep the match() method functional. Implementations should consider multigraphs. """ - pred, succ = [n for n in self.G1.predecessors(G1_node) if n in self.core_1], [ - n for n in self.G1.successors(G1_node) if n in self.core_1] - if not self.one_hop(self.G1, G1_node, self.core_1, pred, succ): # Fail fast on first node. + pred, succ = ( + [n for n in self.G1.predecessors(G1_node) if n in self.core_1], + [n for n in self.G1.successors(G1_node) if n in self.core_1], + ) + if not self.one_hop( + self.G1, G1_node, self.core_1, pred, succ + ): # Fail fast on first node. return False if not self.two_hop_pred(self.G1, G1_node, self.core_1, pred): return False diff --git a/networkx/algorithms/isomorphism/tests/__init__.py b/networkx/algorithms/isomorphism/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/algorithms/isomorphism/tests/test_ismags.py b/networkx/algorithms/isomorphism/tests/test_ismags.py new file mode 100644 index 0000000..e4b61f6 --- /dev/null +++ b/networkx/algorithms/isomorphism/tests/test_ismags.py @@ -0,0 +1,327 @@ +""" + Tests for ISMAGS isomorphism algorithm. +""" + +import pytest +import networkx as nx +from networkx.algorithms import isomorphism as iso + + +def _matches_to_sets(matches): + """ + Helper function to facilitate comparing collections of dictionaries in + which order does not matter. + """ + return set(map(lambda m: frozenset(m.items()), matches)) + + +class TestSelfIsomorphism: + data = [ + ( + [ + (0, dict(name="a")), + (1, dict(name="a")), + (2, dict(name="b")), + (3, dict(name="b")), + (4, dict(name="a")), + (5, dict(name="a")), + ], + [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5)], + ), + (range(1, 5), [(1, 2), (2, 4), (4, 3), (3, 1)]), + ( + [], + [ + (0, 1), + (1, 2), + (2, 3), + (3, 4), + (4, 5), + (5, 0), + (0, 6), + (6, 7), + (2, 8), + (8, 9), + (4, 10), + (10, 11), + ], + ), + ([], [(0, 1), (1, 2), (1, 4), (2, 3), (3, 5), (3, 6)]), + ] + + def test_self_isomorphism(self): + """ + For some small, symmetric graphs, make sure that 1) they are isomorphic + to themselves, and 2) that only the identity mapping is found. + """ + for node_data, edge_data in self.data: + graph = nx.Graph() + graph.add_nodes_from(node_data) + graph.add_edges_from(edge_data) + + ismags = iso.ISMAGS( + graph, graph, node_match=iso.categorical_node_match("name", None) + ) + assert ismags.is_isomorphic() + assert ismags.subgraph_is_isomorphic() + assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [ + {n: n for n in graph.nodes} + ] + + def test_edgecase_self_isomorphism(self): + """ + This edgecase is one of the cases in which it is hard to find all + symmetry elements. + """ + graph = nx.Graph() + nx.add_path(graph, range(5)) + graph.add_edges_from([(2, 5), (5, 6)]) + + ismags = iso.ISMAGS(graph, graph) + ismags_answer = list(ismags.find_isomorphisms(True)) + assert ismags_answer == [{n: n for n in graph.nodes}] + + graph = nx.relabel_nodes(graph, {0: 0, 1: 1, 2: 2, 3: 3, 4: 6, 5: 4, 6: 5}) + ismags = iso.ISMAGS(graph, graph) + ismags_answer = list(ismags.find_isomorphisms(True)) + assert ismags_answer == [{n: n for n in graph.nodes}] + + @pytest.mark.skip() + def test_directed_self_isomorphism(self): + """ + For some small, directed, symmetric graphs, make sure that 1) they are + isomorphic to themselves, and 2) that only the identity mapping is + found. + """ + for node_data, edge_data in self.data: + graph = nx.Graph() + graph.add_nodes_from(node_data) + graph.add_edges_from(edge_data) + + ismags = iso.ISMAGS( + graph, graph, node_match=iso.categorical_node_match("name", None) + ) + assert ismags.is_isomorphic() + assert ismags.subgraph_is_isomorphic() + assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [ + {n: n for n in graph.nodes} + ] + + +class TestSubgraphIsomorphism: + def test_isomorphism(self): + g1 = nx.Graph() + nx.add_cycle(g1, range(4)) + + g2 = nx.Graph() + nx.add_cycle(g2, range(4)) + g2.add_edges_from([(n, m) for n, m in zip(g2, range(4, 8))]) + ismags = iso.ISMAGS(g2, g1) + assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [ + {n: n for n in g1.nodes} + ] + + def test_isomorphism2(self): + g1 = nx.Graph() + nx.add_path(g1, range(3)) + + g2 = g1.copy() + g2.add_edge(1, 3) + + ismags = iso.ISMAGS(g2, g1) + matches = ismags.subgraph_isomorphisms_iter(symmetry=True) + expected_symmetric = [ + {0: 0, 1: 1, 2: 2}, + {0: 0, 1: 1, 3: 2}, + {2: 0, 1: 1, 3: 2}, + ] + assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric) + + matches = ismags.subgraph_isomorphisms_iter(symmetry=False) + expected_asymmetric = [ + {0: 2, 1: 1, 2: 0}, + {0: 2, 1: 1, 3: 0}, + {2: 2, 1: 1, 3: 0}, + ] + assert _matches_to_sets(matches) == _matches_to_sets( + expected_symmetric + expected_asymmetric + ) + + def test_labeled_nodes(self): + g1 = nx.Graph() + nx.add_cycle(g1, range(3)) + g1.nodes[1]["attr"] = True + + g2 = g1.copy() + g2.add_edge(1, 3) + ismags = iso.ISMAGS(g2, g1, node_match=lambda x, y: x == y) + matches = ismags.subgraph_isomorphisms_iter(symmetry=True) + expected_symmetric = [{0: 0, 1: 1, 2: 2}] + assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric) + + matches = ismags.subgraph_isomorphisms_iter(symmetry=False) + expected_asymmetric = [{0: 2, 1: 1, 2: 0}] + assert _matches_to_sets(matches) == _matches_to_sets( + expected_symmetric + expected_asymmetric + ) + + def test_labeled_edges(self): + g1 = nx.Graph() + nx.add_cycle(g1, range(3)) + g1.edges[1, 2]["attr"] = True + + g2 = g1.copy() + g2.add_edge(1, 3) + ismags = iso.ISMAGS(g2, g1, edge_match=lambda x, y: x == y) + matches = ismags.subgraph_isomorphisms_iter(symmetry=True) + expected_symmetric = [{0: 0, 1: 1, 2: 2}] + assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric) + + matches = ismags.subgraph_isomorphisms_iter(symmetry=False) + expected_asymmetric = [{1: 2, 0: 0, 2: 1}] + assert _matches_to_sets(matches) == _matches_to_sets( + expected_symmetric + expected_asymmetric + ) + + +class TestWikipediaExample: + # Nodes 'a', 'b', 'c' and 'd' form a column. + # Nodes 'g', 'h', 'i' and 'j' form a column. + g1edges = [ + ["a", "g"], + ["a", "h"], + ["a", "i"], + ["b", "g"], + ["b", "h"], + ["b", "j"], + ["c", "g"], + ["c", "i"], + ["c", "j"], + ["d", "h"], + ["d", "i"], + ["d", "j"], + ] + + # Nodes 1,2,3,4 form the clockwise corners of a large square. + # Nodes 5,6,7,8 form the clockwise corners of a small square + g2edges = [ + [1, 2], + [2, 3], + [3, 4], + [4, 1], + [5, 6], + [6, 7], + [7, 8], + [8, 5], + [1, 5], + [2, 6], + [3, 7], + [4, 8], + ] + + def test_graph(self): + g1 = nx.Graph() + g2 = nx.Graph() + g1.add_edges_from(self.g1edges) + g2.add_edges_from(self.g2edges) + gm = iso.ISMAGS(g1, g2) + assert gm.is_isomorphic() + + +class TestLargestCommonSubgraph: + def test_mcis(self): + # Example graphs from DOI: 10.1002/spe.588 + graph1 = nx.Graph() + graph1.add_edges_from([(1, 2), (2, 3), (2, 4), (3, 4), (4, 5)]) + graph1.nodes[1]["color"] = 0 + + graph2 = nx.Graph() + graph2.add_edges_from( + [(1, 2), (2, 3), (2, 4), (3, 4), (3, 5), (5, 6), (5, 7), (6, 7)] + ) + graph2.nodes[1]["color"] = 1 + graph2.nodes[6]["color"] = 2 + graph2.nodes[7]["color"] = 2 + + ismags = iso.ISMAGS( + graph1, graph2, node_match=iso.categorical_node_match("color", None) + ) + assert list(ismags.subgraph_isomorphisms_iter(True)) == [] + assert list(ismags.subgraph_isomorphisms_iter(False)) == [] + found_mcis = _matches_to_sets(ismags.largest_common_subgraph()) + expected = _matches_to_sets( + [{2: 2, 3: 4, 4: 3, 5: 5}, {2: 4, 3: 2, 4: 3, 5: 5}] + ) + assert expected == found_mcis + + ismags = iso.ISMAGS( + graph2, graph1, node_match=iso.categorical_node_match("color", None) + ) + assert list(ismags.subgraph_isomorphisms_iter(True)) == [] + assert list(ismags.subgraph_isomorphisms_iter(False)) == [] + found_mcis = _matches_to_sets(ismags.largest_common_subgraph()) + # Same answer, but reversed. + expected = _matches_to_sets( + [{2: 2, 3: 4, 4: 3, 5: 5}, {4: 2, 2: 3, 3: 4, 5: 5}] + ) + assert expected == found_mcis + + def test_symmetry_mcis(self): + graph1 = nx.Graph() + nx.add_path(graph1, range(4)) + + graph2 = nx.Graph() + nx.add_path(graph2, range(3)) + graph2.add_edge(1, 3) + + # Only the symmetry of graph2 is taken into account here. + ismags1 = iso.ISMAGS( + graph1, graph2, node_match=iso.categorical_node_match("color", None) + ) + assert list(ismags1.subgraph_isomorphisms_iter(True)) == [] + found_mcis = _matches_to_sets(ismags1.largest_common_subgraph()) + expected = _matches_to_sets([{0: 0, 1: 1, 2: 2}, {1: 0, 3: 2, 2: 1}]) + assert expected == found_mcis + + # Only the symmetry of graph1 is taken into account here. + ismags2 = iso.ISMAGS( + graph2, graph1, node_match=iso.categorical_node_match("color", None) + ) + assert list(ismags2.subgraph_isomorphisms_iter(True)) == [] + found_mcis = _matches_to_sets(ismags2.largest_common_subgraph()) + expected = _matches_to_sets( + [ + {3: 2, 0: 0, 1: 1}, + {2: 0, 0: 2, 1: 1}, + {3: 0, 0: 2, 1: 1}, + {3: 0, 1: 1, 2: 2}, + {0: 0, 1: 1, 2: 2}, + {2: 0, 3: 2, 1: 1}, + ] + ) + + assert expected == found_mcis + + found_mcis1 = _matches_to_sets(ismags1.largest_common_subgraph(False)) + found_mcis2 = ismags2.largest_common_subgraph(False) + found_mcis2 = [{v: k for k, v in d.items()} for d in found_mcis2] + found_mcis2 = _matches_to_sets(found_mcis2) + + expected = _matches_to_sets( + [ + {3: 2, 1: 3, 2: 1}, + {2: 0, 0: 2, 1: 1}, + {1: 2, 3: 3, 2: 1}, + {3: 0, 1: 3, 2: 1}, + {0: 2, 2: 3, 1: 1}, + {3: 0, 1: 2, 2: 1}, + {2: 0, 0: 3, 1: 1}, + {0: 0, 2: 3, 1: 1}, + {1: 0, 3: 3, 2: 1}, + {1: 0, 3: 2, 2: 1}, + {0: 3, 1: 1, 2: 2}, + {0: 0, 1: 1, 2: 2}, + ] + ) + assert expected == found_mcis1 + assert expected == found_mcis2 diff --git a/networkx/algorithms/isomorphism/tests/test_isomorphism.py b/networkx/algorithms/isomorphism/tests/test_isomorphism.py index 5061129..c669040 100644 --- a/networkx/algorithms/isomorphism/tests/test_isomorphism.py +++ b/networkx/algorithms/isomorphism/tests/test_isomorphism.py @@ -1,33 +1,40 @@ -#!/usr/bin/env python -from nose.tools import * import networkx as nx from networkx.algorithms import isomorphism as iso class TestIsomorph: - - def setUp(self): - self.G1 = nx.Graph() - self.G2 = nx.Graph() - self.G3 = nx.Graph() - self.G4 = nx.Graph() - self.G1.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 3]]) - self.G2.add_edges_from([[10, 20], [20, 30], [10, 30], [10, 50]]) - self.G3.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 5]]) - self.G4.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 4]]) + @classmethod + def setup_class(cls): + cls.G1 = nx.Graph() + cls.G2 = nx.Graph() + cls.G3 = nx.Graph() + cls.G4 = nx.Graph() + cls.G5 = nx.Graph() + cls.G6 = nx.Graph() + cls.G1.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 3]]) + cls.G2.add_edges_from([[10, 20], [20, 30], [10, 30], [10, 50]]) + cls.G3.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 5]]) + cls.G4.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 4]]) + cls.G5.add_edges_from([[1, 2], [1, 3]]) + cls.G6.add_edges_from([[10, 20], [20, 30], [10, 30], [10, 50], [20, 50]]) def test_could_be_isomorphic(self): - assert_true(iso.could_be_isomorphic(self.G1, self.G2)) - assert_true(iso.could_be_isomorphic(self.G1, self.G3)) - assert_false(iso.could_be_isomorphic(self.G1, self.G4)) - assert_true(iso.could_be_isomorphic(self.G3, self.G2)) + assert iso.could_be_isomorphic(self.G1, self.G2) + assert iso.could_be_isomorphic(self.G1, self.G3) + assert not iso.could_be_isomorphic(self.G1, self.G4) + assert iso.could_be_isomorphic(self.G3, self.G2) + assert not iso.could_be_isomorphic(self.G1, self.G6) def test_fast_could_be_isomorphic(self): - assert_true(iso.fast_could_be_isomorphic(self.G3, self.G2)) + assert iso.fast_could_be_isomorphic(self.G3, self.G2) + assert not iso.fast_could_be_isomorphic(self.G3, self.G5) + assert not iso.fast_could_be_isomorphic(self.G1, self.G6) def test_faster_could_be_isomorphic(self): - assert_true(iso.faster_could_be_isomorphic(self.G3, self.G2)) + assert iso.faster_could_be_isomorphic(self.G3, self.G2) + assert not iso.faster_could_be_isomorphic(self.G3, self.G5) + assert not iso.faster_could_be_isomorphic(self.G1, self.G6) def test_is_isomorphic(self): - assert_true(iso.is_isomorphic(self.G1, self.G2)) - assert_false(iso.is_isomorphic(self.G1, self.G4)) + assert iso.is_isomorphic(self.G1, self.G2) + assert not iso.is_isomorphic(self.G1, self.G4) diff --git a/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py b/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py index 37407c1..06d041e 100644 --- a/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py +++ b/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py @@ -6,27 +6,46 @@ import struct import random -from nose.tools import assert_true, assert_equal -from nose import SkipTest import networkx as nx from networkx.algorithms import isomorphism as iso -class TestWikipediaExample(object): +class TestWikipediaExample: # Source: https://en.wikipedia.org/wiki/Graph_isomorphism # Nodes 'a', 'b', 'c' and 'd' form a column. # Nodes 'g', 'h', 'i' and 'j' form a column. - g1edges = [['a', 'g'], ['a', 'h'], ['a', 'i'], - ['b', 'g'], ['b', 'h'], ['b', 'j'], - ['c', 'g'], ['c', 'i'], ['c', 'j'], - ['d', 'h'], ['d', 'i'], ['d', 'j']] + g1edges = [ + ["a", "g"], + ["a", "h"], + ["a", "i"], + ["b", "g"], + ["b", "h"], + ["b", "j"], + ["c", "g"], + ["c", "i"], + ["c", "j"], + ["d", "h"], + ["d", "i"], + ["d", "j"], + ] # Nodes 1,2,3,4 form the clockwise corners of a large square. # Nodes 5,6,7,8 form the clockwise corners of a small square - g2edges = [[1, 2], [2, 3], [3, 4], [4, 1], - [5, 6], [6, 7], [7, 8], [8, 5], - [1, 5], [2, 6], [3, 7], [4, 8]] + g2edges = [ + [1, 2], + [2, 3], + [3, 4], + [4, 1], + [5, 6], + [6, 7], + [7, 8], + [8, 5], + [1, 5], + [2, 6], + [3, 7], + [4, 8], + ] def test_graph(self): g1 = nx.Graph() @@ -34,14 +53,17 @@ def test_graph(self): g1.add_edges_from(self.g1edges) g2.add_edges_from(self.g2edges) gm = iso.GraphMatcher(g1, g2) - assert_true(gm.is_isomorphic()) + assert gm.is_isomorphic() + # Just testing some cases + assert gm.subgraph_is_monomorphic() mapping = sorted(gm.mapping.items()) -# this mapping is only one of the possibilies -# so this test needs to be reconsidered -# isomap = [('a', 1), ('b', 6), ('c', 3), ('d', 8), -# ('g', 2), ('h', 5), ('i', 4), ('j', 7)] -# assert_equal(mapping, isomap) + + # this mapping is only one of the possibilies + # so this test needs to be reconsidered + # isomap = [('a', 1), ('b', 6), ('c', 3), ('d', 8), + # ('g', 2), ('h', 5), ('i', 4), ('j', 7)] + # assert_equal(mapping, isomap) def test_subgraph(self): g1 = nx.Graph() @@ -50,10 +72,18 @@ def test_subgraph(self): g2.add_edges_from(self.g2edges) g3 = g2.subgraph([1, 2, 3, 4]) gm = iso.GraphMatcher(g1, g3) - assert_true(gm.subgraph_is_isomorphic()) + assert gm.subgraph_is_isomorphic() + def test_subgraph_mono(self): + g1 = nx.Graph() + g2 = nx.Graph() + g1.add_edges_from(self.g1edges) + g2.add_edges_from([[1, 2], [2, 3], [3, 4]]) + gm = iso.GraphMatcher(g1, g2) + assert gm.subgraph_is_monomorphic() -class TestVF2GraphDB(object): + +class TestVF2GraphDB: # http://amalfi.dis.unina.it/graph/db/ @staticmethod @@ -69,19 +99,19 @@ def create_graph(filename): # This says, expect the data in little-endian encoding # as an unsigned short int and unpack 2 bytes from the file. - fh = open(filename, mode='rb') + fh = open(filename, mode="rb") # Grab the number of nodes. # Node numeration is 0-based, so the first node has index 0. - nodes = struct.unpack(' 0 + assert check_isomorphism(t1, t2, isomorphism) + + +# run positive_single_tree over all the +# non-isomorphic trees for k from 4 to maxk +# k = 4 is the first level that has more than 1 non-isomorphic tree +# k = 13 takes about 2.86 seconds to run on my laptop +# larger values run slow down significantly +# as the number of trees grows rapidly +def test_positive(maxk=14): + + print("positive test") + + for k in range(2, maxk + 1): + start_time = time.time() + trial = 0 + for t in nx.nonisomorphic_trees(k): + positive_single_tree(t) + trial += 1 + print(k, trial, time.time() - start_time) + + +# test the trivial case of a single node in each tree +# note that nonisomorphic_trees doesn't work for k = 1 +def test_trivial(): + + print("trivial test") + + # back to an undirected graph + t1 = nx.Graph() + t1.add_node("a") + root1 = "a" + + t2 = nx.Graph() + t2.add_node("n") + root2 = "n" + + isomorphism = rooted_tree_isomorphism(t1, root1, t2, root2) + + assert isomorphism == [("a", "n")] + + assert check_isomorphism(t1, t2, isomorphism) + + +# test another trivial case where the two graphs have +# different numbers of nodes +def test_trivial_2(): + + print("trivial test 2") + + edges_1 = [("a", "b"), ("a", "c")] + + edges_2 = [("v", "y")] + + t1 = nx.Graph() + t1.add_edges_from(edges_1) + + t2 = nx.Graph() + t2.add_edges_from(edges_2) + + isomorphism = tree_isomorphism(t1, t2) + + # they cannot be isomorphic, + # since they have different numbers of nodes + assert isomorphism == [] + + +# the function nonisomorphic_trees generates all the non-isomorphic +# trees of a given size. Take each pair of these and verify that +# they are not isomorphic +# k = 4 is the first level that has more than 1 non-isomorphic tree +# k = 11 takes about 4.76 seconds to run on my laptop +# larger values run slow down significantly +# as the number of trees grows rapidly +def test_negative(maxk=11): + + print("negative test") + + for k in range(4, maxk + 1): + test_trees = list(nx.nonisomorphic_trees(k)) + start_time = time.time() + trial = 0 + for i in range(len(test_trees) - 1): + for j in range(i + 1, len(test_trees)): + trial += 1 + assert tree_isomorphism(test_trees[i], test_trees[j]) == [] + print(k, trial, time.time() - start_time) diff --git a/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py b/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py index 97e0502..47dba07 100644 --- a/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py +++ b/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py @@ -2,7 +2,6 @@ Tests for VF2 isomorphism algorithm for weighted graphs. """ -from nose.tools import assert_true, assert_false from operator import eq import networkx as nx @@ -11,35 +10,31 @@ def test_simple(): # 16 simple tests - w = 'weight' + w = "weight" edges = [(0, 0, 1), (0, 0, 1.5), (0, 1, 2), (1, 0, 3)] - for g1 in [nx.Graph(), - nx.DiGraph(), - nx.MultiGraph(), - nx.MultiDiGraph(), - ]: + for g1 in [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()]: g1.add_weighted_edges_from(edges) g2 = g1.subgraph(g1.nodes()) if g1.is_multigraph(): - em = iso.numerical_multiedge_match('weight', 1) + em = iso.numerical_multiedge_match("weight", 1) else: - em = iso.numerical_edge_match('weight', 1) - assert_true(nx.is_isomorphic(g1, g2, edge_match=em)) + em = iso.numerical_edge_match("weight", 1) + assert nx.is_isomorphic(g1, g2, edge_match=em) for mod1, mod2 in [(False, True), (True, False), (True, True)]: # mod1 tests a regular edge # mod2 tests a selfloop if g2.is_multigraph(): if mod1: - data1 = {0: {'weight': 10}} + data1 = {0: {"weight": 10}} if mod2: - data2 = {0: {'weight': 1}, 1: {'weight': 2.5}} + data2 = {0: {"weight": 1}, 1: {"weight": 2.5}} else: if mod1: - data1 = {'weight': 10} + data1 = {"weight": 10} if mod2: - data2 = {'weight': 2.5} + data2 = {"weight": 2.5} g2 = g1.subgraph(g1.nodes()).copy() if mod1: @@ -56,81 +51,79 @@ def test_simple(): g2._succ[0][0] = data2 g2._pred[0][0] = data2 - assert_false(nx.is_isomorphic(g1, g2, edge_match=em)) + assert not nx.is_isomorphic(g1, g2, edge_match=em) def test_weightkey(): g1 = nx.DiGraph() g2 = nx.DiGraph() - g1.add_edge('A', 'B', weight=1) - g2.add_edge('C', 'D', weight=0) + g1.add_edge("A", "B", weight=1) + g2.add_edge("C", "D", weight=0) - assert_true(nx.is_isomorphic(g1, g2)) - em = iso.numerical_edge_match('nonexistent attribute', 1) - assert_true(nx.is_isomorphic(g1, g2, edge_match=em)) - em = iso.numerical_edge_match('weight', 1) - assert_false(nx.is_isomorphic(g1, g2, edge_match=em)) + assert nx.is_isomorphic(g1, g2) + em = iso.numerical_edge_match("nonexistent attribute", 1) + assert nx.is_isomorphic(g1, g2, edge_match=em) + em = iso.numerical_edge_match("weight", 1) + assert not nx.is_isomorphic(g1, g2, edge_match=em) g2 = nx.DiGraph() - g2.add_edge('C', 'D') - assert_true(nx.is_isomorphic(g1, g2, edge_match=em)) + g2.add_edge("C", "D") + assert nx.is_isomorphic(g1, g2, edge_match=em) -class TestNodeMatch_Graph(object): - def setUp(self): +class TestNodeMatch_Graph: + def setup_method(self): self.g1 = nx.Graph() self.g2 = nx.Graph() self.build() def build(self): + self.nm = iso.categorical_node_match("color", "") + self.em = iso.numerical_edge_match("weight", 1) - self.nm = iso.categorical_node_match('color', '') - self.em = iso.numerical_edge_match('weight', 1) + self.g1.add_node("A", color="red") + self.g2.add_node("C", color="blue") - self.g1.add_node('A', color='red') - self.g2.add_node('C', color='blue') - - self.g1.add_edge('A', 'B', weight=1) - self.g2.add_edge('C', 'D', weight=1) + self.g1.add_edge("A", "B", weight=1) + self.g2.add_edge("C", "D", weight=1) def test_noweight_nocolor(self): - assert_true(nx.is_isomorphic(self.g1, self.g2)) + assert nx.is_isomorphic(self.g1, self.g2) def test_color1(self): - assert_false(nx.is_isomorphic(self.g1, self.g2, node_match=self.nm)) + assert not nx.is_isomorphic(self.g1, self.g2, node_match=self.nm) def test_color2(self): - self.g1.nodes['A']['color'] = 'blue' - assert_true(nx.is_isomorphic(self.g1, self.g2, node_match=self.nm)) + self.g1.nodes["A"]["color"] = "blue" + assert nx.is_isomorphic(self.g1, self.g2, node_match=self.nm) def test_weight1(self): - assert_true(nx.is_isomorphic(self.g1, self.g2, edge_match=self.em)) + assert nx.is_isomorphic(self.g1, self.g2, edge_match=self.em) def test_weight2(self): - self.g1.add_edge('A', 'B', weight=2) - assert_false(nx.is_isomorphic(self.g1, self.g2, edge_match=self.em)) + self.g1.add_edge("A", "B", weight=2) + assert not nx.is_isomorphic(self.g1, self.g2, edge_match=self.em) def test_colorsandweights1(self): - iso = nx.is_isomorphic(self.g1, self.g2, - node_match=self.nm, edge_match=self.em) - assert_false(iso) + iso = nx.is_isomorphic(self.g1, self.g2, node_match=self.nm, edge_match=self.em) + assert not iso def test_colorsandweights2(self): - self.g1.nodes['A']['color'] = 'blue' - iso = nx.is_isomorphic(self.g1, self.g2, - node_match=self.nm, edge_match=self.em) - assert_true(iso) + self.g1.nodes["A"]["color"] = "blue" + iso = nx.is_isomorphic(self.g1, self.g2, node_match=self.nm, edge_match=self.em) + assert iso def test_colorsandweights3(self): # make the weights disagree - self.g1.add_edge('A', 'B', weight=2) - assert_false(nx.is_isomorphic(self.g1, self.g2, - node_match=self.nm, edge_match=self.em)) + self.g1.add_edge("A", "B", weight=2) + assert not nx.is_isomorphic( + self.g1, self.g2, node_match=self.nm, edge_match=self.em + ) -class TestEdgeMatch_MultiGraph(object): - def setUp(self): +class TestEdgeMatch_MultiGraph: + def setup_method(self): self.g1 = nx.MultiGraph() self.g2 = nx.MultiGraph() self.GM = iso.MultiGraphMatcher @@ -141,58 +134,66 @@ def build(self): g2 = self.g2 # We will assume integer weights only. - g1.add_edge('A', 'B', color='green', weight=0, size=.5) - g1.add_edge('A', 'B', color='red', weight=1, size=.35) - g1.add_edge('A', 'B', color='red', weight=2, size=.65) + g1.add_edge("A", "B", color="green", weight=0, size=0.5) + g1.add_edge("A", "B", color="red", weight=1, size=0.35) + g1.add_edge("A", "B", color="red", weight=2, size=0.65) - g2.add_edge('C', 'D', color='green', weight=1, size=.5) - g2.add_edge('C', 'D', color='red', weight=0, size=.45) - g2.add_edge('C', 'D', color='red', weight=2, size=.65) + g2.add_edge("C", "D", color="green", weight=1, size=0.5) + g2.add_edge("C", "D", color="red", weight=0, size=0.45) + g2.add_edge("C", "D", color="red", weight=2, size=0.65) if g1.is_multigraph(): - self.em = iso.numerical_multiedge_match('weight', 1) - self.emc = iso.categorical_multiedge_match('color', '') - self.emcm = iso.categorical_multiedge_match(['color', 'weight'], ['', 1]) - self.emg1 = iso.generic_multiedge_match('color', 'red', eq) - self.emg2 = iso.generic_multiedge_match(['color', 'weight', 'size'], ['red', 1, .5], [ - eq, eq, iso.matchhelpers.close]) + self.em = iso.numerical_multiedge_match("weight", 1) + self.emc = iso.categorical_multiedge_match("color", "") + self.emcm = iso.categorical_multiedge_match(["color", "weight"], ["", 1]) + self.emg1 = iso.generic_multiedge_match("color", "red", eq) + self.emg2 = iso.generic_multiedge_match( + ["color", "weight", "size"], + ["red", 1, 0.5], + [eq, eq, iso.matchhelpers.close], + ) else: - self.em = iso.numerical_edge_match('weight', 1) - self.emc = iso.categorical_edge_match('color', '') - self.emcm = iso.categorical_edge_match(['color', 'weight'], ['', 1]) - self.emg1 = iso.generic_multiedge_match('color', 'red', eq) - self.emg2 = iso.generic_edge_match(['color', 'weight', 'size'], ['red', 1, .5], [ - eq, eq, iso.matchhelpers.close]) + self.em = iso.numerical_edge_match("weight", 1) + self.emc = iso.categorical_edge_match("color", "") + self.emcm = iso.categorical_edge_match(["color", "weight"], ["", 1]) + self.emg1 = iso.generic_multiedge_match("color", "red", eq) + self.emg2 = iso.generic_edge_match( + ["color", "weight", "size"], + ["red", 1, 0.5], + [eq, eq, iso.matchhelpers.close], + ) def test_weights_only(self): - assert_true(nx.is_isomorphic(self.g1, self.g2, edge_match=self.em)) + assert nx.is_isomorphic(self.g1, self.g2, edge_match=self.em) def test_colors_only(self): gm = self.GM(self.g1, self.g2, edge_match=self.emc) - assert_true(gm.is_isomorphic()) + assert gm.is_isomorphic() def test_colorsandweights(self): gm = self.GM(self.g1, self.g2, edge_match=self.emcm) - assert_false(gm.is_isomorphic()) + assert not gm.is_isomorphic() def test_generic1(self): gm = self.GM(self.g1, self.g2, edge_match=self.emg1) - assert_true(gm.is_isomorphic()) + assert gm.is_isomorphic() def test_generic2(self): gm = self.GM(self.g1, self.g2, edge_match=self.emg2) - assert_false(gm.is_isomorphic()) + assert not gm.is_isomorphic() class TestEdgeMatch_DiGraph(TestNodeMatch_Graph): - def setUp(self): + def setup_method(self): + TestNodeMatch_Graph.setup_method(self) self.g1 = nx.DiGraph() self.g2 = nx.DiGraph() self.build() class TestEdgeMatch_MultiDiGraph(TestEdgeMatch_MultiGraph): - def setUp(self): + def setup_method(self): + TestEdgeMatch_MultiGraph.setup_method(self) self.g1 = nx.MultiDiGraph() self.g2 = nx.MultiDiGraph() self.GM = iso.MultiDiGraphMatcher diff --git a/networkx/algorithms/isomorphism/tree_isomorphism.py b/networkx/algorithms/isomorphism/tree_isomorphism.py new file mode 100644 index 0000000..1d1a71f --- /dev/null +++ b/networkx/algorithms/isomorphism/tree_isomorphism.py @@ -0,0 +1,279 @@ +""" +An algorithm for finding if two undirected trees are isomorphic, +and if so returns an isomorphism between the two sets of nodes. + +This algorithm uses a routine to tell if two rooted trees (trees with a +specified root node) are isomorphic, which may be independently useful. + +This implements an algorithm from: +The Design and Analysis of Computer Algorithms +by Aho, Hopcroft, and Ullman +Addison-Wesley Publishing 1974 +Example 3.2 pp. 84-86. + +A more understandable version of this algorithm is described in: +Homework Assignment 5 +McGill University SOCS 308-250B, Winter 2002 +by Matthew Suderman +http://crypto.cs.mcgill.ca/~crepeau/CS250/2004/HW5+.pdf +""" + +import networkx as nx +from networkx.utils.decorators import not_implemented_for + +__all__ = ["rooted_tree_isomorphism", "tree_isomorphism"] + + +def root_trees(t1, root1, t2, root2): + """ Create a single digraph dT of free trees t1 and t2 + # with roots root1 and root2 respectively + # rename the nodes with consecutive integers + # so that all nodes get a unique name between both trees + + # our new "fake" root node is 0 + # t1 is numbers from 1 ... n + # t2 is numbered from n+1 to 2n + """ + + dT = nx.DiGraph() + + newroot1 = 1 # left root will be 1 + newroot2 = nx.number_of_nodes(t1) + 1 # right will be n+1 + + # may be overlap in node names here so need separate maps + # given the old name, what is the new + namemap1 = {root1: newroot1} + namemap2 = {root2: newroot2} + + # add an edge from our new root to root1 and root2 + dT.add_edge(0, namemap1[root1]) + dT.add_edge(0, namemap2[root2]) + + for i, (v1, v2) in enumerate(nx.bfs_edges(t1, root1)): + namemap1[v2] = i + namemap1[root1] + 1 + dT.add_edge(namemap1[v1], namemap1[v2]) + + for i, (v1, v2) in enumerate(nx.bfs_edges(t2, root2)): + namemap2[v2] = i + namemap2[root2] + 1 + dT.add_edge(namemap2[v1], namemap2[v2]) + + # now we really want the inverse of namemap1 and namemap2 + # giving the old name given the new + # since the values of namemap1 and namemap2 are unique + # there won't be collisions + namemap = {} + for old, new in namemap1.items(): + namemap[new] = old + for old, new in namemap2.items(): + namemap[new] = old + + return (dT, namemap, newroot1, newroot2) + + +# figure out the level of each node, with 0 at root +def assign_levels(G, root): + level = {} + level[root] = 0 + for (v1, v2) in nx.bfs_edges(G, root): + level[v2] = level[v1] + 1 + + return level + + +# now group the nodes at each level +def group_by_levels(levels): + L = {} + for (n, lev) in levels.items(): + if lev not in L: + L[lev] = [] + L[lev].append(n) + + return L + + +# now lets get the isomorphism by walking the ordered_children +def generate_isomorphism(v, w, M, ordered_children): + # make sure tree1 comes first + assert v < w + M.append((v, w)) + for i, (x, y) in enumerate(zip(ordered_children[v], ordered_children[w])): + generate_isomorphism(x, y, M, ordered_children) + + +def rooted_tree_isomorphism(t1, root1, t2, root2): + """ + Given two rooted trees `t1` and `t2`, + with roots `root1` and `root2` respectivly + this routine will determine if they are isomorphic. + + These trees may be either directed or undirected, + but if they are directed, all edges should flow from the root. + + It returns the isomorphism, a mapping of the nodes of `t1` onto the nodes + of `t2`, such that two trees are then identical. + + Note that two trees may have more than one isomorphism, and this + routine just returns one valid mapping. + + Parameters + ---------- + `t1` : NetworkX graph + One of the trees being compared + + `root1` : a node of `t1` which is the root of the tree + + `t2` : undirected NetworkX graph + The other tree being compared + + `root2` : a node of `t2` which is the root of the tree + + This is a subroutine used to implement `tree_isomorphism`, but will + be somewhat faster if you already have rooted trees. + + Returns + ------- + isomorphism : list + A list of pairs in which the left element is a node in `t1` + and the right element is a node in `t2`. The pairs are in + arbitrary order. If the nodes in one tree is mapped to the names in + the other, then trees will be identical. Note that an isomorphism + will not necessarily be unique. + + If `t1` and `t2` are not isomorphic, then it returns the empty list. + """ + + assert nx.is_tree(t1) + assert nx.is_tree(t2) + + # get the rooted tree formed by combining them + # with unique names + (dT, namemap, newroot1, newroot2) = root_trees(t1, root1, t2, root2) + + # compute the distance from the root, with 0 for our + levels = assign_levels(dT, 0) + + # height + h = max(levels.values()) + + # collect nodes into a dict by level + L = group_by_levels(levels) + + # each node has a label, initially set to 0 + label = {v: 0 for v in dT} + # and also ordered_labels and ordered_children + # which will store ordered tuples + ordered_labels = {v: () for v in dT} + ordered_children = {v: () for v in dT} + + # nothing to do on last level so start on h-1 + # also nothing to do for our fake level 0, so skip that + for i in range(h - 1, 0, -1): + # update the ordered_labels and ordered_childen + # for any children + for v in L[i]: + # nothing to do if no children + if dT.out_degree(v) > 0: + # get all the pairs of labels and nodes of children + # and sort by labels + s = sorted([(label[u], u) for u in dT.successors(v)]) + + # invert to give a list of two tuples + # the sorted labels, and the corresponding children + ordered_labels[v], ordered_children[v] = list(zip(*s)) + + # now collect and sort the sorted ordered_labels + # for all nodes in L[i], carrying along the node + forlabel = sorted([(ordered_labels[v], v) for v in L[i]]) + + # now assign labels to these nodes, according to the sorted order + # starting from 0, where idential ordered_labels get the same label + current = 0 + for i, (ol, v) in enumerate(forlabel): + # advance to next label if not 0, and different from previous + if (i != 0) and (ol != forlabel[i - 1][0]): + current += 1 + label[v] = current + + # they are isomorphic if the labels of newroot1 and newroot2 are 0 + isomorphism = [] + if label[newroot1] == 0 and label[newroot2] == 0: + generate_isomorphism(newroot1, newroot2, isomorphism, ordered_children) + + # get the mapping back in terms of the old names + # return in sorted order for neatness + isomorphism = [(namemap[u], namemap[v]) for (u, v) in isomorphism] + + return isomorphism + + +@not_implemented_for("directed", "multigraph") +def tree_isomorphism(t1, t2): + """ + Given two undirected (or free) trees `t1` and `t2`, + this routine will determine if they are isomorphic. + It returns the isomorphism, a mapping of the nodes of `t1` onto the nodes + of `t2`, such that two trees are then identical. + + Note that two trees may have more than one isomorphism, and this + routine just returns one valid mapping. + + Parameters + ---------- + t1 : undirected NetworkX graph + One of the trees being compared + + t2 : undirected NetworkX graph + The other tree being compared + + Returns + ------- + isomorphism : list + A list of pairs in which the left element is a node in `t1` + and the right element is a node in `t2`. The pairs are in + arbitrary order. If the nodes in one tree is mapped to the names in + the other, then trees will be identical. Note that an isomorphism + will not necessarily be unique. + + If `t1` and `t2` are not isomorphic, then it returns the empty list. + + Notes + ----- + This runs in O(n*log(n)) time for trees with n nodes. + """ + + assert nx.is_tree(t1) + assert nx.is_tree(t2) + + # To be isomrophic, t1 and t2 must have the same number of nodes. + if nx.number_of_nodes(t1) != nx.number_of_nodes(t2): + return [] + + # Another shortcut is that the sorted degree sequences need to be the same. + degree_sequence1 = sorted([d for (n, d) in t1.degree()]) + degree_sequence2 = sorted([d for (n, d) in t2.degree()]) + + if degree_sequence1 != degree_sequence2: + return [] + + # A tree can have either 1 or 2 centers. + # If the number doesn't match then t1 and t2 are not isomorphic. + center1 = nx.center(t1) + center2 = nx.center(t2) + + if len(center1) != len(center2): + return [] + + # If there is only 1 center in each, then use it. + if len(center1) == 1: + return rooted_tree_isomorphism(t1, center1[0], t2, center2[0]) + + # If there both have 2 centers, then try the first for t1 + # with the first for t2. + attemps = rooted_tree_isomorphism(t1, center1[0], t2, center2[0]) + + # If that worked we're done. + if len(attemps) > 0: + return attemps + + # Otherwise, try center1[0] with the center2[1], and see if that works + return rooted_tree_isomorphism(t1, center1[0], t2, center2[1]) diff --git a/networkx/algorithms/isomorphism/vf2userfunc.py b/networkx/algorithms/isomorphism/vf2userfunc.py index 708880f..48b76c8 100644 --- a/networkx/algorithms/isomorphism/vf2userfunc.py +++ b/networkx/algorithms/isomorphism/vf2userfunc.py @@ -31,14 +31,9 @@ it must determine if there is an isomorphism between the two sets of edges. """ -import networkx as nx from . import isomorphvf2 as vf2 -__all__ = ['GraphMatcher', - 'DiGraphMatcher', - 'MultiGraphMatcher', - 'MultiDiGraphMatcher', - ] +__all__ = ["GraphMatcher", "DiGraphMatcher", "MultiGraphMatcher", "MultiDiGraphMatcher"] def _semantic_feasibility(self, G1_node, G2_node): @@ -54,20 +49,23 @@ def _semantic_feasibility(self, G1_node, G2_node): if self.edge_match is not None: # Cached lookups - G1_adj = self.G1_adj - G2_adj = self.G2_adj + G1nbrs = self.G1_adj[G1_node] + G2nbrs = self.G2_adj[G2_node] core_1 = self.core_1 edge_match = self.edge_match - for neighbor in G1_adj[G1_node]: + for neighbor in G1nbrs: # G1_node is not in core_1, so we must handle R_self separately if neighbor == G1_node: - if not edge_match(G1_adj[G1_node][G1_node], - G2_adj[G2_node][G2_node]): + if G2_node in G2nbrs and not edge_match( + G1nbrs[G1_node], G2nbrs[G2_node] + ): return False elif neighbor in core_1: - if not edge_match(G1_adj[G1_node][neighbor], - G2_adj[G2_node][core_1[neighbor]]): + G2_nbr = core_1[neighbor] + if G2_nbr in G2nbrs and not edge_match( + G1nbrs[neighbor], G2nbrs[G2_nbr] + ): return False # syntactic check has already verified that neighbors are symmetric @@ -184,6 +182,7 @@ def semantic_feasibility(self, G1_node, G2_node): return feasible + # The "semantics" of edge_match are different for multi(di)graphs, but # the implementation is the same. So, technically we do not need to # provide "multi" versions, but we do so to match NetworkX's base classes. @@ -191,9 +190,11 @@ def semantic_feasibility(self, G1_node, G2_node): class MultiGraphMatcher(GraphMatcher): """VF2 isomorphism checker for undirected multigraphs. """ + pass class MultiDiGraphMatcher(DiGraphMatcher): """VF2 isomorphism checker for directed multigraphs. """ + pass diff --git a/networkx/algorithms/link_analysis/hits_alg.py b/networkx/algorithms/link_analysis/hits_alg.py index 5ecc539..f7b1174 100644 --- a/networkx/algorithms/link_analysis/hits_alg.py +++ b/networkx/algorithms/link_analysis/hits_alg.py @@ -1,19 +1,12 @@ """Hubs and authorities analysis of graph structure. """ -# Copyright (C) 2008-2012 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# NetworkX:http://networkx.github.io/ import networkx as nx -__author__ = """Aric Hagberg (hagberg@lanl.gov)""" -__all__ = ['hits', 'hits_numpy', 'hits_scipy', 'authority_matrix', 'hub_matrix'] + +__all__ = ["hits", "hits_numpy", "hits_scipy", "authority_matrix", "hub_matrix"] def hits(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True): - """Return HITS hubs and authorities values for nodes. + """Returns HITS hubs and authorities values for nodes. The HITS algorithm computes two numbers for a node. Authorities estimates the node value based on the incoming links. @@ -51,8 +44,8 @@ def hits(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True): Examples -------- - >>> G=nx.path_graph(4) - >>> h,a=nx.hits(G) + >>> G = nx.path_graph(4) + >>> h, a = nx.hits(G) Notes ----- @@ -97,11 +90,11 @@ def hits(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True): # doing a left multiply a^T=hlast^T*G for n in h: for nbr in G[n]: - a[nbr] += hlast[n] * G[n][nbr].get('weight', 1) + a[nbr] += hlast[n] * G[n][nbr].get("weight", 1) # now multiply h=Ga for n in h: for nbr in G[n]: - h[n] += a[nbr] * G[n][nbr].get('weight', 1) + h[n] += a[nbr] * G[n][nbr].get("weight", 1) # normalize vector s = 1.0 / max(h.values()) for n in h: @@ -127,19 +120,19 @@ def hits(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True): def authority_matrix(G, nodelist=None): - """Return the HITS authority matrix.""" - M = nx.to_numpy_matrix(G, nodelist=nodelist) - return M.T * M + """Returns the HITS authority matrix.""" + M = nx.to_numpy_array(G, nodelist=nodelist) + return M.T @ M def hub_matrix(G, nodelist=None): - """Return the HITS hub matrix.""" - M = nx.to_numpy_matrix(G, nodelist=nodelist) - return M * M.T + """Returns the HITS hub matrix.""" + M = nx.to_numpy_array(G, nodelist=nodelist) + return M @ M.T def hits_numpy(G, normalized=True): - """Return HITS hubs and authorities values for nodes. + """Returns HITS hubs and authorities values for nodes. The HITS algorithm computes two numbers for a node. Authorities estimates the node value based on the incoming links. @@ -161,8 +154,8 @@ def hits_numpy(G, normalized=True): Examples -------- - >>> G=nx.path_graph(4) - >>> h,a=nx.hits(G) + >>> G = nx.path_graph(4) + >>> h, a = nx.hits(G) Notes ----- @@ -185,9 +178,8 @@ def hits_numpy(G, normalized=True): """ try: import numpy as np - except ImportError: - raise ImportError( - "hits_numpy() requires NumPy: http://scipy.org/") + except ImportError as e: + raise ImportError("hits_numpy() requires NumPy: " "http://numpy.org/") from e if len(G) == 0: return {}, {} H = nx.hub_matrix(G, list(G)) @@ -210,7 +202,7 @@ def hits_numpy(G, normalized=True): def hits_scipy(G, max_iter=100, tol=1.0e-6, normalized=True): - """Return HITS hubs and authorities values for nodes. + """Returns HITS hubs and authorities values for nodes. The HITS algorithm computes two numbers for a node. Authorities estimates the node value based on the incoming links. @@ -241,8 +233,8 @@ def hits_scipy(G, max_iter=100, tol=1.0e-6, normalized=True): Examples -------- - >>> G=nx.path_graph(4) - >>> h,a=nx.hits(G) + >>> G = nx.path_graph(4) + >>> h, a = nx.hits(G) Notes ----- @@ -276,17 +268,18 @@ def hits_scipy(G, max_iter=100, tol=1.0e-6, normalized=True): http://www.cs.cornell.edu/home/kleinber/auth.pdf. """ try: - import scipy.sparse import numpy as np - except ImportError: + except ImportError as e: raise ImportError( - "hits_scipy() requires SciPy: http://scipy.org/") + "hits_scipy() requires SciPy and NumPy:" + "http://scipy.org/ http://numpy.org/" + ) from e if len(G) == 0: return {}, {} M = nx.to_scipy_sparse_matrix(G, nodelist=list(G)) (n, m) = M.shape # should be square A = M.T * M # authority matrix - x = scipy.ones((n, 1)) / n # initial guess + x = np.ones((n, 1)) / n # initial guess # power iteration on authority matrix i = 0 while True: @@ -294,7 +287,7 @@ def hits_scipy(G, max_iter=100, tol=1.0e-6, normalized=True): x = A * x x = x / x.max() # check convergence, l1 norm - err = scipy.absolute(x - xlast).sum() + err = np.absolute(x - xlast).sum() if err < tol: break if i > max_iter: @@ -310,17 +303,3 @@ def hits_scipy(G, max_iter=100, tol=1.0e-6, normalized=True): hubs = dict(zip(G, map(float, h))) authorities = dict(zip(G, map(float, a))) return hubs, authorities - -# fixture for nose tests - - -def setup_module(module): - from nose import SkipTest - try: - import numpy - except: - raise SkipTest("NumPy not available") - try: - import scipy - except: - raise SkipTest("SciPy not available") diff --git a/networkx/algorithms/link_analysis/pagerank_alg.py b/networkx/algorithms/link_analysis/pagerank_alg.py index 8728399..935b2e4 100644 --- a/networkx/algorithms/link_analysis/pagerank_alg.py +++ b/networkx/algorithms/link_analysis/pagerank_alg.py @@ -1,23 +1,22 @@ """PageRank analysis of graph structure. """ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# NetworkX:http://networkx.github.io/ import networkx as nx from networkx.utils import not_implemented_for -__author__ = """\n""".join(["Aric Hagberg ", - "Brandon Liu >> import networkx as nx >>> G = nx.complete_graph(5) >>> preds = nx.resource_allocation_index(G, [(0, 1), (2, 3)]) >>> for u, v, p in preds: - ... '(%d, %d) -> %.8f' % (u, v, p) - ... - '(0, 1) -> 0.75000000' - '(2, 3) -> 0.75000000' + ... print(f"({u}, {v}) -> {p:.8f}") + (0, 1) -> 0.75000000 + (2, 3) -> 0.75000000 References ---------- @@ -87,13 +87,15 @@ def resource_allocation_index(G, ebunch=None): Eur. Phys. J. B 71 (2009) 623. https://arxiv.org/pdf/0901.0553.pdf """ + def predict(u, v): return sum(1 / G.degree(w) for w in nx.common_neighbors(G, u, v)) + return _apply_prediction(G, predict, ebunch) -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def jaccard_coefficient(G, ebunch=None): r"""Compute the Jaccard coefficient of all node pairs in ebunch. @@ -125,14 +127,12 @@ def jaccard_coefficient(G, ebunch=None): Examples -------- - >>> import networkx as nx >>> G = nx.complete_graph(5) >>> preds = nx.jaccard_coefficient(G, [(0, 1), (2, 3)]) >>> for u, v, p in preds: - ... '(%d, %d) -> %.8f' % (u, v, p) - ... - '(0, 1) -> 0.60000000' - '(2, 3) -> 0.60000000' + ... print(f"({u}, {v}) -> {p:.8f}") + (0, 1) -> 0.60000000 + (2, 3) -> 0.60000000 References ---------- @@ -140,16 +140,18 @@ def jaccard_coefficient(G, ebunch=None): The Link Prediction Problem for Social Networks (2004). http://www.cs.cornell.edu/home/kleinber/link-pred.pdf """ + def predict(u, v): union_size = len(set(G[u]) | set(G[v])) if union_size == 0: return 0 return len(list(nx.common_neighbors(G, u, v))) / union_size + return _apply_prediction(G, predict, ebunch) -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def adamic_adar_index(G, ebunch=None): r"""Compute the Adamic-Adar index of all node pairs in ebunch. @@ -160,6 +162,8 @@ def adamic_adar_index(G, ebunch=None): \sum_{w \in \Gamma(u) \cap \Gamma(v)} \frac{1}{\log |\Gamma(w)|} where $\Gamma(u)$ denotes the set of neighbors of $u$. + This index leads to zero-division for nodes only connected via self-loops. + It is intended to be used when no self-loops are present. Parameters ---------- @@ -181,14 +185,12 @@ def adamic_adar_index(G, ebunch=None): Examples -------- - >>> import networkx as nx >>> G = nx.complete_graph(5) >>> preds = nx.adamic_adar_index(G, [(0, 1), (2, 3)]) >>> for u, v, p in preds: - ... '(%d, %d) -> %.8f' % (u, v, p) - ... - '(0, 1) -> 2.16404256' - '(2, 3) -> 2.16404256' + ... print(f"({u}, {v}) -> {p:.8f}") + (0, 1) -> 2.16404256 + (2, 3) -> 2.16404256 References ---------- @@ -196,13 +198,95 @@ def adamic_adar_index(G, ebunch=None): The Link Prediction Problem for Social Networks (2004). http://www.cs.cornell.edu/home/kleinber/link-pred.pdf """ + def predict(u, v): return sum(1 / log(G.degree(w)) for w in nx.common_neighbors(G, u, v)) + return _apply_prediction(G, predict, ebunch) -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def common_neighbor_centrality(G, ebunch=None, alpha=0.8): + r"""Return the CCPA score for each pair of nodes. + + Compute the Common Neighbor and Centrality based Parameterized Algorithm(CCPA) + score of all node pairs in ebunch. + + CCPA score of `u` and `v` is defined as + + .. math:: + + \alpha \cdot (|\Gamma (u){\cap }^{}\Gamma (v)|)+(1-\alpha )\cdot \frac{N}{{d}_{uv}} + + where $\Gamma(u)$ denotes the set of neighbors of $u$, $\Gamma(v)$ denotes the + set of neighbors of $v$, $\alpha$ is parameter varies between [0,1], $N$ denotes + total number of nodes in the Graph and ${d}_{uv}$ denotes shortest distance + between $u$ and $v$. + + This algorithm is based on two vital properties of nodes, namely the number + of common neighbors and their centrality. Common neighbor refers to the common + nodes between two nodes. Centrality refers to the prestige that a node enjoys + in a network. + + .. seealso:: + + :func:`common_neighbors` + + Parameters + ---------- + G : graph + NetworkX undirected graph. + + ebunch : iterable of node pairs, optional (default = None) + Preferential attachment score will be computed for each pair of + nodes given in the iterable. The pairs must be given as + 2-tuples (u, v) where u and v are nodes in the graph. If ebunch + is None then all non-existent edges in the graph will be used. + Default value: None. + + alpha : Parameter defined for participation of Common Neighbor + and Centrality Algorithm share. Default value set to 0.8 + because author found better performance at 0.8 for all the + dataset. + Default value: 0.8 + + + Returns + ------- + piter : iterator + An iterator of 3-tuples in the form (u, v, p) where (u, v) is a + pair of nodes and p is their Common Neighbor and Centrality based + Parameterized Algorithm(CCPA) score. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> preds = nx.common_neighbor_centrality(G, [(0, 1), (2, 3)]) + >>> for u, v, p in preds: + ... print(f"({u}, {v}) -> {p}") + (0, 1) -> 3.4000000000000004 + (2, 3) -> 3.4000000000000004 + + References + ---------- + .. [1] Ahmad, I., Akhtar, M.U., Noor, S. et al. + Missing Link Prediction using Common Neighbor and Centrality based Parameterized Algorithm. + Sci Rep 10, 364 (2020). + https://doi.org/10.1038/s41598-019-57304-y + """ + shortest_path = nx.shortest_path(G) + + def predict(u, v): + return alpha * len(list(nx.common_neighbors(G, u, v))) + (1 - alpha) * ( + G.number_of_nodes() / (len(shortest_path[u][v]) - 1) + ) + + return _apply_prediction(G, predict, ebunch) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") def preferential_attachment(G, ebunch=None): r"""Compute the preferential attachment score of all node pairs in ebunch. @@ -234,14 +318,12 @@ def preferential_attachment(G, ebunch=None): Examples -------- - >>> import networkx as nx >>> G = nx.complete_graph(5) >>> preds = nx.preferential_attachment(G, [(0, 1), (2, 3)]) >>> for u, v, p in preds: - ... '(%d, %d) -> %d' % (u, v, p) - ... - '(0, 1) -> 16' - '(2, 3) -> 16' + ... print(f"({u}, {v}) -> {p}") + (0, 1) -> 16 + (2, 3) -> 16 References ---------- @@ -249,14 +331,16 @@ def preferential_attachment(G, ebunch=None): The Link Prediction Problem for Social Networks (2004). http://www.cs.cornell.edu/home/kleinber/link-pred.pdf """ + def predict(u, v): return G.degree(u) * G.degree(v) + return _apply_prediction(G, predict, ebunch) -@not_implemented_for('directed') -@not_implemented_for('multigraph') -def cn_soundarajan_hopcroft(G, ebunch=None, community='community'): +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def cn_soundarajan_hopcroft(G, ebunch=None, community="community"): r"""Count the number of common neighbors of all node pairs in ebunch using community information. @@ -297,15 +381,14 @@ def cn_soundarajan_hopcroft(G, ebunch=None, community='community'): Examples -------- - >>> import networkx as nx >>> G = nx.path_graph(3) - >>> G.nodes[0]['community'] = 0 - >>> G.nodes[1]['community'] = 0 - >>> G.nodes[2]['community'] = 0 + >>> G.nodes[0]["community"] = 0 + >>> G.nodes[1]["community"] = 0 + >>> G.nodes[2]["community"] = 0 >>> preds = nx.cn_soundarajan_hopcroft(G, [(0, 2)]) >>> for u, v, p in preds: - ... '(%d, %d) -> %d' % (u, v, p) - '(0, 2) -> 2' + ... print(f"({u}, {v}) -> {p}") + (0, 2) -> 2 References ---------- @@ -316,19 +399,22 @@ def cn_soundarajan_hopcroft(G, ebunch=None, community='community'): World Wide Web (WWW '12 Companion). ACM, New York, NY, USA, 607-608. http://doi.acm.org/10.1145/2187980.2188150 """ + def predict(u, v): Cu = _community(G, u, community) Cv = _community(G, v, community) cnbors = list(nx.common_neighbors(G, u, v)) - neighbors = (sum(_community(G, w, community) == Cu for w in cnbors) - if Cu == Cv else 0) + neighbors = ( + sum(_community(G, w, community) == Cu for w in cnbors) if Cu == Cv else 0 + ) return len(cnbors) + neighbors + return _apply_prediction(G, predict, ebunch) -@not_implemented_for('directed') -@not_implemented_for('multigraph') -def ra_index_soundarajan_hopcroft(G, ebunch=None, community='community'): +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def ra_index_soundarajan_hopcroft(G, ebunch=None, community="community"): r"""Compute the resource allocation index of all node pairs in ebunch using community information. @@ -369,17 +455,16 @@ def ra_index_soundarajan_hopcroft(G, ebunch=None, community='community'): Examples -------- - >>> import networkx as nx >>> G = nx.Graph() >>> G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) - >>> G.nodes[0]['community'] = 0 - >>> G.nodes[1]['community'] = 0 - >>> G.nodes[2]['community'] = 1 - >>> G.nodes[3]['community'] = 0 + >>> G.nodes[0]["community"] = 0 + >>> G.nodes[1]["community"] = 0 + >>> G.nodes[2]["community"] = 1 + >>> G.nodes[3]["community"] = 0 >>> preds = nx.ra_index_soundarajan_hopcroft(G, [(0, 3)]) >>> for u, v, p in preds: - ... '(%d, %d) -> %.8f' % (u, v, p) - '(0, 3) -> 0.50000000' + ... print(f"({u}, {v}) -> {p:.8f}") + (0, 3) -> 0.50000000 References ---------- @@ -390,20 +475,21 @@ def ra_index_soundarajan_hopcroft(G, ebunch=None, community='community'): World Wide Web (WWW '12 Companion). ACM, New York, NY, USA, 607-608. http://doi.acm.org/10.1145/2187980.2188150 """ + def predict(u, v): Cu = _community(G, u, community) Cv = _community(G, v, community) if Cu != Cv: return 0 cnbors = nx.common_neighbors(G, u, v) - return sum(1 / G.degree(w) for w in cnbors - if _community(G, w, community) == Cu) + return sum(1 / G.degree(w) for w in cnbors if _community(G, w, community) == Cu) + return _apply_prediction(G, predict, ebunch) -@not_implemented_for('directed') -@not_implemented_for('multigraph') -def within_inter_cluster(G, ebunch=None, delta=0.001, community='community'): +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def within_inter_cluster(G, ebunch=None, delta=0.001, community="community"): """Compute the ratio of within- and inter-cluster common neighbors of all node pairs in ebunch. @@ -444,24 +530,21 @@ def within_inter_cluster(G, ebunch=None, delta=0.001, community='community'): Examples -------- - >>> import networkx as nx >>> G = nx.Graph() >>> G.add_edges_from([(0, 1), (0, 2), (0, 3), (1, 4), (2, 4), (3, 4)]) - >>> G.nodes[0]['community'] = 0 - >>> G.nodes[1]['community'] = 1 - >>> G.nodes[2]['community'] = 0 - >>> G.nodes[3]['community'] = 0 - >>> G.nodes[4]['community'] = 0 + >>> G.nodes[0]["community"] = 0 + >>> G.nodes[1]["community"] = 1 + >>> G.nodes[2]["community"] = 0 + >>> G.nodes[3]["community"] = 0 + >>> G.nodes[4]["community"] = 0 >>> preds = nx.within_inter_cluster(G, [(0, 4)]) >>> for u, v, p in preds: - ... '(%d, %d) -> %.8f' % (u, v, p) - ... - '(0, 4) -> 1.99800200' + ... print(f"({u}, {v}) -> {p:.8f}") + (0, 4) -> 1.99800200 >>> preds = nx.within_inter_cluster(G, [(0, 4)], delta=0.5) >>> for u, v, p in preds: - ... '(%d, %d) -> %.8f' % (u, v, p) - ... - '(0, 4) -> 1.33333333' + ... print(f"({u}, {v}) -> {p:.8f}") + (0, 4) -> 1.33333333 References ---------- @@ -472,7 +555,7 @@ def within_inter_cluster(G, ebunch=None, delta=0.001, community='community'): https://doi.org/10.1007/978-3-642-34459-6_10 """ if delta <= 0: - raise nx.NetworkXAlgorithmError('Delta must be greater than zero') + raise nx.NetworkXAlgorithmError("Delta must be greater than zero") def predict(u, v): Cu = _community(G, u, community) @@ -480,8 +563,7 @@ def predict(u, v): if Cu != Cv: return 0 cnbors = set(nx.common_neighbors(G, u, v)) - within = set(w for w in cnbors - if _community(G, w, community) == Cu) + within = {w for w in cnbors if _community(G, w, community) == Cu} inter = cnbors - within return len(within) / (len(inter) + delta) @@ -493,5 +575,5 @@ def _community(G, u, community): node_u = G.nodes[u] try: return node_u[community] - except KeyError: - raise nx.NetworkXAlgorithmError('No community information') + except KeyError as e: + raise nx.NetworkXAlgorithmError("No community information") from e diff --git a/networkx/algorithms/lowest_common_ancestors.py b/networkx/algorithms/lowest_common_ancestors.py index 4169a82..7961ced 100644 --- a/networkx/algorithms/lowest_common_ancestors.py +++ b/networkx/algorithms/lowest_common_ancestors.py @@ -1,25 +1,21 @@ -# Copyright (C) 2013 by -# Alex Roper -# Copyright (C) 2017 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# -# All rights reserved. -# BSD license. -# -# Author: Alex Roper """Algorithms for finding the lowest common ancestor of trees and DAGs.""" -from collections import defaultdict, Mapping, Set +from collections import defaultdict +from collections.abc import Mapping, Set from itertools import chain, count import networkx as nx -from networkx.utils import arbitrary_element, not_implemented_for, \ - UnionFind, generate_unique_node +from networkx.utils import ( + arbitrary_element, + not_implemented_for, + UnionFind, + generate_unique_node, +) -__all__ = ["all_pairs_lowest_common_ancestor", - "tree_all_pairs_lowest_common_ancestor", - "lowest_common_ancestor"] +__all__ = [ + "all_pairs_lowest_common_ancestor", + "tree_all_pairs_lowest_common_ancestor", + "lowest_common_ancestor", +] @not_implemented_for("undirected") @@ -74,7 +70,7 @@ def tree_all_pairs_lowest_common_ancestor(G, root=None, pairs=None): for u, v in pairs: for n in (u, v): if n not in G: - msg = "The node %s is not in the digraph." % str(n) + msg = f"The node {str(n)} is not in the digraph." raise nx.NodeNotFound(msg) pair_dict[u].add(v) pair_dict[v].add(u) @@ -97,7 +93,7 @@ def tree_all_pairs_lowest_common_ancestor(G, root=None, pairs=None): raise nx.NetworkXError("Graph contains a cycle.") # Iterative implementation of Tarjan's offline lca algorithm - # as described in CLRS on page 521. + # as described in CLRS on page 521 (2nd edition)/page 584 (3rd edition) uf = UnionFind() ancestors = {} for node in G: @@ -106,7 +102,7 @@ def tree_all_pairs_lowest_common_ancestor(G, root=None, pairs=None): colors = defaultdict(bool) for node in nx.dfs_postorder_nodes(G, root): colors[node] = True - for v in (pair_dict[node] if pairs is not None else G): + for v in pair_dict[node] if pairs is not None else G: if colors[v]: # If the user requested both directions of a pair, give it. # Otherwise, just give one. @@ -210,7 +206,7 @@ def all_pairs_lowest_common_ancestor(G, pairs=None): # This will always produce correct results and avoid unnecessary # copies in many common cases. # - if (not isinstance(pairs, (Mapping, Set)) and pairs is not None): + if not isinstance(pairs, (Mapping, Set)) and pairs is not None: pairs = set(pairs) # Convert G into a dag with a single root by adding a node with edges to @@ -229,8 +225,11 @@ def all_pairs_lowest_common_ancestor(G, pairs=None): # We will then use the tree lca algorithm on the spanning tree, and use # the DAG to figure out the set of tree queries necessary. spanning_tree = nx.dfs_tree(G, root) - dag = nx.DiGraph((u, v) for u, v in G.edges - if u not in spanning_tree or v not in spanning_tree[u]) + dag = nx.DiGraph( + (u, v) + for u, v in G.edges + if u not in spanning_tree or v not in spanning_tree[u] + ) # Ensure that both the dag and the spanning tree contains all nodes in G, # even nodes that are disconnected in the dag. @@ -261,7 +260,7 @@ def all_pairs_lowest_common_ancestor(G, pairs=None): for n in pairset: if n not in G: - msg = "The node %s is not in the digraph." % str(n) + msg = f"The node {str(n)} is not in the digraph." raise nx.NodeNotFound(msg) # Generate the transitive closure over the dag (not G) of all nodes, and @@ -295,15 +294,16 @@ def get_next_in_merged_lists(indices): Index can be 0 or 1 (or None if exhausted). """ index1, index2 = indices - if (index1 >= len(ancestors[node1]) and - index2 >= len(ancestors[node2])): + if index1 >= len(ancestors[node1]) and index2 >= len(ancestors[node2]): return None elif index1 >= len(ancestors[node1]): return 1 elif index2 >= len(ancestors[node2]): return 0 - elif (euler_tour_pos[ancestors[node1][index1]] < - euler_tour_pos[ancestors[node2][index2]]): + elif ( + euler_tour_pos[ancestors[node1][index1]] + < euler_tour_pos[ancestors[node2][index2]] + ): return 0 else: return 1 @@ -331,8 +331,9 @@ def get_next_in_merged_lists(indices): ans = tree_lca[tree_node1, tree_node2] else: ans = tree_lca[tree_node2, tree_node1] - if not dry_run and (best is None or - root_distance[ans] > best_root_distance): + if not dry_run and ( + best is None or root_distance[ans] > best_root_distance + ): best_root_distance = root_distance[ans] best = ans @@ -345,8 +346,7 @@ def get_next_in_merged_lists(indices): # tree lca. if pairs is None: # We want all pairs so we'll need the entire tree. - tree_lca = dict(tree_all_pairs_lowest_common_ancestor(spanning_tree, - root)) + tree_lca = dict(tree_all_pairs_lowest_common_ancestor(spanning_tree, root)) else: # We only need the merged adjacent pairs by seeing which queries the # algorithm needs then generating them in a single pass. @@ -355,9 +355,9 @@ def get_next_in_merged_lists(indices): pass # Replace the bogus default tree values with the real ones. - for (pair, lca) in tree_all_pairs_lowest_common_ancestor(spanning_tree, - root, - tree_lca): + for (pair, lca) in tree_all_pairs_lowest_common_ancestor( + spanning_tree, root, tree_lca + ): tree_lca[pair] = lca # All precomputations complete. Now we just need to give the user the pairs diff --git a/networkx/algorithms/matching.py b/networkx/algorithms/matching.py index 0e15002..c6e5fd1 100644 --- a/networkx/algorithms/matching.py +++ b/networkx/algorithms/matching.py @@ -1,23 +1,15 @@ -# Copyright 2016 NetworkX developers. -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# -# Copyright (C) 2008 by -# Joris van Rantwijk. -# -# Copyright (C) 2011 by -# Nicholas Mancuso -# -# All rights reserved. -# BSD license. """Functions for computing and verifying matchings in a graph.""" +from collections import Counter from itertools import combinations from itertools import repeat -__all__ = ['is_matching', 'is_maximal_matching', 'max_weight_matching', - 'maximal_matching'] +__all__ = [ + "is_matching", + "is_maximal_matching", + "is_perfect_matching", + "max_weight_matching", + "maximal_matching", +] def maximal_matching(G): @@ -70,7 +62,7 @@ def matching_dict_to_set(matching): # only the (frozen)set `{u, v}` appears as an element in the # returned set. - return set((u, v) for (u, v) in set(map(frozenset, matching.items()))) + return {(u, v) for (u, v) in set(map(frozenset, matching.items()))} def is_matching(G, matching): @@ -101,8 +93,7 @@ def is_matching(G, matching): if isinstance(matching, dict): matching = matching_dict_to_set(matching) # TODO This is parallelizable. - return all(len(set(e1) & set(e2)) == 0 - for e1, e2 in combinations(matching, 2)) + return all(len(set(e1) & set(e2)) == 0 for e1, e2 in combinations(matching, 2)) def is_maximal_matching(G, matching): @@ -149,7 +140,43 @@ def is_maximal_matching(G, matching): return all(not is_matching(G, matching | {e}) for e in unmatched_edges) -def max_weight_matching(G, maxcardinality=False, weight='weight'): +def is_perfect_matching(G, matching): + """Decides whether the given set represents a valid perfect matching in + ``G``. + + A *perfect matching* in a graph is a matching in which exactly one edge + is incident upon each vertex. + + Parameters + ---------- + G : NetworkX graph + + matching : dict or set + A dictionary or set representing a matching. If a dictionary, it + must have ``matching[u] == v`` and ``matching[v] == u`` for each + edge ``(u, v)`` in the matching. If a set, it must have elements + of the form ``(u, v)``, where ``(u, v)`` is an edge in the + matching. + + Returns + ------- + bool + Whether the given set or dictionary represents a valid perfect + matching in the graph. + + """ + if isinstance(matching, dict): + matching = matching_dict_to_set(matching) + + if not is_matching(G, matching): + return False + + counts = Counter(sum(matching, ())) + + return all(counts[v] == 1 for v in G) + + +def max_weight_matching(G, maxcardinality=False, weight="weight"): """Compute a maximum-weighted matching of G. A matching is a subset of edges in which no node occurs more than once. @@ -216,12 +243,13 @@ def max_weight_matching(G, maxcardinality=False, weight='weight'): class NoNode: """Dummy value which is different from any node.""" + pass class Blossom: """Representation of a non-trivial blossom or sub-blossom.""" - __slots__ = ['childs', 'edges', 'mybestedges'] + __slots__ = ["childs", "edges", "mybestedges"] # b.childs is an ordered list of b's sub-blossoms, starting with # the base and going round the blossom. @@ -239,8 +267,7 @@ class Blossom: def leaves(self): for t in self.childs: if isinstance(t, Blossom): - for v in t.leaves(): - yield v + yield from t.leaves() else: yield t @@ -256,8 +283,7 @@ def leaves(self): wt = d.get(weight, 1) if i != j and wt > maxweight: maxweight = wt - allinteger = allinteger and (str(type(wt)).split("'")[1] - in ('int', 'long')) + allinteger = allinteger and (str(type(wt)).split("'")[1] in ("int", "long")) # If v is a matched vertex, mate[v] is its partner vertex. # If v is a single vertex, v does not occur as a key in mate. @@ -414,8 +440,9 @@ def addBlossom(base, v, w): blossomparent[bv] = b path.append(bv) edgs.append(labeledge[bv]) - assert label[bv] == 2 or (label[bv] == 1 and labeledge[ - bv][0] == mate[blossombase[bv]]) + assert label[bv] == 2 or ( + label[bv] == 1 and labeledge[bv][0] == mate[blossombase[bv]] + ) # Trace one step back. v = labeledge[bv][0] bv = inblossom[v] @@ -429,8 +456,9 @@ def addBlossom(base, v, w): blossomparent[bw] = b path.append(bw) edgs.append((labeledge[bw][1], labeledge[bw][0])) - assert label[bw] == 2 or (label[bw] == 1 and labeledge[ - bw][0] == mate[blossombase[bw]]) + assert label[bw] == 2 or ( + label[bw] == 1 and labeledge[bw][0] == mate[blossombase[bw]] + ) # Trace one step back. w = labeledge[bw][0] bw = inblossom[w] @@ -459,22 +487,21 @@ def addBlossom(base, v, w): else: # This subblossom does not have a list of least-slack # edges; get the information from the vertices. - nblist = [(v, w) - for v in bv.leaves() - for w in G.neighbors(v) - if v != w] + nblist = [ + (v, w) for v in bv.leaves() for w in G.neighbors(v) if v != w + ] else: - nblist = [(bv, w) - for w in G.neighbors(bv) - if bv != w] + nblist = [(bv, w) for w in G.neighbors(bv) if bv != w] for k in nblist: (i, j) = k if inblossom[j] == b: i, j = j, i bj = inblossom[j] - if (bj != b and label.get(bj) == 1 and - ((bj not in bestedgeto) or - slack(i, j) < slack(*bestedgeto[bj]))): + if ( + bj != b + and label.get(bj) == 1 + and ((bj not in bestedgeto) or slack(i, j) < slack(*bestedgeto[bj])) + ): bestedgeto[bj] = k # Forget about least-slack edge of the subblossom. bestedge[bv] = None @@ -639,9 +666,9 @@ def augmentMatching(v, w): while 1: bs = inblossom[s] assert label[bs] == 1 - assert ( - labeledge[bs] is None and blossombase[bs] not in mate)\ - or (labeledge[bs][0] == mate[blossombase[bs]]) + assert (labeledge[bs] is None and blossombase[bs] not in mate) or ( + labeledge[bs][0] == mate[blossombase[bs]] + ) # Augment through the S-blossom from s to base. if isinstance(bs, Blossom): augmentBlossom(bs, s) @@ -801,15 +828,13 @@ def verifyOptimum(): elif label.get(bw) == 1: # keep track of the least-slack non-allowable edge to # a different S-blossom. - if bestedge.get(bv) is None or \ - kslack < slack(*bestedge[bv]): + if bestedge.get(bv) is None or kslack < slack(*bestedge[bv]): bestedge[bv] = (v, w) elif label.get(w) is None: # w is a free vertex (or an unreached vertex inside # a T-blossom) but we can not reach it yet; # keep track of the least-slack edge that reaches w. - if bestedge.get(w) is None or \ - kslack < slack(*bestedge[w]): + if bestedge.get(w) is None or kslack < slack(*bestedge[w]): bestedge[w] = (v, w) if augmented: @@ -830,8 +855,7 @@ def verifyOptimum(): # Compute delta2: the minimum slack on any edge between # an S-vertex and a free vertex. for v in G.nodes(): - if label.get(inblossom[v]) is None and \ - bestedge.get(v) is not None: + if label.get(inblossom[v]) is None and bestedge.get(v) is not None: d = slack(*bestedge[v]) if deltatype == -1 or d < delta: delta = d @@ -841,8 +865,11 @@ def verifyOptimum(): # Compute delta3: half the minimum slack on any edge between # a pair of S-blossoms. for b in blossomparent: - if (blossomparent[b] is None and label.get(b) == 1 and - bestedge.get(b) is not None): + if ( + blossomparent[b] is None + and label.get(b) == 1 + and bestedge.get(b) is not None + ): kslack = slack(*bestedge[b]) if allinteger: assert (kslack % 2) == 0 @@ -856,8 +883,11 @@ def verifyOptimum(): # Compute delta4: minimum z variable of any T-blossom. for b in blossomdual: - if (blossomparent[b] is None and label.get(b) == 2 and - (deltatype == -1 or blossomdual[b] < delta)): + if ( + blossomparent[b] is None + and label.get(b) == 2 + and (deltatype == -1 or blossomdual[b] < delta) + ): delta = blossomdual[b] deltatype = 4 deltablossom = b @@ -921,8 +951,7 @@ def verifyOptimum(): for b in list(blossomdual.keys()): if b not in blossomdual: continue # already expanded - if (blossomparent[b] is None and label.get(b) == 1 and - blossomdual[b] == 0): + if blossomparent[b] is None and label.get(b) == 1 and blossomdual[b] == 0: expandBlossom(b, True) # Verify that we reached the optimum solution (only for integer weights). diff --git a/networkx/algorithms/minors.py b/networkx/algorithms/minors.py index 9cd3ee8..c4be5f2 100644 --- a/networkx/algorithms/minors.py +++ b/networkx/algorithms/minors.py @@ -1,13 +1,3 @@ -# minors.py - functions for computing minors of graphs -# -# Copyright 2015 Jeffrey Finkelstein . -# Copyright 2010 Drew Conway -# Copyright 2010 Aric Hagberg -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Provides functions for computing minors of a graph.""" from itertools import chain from itertools import combinations @@ -19,8 +9,7 @@ from networkx.exception import NetworkXException from networkx.utils import arbitrary_element -__all__ = ['contracted_edge', 'contracted_nodes', - 'identified_nodes', 'quotient_graph'] +__all__ = ["contracted_edge", "contracted_nodes", "identified_nodes", "quotient_graph"] chaini = chain.from_iterable @@ -60,8 +49,15 @@ def equivalence_classes(iterable, relation): return {frozenset(block) for block in blocks} -def quotient_graph(G, partition, edge_relation=None, node_data=None, - edge_data=None, relabel=False, create_using=None): +def quotient_graph( + G, + partition, + edge_relation=None, + node_data=None, + edge_data=None, + relabel=False, + create_using=None, +): """Returns the quotient graph of `G` under the specified equivalence relation on nodes. @@ -125,11 +121,8 @@ def quotient_graph(G, partition, edge_relation=None, node_data=None, :class:`frozenset` instances representing the blocks given in `partition`. - create_using : NetworkX graph - If specified, this must be an instance of a NetworkX graph - class. The nodes and edges of the quotient graph will be added - to this graph and returned. If not specified, the returned graph - will have the same type as the input graph. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. Returns ------- @@ -152,10 +145,10 @@ def quotient_graph(G, partition, edge_relation=None, node_data=None, neighbors" equivalence relation is `K_2`. Under this relation, two nodes are equivalent if they are not adjacent but have the same neighbor set:: - >>> import networkx as nx >>> G = nx.complete_bipartite_graph(2, 3) - >>> same_neighbors = lambda u, v: (u not in G[v] and v not in G[u] - ... and G[u] == G[v]) + >>> same_neighbors = lambda u, v: ( + ... u not in G[v] and v not in G[u] and G[u] == G[v] + ... ) >>> Q = nx.quotient_graph(G, same_neighbors) >>> K2 = nx.complete_graph(2) >>> nx.is_isomorphic(Q, K2) @@ -166,17 +159,30 @@ def quotient_graph(G, partition, edge_relation=None, node_data=None, :func:`condensation`). This example comes from the Wikipedia article *`Strongly connected component`_*:: - >>> import networkx as nx >>> G = nx.DiGraph() - >>> edges = ['ab', 'be', 'bf', 'bc', 'cg', 'cd', 'dc', 'dh', 'ea', - ... 'ef', 'fg', 'gf', 'hd', 'hf'] + >>> edges = [ + ... "ab", + ... "be", + ... "bf", + ... "bc", + ... "cg", + ... "cd", + ... "dc", + ... "dh", + ... "ea", + ... "ef", + ... "fg", + ... "gf", + ... "hd", + ... "hf", + ... ] >>> G.add_edges_from(tuple(x) for x in edges) >>> components = list(nx.strongly_connected_components(G)) >>> sorted(sorted(component) for component in components) [['a', 'b', 'e'], ['c', 'd', 'h'], ['f', 'g']] >>> >>> C = nx.condensation(G, components) - >>> component_of = C.graph['mapping'] + >>> component_of = C.graph["mapping"] >>> same_component = lambda u, v: component_of[u] == component_of[v] >>> Q = nx.quotient_graph(G, same_component) >>> nx.is_isomorphic(C, Q) @@ -186,7 +192,6 @@ def quotient_graph(G, partition, edge_relation=None, node_data=None, equivalence relation that places the two nodes in one block and each other node in its own singleton block:: - >>> import networkx as nx >>> K24 = nx.complete_bipartite_graph(2, 4) >>> K34 = nx.complete_bipartite_graph(3, 4) >>> C = nx.contracted_nodes(K34, 1, 2) @@ -222,8 +227,9 @@ def quotient_graph(G, partition, edge_relation=None, node_data=None, if callable(partition): # equivalence_classes always return partition of whole G. partition = equivalence_classes(G, partition) - return _quotient_graph(G, partition, edge_relation, node_data, - edge_data, relabel, create_using) + return _quotient_graph( + G, partition, edge_relation, node_data, edge_data, relabel, create_using + ) # If the user provided partition as a collection of sets. Then we # need to check if partition covers all of G nodes. If the answer @@ -231,23 +237,37 @@ def quotient_graph(G, partition, edge_relation=None, node_data=None, partition_nodes = set().union(*partition) if len(partition_nodes) != len(G): G = G.subgraph(partition_nodes) - return _quotient_graph(G, partition, edge_relation, node_data, - edge_data, relabel, create_using) - - -def _quotient_graph(G, partition, edge_relation=None, node_data=None, - edge_data=None, relabel=False, create_using=None): + return _quotient_graph( + G, partition, edge_relation, node_data, edge_data, relabel, create_using + ) + + +def _quotient_graph( + G, + partition, + edge_relation=None, + node_data=None, + edge_data=None, + relabel=False, + create_using=None, +): # Each node in the graph must be in exactly one block. if any(sum(1 for b in partition if v in b) != 1 for v in G): - raise NetworkXException('each node must be in exactly one block') - H = G.fresh_copy() if create_using is None else create_using.fresh_copy() + raise NetworkXException("each node must be in exactly one block") + if create_using is None: + H = G.__class__() + else: + H = nx.empty_graph(0, create_using) # By default set some basic information about the subgraph that each block # represents on the nodes in the quotient graph. if node_data is None: + def node_data(b): S = G.subgraph(b) - return dict(graph=S, nnodes=len(S), nedges=S.number_of_edges(), - density=density(S)) + return dict( + graph=S, nnodes=len(S), nedges=S.number_of_edges(), density=density(S) + ) + # Each block of the partition becomes a node in the quotient graph. partition = [frozenset(b) for b in partition] H.add_nodes_from((b, node_data(b)) for b in partition) @@ -259,28 +279,42 @@ def node_data(b): # there are O(n^2) pairs to check and each check may require O(log n) time # (to check set membership). This can certainly be parallelized. if edge_relation is None: + def edge_relation(b, c): return any(v in G[u] for u, v in product(b, c)) + # By default, sum the weights of the edges joining pairs of nodes across # blocks to get the weight of the edge joining those two blocks. if edge_data is None: + def edge_data(b, c): - edgedata = (d for u, v, d in G.edges(b | c, data=True) - if (u in b and v in c) or (u in c and v in b)) - return {'weight': sum(d.get('weight', 1) for d in edgedata)} + edgedata = ( + d + for u, v, d in G.edges(b | c, data=True) + if (u in b and v in c) or (u in c and v in b) + ) + return {"weight": sum(d.get("weight", 1) for d in edgedata)} + block_pairs = permutations(H, 2) if H.is_directed() else combinations(H, 2) # In a multigraph, add one edge in the quotient graph for each edge # in the original graph. if H.is_multigraph(): - edges = chaini(((b, c, G.get_edge_data(u, v, default={})) - for u, v in product(b, c) if v in G[u]) - for b, c in block_pairs if edge_relation(b, c)) + edges = chaini( + ( + (b, c, G.get_edge_data(u, v, default={})) + for u, v in product(b, c) + if v in G[u] + ) + for b, c in block_pairs + if edge_relation(b, c) + ) # In a simple graph, apply the edge data function to each pair of # blocks to determine the edge data attributes to apply to each edge # in the quotient graph. else: - edges = ((b, c, edge_data(b, c)) for (b, c) in block_pairs - if edge_relation(b, c)) + edges = ( + (b, c, edge_data(b, c)) for (b, c) in block_pairs if edge_relation(b, c) + ) H.add_edges_from(edges) # If requested by the user, relabel the nodes to be integers, # numbered in increasing order from zero in the same order as the @@ -294,7 +328,7 @@ def edge_data(b, c): return H -def contracted_nodes(G, u, v, self_loops=True): +def contracted_nodes(G, u, v, self_loops=True, copy=True): """Returns the graph that results from contracting `u` and `v`. Node contraction identifies the two nodes as a single node incident to any @@ -312,13 +346,22 @@ def contracted_nodes(G, u, v, self_loops=True): If this is True, any edges joining `u` and `v` in `G` become self-loops on the new node in the returned graph. + copy : Boolean + If this is True (default True), make a copy of + `G` and return that instead of directly changing `G`. + Returns ------- Networkx graph + If Copy is True: A new graph object of the same type as `G` (leaving `G` unmodified) with `u` and `v` identified in a single node. The right node `v` will be merged into the node `u`, so only `u` will appear in the returned graph. + if Copy is False: + Modifies `G` with `u` and `v` identified in a single node. + The right node `v` will be merged into the node `u`, so + only `u` will appear in the returned graph. Notes ----- @@ -342,7 +385,7 @@ def contracted_nodes(G, u, v, self_loops=True): >>> M.edges MultiEdgeView([(0, 1, 0), (0, 1, 1)]) - >>> G = nx.Graph([(1,2), (2,2)]) + >>> G = nx.Graph([(1, 2), (2, 2)]) >>> H = nx.contracted_nodes(G, 1, 2, self_loops=False) >>> list(H.nodes()) [1] @@ -358,28 +401,45 @@ def contracted_nodes(G, u, v, self_loops=True): ----- This function is also available as `identified_nodes`. """ - H = G.copy() + # Copying has significant overhead and can be disabled if needed + if copy: + H = G.copy() + else: + H = G + # edge code uses G.edges(v) instead of G.adj[v] to handle multiedges if H.is_directed(): - in_edges = ((w if w != v else u, u, d) - for w, x, d in G.in_edges(v, data=True) - if self_loops or w != u) - out_edges = ((u, w if w != v else u, d) - for x, w, d in G.out_edges(v, data=True) - if self_loops or w != u) + in_edges = ( + (w if w != v else u, u, d) + for w, x, d in G.in_edges(v, data=True) + if self_loops or w != u + ) + out_edges = ( + (u, w if w != v else u, d) + for x, w, d in G.out_edges(v, data=True) + if self_loops or w != u + ) new_edges = chain(in_edges, out_edges) else: - new_edges = ((u, w if w != v else u, d) - for x, w, d in G.edges(v, data=True) - if self_loops or w != u) + new_edges = ( + (u, w if w != v else u, d) + for x, w, d in G.edges(v, data=True) + if self_loops or w != u + ) + + # If the H=G, the generators change as H changes + # This makes the new_edges independent of H + if not copy: + new_edges = list(new_edges) + v_data = H.nodes[v] H.remove_node(v) H.add_edges_from(new_edges) - if 'contraction' in H.nodes[u]: - H.nodes[u]['contraction'][v] = v_data + if "contraction" in H.nodes[u]: + H.nodes[u]["contraction"][v] = v_data else: - H.nodes[u]['contraction'] = {v: v_data} + H.nodes[u]["contraction"] = {v: v_data} return H @@ -424,7 +484,6 @@ def contracted_edge(G, edge, self_loops=True): -------- Attempting to contract two nonadjacent nodes yields an error:: - >>> import networkx as nx >>> G = nx.cycle_graph(4) >>> nx.contracted_edge(G, (1, 3)) Traceback (most recent call last): @@ -434,7 +493,6 @@ def contracted_edge(G, edge, self_loops=True): Contracting two adjacent nodes in the cycle graph on *n* nodes yields the cycle graph on *n - 1* nodes:: - >>> import networkx as nx >>> C5 = nx.cycle_graph(5) >>> C4 = nx.cycle_graph(4) >>> M = nx.contracted_edge(C5, (0, 1), self_loops=False) @@ -448,6 +506,5 @@ def contracted_edge(G, edge, self_loops=True): """ if not G.has_edge(*edge): - raise ValueError('Edge {0} does not exist in graph G; cannot contract' - ' it'.format(edge)) + raise ValueError(f"Edge {edge} does not exist in graph G; cannot contract it") return contracted_nodes(G, *edge, self_loops=self_loops) diff --git a/networkx/algorithms/mis.py b/networkx/algorithms/mis.py index 7811e74..83cc47f 100644 --- a/networkx/algorithms/mis.py +++ b/networkx/algorithms/mis.py @@ -1,28 +1,18 @@ -# -*- coding: utf-8 -*- -# $Id: maximalIndependentSet.py 576 2011-03-01 05:50:34Z lleeoo $ -# Leo Lopes -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Leo Lopes -# Loïc Séguin-C. """ Algorithm to find a maximal (not maximum) independent set. """ -import random import networkx as nx from networkx.utils import not_implemented_for +from networkx.utils import py_random_state -__all__ = ['maximal_independent_set'] +__all__ = ["maximal_independent_set"] -@not_implemented_for('directed') -def maximal_independent_set(G, nodes=None): - """Return a random maximal independent set guaranteed to contain +@py_random_state(2) +@not_implemented_for("directed") +def maximal_independent_set(G, nodes=None, seed=None): + """Returns a random maximal independent set guaranteed to contain a given set of nodes. An independent set is a set of nodes such that the subgraph @@ -38,6 +28,10 @@ def maximal_independent_set(G, nodes=None): Nodes that must be part of the independent set. This set of nodes must be independent. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + Returns ------- indep_nodes : list @@ -55,9 +49,9 @@ def maximal_independent_set(G, nodes=None): Examples -------- >>> G = nx.path_graph(5) - >>> nx.maximal_independent_set(G) # doctest: +SKIP + >>> nx.maximal_independent_set(G) # doctest: +SKIP [4, 0, 2] - >>> nx.maximal_independent_set(G, [1]) # doctest: +SKIP + >>> nx.maximal_independent_set(G, [1]) # doctest: +SKIP [1, 3] Notes @@ -66,20 +60,18 @@ def maximal_independent_set(G, nodes=None): """ if not nodes: - nodes = set([random.choice(list(G))]) + nodes = {seed.choice(list(G))} else: nodes = set(nodes) if not nodes.issubset(G): - raise nx.NetworkXUnfeasible( - "%s is not a subset of the nodes of G" % nodes) + raise nx.NetworkXUnfeasible(f"{nodes} is not a subset of the nodes of G") neighbors = set.union(*[set(G.adj[v]) for v in nodes]) if set.intersection(neighbors, nodes): - raise nx.NetworkXUnfeasible( - "%s is not an independent set of G" % nodes) + raise nx.NetworkXUnfeasible(f"{nodes} is not an independent set of G") indep_nodes = list(nodes) available_nodes = set(G.nodes()).difference(neighbors.union(nodes)) while available_nodes: - node = random.choice(list(available_nodes)) + node = seed.choice(list(available_nodes)) indep_nodes.append(node) available_nodes.difference_update(list(G.adj[node]) + [node]) return indep_nodes diff --git a/networkx/algorithms/moral.py b/networkx/algorithms/moral.py new file mode 100644 index 0000000..c81e2cb --- /dev/null +++ b/networkx/algorithms/moral.py @@ -0,0 +1,47 @@ +r"""Function for computing the moral graph of a directed graph.""" + +from networkx.utils import not_implemented_for +import itertools + +__all__ = ["moral_graph"] + + +@not_implemented_for("undirected") +def moral_graph(G): + r"""Return the Moral Graph + + Returns the moralized graph of a given directed graph. + + Parameters + ---------- + G : NetworkX graph + Directed graph + + Returns + ------- + H : NetworkX graph + The undirected moralized graph of G + + Notes + ------ + A moral graph is an undirected graph H = (V, E) generated from a + directed Graph, where if a node has more than one parent node, edges + between these parent nodes are inserted and all directed edges become + undirected. + + https://en.wikipedia.org/wiki/Moral_graph + + References + ---------- + .. [1] Wray L. Buntine. 1995. Chain graphs for learning. + In Proceedings of the Eleventh conference on Uncertainty + in artificial intelligence (UAI'95) + """ + if G is None: + raise ValueError("Expected NetworkX graph!") + + H = G.to_undirected() + for preds in G.pred.values(): + predecessors_combinations = itertools.combinations(preds, r=2) + H.add_edges_from(predecessors_combinations) + return H diff --git a/networkx/algorithms/node_classification/__init__.py b/networkx/algorithms/node_classification/__init__.py index f1db37d..4aa4622 100644 --- a/networkx/algorithms/node_classification/__init__.py +++ b/networkx/algorithms/node_classification/__init__.py @@ -7,14 +7,13 @@ then accessing the functions as attributes of `node_classification`. For example: - >>> import networkx as nx >>> from networkx.algorithms import node_classification >>> G = nx.path_graph(4) >>> G.edges() EdgeView([(0, 1), (1, 2), (2, 3)]) - >>> G.node[0]['label'] = 'A' - >>> G.node[3]['label'] = 'B' - >>> node_classification.harmonic_function(G) # doctest: +SKIP + >>> G.nodes[0]["label"] = "A" + >>> G.nodes[3]["label"] = "B" + >>> node_classification.harmonic_function(G) ['A', 'A', 'B', 'B'] """ diff --git a/networkx/algorithms/node_classification/hmn.py b/networkx/algorithms/node_classification/hmn.py index 30da956..56297de 100644 --- a/networkx/algorithms/node_classification/hmn.py +++ b/networkx/algorithms/node_classification/hmn.py @@ -1,7 +1,3 @@ -# -*- coding: utf-8 -*- -# -# Author: Yuto Yamaguchi - """Function for computing Harmonic function algorithm by Zhu et al. References @@ -10,8 +6,6 @@ Semi-supervised learning using gaussian fields and harmonic functions. In ICML (Vol. 3, pp. 912-919). """ - - import networkx as nx from networkx.utils.decorators import not_implemented_for @@ -22,11 +16,11 @@ _predict, ) -__all__ = ['harmonic_function'] +__all__ = ["harmonic_function"] -@not_implemented_for('directed') -def harmonic_function(G, max_iter=30, label_name='label'): +@not_implemented_for("directed") +def harmonic_function(G, max_iter=30, label_name="label"): """Node classification by Harmonic function Parameters @@ -37,21 +31,22 @@ def harmonic_function(G, max_iter=30, label_name='label'): label_name : string name of target labels to predict - Raises - ---------- - `NetworkXError` if no nodes on `G` has `label_name`. - Returns ---------- predicted : array, shape = [n_samples] Array of predicted labels + Raises + ---------- + NetworkXError + If no nodes on `G` has `label_name`. + Examples -------- >>> from networkx.algorithms import node_classification >>> G = nx.path_graph(4) - >>> G.node[0]['label'] = 'A' - >>> G.node[3]['label'] = 'B' + >>> G.nodes[0]["label"] = "A" + >>> G.nodes[3]["label"] = "B" >>> G.nodes(data=True) NodeDataView({0: {'label': 'A'}, 1: {}, 2: {}, 3: {'label': 'B'}}) >>> G.edges() @@ -68,14 +63,16 @@ def harmonic_function(G, max_iter=30, label_name='label'): """ try: import numpy as np - except ImportError: + except ImportError as e: raise ImportError( - "harmonic_function() requires numpy: http://scipy.org/ ") + "harmonic_function() requires numpy: http://numpy.org/ " + ) from e try: from scipy import sparse - except ImportError: + except ImportError as e: raise ImportError( - "harmonic_function() requires scipy: http://scipy.org/ ") + "harmonic_function() requires scipy: http://scipy.org/ " + ) from e def _build_propagation_matrix(X, labels): """Build propagation matrix of Harmonic function @@ -127,7 +124,8 @@ def _build_base_matrix(X, labels, n_classes): if labels.shape[0] == 0: raise nx.NetworkXError( - "No node on the input graph is labeled by '" + label_name + "'.") + "No node on the input graph is labeled by '" + label_name + "'." + ) n_samples = X.shape[0] n_classes = label_dict.shape[0] @@ -145,16 +143,3 @@ def _build_base_matrix(X, labels, n_classes): predicted = _predict(F, label_dict) return predicted - - -def setup_module(module): - """Fixture for nose tests.""" - from nose import SkipTest - try: - import numpy - except ImportError: - raise SkipTest("NumPy not available") - try: - import scipy - except ImportError: - raise SkipTest("SciPy not available") diff --git a/networkx/algorithms/node_classification/lgc.py b/networkx/algorithms/node_classification/lgc.py index 250f794..dd32c59 100644 --- a/networkx/algorithms/node_classification/lgc.py +++ b/networkx/algorithms/node_classification/lgc.py @@ -1,7 +1,3 @@ -# -*- coding: utf-8 -*- -# -# Author: Yuto Yamaguchi - """Function for computing Local and global consistency algorithm by Zhou et al. References @@ -10,7 +6,6 @@ Learning with local and global consistency. Advances in neural information processing systems, 16(16), 321-328. """ - import networkx as nx from networkx.utils.decorators import not_implemented_for @@ -21,13 +16,11 @@ _predict, ) -__all__ = ['local_and_global_consistency'] +__all__ = ["local_and_global_consistency"] -@not_implemented_for('directed') -def local_and_global_consistency(G, alpha=0.99, - max_iter=30, - label_name='label'): +@not_implemented_for("directed") +def local_and_global_consistency(G, alpha=0.99, max_iter=30, label_name="label"): """Node classification by Local and Global Consistency Parameters @@ -40,21 +33,22 @@ def local_and_global_consistency(G, alpha=0.99, label_name : string Name of target labels to predict - Raises - ---------- - `NetworkXError` if no nodes on `G` has `label_name`. - Returns ---------- predicted : array, shape = [n_samples] Array of predicted labels + Raises + ------ + NetworkXError + If no nodes on `G` has `label_name`. + Examples -------- >>> from networkx.algorithms import node_classification >>> G = nx.path_graph(4) - >>> G.node[0]['label'] = 'A' - >>> G.node[3]['label'] = 'B' + >>> G.nodes[0]["label"] = "A" + >>> G.nodes[3]["label"] = "B" >>> G.nodes(data=True) NodeDataView({0: {'label': 'A'}, 1: {}, 2: {}, 3: {'label': 'B'}}) >>> G.edges() @@ -72,16 +66,16 @@ def local_and_global_consistency(G, alpha=0.99, """ try: import numpy as np - except ImportError: + except ImportError as e: raise ImportError( - "local_and_global_consistency() requires numpy: ", - "http://scipy.org/ ") + "local_and_global_consistency() requires numpy: ", "http://numpy.org/ " + ) from e try: from scipy import sparse - except ImportError: + except ImportError as e: raise ImportError( - "local_and_global_consistensy() requires scipy: ", - "http://scipy.org/ ") + "local_and_global_consistensy() requires scipy: ", "http://scipy.org/ " + ) from e def _build_propagation_matrix(X, labels, alpha): """Build propagation matrix of Local and global consistency @@ -137,7 +131,8 @@ def _build_base_matrix(X, labels, alpha, n_classes): if labels.shape[0] == 0: raise nx.NetworkXError( - "No node on the input graph is labeled by '" + label_name + "'.") + "No node on the input graph is labeled by '" + label_name + "'." + ) n_samples = X.shape[0] n_classes = label_dict.shape[0] @@ -154,16 +149,3 @@ def _build_base_matrix(X, labels, alpha, n_classes): predicted = _predict(F, label_dict) return predicted - - -def setup_module(module): - """Fixture for nose tests.""" - from nose import SkipTest - try: - import numpy - except ImportError: - raise SkipTest("NumPy not available") - try: - import scipy - except ImportError: - raise SkipTest("SciPy not available") diff --git a/networkx/algorithms/node_classification/tests/__init__.py b/networkx/algorithms/node_classification/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/algorithms/node_classification/tests/test_harmonic_function.py b/networkx/algorithms/node_classification/tests/test_harmonic_function.py index 84b6210..c837926 100644 --- a/networkx/algorithms/node_classification/tests/test_harmonic_function.py +++ b/networkx/algorithms/node_classification/tests/test_harmonic_function.py @@ -1,92 +1,76 @@ -#!/usr/bin/env python -from nose.tools import * -from nose import SkipTest +import pytest + +numpy = pytest.importorskip("numpy") +scipy = pytest.importorskip("scipy") + import networkx as nx from networkx.algorithms import node_classification class TestHarmonicFunction: - - @classmethod - def setupClass(cls): - global numpy - global scipy - try: - import numpy - except ImportError: - raise SkipTest('NumPy not available.') - try: - import scipy - except ImportError: - raise SkipTest('SciPy not available.') - def test_path_graph(self): G = nx.path_graph(4) - label_name = 'label' - G.node[0][label_name] = 'A' - G.node[3][label_name] = 'B' - predicted = node_classification.harmonic_function( - G, label_name=label_name) - assert_equal(predicted[0], 'A') - assert_equal(predicted[1], 'A') - assert_equal(predicted[2], 'B') - assert_equal(predicted[3], 'B') + label_name = "label" + G.nodes[0][label_name] = "A" + G.nodes[3][label_name] = "B" + predicted = node_classification.harmonic_function(G, label_name=label_name) + assert predicted[0] == "A" + assert predicted[1] == "A" + assert predicted[2] == "B" + assert predicted[3] == "B" - @raises(nx.NetworkXError) def test_no_labels(self): - G = nx.path_graph(4) - node_classification.harmonic_function(G) + with pytest.raises(nx.NetworkXError): + G = nx.path_graph(4) + node_classification.harmonic_function(G) - @raises(nx.NetworkXError) def test_no_nodes(self): - G = nx.Graph() - node_classification.harmonic_function(G) + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + node_classification.harmonic_function(G) - @raises(nx.NetworkXError) def test_no_edges(self): - G = nx.Graph() - G.add_node(1) - G.add_node(2) - node_classification.harmonic_function(G) + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + G.add_node(1) + G.add_node(2) + node_classification.harmonic_function(G) - @raises(nx.NetworkXNotImplemented) def test_digraph(self): - G = nx.DiGraph() - G.add_edge(0, 1) - G.add_edge(1, 2) - G.add_edge(2, 3) - label_name = 'label' - G.node[0][label_name] = 'A' - G.node[3][label_name] = 'B' - node_classification.harmonic_function(G) + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.DiGraph() + G.add_edge(0, 1) + G.add_edge(1, 2) + G.add_edge(2, 3) + label_name = "label" + G.nodes[0][label_name] = "A" + G.nodes[3][label_name] = "B" + node_classification.harmonic_function(G) def test_one_labeled_node(self): G = nx.path_graph(4) - label_name = 'label' - G.node[0][label_name] = 'A' - predicted = node_classification.harmonic_function( - G, label_name=label_name) - assert_equal(predicted[0], 'A') - assert_equal(predicted[1], 'A') - assert_equal(predicted[2], 'A') - assert_equal(predicted[3], 'A') + label_name = "label" + G.nodes[0][label_name] = "A" + predicted = node_classification.harmonic_function(G, label_name=label_name) + assert predicted[0] == "A" + assert predicted[1] == "A" + assert predicted[2] == "A" + assert predicted[3] == "A" def test_nodes_all_labeled(self): G = nx.karate_club_graph() - label_name = 'club' - predicted = node_classification.harmonic_function( - G, label_name=label_name) + label_name = "club" + predicted = node_classification.harmonic_function(G, label_name=label_name) for i in range(len(G)): - assert_equal(predicted[i], G.node[i][label_name]) + assert predicted[i] == G.nodes[i][label_name] def test_labeled_nodes_are_not_changed(self): G = nx.karate_club_graph() - label_name = 'club' - label_removed = set([0, 1, 2, 3, 4, 5, 6, 7]) + label_name = "club" + label_removed = {0, 1, 2, 3, 4, 5, 6, 7} for i in label_removed: - del G.node[i][label_name] - predicted = node_classification.harmonic_function( - G, label_name=label_name) + del G.nodes[i][label_name] + predicted = node_classification.harmonic_function(G, label_name=label_name) label_not_removed = set(list(range(len(G)))) - label_removed for i in label_not_removed: - assert_equal(predicted[i], G.node[i][label_name]) + assert predicted[i] == G.nodes[i][label_name] diff --git a/networkx/algorithms/node_classification/tests/test_local_and_global_consistency.py b/networkx/algorithms/node_classification/tests/test_local_and_global_consistency.py index 3a64fbe..163c021 100644 --- a/networkx/algorithms/node_classification/tests/test_local_and_global_consistency.py +++ b/networkx/algorithms/node_classification/tests/test_local_and_global_consistency.py @@ -1,80 +1,72 @@ -#!/usr/bin/env python -from nose.tools import * -from nose import SkipTest +import pytest + +numpy = pytest.importorskip("numpy") +scipy = pytest.importorskip("scipy") + + import networkx as nx from networkx.algorithms import node_classification class TestLocalAndGlobalConsistency: - - @classmethod - def setupClass(cls): - global numpy - global scipy - try: - import numpy - except ImportError: - raise SkipTest('NumPy not available.') - try: - import scipy - except ImportError: - raise SkipTest('SciPy not available.') - def test_path_graph(self): G = nx.path_graph(4) - label_name = 'label' - G.node[0][label_name] = 'A' - G.node[3][label_name] = 'B' + label_name = "label" + G.nodes[0][label_name] = "A" + G.nodes[3][label_name] = "B" predicted = node_classification.local_and_global_consistency( - G, label_name=label_name) - assert_equal(predicted[0], 'A') - assert_equal(predicted[1], 'A') - assert_equal(predicted[2], 'B') - assert_equal(predicted[3], 'B') + G, label_name=label_name + ) + assert predicted[0] == "A" + assert predicted[1] == "A" + assert predicted[2] == "B" + assert predicted[3] == "B" - @raises(nx.NetworkXError) def test_no_labels(self): - G = nx.path_graph(4) - node_classification.local_and_global_consistency(G) + with pytest.raises(nx.NetworkXError): + G = nx.path_graph(4) + node_classification.local_and_global_consistency(G) - @raises(nx.NetworkXError) def test_no_nodes(self): - G = nx.Graph() - node_classification.local_and_global_consistency(G) + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + node_classification.local_and_global_consistency(G) - @raises(nx.NetworkXError) def test_no_edges(self): - G = nx.Graph() - G.add_node(1) - G.add_node(2) - node_classification.local_and_global_consistency(G) + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + G.add_node(1) + G.add_node(2) + node_classification.local_and_global_consistency(G) - @raises(nx.NetworkXNotImplemented) def test_digraph(self): - G = nx.DiGraph() - G.add_edge(0, 1) - G.add_edge(1, 2) - G.add_edge(2, 3) - label_name = 'label' - G.node[0][label_name] = 'A' - G.node[3][label_name] = 'B' - node_classification.harmonic_function(G) + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.DiGraph() + G.add_edge(0, 1) + G.add_edge(1, 2) + G.add_edge(2, 3) + label_name = "label" + G.nodes[0][label_name] = "A" + G.nodes[3][label_name] = "B" + node_classification.harmonic_function(G) def test_one_labeled_node(self): G = nx.path_graph(4) - label_name = 'label' - G.node[0][label_name] = 'A' + label_name = "label" + G.nodes[0][label_name] = "A" predicted = node_classification.local_and_global_consistency( - G, label_name=label_name) - assert_equal(predicted[0], 'A') - assert_equal(predicted[1], 'A') - assert_equal(predicted[2], 'A') - assert_equal(predicted[3], 'A') + G, label_name=label_name + ) + assert predicted[0] == "A" + assert predicted[1] == "A" + assert predicted[2] == "A" + assert predicted[3] == "A" def test_nodes_all_labeled(self): G = nx.karate_club_graph() - label_name = 'club' + label_name = "club" predicted = node_classification.local_and_global_consistency( - G, alpha=0, label_name=label_name) + G, alpha=0, label_name=label_name + ) for i in range(len(G)): - assert_equal(predicted[i], G.node[i][label_name]) + assert predicted[i] == G.nodes[i][label_name] diff --git a/networkx/algorithms/node_classification/utils.py b/networkx/algorithms/node_classification/utils.py index 39d5d3c..4f80138 100644 --- a/networkx/algorithms/node_classification/utils.py +++ b/networkx/algorithms/node_classification/utils.py @@ -1,7 +1,3 @@ -# -*- coding: utf-8 -*- -# -# Author: Yuto Yamaguchi - def _propagate(P, F, B): """Propagate labels by one step @@ -53,8 +49,9 @@ def _get_label_info(G, label_name): lid += 1 labels.append([i, label_to_id[label]]) labels = np.array(labels) - label_dict = np.array([label for label, _ in sorted( - label_to_id.items(), key=lambda x:x[1])]) + label_dict = np.array( + [label for label, _ in sorted(label_to_id.items(), key=lambda x: x[1])] + ) return (labels, label_dict) diff --git a/networkx/algorithms/non_randomness.py b/networkx/algorithms/non_randomness.py new file mode 100644 index 0000000..cab2e90 --- /dev/null +++ b/networkx/algorithms/non_randomness.py @@ -0,0 +1,82 @@ +r""" Computation of graph non-randomness +""" + +import math +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["non_randomness"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def non_randomness(G, k=None): + """Compute the non-randomness of graph G. + + The first returned value nr is the sum of non-randomness values of all + edges within the graph (where the non-randomness of an edge tends to be + small when the two nodes linked by that edge are from two different + communities). + + The second computed value nr_rd is a relative measure that indicates + to what extent graph G is different from random graphs in terms + of probability. When it is close to 0, the graph tends to be more + likely generated by an Erdos Renyi model. + + Parameters + ---------- + G : NetworkX graph + Graph must be binary, symmetric, connected, and without self-loops. + + k : int + The number of communities in G. + If k is not set, the function will use a default community + detection algorithm to set it. + + Returns + ------- + non-randomness : (float, float) tuple + Non-randomness, Relative non-randomness w.r.t. + Erdos Renyi random graphs. + + Examples + -------- + >>> G = nx.karate_club_graph() + >>> nr, nr_rd = nx.non_randomness(G, 2) + + Notes + ----- + This computes Eq. (4.4) and (4.5) in Ref. [1]_. + + References + ---------- + .. [1] Xiaowei Ying and Xintao Wu, + On Randomness Measures for Social Networks, + SIAM International Conference on Data Mining. 2009 + """ + + if not nx.is_connected(G): + raise nx.NetworkXException("Non connected graph.") + if len(list(nx.selfloop_edges(G))) > 0: + raise nx.NetworkXError("Graph must not contain self-loops") + + if k is None: + k = len(tuple(nx.community.label_propagation_communities(G))) + + try: + import numpy as np + except ImportError as e: + msg = "non_randomness requires NumPy: http://numpy.org/" + raise ImportError(msg) from e + + # eq. 4.4 + nr = np.real(np.sum(np.linalg.eigvals(nx.to_numpy_array(G))[:k])) + + n = G.number_of_nodes() + m = G.number_of_edges() + p = (2 * k * m) / (n * (n - k)) + + # eq. 4.5 + nr_rd = (nr - ((n - 2 * k) * p + k)) / math.sqrt(2 * k * p * (1 - p)) + + return nr, nr_rd diff --git a/networkx/algorithms/operators/all.py b/networkx/algorithms/operators/all.py index 8e58e6d..a08f634 100644 --- a/networkx/algorithms/operators/all.py +++ b/networkx/algorithms/operators/all.py @@ -1,26 +1,13 @@ """Operations on many graphs. """ -# Copyright (C) 2013 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -try: - from itertools import izip_longest as zip_longest -except ImportError: # Python3 has zip_longest - from itertools import zip_longest +from itertools import zip_longest import networkx as nx -__author__ = """\n""".join(['Robert King ', - 'Aric Hagberg ']) - -__all__ = ['union_all', 'compose_all', 'disjoint_union_all', - 'intersection_all'] +__all__ = ["union_all", "compose_all", "disjoint_union_all", "intersection_all"] def union_all(graphs, rename=(None,)): - """Return the union of all graphs. + """Returns the union of all graphs. The graphs must be disjoint, otherwise an exception is raised. @@ -38,6 +25,11 @@ def union_all(graphs, rename=(None,)): ------- U : a graph with the same type as the first graph in list + Raises + ------ + ValueError + If `graphs` is an empty list. + Notes ----- To force a disjoint union with node relabeling, use @@ -52,6 +44,8 @@ def union_all(graphs, rename=(None,)): union disjoint_union_all """ + if not graphs: + raise ValueError("cannot apply union_all to an empty list") graphs_names = zip_longest(graphs, rename) U, gname = next(graphs_names) for H, hname in graphs_names: @@ -61,7 +55,7 @@ def union_all(graphs, rename=(None,)): def disjoint_union_all(graphs): - """Return the disjoint union of all graphs. + """Returns the disjoint union of all graphs. This operation forces distinct integer node labels starting with 0 for the first graph in the list and numbering consecutively. @@ -75,6 +69,11 @@ def disjoint_union_all(graphs): ------- U : A graph with the same type as the first graph in list + Raises + ------ + ValueError + If `graphs` is an empty list. + Notes ----- It is recommended that the graphs be either all directed or all undirected. @@ -83,6 +82,8 @@ def disjoint_union_all(graphs): If a graph attribute is present in multiple graphs, then the value from the last graph in the list with that attribute is used. """ + if not graphs: + raise ValueError("cannot apply disjoint_union_all to an empty list") graphs = iter(graphs) U = next(graphs) for H in graphs: @@ -91,7 +92,7 @@ def disjoint_union_all(graphs): def compose_all(graphs): - """Return the composition of all graphs. + """Returns the composition of all graphs. Composition is the simple union of the node sets and edge sets. The node sets of the supplied graphs need not be disjoint. @@ -105,6 +106,11 @@ def compose_all(graphs): ------- C : A graph with the same type as the first graph in list + Raises + ------ + ValueError + If `graphs` is an empty list. + Notes ----- It is recommended that the supplied graphs be either all directed or all @@ -114,6 +120,8 @@ def compose_all(graphs): If a graph attribute is present in multiple graphs, then the value from the last graph in the list with that attribute is used. """ + if not graphs: + raise ValueError("cannot apply compose_all to an empty list") graphs = iter(graphs) C = next(graphs) for H in graphs: @@ -122,7 +130,7 @@ def compose_all(graphs): def intersection_all(graphs): - """Return a new graph that contains only the edges that exist in + """Returns a new graph that contains only the edges that exist in all graphs. All supplied graphs must have the same node set. @@ -136,11 +144,18 @@ def intersection_all(graphs): ------- R : A new graph with the same type as the first graph in list + Raises + ------ + ValueError + If `graphs` is an empty list. + Notes ----- Attributes from the graph, nodes, and edges are not copied to the new graph. """ + if not graphs: + raise ValueError("cannot apply intersection_all to an empty list") graphs = iter(graphs) R = next(graphs) for H in graphs: diff --git a/networkx/algorithms/operators/binary.py b/networkx/algorithms/operators/binary.py index 3f40b04..3e38620 100644 --- a/networkx/algorithms/operators/binary.py +++ b/networkx/algorithms/operators/binary.py @@ -1,19 +1,17 @@ """ Operations on graphs including union, intersection, difference. """ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. import networkx as nx -from networkx.utils import is_string_like -__author__ = """\n""".join(['Aric Hagberg ', - 'Pieter Swart (swart@lanl.gov)', - 'Dan Schult(dschult@colgate.edu)']) -__all__ = ['union', 'compose', 'disjoint_union', 'intersection', - 'difference', 'symmetric_difference'] + +__all__ = [ + "union", + "compose", + "disjoint_union", + "intersection", + "difference", + "symmetric_difference", + "full_join", +] def union(G, H, rename=(None, None), name=None): @@ -52,9 +50,9 @@ def union(G, H, rename=(None, None), name=None): disjoint_union """ if not G.is_multigraph() == H.is_multigraph(): - raise nx.NetworkXError('G and H must both be graphs or multigraphs.') + raise nx.NetworkXError("G and H must both be graphs or multigraphs.") # Union is the same type as G - R = G.fresh_copy() + R = G.__class__() # add graph attributes, H attributes take precedent over G attributes R.graph.update(G.graph) R.graph.update(H.graph) @@ -65,18 +63,21 @@ def add_prefix(graph, prefix): return graph def label(x): - if is_string_like(x): + if isinstance(x, str): name = prefix + x else: name = prefix + repr(x) return name + return nx.relabel_nodes(graph, label) + G = add_prefix(G, rename[0]) H = add_prefix(H, rename[1]) if set(G) & set(H): - raise nx.NetworkXError('The node sets of G and H are not disjoint.', - 'Use appropriate rename=(Gprefix,Hprefix)' - 'or use disjoint_union(G,H).') + raise nx.NetworkXError( + "The node sets of G and H are not disjoint.", + "Use appropriate rename=(Gprefix,Hprefix)" "or use disjoint_union(G,H).", + ) if G.is_multigraph(): G_edges = G.edges(keys=True, data=True) else: @@ -88,9 +89,9 @@ def label(x): # add nodes R.add_nodes_from(G) - R.add_edges_from(G_edges) - # add edges R.add_nodes_from(H) + # add edges + R.add_edges_from(G_edges) R.add_edges_from(H_edges) # add node attributes for n in G: @@ -136,7 +137,7 @@ def disjoint_union(G, H): def intersection(G, H): - """Return a new graph that contains only the edges that exist in + """Returns a new graph that contains only the edges that exist in both G and H. The node sets of H and G must be the same. @@ -157,16 +158,16 @@ def intersection(G, H): with the attributes (including edge data) from G use remove_nodes_from() as follows - >>> G=nx.path_graph(3) - >>> H=nx.path_graph(5) - >>> R=G.copy() + >>> G = nx.path_graph(3) + >>> H = nx.path_graph(5) + >>> R = G.copy() >>> R.remove_nodes_from(n for n in G if n not in H) """ # create new graph R = nx.create_empty_copy(G) if not G.is_multigraph() == H.is_multigraph(): - raise nx.NetworkXError('G and H must both be graphs or multigraphs.') + raise nx.NetworkXError("G and H must both be graphs or multigraphs.") if set(G) != set(H): raise nx.NetworkXError("Node sets of graphs are not equal") @@ -190,7 +191,7 @@ def intersection(G, H): def difference(G, H): - """Return a new graph that contains the edges that exist in G but not in H. + """Returns a new graph that contains the edges that exist in G but not in H. The node sets of H and G must be the same. @@ -217,7 +218,7 @@ def difference(G, H): """ # create new graph if not G.is_multigraph() == H.is_multigraph(): - raise nx.NetworkXError('G and H must both be graphs or multigraphs.') + raise nx.NetworkXError("G and H must both be graphs or multigraphs.") R = nx.create_empty_copy(G) if set(G) != set(H): @@ -234,7 +235,7 @@ def difference(G, H): def symmetric_difference(G, H): - """Return new graph with edges that exist in either G or H but not both. + """Returns new graph with edges that exist in either G or H but not both. The node sets of H and G must be the same. @@ -254,7 +255,7 @@ def symmetric_difference(G, H): """ # create new graph if not G.is_multigraph() == H.is_multigraph(): - raise nx.NetworkXError('G and H must both be graphs or multigraphs.') + raise nx.NetworkXError("G and H must both be graphs or multigraphs.") R = nx.create_empty_copy(G) if set(G) != set(H): @@ -286,7 +287,7 @@ def symmetric_difference(G, H): def compose(G, H): - """Return a new graph of G composed with H. + """Returns a new graph of G composed with H. Composition is the simple union of the node sets and edge sets. The node sets of G and H do not need to be disjoint. @@ -310,9 +311,9 @@ def compose(G, H): in two graphs) if you use MultiGraph without keeping track of edge keys. """ if not G.is_multigraph() == H.is_multigraph(): - raise nx.NetworkXError('G and H must both be graphs or multigraphs.') + raise nx.NetworkXError("G and H must both be graphs or multigraphs.") - R = G.fresh_copy() + R = G.__class__() # add graph attributes, H attributes take precedent over G attributes R.graph.update(G.graph) R.graph.update(H.graph) @@ -329,3 +330,75 @@ def compose(G, H): else: R.add_edges_from(H.edges(data=True)) return R + + +def full_join(G, H, rename=(None, None)): + """Returns the full join of graphs G and H. + + Full join is the union of G and H in which all edges between + G and H are added. + The node sets of G and H must be disjoint, + otherwise an exception is raised. + + Parameters + ---------- + G, H : graph + A NetworkX graph + + rename : bool , default=(None, None) + Node names of G and H can be changed by specifying the tuple + rename=('G-','H-') (for example). Node "u" in G is then renamed + "G-u" and "v" in H is renamed "H-v". + + Returns + ------- + U : The full join graph with the same type as G. + + Notes + ----- + It is recommended that G and H be either both directed or both undirected. + + If G is directed, then edges from G to H are added as well as from H to G. + + Note that full_join() does not produce parallel edges for MultiGraphs. + + The full join operation of graphs G and H is the same as getting + their complement, performing a disjoint union, and finally getting + the complement of the resulting graph. + + Graph, edge, and node attributes are propagated from G and H + to the union graph. If a graph attribute is present in both + G and H the value from H is used. + + See Also + -------- + union + disjoint_union + """ + R = union(G, H, rename) + + def add_prefix(graph, prefix): + if prefix is None: + return graph + + def label(x): + if isinstance(x, str): + name = prefix + x + else: + name = prefix + repr(x) + return name + + return nx.relabel_nodes(graph, label) + + G = add_prefix(G, rename[0]) + H = add_prefix(H, rename[1]) + + for i in G: + for j in H: + R.add_edge(i, j) + if R.is_directed(): + for i in H: + for j in G: + R.add_edge(i, j) + + return R diff --git a/networkx/algorithms/operators/product.py b/networkx/algorithms/operators/product.py index 0b581ef..5b27d03 100644 --- a/networkx/algorithms/operators/product.py +++ b/networkx/algorithms/operators/product.py @@ -1,15 +1,3 @@ -# Copyright (C) 2011 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: -# Aric Hagberg -# Pieter Swart -# Dan Schult -# Ben Edwards """ Graph products. """ @@ -18,13 +6,18 @@ import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['tensor_product', 'cartesian_product', - 'lexicographic_product', 'strong_product', 'power', - 'rooted_product'] +__all__ = [ + "tensor_product", + "cartesian_product", + "lexicographic_product", + "strong_product", + "power", + "rooted_product", +] def _dict_product(d1, d2): - return dict((k, (d1.get(k), d2.get(k))) for k in set(d1) | set(d2)) + return {k: (d1.get(k), d2.get(k)) for k in set(d1) | set(d2)} # Generators for producting graph products @@ -117,8 +110,8 @@ def _edges_cross_nodes_and_nodes(G, H): def _init_product_graph(G, H): if not G.is_directed() == H.is_directed(): - raise nx.NetworkXError("G and H must be both directed or", - "both undirected") + msg = "G and H must be both directed or both undirected" + raise nx.NetworkXError(msg) if G.is_multigraph() or H.is_multigraph(): GH = nx.MultiGraph() else: @@ -129,7 +122,7 @@ def _init_product_graph(G, H): def tensor_product(G, H): - r"""Return the tensor product of G and H. + r"""Returns the tensor product of G and H. The tensor product $P$ of the graphs $G$ and $H$ has a node set that is the tensor product of the node sets, $V(P)=V(G) \times V(H)$. @@ -167,7 +160,7 @@ def tensor_product(G, H): >>> G = nx.Graph() >>> H = nx.Graph() >>> G.add_node(0, a1=True) - >>> H.add_node('a', a2='Spam') + >>> H.add_node("a", a2="Spam") >>> P = nx.tensor_product(G, H) >>> list(P) [(0, 'a')] @@ -184,7 +177,7 @@ def tensor_product(G, H): def cartesian_product(G, H): - r"""Return the Cartesian product of G and H. + r"""Returns the Cartesian product of G and H. The Cartesian product $P$ of the graphs $G$ and $H$ has a node set that is the Cartesian product of the node sets, $V(P)=V(G) \times V(H)$. @@ -219,7 +212,7 @@ def cartesian_product(G, H): >>> G = nx.Graph() >>> H = nx.Graph() >>> G.add_node(0, a1=True) - >>> H.add_node('a', a2='Spam') + >>> H.add_node("a", a2="Spam") >>> P = nx.cartesian_product(G, H) >>> list(P) [(0, 'a')] @@ -227,9 +220,6 @@ def cartesian_product(G, H): Edge attributes and edge keys (for multigraphs) are also copied to the new product graph """ - if not G.is_directed() == H.is_directed(): - raise nx.NetworkXError("G and H must be both directed or", - "both undirected") GH = _init_product_graph(G, H) GH.add_nodes_from(_node_product(G, H)) GH.add_edges_from(_edges_cross_nodes(G, H)) @@ -238,7 +228,7 @@ def cartesian_product(G, H): def lexicographic_product(G, H): - r"""Return the lexicographic product of G and H. + r"""Returns the lexicographic product of G and H. The lexicographical product $P$ of the graphs $G$ and $H$ has a node set that is the Cartesian product of the node sets, $V(P)=V(G) \times V(H)$. @@ -272,7 +262,7 @@ def lexicographic_product(G, H): >>> G = nx.Graph() >>> H = nx.Graph() >>> G.add_node(0, a1=True) - >>> H.add_node('a', a2='Spam') + >>> H.add_node("a", a2="Spam") >>> P = nx.lexicographic_product(G, H) >>> list(P) [(0, 'a')] @@ -290,7 +280,7 @@ def lexicographic_product(G, H): def strong_product(G, H): - r"""Return the strong product of G and H. + r"""Returns the strong product of G and H. The strong product $P$ of the graphs $G$ and $H$ has a node set that is the Cartesian product of the node sets, $V(P)=V(G) \times V(H)$. @@ -326,7 +316,7 @@ def strong_product(G, H): >>> G = nx.Graph() >>> H = nx.Graph() >>> G.add_node(0, a1=True) - >>> H.add_node('a', a2='Spam') + >>> H.add_node("a", a2="Spam") >>> P = nx.strong_product(G, H) >>> list(P) [(0, 'a')] @@ -344,8 +334,8 @@ def strong_product(G, H): return GH -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def power(G, k): """Returns the specified power of a graph. @@ -409,22 +399,22 @@ def power(G, k): """ if k <= 0: - raise ValueError('k must be a positive integer') + raise ValueError("k must be a positive integer") H = nx.Graph() H.add_nodes_from(G) # update BFS code to ignore self loops. for n in G: - seen = {} # level (number of hops) when seen in BFS - level = 1 # the current level + seen = {} # level (number of hops) when seen in BFS + level = 1 # the current level nextlevel = G[n] while nextlevel: thislevel = nextlevel # advance to next level - nextlevel = {} # and start a new list (fringe) + nextlevel = {} # and start a new list (fringe) for v in thislevel: - if v == n: # avoid self loop + if v == n: # avoid self loop continue if v not in seen: - seen[v] = level # set the level of vertex v + seen[v] = level # set the level of vertex v nextlevel.update(G[v]) # add neighbors of v if k <= level: break @@ -433,7 +423,7 @@ def power(G, k): return H -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def rooted_product(G, H, root): """ Return the rooted product of graphs G and H rooted at root in H. @@ -460,7 +450,7 @@ def rooted_product(G, H, root): The nodes of G and H are not relabeled. """ if root not in H: - raise nx.NetworkXError('root must be a vertex in H') + raise nx.NetworkXError("root must be a vertex in H") R = nx.Graph() R.add_nodes_from(product(G, H)) diff --git a/networkx/algorithms/operators/tests/__init__.py b/networkx/algorithms/operators/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/algorithms/operators/tests/test_all.py b/networkx/algorithms/operators/tests/test_all.py index d8ce797..8c96b08 100644 --- a/networkx/algorithms/operators/tests/test_all.py +++ b/networkx/algorithms/operators/tests/test_all.py @@ -1,6 +1,6 @@ -from nose.tools import * +import pytest import networkx as nx -from networkx.testing import * +from networkx.testing import assert_edges_equal def test_union_all_attributes(): @@ -8,26 +8,26 @@ def test_union_all_attributes(): g.add_node(0, x=4) g.add_node(1, x=5) g.add_edge(0, 1, size=5) - g.graph['name'] = 'g' + g.graph["name"] = "g" h = g.copy() - h.graph['name'] = 'h' - h.graph['attr'] = 'attr' - h.nodes[0]['x'] = 7 + h.graph["name"] = "h" + h.graph["attr"] = "attr" + h.nodes[0]["x"] = 7 j = g.copy() - j.graph['name'] = 'j' - j.graph['attr'] = 'attr' - j.nodes[0]['x'] = 7 + j.graph["name"] = "j" + j.graph["attr"] = "attr" + j.nodes[0]["x"] = 7 - ghj = nx.union_all([g, h, j], rename=('g', 'h', 'j')) - assert_equal(set(ghj.nodes()), set(['h0', 'h1', 'g0', 'g1', 'j0', 'j1'])) + ghj = nx.union_all([g, h, j], rename=("g", "h", "j")) + assert set(ghj.nodes()) == {"h0", "h1", "g0", "g1", "j0", "j1"} for n in ghj: graph, node = n - assert_equal(ghj.nodes[n], eval(graph).nodes[int(node)]) + assert ghj.nodes[n] == eval(graph).nodes[int(node)] - assert_equal(ghj.graph['attr'], 'attr') - assert_equal(ghj.graph['name'], 'j') # j graph attributes take precendent + assert ghj.graph["attr"] == "attr" + assert ghj.graph["name"] == "j" # j graph attributes take precendent def test_intersection_all(): @@ -44,8 +44,8 @@ def test_intersection_all(): R.add_edge(2, 3) R.add_edge(4, 1) I = nx.intersection_all([G, H, R]) - assert_equal(set(I.nodes()), set([1, 2, 3, 4])) - assert_equal(sorted(I.edges()), [(2, 3)]) + assert set(I.nodes()) == {1, 2, 3, 4} + assert sorted(I.edges()) == [(2, 3)] def test_intersection_all_attributes(): @@ -53,20 +53,20 @@ def test_intersection_all_attributes(): g.add_node(0, x=4) g.add_node(1, x=5) g.add_edge(0, 1, size=5) - g.graph['name'] = 'g' + g.graph["name"] = "g" h = g.copy() - h.graph['name'] = 'h' - h.graph['attr'] = 'attr' - h.nodes[0]['x'] = 7 + h.graph["name"] = "h" + h.graph["attr"] = "attr" + h.nodes[0]["x"] = 7 gh = nx.intersection_all([g, h]) - assert_equal(set(gh.nodes()), set(g.nodes())) - assert_equal(set(gh.nodes()), set(h.nodes())) - assert_equal(sorted(gh.edges()), sorted(g.edges())) + assert set(gh.nodes()) == set(g.nodes()) + assert set(gh.nodes()) == set(h.nodes()) + assert sorted(gh.edges()) == sorted(g.edges()) h.remove_node(0) - assert_raises(nx.NetworkXError, nx.intersection, g, h) + pytest.raises(nx.NetworkXError, nx.intersection, g, h) def test_intersection_all_multigraph_attributes(): @@ -78,10 +78,10 @@ def test_intersection_all_multigraph_attributes(): h.add_edge(0, 1, key=0) h.add_edge(0, 1, key=3) gh = nx.intersection_all([g, h]) - assert_equal(set(gh.nodes()), set(g.nodes())) - assert_equal(set(gh.nodes()), set(h.nodes())) - assert_equal(sorted(gh.edges()), [(0, 1)]) - assert_equal(sorted(gh.edges(keys=True)), [(0, 1, 0)]) + assert set(gh.nodes()) == set(g.nodes()) + assert set(gh.nodes()) == set(h.nodes()) + assert sorted(gh.edges()) == [(0, 1)] + assert sorted(gh.edges(keys=True)) == [(0, 1, 0)] def test_union_all_and_compose_all(): @@ -89,60 +89,88 @@ def test_union_all_and_compose_all(): P3 = nx.path_graph(3) G1 = nx.DiGraph() - G1.add_edge('A', 'B') - G1.add_edge('A', 'C') - G1.add_edge('A', 'D') + G1.add_edge("A", "B") + G1.add_edge("A", "C") + G1.add_edge("A", "D") G2 = nx.DiGraph() - G2.add_edge('1', '2') - G2.add_edge('1', '3') - G2.add_edge('1', '4') + G2.add_edge("1", "2") + G2.add_edge("1", "3") + G2.add_edge("1", "4") G = nx.union_all([G1, G2]) H = nx.compose_all([G1, G2]) assert_edges_equal(G.edges(), H.edges()) - assert_false(G.has_edge('A', '1')) - assert_raises(nx.NetworkXError, nx.union, K3, P3) - H1 = nx.union_all([H, G1], rename=('H', 'G1')) - assert_equal(sorted(H1.nodes()), - ['G1A', 'G1B', 'G1C', 'G1D', - 'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD']) + assert not G.has_edge("A", "1") + pytest.raises(nx.NetworkXError, nx.union, K3, P3) + H1 = nx.union_all([H, G1], rename=("H", "G1")) + assert sorted(H1.nodes()) == [ + "G1A", + "G1B", + "G1C", + "G1D", + "H1", + "H2", + "H3", + "H4", + "HA", + "HB", + "HC", + "HD", + ] H2 = nx.union_all([H, G2], rename=("H", "")) - assert_equal(sorted(H2.nodes()), - ['1', '2', '3', '4', - 'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD']) - - assert_false(H1.has_edge('NB', 'NA')) + assert sorted(H2.nodes()) == [ + "1", + "2", + "3", + "4", + "H1", + "H2", + "H3", + "H4", + "HA", + "HB", + "HC", + "HD", + ] + + assert not H1.has_edge("NB", "NA") G = nx.compose_all([G, G]) assert_edges_equal(G.edges(), H.edges()) - G2 = nx.union_all([G2, G2], rename=('', 'copy')) - assert_equal(sorted(G2.nodes()), - ['1', '2', '3', '4', 'copy1', 'copy2', 'copy3', 'copy4']) - - assert_equal(sorted(G2.neighbors('copy4')), []) - assert_equal(sorted(G2.neighbors('copy1')), ['copy2', 'copy3', 'copy4']) - assert_equal(len(G), 8) - assert_equal(nx.number_of_edges(G), 6) + G2 = nx.union_all([G2, G2], rename=("", "copy")) + assert sorted(G2.nodes()) == [ + "1", + "2", + "3", + "4", + "copy1", + "copy2", + "copy3", + "copy4", + ] + + assert sorted(G2.neighbors("copy4")) == [] + assert sorted(G2.neighbors("copy1")) == ["copy2", "copy3", "copy4"] + assert len(G) == 8 + assert nx.number_of_edges(G) == 6 E = nx.disjoint_union_all([G, G]) - assert_equal(len(E), 16) - assert_equal(nx.number_of_edges(E), 12) + assert len(E) == 16 + assert nx.number_of_edges(E) == 12 E = nx.disjoint_union_all([G1, G2]) - assert_equal(sorted(E.nodes()), [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]) + assert sorted(E.nodes()) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] G1 = nx.DiGraph() - G1.add_edge('A', 'B') + G1.add_edge("A", "B") G2 = nx.DiGraph() G2.add_edge(1, 2) G3 = nx.DiGraph() G3.add_edge(11, 22) G4 = nx.union_all([G1, G2, G3], rename=("G1", "G2", "G3")) - assert_equal(sorted(G4.nodes()), - ['G1A', 'G1B', 'G21', 'G22', - 'G311', 'G322']) + assert sorted(G4.nodes()) == ["G1A", "G1B", "G21", "G22", "G311", "G322"] def test_union_all_multigraph(): @@ -153,49 +181,68 @@ def test_union_all_multigraph(): H.add_edge(3, 4, key=0) H.add_edge(3, 4, key=1) GH = nx.union_all([G, H]) - assert_equal(set(GH), set(G) | set(H)) - assert_equal(set(GH.edges(keys=True)), - set(G.edges(keys=True)) | set(H.edges(keys=True))) + assert set(GH) == set(G) | set(H) + assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True)) def test_input_output(): l = [nx.Graph([(1, 2)]), nx.Graph([(3, 4)])] U = nx.disjoint_union_all(l) - assert_equal(len(l), 2) + assert len(l) == 2 C = nx.compose_all(l) - assert_equal(len(l), 2) + assert len(l) == 2 l = [nx.Graph([(1, 2)]), nx.Graph([(1, 2)])] R = nx.intersection_all(l) - assert_equal(len(l), 2) + assert len(l) == 2 -@raises(nx.NetworkXError) def test_mixed_type_union(): - G = nx.Graph() - H = nx.MultiGraph() - I = nx.Graph() - U = nx.union_all([G, H, I]) + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + H = nx.MultiGraph() + I = nx.Graph() + U = nx.union_all([G, H, I]) -@raises(nx.NetworkXError) def test_mixed_type_disjoint_union(): - G = nx.Graph() - H = nx.MultiGraph() - I = nx.Graph() - U = nx.disjoint_union_all([G, H, I]) + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + H = nx.MultiGraph() + I = nx.Graph() + U = nx.disjoint_union_all([G, H, I]) -@raises(nx.NetworkXError) def test_mixed_type_intersection(): - G = nx.Graph() - H = nx.MultiGraph() - I = nx.Graph() - U = nx.intersection_all([G, H, I]) + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + H = nx.MultiGraph() + I = nx.Graph() + U = nx.intersection_all([G, H, I]) -@raises(nx.NetworkXError) def test_mixed_type_compose(): - G = nx.Graph() - H = nx.MultiGraph() - I = nx.Graph() - U = nx.compose_all([G, H, I]) + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + H = nx.MultiGraph() + I = nx.Graph() + U = nx.compose_all([G, H, I]) + + +def test_empty_union(): + with pytest.raises(ValueError): + nx.union_all([]) + + +def test_empty_disjoint_union(): + with pytest.raises(ValueError): + nx.disjoint_union_all([]) + + +def test_empty_compose_all(): + with pytest.raises(ValueError): + nx.compose_all([]) + + +def test_empty_intersection_all(): + with pytest.raises(ValueError): + nx.intersection_all([]) diff --git a/networkx/algorithms/operators/tests/test_binary.py b/networkx/algorithms/operators/tests/test_binary.py index d859074..bf88513 100644 --- a/networkx/algorithms/operators/tests/test_binary.py +++ b/networkx/algorithms/operators/tests/test_binary.py @@ -1,7 +1,6 @@ -from nose.tools import * +import pytest import networkx as nx -from networkx import * -from networkx.testing import * +from networkx.testing import assert_edges_equal def test_union_attributes(): @@ -9,21 +8,21 @@ def test_union_attributes(): g.add_node(0, x=4) g.add_node(1, x=5) g.add_edge(0, 1, size=5) - g.graph['name'] = 'g' + g.graph["name"] = "g" h = g.copy() - h.graph['name'] = 'h' - h.graph['attr'] = 'attr' - h.nodes[0]['x'] = 7 + h.graph["name"] = "h" + h.graph["attr"] = "attr" + h.nodes[0]["x"] = 7 - gh = nx.union(g, h, rename=('g', 'h')) - assert_equal(set(gh.nodes()), set(['h0', 'h1', 'g0', 'g1'])) + gh = nx.union(g, h, rename=("g", "h")) + assert set(gh.nodes()) == {"h0", "h1", "g0", "g1"} for n in gh: graph, node = n - assert_equal(gh.nodes[n], eval(graph).nodes[int(node)]) + assert gh.nodes[n] == eval(graph).nodes[int(node)] - assert_equal(gh.graph['attr'], 'attr') - assert_equal(gh.graph['name'], 'h') # h graph attributes take precendent + assert gh.graph["attr"] == "attr" + assert gh.graph["name"] == "h" # h graph attributes take precendent def test_intersection(): @@ -36,8 +35,8 @@ def test_intersection(): H.add_edge(2, 3) H.add_edge(3, 4) I = nx.intersection(G, H) - assert_equal(set(I.nodes()), set([1, 2, 3, 4])) - assert_equal(sorted(I.edges()), [(2, 3)]) + assert set(I.nodes()) == {1, 2, 3, 4} + assert sorted(I.edges()) == [(2, 3)] def test_intersection_attributes(): @@ -45,20 +44,20 @@ def test_intersection_attributes(): g.add_node(0, x=4) g.add_node(1, x=5) g.add_edge(0, 1, size=5) - g.graph['name'] = 'g' + g.graph["name"] = "g" h = g.copy() - h.graph['name'] = 'h' - h.graph['attr'] = 'attr' - h.nodes[0]['x'] = 7 + h.graph["name"] = "h" + h.graph["attr"] = "attr" + h.nodes[0]["x"] = 7 gh = nx.intersection(g, h) - assert_equal(set(gh.nodes()), set(g.nodes())) - assert_equal(set(gh.nodes()), set(h.nodes())) - assert_equal(sorted(gh.edges()), sorted(g.edges())) + assert set(gh.nodes()) == set(g.nodes()) + assert set(gh.nodes()) == set(h.nodes()) + assert sorted(gh.edges()) == sorted(g.edges()) h.remove_node(0) - assert_raises(nx.NetworkXError, nx.intersection, g, h) + pytest.raises(nx.NetworkXError, nx.intersection, g, h) def test_intersection_multigraph_attributes(): @@ -70,10 +69,10 @@ def test_intersection_multigraph_attributes(): h.add_edge(0, 1, key=0) h.add_edge(0, 1, key=3) gh = nx.intersection(g, h) - assert_equal(set(gh.nodes()), set(g.nodes())) - assert_equal(set(gh.nodes()), set(h.nodes())) - assert_equal(sorted(gh.edges()), [(0, 1)]) - assert_equal(sorted(gh.edges(keys=True)), [(0, 1, 0)]) + assert set(gh.nodes()) == set(g.nodes()) + assert set(gh.nodes()) == set(h.nodes()) + assert sorted(gh.edges()) == [(0, 1)] + assert sorted(gh.edges(keys=True)) == [(0, 1, 0)] def test_difference(): @@ -86,14 +85,14 @@ def test_difference(): H.add_edge(2, 3) H.add_edge(3, 4) D = nx.difference(G, H) - assert_equal(set(D.nodes()), set([1, 2, 3, 4])) - assert_equal(sorted(D.edges()), [(1, 2)]) + assert set(D.nodes()) == {1, 2, 3, 4} + assert sorted(D.edges()) == [(1, 2)] D = nx.difference(H, G) - assert_equal(set(D.nodes()), set([1, 2, 3, 4])) - assert_equal(sorted(D.edges()), [(3, 4)]) + assert set(D.nodes()) == {1, 2, 3, 4} + assert sorted(D.edges()) == [(3, 4)] D = nx.symmetric_difference(G, H) - assert_equal(set(D.nodes()), set([1, 2, 3, 4])) - assert_equal(sorted(D.edges()), [(1, 2), (3, 4)]) + assert set(D.nodes()) == {1, 2, 3, 4} + assert sorted(D.edges()) == [(1, 2), (3, 4)] def test_difference2(): @@ -105,15 +104,15 @@ def test_difference2(): H.add_edge(1, 2) G.add_edge(2, 3) D = nx.difference(G, H) - assert_equal(set(D.nodes()), set([1, 2, 3, 4])) - assert_equal(sorted(D.edges()), [(2, 3)]) + assert set(D.nodes()) == {1, 2, 3, 4} + assert sorted(D.edges()) == [(2, 3)] D = nx.difference(H, G) - assert_equal(set(D.nodes()), set([1, 2, 3, 4])) - assert_equal(sorted(D.edges()), []) + assert set(D.nodes()) == {1, 2, 3, 4} + assert sorted(D.edges()) == [] H.add_edge(3, 4) D = nx.difference(H, G) - assert_equal(set(D.nodes()), set([1, 2, 3, 4])) - assert_equal(sorted(D.edges()), [(3, 4)]) + assert set(D.nodes()) == {1, 2, 3, 4} + assert sorted(D.edges()) == [(3, 4)] def test_difference_attributes(): @@ -121,20 +120,20 @@ def test_difference_attributes(): g.add_node(0, x=4) g.add_node(1, x=5) g.add_edge(0, 1, size=5) - g.graph['name'] = 'g' + g.graph["name"] = "g" h = g.copy() - h.graph['name'] = 'h' - h.graph['attr'] = 'attr' - h.nodes[0]['x'] = 7 + h.graph["name"] = "h" + h.graph["attr"] = "attr" + h.nodes[0]["x"] = 7 gh = nx.difference(g, h) - assert_equal(set(gh.nodes()), set(g.nodes())) - assert_equal(set(gh.nodes()), set(h.nodes())) - assert_equal(sorted(gh.edges()), []) + assert set(gh.nodes()) == set(g.nodes()) + assert set(gh.nodes()) == set(h.nodes()) + assert sorted(gh.edges()) == [] h.remove_node(0) - assert_raises(nx.NetworkXError, nx.intersection, g, h) + pytest.raises(nx.NetworkXError, nx.intersection, g, h) def test_difference_multigraph_attributes(): @@ -146,17 +145,17 @@ def test_difference_multigraph_attributes(): h.add_edge(0, 1, key=0) h.add_edge(0, 1, key=3) gh = nx.difference(g, h) - assert_equal(set(gh.nodes()), set(g.nodes())) - assert_equal(set(gh.nodes()), set(h.nodes())) - assert_equal(sorted(gh.edges()), [(0, 1), (0, 1)]) - assert_equal(sorted(gh.edges(keys=True)), [(0, 1, 1), (0, 1, 2)]) + assert set(gh.nodes()) == set(g.nodes()) + assert set(gh.nodes()) == set(h.nodes()) + assert sorted(gh.edges()) == [(0, 1), (0, 1)] + assert sorted(gh.edges(keys=True)) == [(0, 1, 1), (0, 1, 2)] -@raises(nx.NetworkXError) def test_difference_raise(): G = nx.path_graph(4) H = nx.path_graph(3) - GH = nx.difference(G, H) + pytest.raises(nx.NetworkXError, nx.difference, G, H) + pytest.raises(nx.NetworkXError, nx.symmetric_difference, G, H) def test_symmetric_difference_multigraph(): @@ -168,75 +167,101 @@ def test_symmetric_difference_multigraph(): h.add_edge(0, 1, key=0) h.add_edge(0, 1, key=3) gh = nx.symmetric_difference(g, h) - assert_equal(set(gh.nodes()), set(g.nodes())) - assert_equal(set(gh.nodes()), set(h.nodes())) - assert_equal(sorted(gh.edges()), 3 * [(0, 1)]) - assert_equal(sorted(sorted(e) for e in gh.edges(keys=True)), - [[0, 1, 1], [0, 1, 2], [0, 1, 3]]) - - -@raises(nx.NetworkXError) -def test_symmetric_difference_raise(): - G = nx.path_graph(4) - H = nx.path_graph(3) - GH = nx.symmetric_difference(G, H) + assert set(gh.nodes()) == set(g.nodes()) + assert set(gh.nodes()) == set(h.nodes()) + assert sorted(gh.edges()) == 3 * [(0, 1)] + assert sorted(sorted(e) for e in gh.edges(keys=True)) == [ + [0, 1, 1], + [0, 1, 2], + [0, 1, 3], + ] def test_union_and_compose(): - K3 = complete_graph(3) - P3 = path_graph(3) + K3 = nx.complete_graph(3) + P3 = nx.path_graph(3) G1 = nx.DiGraph() - G1.add_edge('A', 'B') - G1.add_edge('A', 'C') - G1.add_edge('A', 'D') + G1.add_edge("A", "B") + G1.add_edge("A", "C") + G1.add_edge("A", "D") G2 = nx.DiGraph() - G2.add_edge('1', '2') - G2.add_edge('1', '3') - G2.add_edge('1', '4') + G2.add_edge("1", "2") + G2.add_edge("1", "3") + G2.add_edge("1", "4") - G = union(G1, G2) - H = compose(G1, G2) + G = nx.union(G1, G2) + H = nx.compose(G1, G2) assert_edges_equal(G.edges(), H.edges()) - assert_false(G.has_edge('A', 1)) - assert_raises(nx.NetworkXError, nx.union, K3, P3) - H1 = union(H, G1, rename=('H', 'G1')) - assert_equal(sorted(H1.nodes()), - ['G1A', 'G1B', 'G1C', 'G1D', - 'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD']) - - H2 = union(H, G2, rename=("H", "")) - assert_equal(sorted(H2.nodes()), - ['1', '2', '3', '4', - 'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD']) - - assert_false(H1.has_edge('NB', 'NA')) - - G = compose(G, G) + assert not G.has_edge("A", 1) + pytest.raises(nx.NetworkXError, nx.union, K3, P3) + H1 = nx.union(H, G1, rename=("H", "G1")) + assert sorted(H1.nodes()) == [ + "G1A", + "G1B", + "G1C", + "G1D", + "H1", + "H2", + "H3", + "H4", + "HA", + "HB", + "HC", + "HD", + ] + + H2 = nx.union(H, G2, rename=("H", "")) + assert sorted(H2.nodes()) == [ + "1", + "2", + "3", + "4", + "H1", + "H2", + "H3", + "H4", + "HA", + "HB", + "HC", + "HD", + ] + + assert not H1.has_edge("NB", "NA") + + G = nx.compose(G, G) assert_edges_equal(G.edges(), H.edges()) - G2 = union(G2, G2, rename=('', 'copy')) - assert_equal(sorted(G2.nodes()), - ['1', '2', '3', '4', 'copy1', 'copy2', 'copy3', 'copy4']) - - assert_equal(sorted(G2.neighbors('copy4')), []) - assert_equal(sorted(G2.neighbors('copy1')), ['copy2', 'copy3', 'copy4']) - assert_equal(len(G), 8) - assert_equal(number_of_edges(G), 6) - - E = disjoint_union(G, G) - assert_equal(len(E), 16) - assert_equal(number_of_edges(E), 12) - - E = disjoint_union(G1, G2) - assert_equal(sorted(E.nodes()), [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]) + G2 = nx.union(G2, G2, rename=("", "copy")) + assert sorted(G2.nodes()) == [ + "1", + "2", + "3", + "4", + "copy1", + "copy2", + "copy3", + "copy4", + ] + + assert sorted(G2.neighbors("copy4")) == [] + assert sorted(G2.neighbors("copy1")) == ["copy2", "copy3", "copy4"] + assert len(G) == 8 + assert nx.number_of_edges(G) == 6 + + E = nx.disjoint_union(G, G) + assert len(E) == 16 + assert nx.number_of_edges(E) == 12 + + E = nx.disjoint_union(G1, G2) + assert sorted(E.nodes()) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] G = nx.Graph() H = nx.Graph() - G.add_nodes_from([(1, {'a1': 1})]) - H.add_nodes_from([(1, {'b1': 1})]) - R = compose(G, H) - assert_equal(R.nodes, {1: {'a1': 1, 'b1': 1}}) + G.add_nodes_from([(1, {"a1": 1})]) + H.add_nodes_from([(1, {"b1": 1})]) + R = nx.compose(G, H) + assert R.nodes == {1: {"a1": 1, "b1": 1}} def test_union_multigraph(): @@ -247,9 +272,8 @@ def test_union_multigraph(): H.add_edge(3, 4, key=0) H.add_edge(3, 4, key=1) GH = nx.union(G, H) - assert_equal(set(GH), set(G) | set(H)) - assert_equal(set(GH.edges(keys=True)), - set(G.edges(keys=True)) | set(H.edges(keys=True))) + assert set(GH) == set(G) | set(H) + assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True)) def test_disjoint_union_multigraph(): @@ -260,9 +284,8 @@ def test_disjoint_union_multigraph(): H.add_edge(2, 3, key=0) H.add_edge(2, 3, key=1) GH = nx.disjoint_union(G, H) - assert_equal(set(GH), set(G) | set(H)) - assert_equal(set(GH.edges(keys=True)), - set(G.edges(keys=True)) | set(H.edges(keys=True))) + assert set(GH) == set(G) | set(H) + assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True)) def test_compose_multigraph(): @@ -273,53 +296,108 @@ def test_compose_multigraph(): H.add_edge(3, 4, key=0) H.add_edge(3, 4, key=1) GH = nx.compose(G, H) - assert_equal(set(GH), set(G) | set(H)) - assert_equal(set(GH.edges(keys=True)), - set(G.edges(keys=True)) | set(H.edges(keys=True))) + assert set(GH) == set(G) | set(H) + assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True)) H.add_edge(1, 2, key=2) GH = nx.compose(G, H) - assert_equal(set(GH), set(G) | set(H)) - assert_equal(set(GH.edges(keys=True)), - set(G.edges(keys=True)) | set(H.edges(keys=True))) + assert set(GH) == set(G) | set(H) + assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True)) -@raises(nx.NetworkXError) -def test_mixed_type_union(): +def test_full_join_graph(): + # Simple Graphs G = nx.Graph() - H = nx.MultiGraph() - U = nx.union(G, H) + G.add_node(0) + G.add_edge(1, 2) + H = nx.Graph() + H.add_edge(3, 4) + + U = nx.full_join(G, H) + assert set(U) == set(G) | set(H) + assert len(U) == len(G) + len(H) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) + # Rename + U = nx.full_join(G, H, rename=("g", "h")) + assert set(U) == {"g0", "g1", "g2", "h3", "h4"} + assert len(U) == len(G) + len(H) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) -@raises(nx.NetworkXError) -def test_mixed_type_disjoint_union(): + # Rename graphs with string-like nodes G = nx.Graph() - H = nx.MultiGraph() - U = nx.disjoint_union(G, H) + G.add_node("a") + G.add_edge("b", "c") + H = nx.Graph() + H.add_edge("d", "e") + U = nx.full_join(G, H, rename=("g", "h")) + assert set(U) == {"ga", "gb", "gc", "hd", "he"} + assert len(U) == len(G) + len(H) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) -@raises(nx.NetworkXError) -def test_mixed_type_intersection(): - G = nx.Graph() - H = nx.MultiGraph() - U = nx.intersection(G, H) + # DiGraphs + G = nx.DiGraph() + G.add_node(0) + G.add_edge(1, 2) + H = nx.DiGraph() + H.add_edge(3, 4) + U = nx.full_join(G, H) + assert set(U) == set(G) | set(H) + assert len(U) == len(G) + len(H) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2 -@raises(nx.NetworkXError) -def test_mixed_type_difference(): - G = nx.Graph() - H = nx.MultiGraph() - U = nx.difference(G, H) + # DiGraphs Rename + U = nx.full_join(G, H, rename=("g", "h")) + assert set(U) == {"g0", "g1", "g2", "h3", "h4"} + assert len(U) == len(G) + len(H) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2 -@raises(nx.NetworkXError) -def test_mixed_type_symmetric_difference(): - G = nx.Graph() +def test_full_join_multigraph(): + # MultiGraphs + G = nx.MultiGraph() + G.add_node(0) + G.add_edge(1, 2) H = nx.MultiGraph() - U = nx.symmetric_difference(G, H) + H.add_edge(3, 4) + + U = nx.full_join(G, H) + assert set(U) == set(G) | set(H) + assert len(U) == len(G) + len(H) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) + + # MultiGraphs rename + U = nx.full_join(G, H, rename=("g", "h")) + assert set(U) == {"g0", "g1", "g2", "h3", "h4"} + assert len(U) == len(G) + len(H) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) + # MultiDiGraphs + G = nx.MultiDiGraph() + G.add_node(0) + G.add_edge(1, 2) + H = nx.MultiDiGraph() + H.add_edge(3, 4) -@raises(nx.NetworkXError) -def test_mixed_type_compose(): + U = nx.full_join(G, H) + assert set(U) == set(G) | set(H) + assert len(U) == len(G) + len(H) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2 + + # MultiDiGraphs rename + U = nx.full_join(G, H, rename=("g", "h")) + assert set(U) == {"g0", "g1", "g2", "h3", "h4"} + assert len(U) == len(G) + len(H) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2 + + +def test_mixed_type_union(): G = nx.Graph() H = nx.MultiGraph() - U = nx.compose(G, H) + pytest.raises(nx.NetworkXError, nx.union, G, H) + pytest.raises(nx.NetworkXError, nx.disjoint_union, G, H) + pytest.raises(nx.NetworkXError, nx.intersection, G, H) + pytest.raises(nx.NetworkXError, nx.difference, G, H) + pytest.raises(nx.NetworkXError, nx.symmetric_difference, G, H) + pytest.raises(nx.NetworkXError, nx.compose, G, H) diff --git a/networkx/algorithms/operators/tests/test_product.py b/networkx/algorithms/operators/tests/test_product.py index 6ab5b50..2737233 100644 --- a/networkx/algorithms/operators/tests/test_product.py +++ b/networkx/algorithms/operators/tests/test_product.py @@ -1,11 +1,11 @@ +import pytest import networkx as nx -from nose.tools import assert_true, assert_equal, raises from networkx.testing import assert_edges_equal -@raises(nx.NetworkXError) def test_tensor_product_raises(): - P = nx.tensor_product(nx.DiGraph(), nx.Graph()) + with pytest.raises(nx.NetworkXError): + P = nx.tensor_product(nx.DiGraph(), nx.Graph()) def test_tensor_product_null(): @@ -17,28 +17,28 @@ def test_tensor_product_null(): P10 = nx.path_graph(10) # null graph G = nx.tensor_product(null, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) # null_graph X anything = null_graph and v.v. G = nx.tensor_product(null, empty10) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.tensor_product(null, K3) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.tensor_product(null, K10) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.tensor_product(null, P3) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.tensor_product(null, P10) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.tensor_product(empty10, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.tensor_product(K3, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.tensor_product(K10, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.tensor_product(P3, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.tensor_product(P10, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) def test_tensor_product_size(): @@ -47,9 +47,9 @@ def test_tensor_product_size(): K5 = nx.complete_graph(5) G = nx.tensor_product(P5, K3) - assert_equal(nx.number_of_nodes(G), 5 * 3) + assert nx.number_of_nodes(G) == 5 * 3 G = nx.tensor_product(K3, K5) - assert_equal(nx.number_of_nodes(G), 3 * 5) + assert nx.number_of_nodes(G) == 3 * 5 def test_tensor_product_combinations(): @@ -57,44 +57,44 @@ def test_tensor_product_combinations(): P5 = nx.path_graph(5) K3 = nx.complete_graph(3) G = nx.tensor_product(P5, K3) - assert_equal(nx.number_of_nodes(G), 5 * 3) + assert nx.number_of_nodes(G) == 5 * 3 G = nx.tensor_product(P5, nx.MultiGraph(K3)) - assert_equal(nx.number_of_nodes(G), 5 * 3) + assert nx.number_of_nodes(G) == 5 * 3 G = nx.tensor_product(nx.MultiGraph(P5), K3) - assert_equal(nx.number_of_nodes(G), 5 * 3) + assert nx.number_of_nodes(G) == 5 * 3 G = nx.tensor_product(nx.MultiGraph(P5), nx.MultiGraph(K3)) - assert_equal(nx.number_of_nodes(G), 5 * 3) + assert nx.number_of_nodes(G) == 5 * 3 G = nx.tensor_product(nx.DiGraph(P5), nx.DiGraph(K3)) - assert_equal(nx.number_of_nodes(G), 5 * 3) + assert nx.number_of_nodes(G) == 5 * 3 def test_tensor_product_classic_result(): K2 = nx.complete_graph(2) G = nx.petersen_graph() G = nx.tensor_product(G, K2) - assert_true(nx.is_isomorphic(G, nx.desargues_graph())) + assert nx.is_isomorphic(G, nx.desargues_graph()) G = nx.cycle_graph(5) G = nx.tensor_product(G, K2) - assert_true(nx.is_isomorphic(G, nx.cycle_graph(10))) + assert nx.is_isomorphic(G, nx.cycle_graph(10)) G = nx.tetrahedral_graph() G = nx.tensor_product(G, K2) - assert_true(nx.is_isomorphic(G, nx.cubical_graph())) + assert nx.is_isomorphic(G, nx.cubical_graph()) def test_tensor_product_random(): - G = nx.erdos_renyi_graph(10, 2 / 10.) - H = nx.erdos_renyi_graph(10, 2 / 10.) + G = nx.erdos_renyi_graph(10, 2 / 10.0) + H = nx.erdos_renyi_graph(10, 2 / 10.0) GH = nx.tensor_product(G, H) for (u_G, u_H) in GH.nodes(): for (v_G, v_H) in GH.nodes(): if H.has_edge(u_H, v_H) and G.has_edge(u_G, v_G): - assert_true(GH.has_edge((u_G, u_H), (v_G, v_H))) + assert GH.has_edge((u_G, u_H), (v_G, v_H)) else: - assert_true(not GH.has_edge((u_G, u_H), (v_G, v_H))) + assert not GH.has_edge((u_G, u_H), (v_G, v_H)) def test_cartesian_product_multigraph(): @@ -105,18 +105,25 @@ def test_cartesian_product_multigraph(): H.add_edge(3, 4, key=0) H.add_edge(3, 4, key=1) GH = nx.cartesian_product(G, H) - assert_equal(set(GH), {(1, 3), (2, 3), (2, 4), (1, 4)}) - assert_equal({(frozenset([u, v]), k) for u, v, k in GH.edges(keys=True)}, - {(frozenset([u, v]), k) for u, v, k in - [((1, 3), (2, 3), 0), ((1, 3), (2, 3), 1), - ((1, 3), (1, 4), 0), ((1, 3), (1, 4), 1), - ((2, 3), (2, 4), 0), ((2, 3), (2, 4), 1), - ((2, 4), (1, 4), 0), ((2, 4), (1, 4), 1)]}) + assert set(GH) == {(1, 3), (2, 3), (2, 4), (1, 4)} + assert {(frozenset([u, v]), k) for u, v, k in GH.edges(keys=True)} == { + (frozenset([u, v]), k) + for u, v, k in [ + ((1, 3), (2, 3), 0), + ((1, 3), (2, 3), 1), + ((1, 3), (1, 4), 0), + ((1, 3), (1, 4), 1), + ((2, 3), (2, 4), 0), + ((2, 3), (2, 4), 1), + ((2, 4), (1, 4), 0), + ((2, 4), (1, 4), 1), + ] + } -@raises(nx.NetworkXError) def test_cartesian_product_raises(): - P = nx.cartesian_product(nx.DiGraph(), nx.Graph()) + with pytest.raises(nx.NetworkXError): + P = nx.cartesian_product(nx.DiGraph(), nx.Graph()) def test_cartesian_product_null(): @@ -128,28 +135,28 @@ def test_cartesian_product_null(): P10 = nx.path_graph(10) # null graph G = nx.cartesian_product(null, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) # null_graph X anything = null_graph and v.v. G = nx.cartesian_product(null, empty10) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.cartesian_product(null, K3) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.cartesian_product(null, K10) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.cartesian_product(null, P3) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.cartesian_product(null, P10) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.cartesian_product(empty10, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.cartesian_product(K3, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.cartesian_product(K10, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.cartesian_product(P3, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.cartesian_product(P10, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) def test_cartesian_product_size(): @@ -158,15 +165,15 @@ def test_cartesian_product_size(): P5 = nx.path_graph(5) K3 = nx.complete_graph(3) G = nx.cartesian_product(P5, K3) - assert_equal(nx.number_of_nodes(G), 5 * 3) - assert_equal(nx.number_of_edges(G), - nx.number_of_edges(P5) * nx.number_of_nodes(K3) + - nx.number_of_edges(K3) * nx.number_of_nodes(P5)) + assert nx.number_of_nodes(G) == 5 * 3 + assert nx.number_of_edges(G) == nx.number_of_edges(P5) * nx.number_of_nodes( + K3 + ) + nx.number_of_edges(K3) * nx.number_of_nodes(P5) G = nx.cartesian_product(K3, K5) - assert_equal(nx.number_of_nodes(G), 3 * 5) - assert_equal(nx.number_of_edges(G), - nx.number_of_edges(K5) * nx.number_of_nodes(K3) + - nx.number_of_edges(K3) * nx.number_of_nodes(K5)) + assert nx.number_of_nodes(G) == 3 * 5 + assert nx.number_of_edges(G) == nx.number_of_edges(K5) * nx.number_of_nodes( + K3 + ) + nx.number_of_edges(K3) * nx.number_of_nodes(K5) def test_cartesian_product_classic(): @@ -176,30 +183,31 @@ def test_cartesian_product_classic(): # cube = 2-path X 2-path G = nx.cartesian_product(P2, P2) G = nx.cartesian_product(P2, G) - assert_true(nx.is_isomorphic(G, nx.cubical_graph())) + assert nx.is_isomorphic(G, nx.cubical_graph()) # 3x3 grid G = nx.cartesian_product(P3, P3) - assert_true(nx.is_isomorphic(G, nx.grid_2d_graph(3, 3))) + assert nx.is_isomorphic(G, nx.grid_2d_graph(3, 3)) def test_cartesian_product_random(): - G = nx.erdos_renyi_graph(10, 2 / 10.) - H = nx.erdos_renyi_graph(10, 2 / 10.) + G = nx.erdos_renyi_graph(10, 2 / 10.0) + H = nx.erdos_renyi_graph(10, 2 / 10.0) GH = nx.cartesian_product(G, H) for (u_G, u_H) in GH.nodes(): for (v_G, v_H) in GH.nodes(): - if (u_G == v_G and H.has_edge(u_H, v_H)) or \ - (u_H == v_H and G.has_edge(u_G, v_G)): - assert_true(GH.has_edge((u_G, u_H), (v_G, v_H))) + if (u_G == v_G and H.has_edge(u_H, v_H)) or ( + u_H == v_H and G.has_edge(u_G, v_G) + ): + assert GH.has_edge((u_G, u_H), (v_G, v_H)) else: - assert_true(not GH.has_edge((u_G, u_H), (v_G, v_H))) + assert not GH.has_edge((u_G, u_H), (v_G, v_H)) -@raises(nx.NetworkXError) def test_lexicographic_product_raises(): - P = nx.lexicographic_product(nx.DiGraph(), nx.Graph()) + with pytest.raises(nx.NetworkXError): + P = nx.lexicographic_product(nx.DiGraph(), nx.Graph()) def test_lexicographic_product_null(): @@ -211,28 +219,28 @@ def test_lexicographic_product_null(): P10 = nx.path_graph(10) # null graph G = nx.lexicographic_product(null, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) # null_graph X anything = null_graph and v.v. G = nx.lexicographic_product(null, empty10) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.lexicographic_product(null, K3) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.lexicographic_product(null, K10) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.lexicographic_product(null, P3) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.lexicographic_product(null, P10) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.lexicographic_product(empty10, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.lexicographic_product(K3, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.lexicographic_product(K10, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.lexicographic_product(P3, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.lexicographic_product(P10, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) def test_lexicographic_product_size(): @@ -240,42 +248,42 @@ def test_lexicographic_product_size(): P5 = nx.path_graph(5) K3 = nx.complete_graph(3) G = nx.lexicographic_product(P5, K3) - assert_equal(nx.number_of_nodes(G), 5 * 3) + assert nx.number_of_nodes(G) == 5 * 3 G = nx.lexicographic_product(K3, K5) - assert_equal(nx.number_of_nodes(G), 3 * 5) + assert nx.number_of_nodes(G) == 3 * 5 def test_lexicographic_product_combinations(): P5 = nx.path_graph(5) K3 = nx.complete_graph(3) G = nx.lexicographic_product(P5, K3) - assert_equal(nx.number_of_nodes(G), 5 * 3) + assert nx.number_of_nodes(G) == 5 * 3 G = nx.lexicographic_product(nx.MultiGraph(P5), K3) - assert_equal(nx.number_of_nodes(G), 5 * 3) + assert nx.number_of_nodes(G) == 5 * 3 G = nx.lexicographic_product(P5, nx.MultiGraph(K3)) - assert_equal(nx.number_of_nodes(G), 5 * 3) + assert nx.number_of_nodes(G) == 5 * 3 G = nx.lexicographic_product(nx.MultiGraph(P5), nx.MultiGraph(K3)) - assert_equal(nx.number_of_nodes(G), 5 * 3) + assert nx.number_of_nodes(G) == 5 * 3 # No classic easily found classic results for lexicographic product def test_lexicographic_product_random(): - G = nx.erdos_renyi_graph(10, 2 / 10.) - H = nx.erdos_renyi_graph(10, 2 / 10.) + G = nx.erdos_renyi_graph(10, 2 / 10.0) + H = nx.erdos_renyi_graph(10, 2 / 10.0) GH = nx.lexicographic_product(G, H) for (u_G, u_H) in GH.nodes(): for (v_G, v_H) in GH.nodes(): if G.has_edge(u_G, v_G) or (u_G == v_G and H.has_edge(u_H, v_H)): - assert_true(GH.has_edge((u_G, u_H), (v_G, v_H))) + assert GH.has_edge((u_G, u_H), (v_G, v_H)) else: - assert_true(not GH.has_edge((u_G, u_H), (v_G, v_H))) + assert not GH.has_edge((u_G, u_H), (v_G, v_H)) -@raises(nx.NetworkXError) def test_strong_product_raises(): - P = nx.strong_product(nx.DiGraph(), nx.Graph()) + with pytest.raises(nx.NetworkXError): + P = nx.strong_product(nx.DiGraph(), nx.Graph()) def test_strong_product_null(): @@ -287,28 +295,28 @@ def test_strong_product_null(): P10 = nx.path_graph(10) # null graph G = nx.strong_product(null, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) # null_graph X anything = null_graph and v.v. G = nx.strong_product(null, empty10) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.strong_product(null, K3) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.strong_product(null, K10) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.strong_product(null, P3) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.strong_product(null, P10) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.strong_product(empty10, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.strong_product(K3, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.strong_product(K10, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.strong_product(P3, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) G = nx.strong_product(P10, null) - assert_true(nx.is_isomorphic(G, null)) + assert nx.is_isomorphic(G, null) def test_strong_product_size(): @@ -316,44 +324,46 @@ def test_strong_product_size(): P5 = nx.path_graph(5) K3 = nx.complete_graph(3) G = nx.strong_product(P5, K3) - assert_equal(nx.number_of_nodes(G), 5 * 3) + assert nx.number_of_nodes(G) == 5 * 3 G = nx.strong_product(K3, K5) - assert_equal(nx.number_of_nodes(G), 3 * 5) + assert nx.number_of_nodes(G) == 3 * 5 def test_strong_product_combinations(): P5 = nx.path_graph(5) K3 = nx.complete_graph(3) G = nx.strong_product(P5, K3) - assert_equal(nx.number_of_nodes(G), 5 * 3) + assert nx.number_of_nodes(G) == 5 * 3 G = nx.strong_product(nx.MultiGraph(P5), K3) - assert_equal(nx.number_of_nodes(G), 5 * 3) + assert nx.number_of_nodes(G) == 5 * 3 G = nx.strong_product(P5, nx.MultiGraph(K3)) - assert_equal(nx.number_of_nodes(G), 5 * 3) + assert nx.number_of_nodes(G) == 5 * 3 G = nx.strong_product(nx.MultiGraph(P5), nx.MultiGraph(K3)) - assert_equal(nx.number_of_nodes(G), 5 * 3) + assert nx.number_of_nodes(G) == 5 * 3 # No classic easily found classic results for strong product def test_strong_product_random(): - G = nx.erdos_renyi_graph(10, 2 / 10.) - H = nx.erdos_renyi_graph(10, 2 / 10.) + G = nx.erdos_renyi_graph(10, 2 / 10.0) + H = nx.erdos_renyi_graph(10, 2 / 10.0) GH = nx.strong_product(G, H) for (u_G, u_H) in GH.nodes(): for (v_G, v_H) in GH.nodes(): - if (u_G == v_G and H.has_edge(u_H, v_H)) or \ - (u_H == v_H and G.has_edge(u_G, v_G)) or \ - (G.has_edge(u_G, v_G) and H.has_edge(u_H, v_H)): - assert_true(GH.has_edge((u_G, u_H), (v_G, v_H))) + if ( + (u_G == v_G and H.has_edge(u_H, v_H)) + or (u_H == v_H and G.has_edge(u_G, v_G)) + or (G.has_edge(u_G, v_G) and H.has_edge(u_H, v_H)) + ): + assert GH.has_edge((u_G, u_H), (v_G, v_H)) else: - assert_true(not GH.has_edge((u_G, u_H), (v_G, v_H))) + assert not GH.has_edge((u_G, u_H), (v_G, v_H)) -@raises(nx.NetworkXNotImplemented) def test_graph_power_raises(): - nx.power(nx.MultiDiGraph(), 2) + with pytest.raises(nx.NetworkXNotImplemented): + nx.power(nx.MultiDiGraph(), 2) def test_graph_power(): @@ -365,28 +375,52 @@ def test_graph_power(): G.add_edge(9, 2) H = nx.power(G, 2) - assert_edges_equal(list(H.edges()), - [(0, 1), (0, 2), (0, 5), (0, 6), (0, 7), (1, 9), - (1, 2), (1, 3), (1, 6), (2, 3), (2, 4), (2, 8), - (2, 9), (3, 4), (3, 5), (3, 9), (4, 5), (4, 6), - (5, 6), (5, 7), (6, 7), (6, 8), (7, 8), (7, 9), - (8, 9)]) + assert_edges_equal( + list(H.edges()), + [ + (0, 1), + (0, 2), + (0, 5), + (0, 6), + (0, 7), + (1, 9), + (1, 2), + (1, 3), + (1, 6), + (2, 3), + (2, 4), + (2, 8), + (2, 9), + (3, 4), + (3, 5), + (3, 9), + (4, 5), + (4, 6), + (5, 6), + (5, 7), + (6, 7), + (6, 8), + (7, 8), + (7, 9), + (8, 9), + ], + ) -@raises(ValueError) def test_graph_power_negative(): - nx.power(nx.Graph(), -1) + with pytest.raises(ValueError): + nx.power(nx.Graph(), -1) -@raises(nx.NetworkXError) def test_rooted_product_raises(): - nx.rooted_product(nx.Graph(), nx.path_graph(2), 10) + with pytest.raises(nx.NetworkXError): + nx.rooted_product(nx.Graph(), nx.path_graph(2), 10) def test_rooted_product(): G = nx.cycle_graph(5) H = nx.Graph() - H.add_edges_from([('a', 'b'), ('b', 'c'), ('b', 'd')]) - R = nx.rooted_product(G, H, 'a') - assert_equal(len(R), len(G) * len(H)) - assert_equal(R.size(), G.size() + len(G) * H.size()) + H.add_edges_from([("a", "b"), ("b", "c"), ("b", "d")]) + R = nx.rooted_product(G, H, "a") + assert len(R) == len(G) * len(H) + assert R.size() == G.size() + len(G) * H.size() diff --git a/networkx/algorithms/operators/tests/test_unary.py b/networkx/algorithms/operators/tests/test_unary.py index de76bc8..a04a349 100644 --- a/networkx/algorithms/operators/tests/test_unary.py +++ b/networkx/algorithms/operators/tests/test_unary.py @@ -1,48 +1,54 @@ -from nose.tools import * +import pytest import networkx as nx -from networkx import * def test_complement(): - null = null_graph() - empty1 = empty_graph(1) - empty10 = empty_graph(10) - K3 = complete_graph(3) - K5 = complete_graph(5) - K10 = complete_graph(10) - P2 = path_graph(2) - P3 = path_graph(3) - P5 = path_graph(5) - P10 = path_graph(10) + null = nx.null_graph() + empty1 = nx.empty_graph(1) + empty10 = nx.empty_graph(10) + K3 = nx.complete_graph(3) + K5 = nx.complete_graph(5) + K10 = nx.complete_graph(10) + P2 = nx.path_graph(2) + P3 = nx.path_graph(3) + P5 = nx.path_graph(5) + P10 = nx.path_graph(10) # complement of the complete graph is empty - G = complement(K3) - assert_true(is_isomorphic(G, empty_graph(3))) - G = complement(K5) - assert_true(is_isomorphic(G, empty_graph(5))) + G = nx.complement(K3) + assert nx.is_isomorphic(G, nx.empty_graph(3)) + G = nx.complement(K5) + assert nx.is_isomorphic(G, nx.empty_graph(5)) # for any G, G=complement(complement(G)) - P3cc = complement(complement(P3)) - assert_true(is_isomorphic(P3, P3cc)) - nullcc = complement(complement(null)) - assert_true(is_isomorphic(null, nullcc)) - b = bull_graph() - bcc = complement(complement(b)) - assert_true(is_isomorphic(b, bcc)) + P3cc = nx.complement(nx.complement(P3)) + assert nx.is_isomorphic(P3, P3cc) + nullcc = nx.complement(nx.complement(null)) + assert nx.is_isomorphic(null, nullcc) + b = nx.bull_graph() + bcc = nx.complement(nx.complement(b)) + assert nx.is_isomorphic(b, bcc) def test_complement_2(): G1 = nx.DiGraph() - G1.add_edge('A', 'B') - G1.add_edge('A', 'C') - G1.add_edge('A', 'D') - G1C = complement(G1) - assert_equal(sorted(G1C.edges()), - [('B', 'A'), ('B', 'C'), - ('B', 'D'), ('C', 'A'), ('C', 'B'), - ('C', 'D'), ('D', 'A'), ('D', 'B'), ('D', 'C')]) + G1.add_edge("A", "B") + G1.add_edge("A", "C") + G1.add_edge("A", "D") + G1C = nx.complement(G1) + assert sorted(G1C.edges()) == [ + ("B", "A"), + ("B", "C"), + ("B", "D"), + ("C", "A"), + ("C", "B"), + ("C", "D"), + ("D", "A"), + ("D", "B"), + ("D", "C"), + ] def test_reverse1(): # Other tests for reverse are done by the DiGraph and MultiDigraph. G1 = nx.Graph() - assert_raises(nx.NetworkXError, nx.reverse, G1) + pytest.raises(nx.NetworkXError, nx.reverse, G1) diff --git a/networkx/algorithms/operators/unary.py b/networkx/algorithms/operators/unary.py index 71a6303..a24bbc6 100644 --- a/networkx/algorithms/operators/unary.py +++ b/networkx/algorithms/operators/unary.py @@ -1,20 +1,11 @@ """Unary operations on graphs""" -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. import networkx as nx -from networkx.utils import not_implemented_for -__author__ = """\n""".join(['Aric Hagberg ', - 'Pieter Swart (swart@lanl.gov)', - 'Dan Schult(dschult@colgate.edu)']) -__all__ = ['complement', 'reverse'] + +__all__ = ["complement", "reverse"] def complement(G): - """Return the graph complement of G. + """Returns the graph complement of G. Parameters ---------- @@ -32,17 +23,16 @@ def complement(G): Graph, node, and edge data are not propagated to the new graph. """ - R = G.fresh_copy() + R = G.__class__() R.add_nodes_from(G) - R.add_edges_from(((n, n2) - for n, nbrs in G.adjacency() - for n2 in G if n2 not in nbrs - if n != n2)) + R.add_edges_from( + ((n, n2) for n, nbrs in G.adjacency() for n2 in G if n2 not in nbrs if n != n2) + ) return R def reverse(G, copy=True): - """Return the reverse directed graph of G. + """Returns the reverse directed graph of G. Parameters ---------- diff --git a/networkx/algorithms/planar_drawing.py b/networkx/algorithms/planar_drawing.py new file mode 100644 index 0000000..a129e10 --- /dev/null +++ b/networkx/algorithms/planar_drawing.py @@ -0,0 +1,464 @@ +import networkx as nx +from collections import defaultdict + + +__all__ = ["combinatorial_embedding_to_pos"] + + +def combinatorial_embedding_to_pos(embedding, fully_triangulate=False): + """Assigns every node a (x, y) position based on the given embedding + + The algorithm iteratively inserts nodes of the input graph in a certain + order and rearranges previously inserted nodes so that the planar drawing + stays valid. This is done efficiently by only maintaining relative + positions during the node placements and calculating the absolute positions + at the end. For more information see [1]_. + + Parameters + ---------- + embedding : nx.PlanarEmbedding + This defines the order of the edges + + fully_triangulate : bool + If set to True the algorithm adds edges to a copy of the input + embedding and makes it chordal. + + Returns + ------- + pos : dict + Maps each node to a tuple that defines the (x, y) position + + References + ---------- + .. [1] M. Chrobak and T.H. Payne: + A Linear-time Algorithm for Drawing a Planar Graph on a Grid 1989 + http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.51.6677 + + """ + if len(embedding.nodes()) < 4: + # Position the node in any triangle + default_positions = [(0, 0), (2, 0), (1, 1)] + pos = {} + for i, v in enumerate(embedding.nodes()): + pos[v] = default_positions[i] + return pos + + embedding, outer_face = triangulate_embedding(embedding, fully_triangulate) + + # The following dicts map a node to another node + # If a node is not in the key set it means that the node is not yet in G_k + # If a node maps to None then the corresponding subtree does not exist + left_t_child = {} + right_t_child = {} + + # The following dicts map a node to an integer + delta_x = {} + y_coordinate = {} + + node_list = get_canonical_ordering(embedding, outer_face) + + # 1. Phase: Compute relative positions + + # Initialization + v1, v2, v3 = node_list[0][0], node_list[1][0], node_list[2][0] + + delta_x[v1] = 0 + y_coordinate[v1] = 0 + right_t_child[v1] = v3 + left_t_child[v1] = None + + delta_x[v2] = 1 + y_coordinate[v2] = 0 + right_t_child[v2] = None + left_t_child[v2] = None + + delta_x[v3] = 1 + y_coordinate[v3] = 1 + right_t_child[v3] = v2 + left_t_child[v3] = None + + for k in range(3, len(node_list)): + vk, contour_neighbors = node_list[k] + wp = contour_neighbors[0] + wp1 = contour_neighbors[1] + wq = contour_neighbors[-1] + wq1 = contour_neighbors[-2] + adds_mult_tri = len(contour_neighbors) > 2 + + # Stretch gaps: + delta_x[wp1] += 1 + delta_x[wq] += 1 + + delta_x_wp_wq = sum(delta_x[x] for x in contour_neighbors[1:]) + + # Adjust offsets + delta_x[vk] = (-y_coordinate[wp] + delta_x_wp_wq + y_coordinate[wq]) // 2 + y_coordinate[vk] = (y_coordinate[wp] + delta_x_wp_wq + y_coordinate[wq]) // 2 + delta_x[wq] = delta_x_wp_wq - delta_x[vk] + if adds_mult_tri: + delta_x[wp1] -= delta_x[vk] + + # Install v_k: + right_t_child[wp] = vk + right_t_child[vk] = wq + if adds_mult_tri: + left_t_child[vk] = wp1 + right_t_child[wq1] = None + else: + left_t_child[vk] = None + + # 2. Phase: Set absolute positions + pos = dict() + pos[v1] = (0, y_coordinate[v1]) + remaining_nodes = [v1] + while remaining_nodes: + parent_node = remaining_nodes.pop() + + # Calculate position for left child + set_position( + parent_node, left_t_child, remaining_nodes, delta_x, y_coordinate, pos + ) + # Calculate position for right child + set_position( + parent_node, right_t_child, remaining_nodes, delta_x, y_coordinate, pos + ) + return pos + + +def set_position(parent, tree, remaining_nodes, delta_x, y_coordinate, pos): + """Helper method to calculate the absolute position of nodes.""" + child = tree[parent] + parent_node_x = pos[parent][0] + if child is not None: + # Calculate pos of child + child_x = parent_node_x + delta_x[child] + pos[child] = (child_x, y_coordinate[child]) + # Remember to calculate pos of its children + remaining_nodes.append(child) + + +def get_canonical_ordering(embedding, outer_face): + """Returns a canonical ordering of the nodes + + The canonical ordering of nodes (v1, ..., vn) must fulfill the following + conditions: + (See Lemma 1 in [2]_) + + - For the subgraph G_k of the input graph induced by v1, ..., vk it holds: + - 2-connected + - internally triangulated + - the edge (v1, v2) is part of the outer face + - For a node v(k+1) the following holds: + - The node v(k+1) is part of the outer face of G_k + - It has at least two neighbors in G_k + - All neighbors of v(k+1) in G_k lie consecutively on the outer face of + G_k (excluding the edge (v1, v2)). + + The algorithm used here starts with G_n (containing all nodes). It first + selects the nodes v1 and v2. And then tries to find the order of the other + nodes by checking which node can be removed in order to fulfill the + conditions mentioned above. This is done by calculating the number of + chords of nodes on the outer face. For more information see [1]_. + + Parameters + ---------- + embedding : nx.PlanarEmbedding + The embedding must be triangulated + outer_face : list + The nodes on the outer face of the graph + + Returns + ------- + ordering : list + A list of tuples `(vk, wp_wq)`. Here `vk` is the node at this position + in the canonical ordering. The element `wp_wq` is a list of nodes that + make up the outer face of G_k. + + References + ---------- + .. [1] Steven Chaplick. + Canonical Orders of Planar Graphs and (some of) Their Applications 2015 + https://wuecampus2.uni-wuerzburg.de/moodle/pluginfile.php/545727/mod_resource/content/0/vg-ss15-vl03-canonical-orders-druckversion.pdf + .. [2] M. Chrobak and T.H. Payne: + A Linear-time Algorithm for Drawing a Planar Graph on a Grid 1989 + http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.51.6677 + + """ + v1 = outer_face[0] + v2 = outer_face[1] + chords = defaultdict(int) # Maps nodes to the number of their chords + marked_nodes = set() + ready_to_pick = set(outer_face) + + # Initialize outer_face_ccw_nbr (do not include v1 -> v2) + outer_face_ccw_nbr = {} + prev_nbr = v2 + for idx in range(2, len(outer_face)): + outer_face_ccw_nbr[prev_nbr] = outer_face[idx] + prev_nbr = outer_face[idx] + outer_face_ccw_nbr[prev_nbr] = v1 + + # Initialize outer_face_cw_nbr (do not include v2 -> v1) + outer_face_cw_nbr = {} + prev_nbr = v1 + for idx in range(len(outer_face) - 1, 0, -1): + outer_face_cw_nbr[prev_nbr] = outer_face[idx] + prev_nbr = outer_face[idx] + + def is_outer_face_nbr(x, y): + if x not in outer_face_ccw_nbr: + return outer_face_cw_nbr[x] == y + if x not in outer_face_cw_nbr: + return outer_face_ccw_nbr[x] == y + return outer_face_ccw_nbr[x] == y or outer_face_cw_nbr[x] == y + + def is_on_outer_face(x): + return x not in marked_nodes and (x in outer_face_ccw_nbr.keys() or x == v1) + + # Initialize number of chords + for v in outer_face: + for nbr in embedding.neighbors_cw_order(v): + if is_on_outer_face(nbr) and not is_outer_face_nbr(v, nbr): + chords[v] += 1 + ready_to_pick.discard(v) + + # Initialize canonical_ordering + canonical_ordering = [None] * len(embedding.nodes()) # type: list + canonical_ordering[0] = (v1, []) + canonical_ordering[1] = (v2, []) + ready_to_pick.discard(v1) + ready_to_pick.discard(v2) + + for k in range(len(embedding.nodes()) - 1, 1, -1): + # 1. Pick v from ready_to_pick + v = ready_to_pick.pop() + marked_nodes.add(v) + + # v has exactly two neighbors on the outer face (wp and wq) + wp = None + wq = None + # Iterate over neighbors of v to find wp and wq + nbr_iterator = iter(embedding.neighbors_cw_order(v)) + while True: + nbr = next(nbr_iterator) + if nbr in marked_nodes: + # Only consider nodes that are not yet removed + continue + if is_on_outer_face(nbr): + # nbr is either wp or wq + if nbr == v1: + wp = v1 + elif nbr == v2: + wq = v2 + else: + if outer_face_cw_nbr[nbr] == v: + # nbr is wp + wp = nbr + else: + # nbr is wq + wq = nbr + if wp is not None and wq is not None: + # We don't need to iterate any further + break + + # Obtain new nodes on outer face (neighbors of v from wp to wq) + wp_wq = [wp] + nbr = wp + while nbr != wq: + # Get next next neighbor (clockwise on the outer face) + next_nbr = embedding[v][nbr]["ccw"] + wp_wq.append(next_nbr) + # Update outer face + outer_face_cw_nbr[nbr] = next_nbr + outer_face_ccw_nbr[next_nbr] = nbr + # Move to next neighbor of v + nbr = next_nbr + + if len(wp_wq) == 2: + # There was a chord between wp and wq, decrease number of chords + chords[wp] -= 1 + if chords[wp] == 0: + ready_to_pick.add(wp) + chords[wq] -= 1 + if chords[wq] == 0: + ready_to_pick.add(wq) + else: + # Update all chords involving w_(p+1) to w_(q-1) + new_face_nodes = set(wp_wq[1:-1]) + for w in new_face_nodes: + # If we do not find a chord for w later we can pick it next + ready_to_pick.add(w) + for nbr in embedding.neighbors_cw_order(w): + if is_on_outer_face(nbr) and not is_outer_face_nbr(w, nbr): + # There is a chord involving w + chords[w] += 1 + ready_to_pick.discard(w) + if nbr not in new_face_nodes: + # Also increase chord for the neighbor + # We only iterator over new_face_nodes + chords[nbr] += 1 + ready_to_pick.discard(nbr) + # Set the canonical ordering node and the list of contour neighbors + canonical_ordering[k] = (v, wp_wq) + + return canonical_ordering + + +def triangulate_face(embedding, v1, v2): + """Triangulates the face given by half edge (v, w) + + Parameters + ---------- + embedding : nx.PlanarEmbedding + v1 : node + The half-edge (v1, v2) belongs to the face that gets triangulated + v2 : node + """ + _, v3 = embedding.next_face_half_edge(v1, v2) + _, v4 = embedding.next_face_half_edge(v2, v3) + if v1 == v2 or v1 == v3: + # The component has less than 3 nodes + return + while v1 != v4: + # Add edge if not already present on other side + if embedding.has_edge(v1, v3): + # Cannot triangulate at this position + v1, v2, v3 = v2, v3, v4 + else: + # Add edge for triangulation + embedding.add_half_edge_cw(v1, v3, v2) + embedding.add_half_edge_ccw(v3, v1, v2) + v1, v2, v3 = v1, v3, v4 + # Get next node + _, v4 = embedding.next_face_half_edge(v2, v3) + + +def triangulate_embedding(embedding, fully_triangulate=True): + """Triangulates the embedding. + + Traverses faces of the embedding and adds edges to a copy of the + embedding to triangulate it. + The method also ensures that the resulting graph is 2-connected by adding + edges if the same vertex is contained twice on a path around a face. + + Parameters + ---------- + embedding : nx.PlanarEmbedding + The input graph must contain at least 3 nodes. + + fully_triangulate : bool + If set to False the face with the most nodes is chooses as outer face. + This outer face does not get triangulated. + + Returns + ------- + (embedding, outer_face) : (nx.PlanarEmbedding, list) tuple + The element `embedding` is a new embedding containing all edges from + the input embedding and the additional edges to triangulate the graph. + The element `outer_face` is a list of nodes that lie on the outer face. + If the graph is fully triangulated these are three arbitrary connected + nodes. + + """ + if len(embedding.nodes) <= 1: + return embedding, list(embedding.nodes) + embedding = nx.PlanarEmbedding(embedding) + + # Get a list with a node for each connected component + component_nodes = [next(iter(x)) for x in nx.connected_components(embedding)] + + # 1. Make graph a single component (add edge between components) + for i in range(len(component_nodes) - 1): + v1 = component_nodes[i] + v2 = component_nodes[i + 1] + embedding.connect_components(v1, v2) + + # 2. Calculate faces, ensure 2-connectedness and determine outer face + outer_face = [] # A face with the most number of nodes + face_list = [] + edges_visited = set() # Used to keep track of already visited faces + for v in embedding.nodes(): + for w in embedding.neighbors_cw_order(v): + new_face = make_bi_connected(embedding, v, w, edges_visited) + if new_face: + # Found a new face + face_list.append(new_face) + if len(new_face) > len(outer_face): + # The face is a candidate to be the outer face + outer_face = new_face + + # 3. Triangulate (internal) faces + for face in face_list: + if face is not outer_face or fully_triangulate: + # Triangulate this face + triangulate_face(embedding, face[0], face[1]) + + if fully_triangulate: + v1 = outer_face[0] + v2 = outer_face[1] + v3 = embedding[v2][v1]["ccw"] + outer_face = [v1, v2, v3] + + return embedding, outer_face + + +def make_bi_connected(embedding, starting_node, outgoing_node, edges_counted): + """Triangulate a face and make it 2-connected + + This method also adds all edges on the face to `edges_counted`. + + Parameters + ---------- + embedding: nx.PlanarEmbedding + The embedding that defines the faces + starting_node : node + A node on the face + outgoing_node : node + A node such that the half edge (starting_node, outgoing_node) belongs + to the face + edges_counted: set + Set of all half-edges that belong to a face that have been visited + + Returns + ------- + face_nodes: list + A list of all nodes at the border of this face + """ + + # Check if the face has already been calculated + if (starting_node, outgoing_node) in edges_counted: + # This face was already counted + return [] + edges_counted.add((starting_node, outgoing_node)) + + # Add all edges to edges_counted which have this face to their left + v1 = starting_node + v2 = outgoing_node + face_list = [starting_node] # List of nodes around the face + face_set = set(face_list) # Set for faster queries + _, v3 = embedding.next_face_half_edge(v1, v2) + + # Move the nodes v1, v2, v3 around the face: + while v2 != starting_node or v3 != outgoing_node: + if v1 == v2: + raise nx.NetworkXException("Invalid half-edge") + # cycle is not completed yet + if v2 in face_set: + # v2 encountered twice: Add edge to ensure 2-connectedness + embedding.add_half_edge_cw(v1, v3, v2) + embedding.add_half_edge_ccw(v3, v1, v2) + edges_counted.add((v2, v3)) + edges_counted.add((v3, v1)) + v2 = v1 + else: + face_set.add(v2) + face_list.append(v2) + + # set next edge + v1 = v2 + v2, v3 = embedding.next_face_half_edge(v2, v3) + + # remember that this edge has been counted + edges_counted.add((v1, v2)) + + return face_list diff --git a/networkx/algorithms/planarity.py b/networkx/algorithms/planarity.py new file mode 100644 index 0000000..ed87b55 --- /dev/null +++ b/networkx/algorithms/planarity.py @@ -0,0 +1,1115 @@ +from collections import defaultdict +import networkx as nx + +__all__ = ["check_planarity", "PlanarEmbedding"] + + +def check_planarity(G, counterexample=False): + """Check if a graph is planar and return a counterexample or an embedding. + + A graph is planar iff it can be drawn in a plane without + any edge intersections. + + Parameters + ---------- + G : NetworkX graph + counterexample : bool + A Kuratowski subgraph (to proof non planarity) is only returned if set + to true. + + Returns + ------- + (is_planar, certificate) : (bool, NetworkX graph) tuple + is_planar is true if the graph is planar. + If the graph is planar `certificate` is a PlanarEmbedding + otherwise it is a Kuratowski subgraph. + + Notes + ----- + A (combinatorial) embedding consists of cyclic orderings of the incident + edges at each vertex. Given such an embedding there are multiple approaches + discussed in literature to drawing the graph (subject to various + constraints, e.g. integer coordinates), see e.g. [2]. + + The planarity check algorithm and extraction of the combinatorial embedding + is based on the Left-Right Planarity Test [1]. + + A counterexample is only generated if the corresponding parameter is set, + because the complexity of the counterexample generation is higher. + + References + ---------- + .. [1] Ulrik Brandes: + The Left-Right Planarity Test + 2009 + http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.217.9208 + .. [2] Takao Nishizeki, Md Saidur Rahman: + Planar graph drawing + Lecture Notes Series on Computing: Volume 12 + 2004 + """ + + planarity_state = LRPlanarity(G) + embedding = planarity_state.lr_planarity() + if embedding is None: + # graph is not planar + if counterexample: + return False, get_counterexample(G) + else: + return False, None + else: + # graph is planar + return True, embedding + + +def check_planarity_recursive(G, counterexample=False): + """Recursive version of :meth:`check_planarity`.""" + planarity_state = LRPlanarity(G) + embedding = planarity_state.lr_planarity_recursive() + if embedding is None: + # graph is not planar + if counterexample: + return False, get_counterexample_recursive(G) + else: + return False, None + else: + # graph is planar + return True, embedding + + +def get_counterexample(G): + """Obtains a Kuratowski subgraph. + + Raises nx.NetworkXException if G is planar. + + The function removes edges such that the graph is still not planar. + At some point the removal of any edge would make the graph planar. + This subgraph must be a Kuratowski subgraph. + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + subgraph : NetworkX graph + A Kuratowski subgraph that proves that G is not planar. + + """ + # copy graph + G = nx.Graph(G) + + if check_planarity(G)[0]: + raise nx.NetworkXException("G is planar - no counter example.") + + # find Kuratowski subgraph + subgraph = nx.Graph() + for u in G: + nbrs = list(G[u]) + for v in nbrs: + G.remove_edge(u, v) + if check_planarity(G)[0]: + G.add_edge(u, v) + subgraph.add_edge(u, v) + + return subgraph + + +def get_counterexample_recursive(G): + """Recursive version of :meth:`get_counterexample`. + """ + + # copy graph + G = nx.Graph(G) + + if check_planarity_recursive(G)[0]: + raise nx.NetworkXException("G is planar - no counter example.") + + # find Kuratowski subgraph + subgraph = nx.Graph() + for u in G: + nbrs = list(G[u]) + for v in nbrs: + G.remove_edge(u, v) + if check_planarity_recursive(G)[0]: + G.add_edge(u, v) + subgraph.add_edge(u, v) + + return subgraph + + +class Interval: + """Represents a set of return edges. + + All return edges in an interval induce a same constraint on the contained + edges, which means that all edges must either have a left orientation or + all edges must have a right orientation. + """ + + def __init__(self, low=None, high=None): + self.low = low + self.high = high + + def empty(self): + """Check if the interval is empty""" + return self.low is None and self.high is None + + def copy(self): + """Returns a copy of this interval""" + return Interval(self.low, self.high) + + def conflicting(self, b, planarity_state): + """Returns True if interval I conflicts with edge b""" + return ( + not self.empty() + and planarity_state.lowpt[self.high] > planarity_state.lowpt[b] + ) + + +class ConflictPair: + """Represents a different constraint between two intervals. + + The edges in the left interval must have a different orientation than + the one in the right interval. + """ + + def __init__(self, left=Interval(), right=Interval()): + self.left = left + self.right = right + + def swap(self): + """Swap left and right intervals""" + temp = self.left + self.left = self.right + self.right = temp + + def lowest(self, planarity_state): + """Returns the lowest lowpoint of a conflict pair""" + if self.left.empty(): + return planarity_state.lowpt[self.right.low] + if self.right.empty(): + return planarity_state.lowpt[self.left.low] + return min( + planarity_state.lowpt[self.left.low], planarity_state.lowpt[self.right.low] + ) + + +def top_of_stack(l): + """Returns the element on top of the stack.""" + if not l: + return None + return l[-1] + + +class LRPlanarity: + """A class to maintain the state during planarity check.""" + + __slots__ = [ + "G", + "roots", + "height", + "lowpt", + "lowpt2", + "nesting_depth", + "parent_edge", + "DG", + "adjs", + "ordered_adjs", + "ref", + "side", + "S", + "stack_bottom", + "lowpt_edge", + "left_ref", + "right_ref", + "embedding", + ] + + def __init__(self, G): + # copy G without adding self-loops + self.G = nx.Graph() + self.G.add_nodes_from(G.nodes) + for e in G.edges: + if e[0] != e[1]: + self.G.add_edge(e[0], e[1]) + + self.roots = [] + + # distance from tree root + self.height = defaultdict(lambda: None) + + self.lowpt = {} # height of lowest return point of an edge + self.lowpt2 = {} # height of second lowest return point + self.nesting_depth = {} # for nesting order + + # None -> missing edge + self.parent_edge = defaultdict(lambda: None) + + # oriented DFS graph + self.DG = nx.DiGraph() + self.DG.add_nodes_from(G.nodes) + + self.adjs = {} + self.ordered_adjs = {} + + self.ref = defaultdict(lambda: None) + self.side = defaultdict(lambda: 1) + + # stack of conflict pairs + self.S = [] + self.stack_bottom = {} + self.lowpt_edge = {} + + self.left_ref = {} + self.right_ref = {} + + self.embedding = PlanarEmbedding() + + def lr_planarity(self): + """Execute the LR planarity test. + + Returns + ------- + embedding : dict + If the graph is planar an embedding is returned. Otherwise None. + """ + if self.G.order() > 2 and self.G.size() > 3 * self.G.order() - 6: + # graph is not planar + return None + + # make adjacency lists for dfs + for v in self.G: + self.adjs[v] = list(self.G[v]) + + # orientation of the graph by depth first search traversal + for v in self.G: + if self.height[v] is None: + self.height[v] = 0 + self.roots.append(v) + self.dfs_orientation(v) + + # Free no longer used variables + self.G = None + self.lowpt2 = None + self.adjs = None + + # testing + for v in self.DG: # sort the adjacency lists by nesting depth + # note: this sorting leads to non linear time + self.ordered_adjs[v] = sorted( + self.DG[v], key=lambda x: self.nesting_depth[(v, x)] + ) + for v in self.roots: + if not self.dfs_testing(v): + return None + + # Free no longer used variables + self.height = None + self.lowpt = None + self.S = None + self.stack_bottom = None + self.lowpt_edge = None + + for e in self.DG.edges: + self.nesting_depth[e] = self.sign(e) * self.nesting_depth[e] + + self.embedding.add_nodes_from(self.DG.nodes) + for v in self.DG: + # sort the adjacency lists again + self.ordered_adjs[v] = sorted( + self.DG[v], key=lambda x: self.nesting_depth[(v, x)] + ) + # initialize the embedding + previous_node = None + for w in self.ordered_adjs[v]: + self.embedding.add_half_edge_cw(v, w, previous_node) + previous_node = w + + # Free no longer used variables + self.DG = None + self.nesting_depth = None + self.ref = None + + # compute the complete embedding + for v in self.roots: + self.dfs_embedding(v) + + # Free no longer used variables + self.roots = None + self.parent_edge = None + self.ordered_adjs = None + self.left_ref = None + self.right_ref = None + self.side = None + + return self.embedding + + def lr_planarity_recursive(self): + """Recursive version of :meth:`lr_planarity`.""" + if self.G.order() > 2 and self.G.size() > 3 * self.G.order() - 6: + # graph is not planar + return None + + # orientation of the graph by depth first search traversal + for v in self.G: + if self.height[v] is None: + self.height[v] = 0 + self.roots.append(v) + self.dfs_orientation_recursive(v) + + # Free no longer used variable + self.G = None + + # testing + for v in self.DG: # sort the adjacency lists by nesting depth + # note: this sorting leads to non linear time + self.ordered_adjs[v] = sorted( + self.DG[v], key=lambda x: self.nesting_depth[(v, x)] + ) + for v in self.roots: + if not self.dfs_testing_recursive(v): + return None + + for e in self.DG.edges: + self.nesting_depth[e] = self.sign_recursive(e) * self.nesting_depth[e] + + self.embedding.add_nodes_from(self.DG.nodes) + for v in self.DG: + # sort the adjacency lists again + self.ordered_adjs[v] = sorted( + self.DG[v], key=lambda x: self.nesting_depth[(v, x)] + ) + # initialize the embedding + previous_node = None + for w in self.ordered_adjs[v]: + self.embedding.add_half_edge_cw(v, w, previous_node) + previous_node = w + + # compute the complete embedding + for v in self.roots: + self.dfs_embedding_recursive(v) + + return self.embedding + + def dfs_orientation(self, v): + """Orient the graph by DFS, compute lowpoints and nesting order. + """ + # the recursion stack + dfs_stack = [v] + # index of next edge to handle in adjacency list of each node + ind = defaultdict(lambda: 0) + # boolean to indicate whether to skip the initial work for an edge + skip_init = defaultdict(lambda: False) + + while dfs_stack: + v = dfs_stack.pop() + e = self.parent_edge[v] + + for w in self.adjs[v][ind[v] :]: + vw = (v, w) + + if not skip_init[vw]: + if (v, w) in self.DG.edges or (w, v) in self.DG.edges: + ind[v] += 1 + continue # the edge was already oriented + + self.DG.add_edge(v, w) # orient the edge + + self.lowpt[vw] = self.height[v] + self.lowpt2[vw] = self.height[v] + if self.height[w] is None: # (v, w) is a tree edge + self.parent_edge[w] = vw + self.height[w] = self.height[v] + 1 + + dfs_stack.append(v) # revisit v after finishing w + dfs_stack.append(w) # visit w next + skip_init[vw] = True # don't redo this block + break # handle next node in dfs_stack (i.e. w) + else: # (v, w) is a back edge + self.lowpt[vw] = self.height[w] + + # determine nesting graph + self.nesting_depth[vw] = 2 * self.lowpt[vw] + if self.lowpt2[vw] < self.height[v]: # chordal + self.nesting_depth[vw] += 1 + + # update lowpoints of parent edge e + if e is not None: + if self.lowpt[vw] < self.lowpt[e]: + self.lowpt2[e] = min(self.lowpt[e], self.lowpt2[vw]) + self.lowpt[e] = self.lowpt[vw] + elif self.lowpt[vw] > self.lowpt[e]: + self.lowpt2[e] = min(self.lowpt2[e], self.lowpt[vw]) + else: + self.lowpt2[e] = min(self.lowpt2[e], self.lowpt2[vw]) + + ind[v] += 1 + + def dfs_orientation_recursive(self, v): + """Recursive version of :meth:`dfs_orientation`.""" + e = self.parent_edge[v] + for w in self.G[v]: + if (v, w) in self.DG.edges or (w, v) in self.DG.edges: + continue # the edge was already oriented + vw = (v, w) + self.DG.add_edge(v, w) # orient the edge + + self.lowpt[vw] = self.height[v] + self.lowpt2[vw] = self.height[v] + if self.height[w] is None: # (v, w) is a tree edge + self.parent_edge[w] = vw + self.height[w] = self.height[v] + 1 + self.dfs_orientation_recursive(w) + else: # (v, w) is a back edge + self.lowpt[vw] = self.height[w] + + # determine nesting graph + self.nesting_depth[vw] = 2 * self.lowpt[vw] + if self.lowpt2[vw] < self.height[v]: # chordal + self.nesting_depth[vw] += 1 + + # update lowpoints of parent edge e + if e is not None: + if self.lowpt[vw] < self.lowpt[e]: + self.lowpt2[e] = min(self.lowpt[e], self.lowpt2[vw]) + self.lowpt[e] = self.lowpt[vw] + elif self.lowpt[vw] > self.lowpt[e]: + self.lowpt2[e] = min(self.lowpt2[e], self.lowpt[vw]) + else: + self.lowpt2[e] = min(self.lowpt2[e], self.lowpt2[vw]) + + def dfs_testing(self, v): + """Test for LR partition.""" + # the recursion stack + dfs_stack = [v] + # index of next edge to handle in adjacency list of each node + ind = defaultdict(lambda: 0) + # boolean to indicate whether to skip the initial work for an edge + skip_init = defaultdict(lambda: False) + + while dfs_stack: + v = dfs_stack.pop() + e = self.parent_edge[v] + # to indicate whether to skip the final block after the for loop + skip_final = False + + for w in self.ordered_adjs[v][ind[v] :]: + ei = (v, w) + + if not skip_init[ei]: + self.stack_bottom[ei] = top_of_stack(self.S) + + if ei == self.parent_edge[w]: # tree edge + dfs_stack.append(v) # revisit v after finishing w + dfs_stack.append(w) # visit w next + skip_init[ei] = True # don't redo this block + skip_final = True # skip final work after breaking + break # handle next node in dfs_stack (i.e. w) + else: # back edge + self.lowpt_edge[ei] = ei + self.S.append(ConflictPair(right=Interval(ei, ei))) + + # integrate new return edges + if self.lowpt[ei] < self.height[v]: + if w == self.ordered_adjs[v][0]: # e_i has return edge + self.lowpt_edge[e] = self.lowpt_edge[ei] + else: # add constraints of e_i + if not self.add_constraints(ei, e): + # graph is not planar + return False + + ind[v] += 1 + + if not skip_final: + # remove back edges returning to parent + if e is not None: # v isn't root + self.remove_back_edges(e) + + return True + + def dfs_testing_recursive(self, v): + """Recursive version of :meth:`dfs_testing`.""" + e = self.parent_edge[v] + for w in self.ordered_adjs[v]: + ei = (v, w) + self.stack_bottom[ei] = top_of_stack(self.S) + if ei == self.parent_edge[w]: # tree edge + if not self.dfs_testing_recursive(w): + return False + else: # back edge + self.lowpt_edge[ei] = ei + self.S.append(ConflictPair(right=Interval(ei, ei))) + + # integrate new return edges + if self.lowpt[ei] < self.height[v]: + if w == self.ordered_adjs[v][0]: # e_i has return edge + self.lowpt_edge[e] = self.lowpt_edge[ei] + else: # add constraints of e_i + if not self.add_constraints(ei, e): + # graph is not planar + return False + + # remove back edges returning to parent + if e is not None: # v isn't root + self.remove_back_edges(e) + return True + + def add_constraints(self, ei, e): + P = ConflictPair() + # merge return edges of e_i into P.right + while True: + Q = self.S.pop() + if not Q.left.empty(): + Q.swap() + if not Q.left.empty(): # not planar + return False + if self.lowpt[Q.right.low] > self.lowpt[e]: + # merge intervals + if P.right.empty(): # topmost interval + P.right = Q.right.copy() + else: + self.ref[P.right.low] = Q.right.high + P.right.low = Q.right.low + else: # align + self.ref[Q.right.low] = self.lowpt_edge[e] + if top_of_stack(self.S) == self.stack_bottom[ei]: + break + # merge conflicting return edges of e_1,...,e_i-1 into P.L + while top_of_stack(self.S).left.conflicting(ei, self) or top_of_stack( + self.S + ).right.conflicting(ei, self): + Q = self.S.pop() + if Q.right.conflicting(ei, self): + Q.swap() + if Q.right.conflicting(ei, self): # not planar + return False + # merge interval below lowpt(e_i) into P.R + self.ref[P.right.low] = Q.right.high + if Q.right.low is not None: + P.right.low = Q.right.low + + if P.left.empty(): # topmost interval + P.left = Q.left.copy() + else: + self.ref[P.left.low] = Q.left.high + P.left.low = Q.left.low + + if not (P.left.empty() and P.right.empty()): + self.S.append(P) + return True + + def remove_back_edges(self, e): + u = e[0] + # trim back edges ending at parent u + # drop entire conflict pairs + while self.S and top_of_stack(self.S).lowest(self) == self.height[u]: + P = self.S.pop() + if P.left.low is not None: + self.side[P.left.low] = -1 + + if self.S: # one more conflict pair to consider + P = self.S.pop() + # trim left interval + while P.left.high is not None and P.left.high[1] == u: + P.left.high = self.ref[P.left.high] + if P.left.high is None and P.left.low is not None: + # just emptied + self.ref[P.left.low] = P.right.low + self.side[P.left.low] = -1 + P.left.low = None + # trim right interval + while P.right.high is not None and P.right.high[1] == u: + P.right.high = self.ref[P.right.high] + if P.right.high is None and P.right.low is not None: + # just emptied + self.ref[P.right.low] = P.left.low + self.side[P.right.low] = -1 + P.right.low = None + self.S.append(P) + + # side of e is side of a highest return edge + if self.lowpt[e] < self.height[u]: # e has return edge + hl = top_of_stack(self.S).left.high + hr = top_of_stack(self.S).right.high + + if hl is not None and (hr is None or self.lowpt[hl] > self.lowpt[hr]): + self.ref[e] = hl + else: + self.ref[e] = hr + + def dfs_embedding(self, v): + """Completes the embedding.""" + # the recursion stack + dfs_stack = [v] + # index of next edge to handle in adjacency list of each node + ind = defaultdict(lambda: 0) + + while dfs_stack: + v = dfs_stack.pop() + + for w in self.ordered_adjs[v][ind[v] :]: + ind[v] += 1 + ei = (v, w) + + if ei == self.parent_edge[w]: # tree edge + self.embedding.add_half_edge_first(w, v) + self.left_ref[v] = w + self.right_ref[v] = w + + dfs_stack.append(v) # revisit v after finishing w + dfs_stack.append(w) # visit w next + break # handle next node in dfs_stack (i.e. w) + else: # back edge + if self.side[ei] == 1: + self.embedding.add_half_edge_cw(w, v, self.right_ref[w]) + else: + self.embedding.add_half_edge_ccw(w, v, self.left_ref[w]) + self.left_ref[w] = v + + def dfs_embedding_recursive(self, v): + """Recursive version of :meth:`dfs_embedding`.""" + for w in self.ordered_adjs[v]: + ei = (v, w) + if ei == self.parent_edge[w]: # tree edge + self.embedding.add_half_edge_first(w, v) + self.left_ref[v] = w + self.right_ref[v] = w + self.dfs_embedding_recursive(w) + else: # back edge + if self.side[ei] == 1: + # place v directly after right_ref[w] in embed. list of w + self.embedding.add_half_edge_cw(w, v, self.right_ref[w]) + else: + # place v directly before left_ref[w] in embed. list of w + self.embedding.add_half_edge_ccw(w, v, self.left_ref[w]) + self.left_ref[w] = v + + def sign(self, e): + """Resolve the relative side of an edge to the absolute side.""" + # the recursion stack + dfs_stack = [e] + # dict to remember reference edges + old_ref = defaultdict(lambda: None) + + while dfs_stack: + e = dfs_stack.pop() + + if self.ref[e] is not None: + dfs_stack.append(e) # revisit e after finishing self.ref[e] + dfs_stack.append(self.ref[e]) # visit self.ref[e] next + old_ref[e] = self.ref[e] # remember value of self.ref[e] + self.ref[e] = None + else: + self.side[e] *= self.side[old_ref[e]] + + return self.side[e] + + def sign_recursive(self, e): + """Recursive version of :meth:`sign`.""" + if self.ref[e] is not None: + self.side[e] = self.side[e] * self.sign_recursive(self.ref[e]) + self.ref[e] = None + return self.side[e] + + +class PlanarEmbedding(nx.DiGraph): + """Represents a planar graph with its planar embedding. + + The planar embedding is given by a `combinatorial embedding + `_. + + **Neighbor ordering:** + + In comparison to a usual graph structure, the embedding also stores the + order of all neighbors for every vertex. + The order of the neighbors can be given in clockwise (cw) direction or + counterclockwise (ccw) direction. This order is stored as edge attributes + in the underlying directed graph. For the edge (u, v) the edge attribute + 'cw' is set to the neighbor of u that follows immediately after v in + clockwise direction. + + In order for a PlanarEmbedding to be valid it must fulfill multiple + conditions. It is possible to check if these conditions are fulfilled with + the method :meth:`check_structure`. + The conditions are: + + * Edges must go in both directions (because the edge attributes differ) + * Every edge must have a 'cw' and 'ccw' attribute which corresponds to a + correct planar embedding. + * A node with non zero degree must have a node attribute 'first_nbr'. + + As long as a PlanarEmbedding is invalid only the following methods should + be called: + + * :meth:`add_half_edge_ccw` + * :meth:`add_half_edge_cw` + * :meth:`connect_components` + * :meth:`add_half_edge_first` + + Even though the graph is a subclass of nx.DiGraph, it can still be used + for algorithms that require undirected graphs, because the method + :meth:`is_directed` is overridden. This is possible, because a valid + PlanarGraph must have edges in both directions. + + **Half edges:** + + In methods like `add_half_edge_ccw` the term "half-edge" is used, which is + a term that is used in `doubly connected edge lists + `_. It is used + to emphasize that the edge is only in one direction and there exists + another half-edge in the opposite direction. + While conventional edges always have two faces (including outer face) next + to them, it is possible to assign each half-edge *exactly one* face. + For a half-edge (u, v) that is orientated such that u is below v then the + face that belongs to (u, v) is to the right of this half-edge. + + Examples + -------- + + Create an embedding of a star graph (compare `nx.star_graph(3)`): + + >>> G = nx.PlanarEmbedding() + >>> G.add_half_edge_cw(0, 1, None) + >>> G.add_half_edge_cw(0, 2, 1) + >>> G.add_half_edge_cw(0, 3, 2) + >>> G.add_half_edge_cw(1, 0, None) + >>> G.add_half_edge_cw(2, 0, None) + >>> G.add_half_edge_cw(3, 0, None) + + Alternatively the same embedding can also be defined in counterclockwise + orientation. The following results in exactly the same PlanarEmbedding: + + >>> G = nx.PlanarEmbedding() + >>> G.add_half_edge_ccw(0, 1, None) + >>> G.add_half_edge_ccw(0, 3, 1) + >>> G.add_half_edge_ccw(0, 2, 3) + >>> G.add_half_edge_ccw(1, 0, None) + >>> G.add_half_edge_ccw(2, 0, None) + >>> G.add_half_edge_ccw(3, 0, None) + + After creating a graph, it is possible to validate that the PlanarEmbedding + object is correct: + + >>> G.check_structure() + + """ + + def get_data(self): + """Converts the adjacency structure into a better readable structure. + + Returns + ------- + embedding : dict + A dict mapping all nodes to a list of neighbors sorted in + clockwise order. + + See Also + -------- + set_data + + """ + embedding = dict() + for v in self: + embedding[v] = list(self.neighbors_cw_order(v)) + return embedding + + def set_data(self, data): + """Inserts edges according to given sorted neighbor list. + + The input format is the same as the output format of get_data(). + + Parameters + ---------- + data : dict + A dict mapping all nodes to a list of neighbors sorted in + clockwise order. + + See Also + -------- + get_data + + """ + for v in data: + for w in reversed(data[v]): + self.add_half_edge_first(v, w) + + def neighbors_cw_order(self, v): + """Generator for the neighbors of v in clockwise order. + + Parameters + ---------- + v : node + + Yields + ------ + node + + """ + if len(self[v]) == 0: + # v has no neighbors + return + start_node = self.nodes[v]["first_nbr"] + yield start_node + current_node = self[v][start_node]["cw"] + while start_node != current_node: + yield current_node + current_node = self[v][current_node]["cw"] + + def check_structure(self): + """Runs without exceptions if this object is valid. + + Checks that the following properties are fulfilled: + + * Edges go in both directions (because the edge attributes differ). + * Every edge has a 'cw' and 'ccw' attribute which corresponds to a + correct planar embedding. + * A node with a degree larger than 0 has a node attribute 'first_nbr'. + + Running this method verifies that the underlying Graph must be planar. + + Raises + ------ + NetworkXException + This exception is raised with a short explanation if the + PlanarEmbedding is invalid. + """ + # Check fundamental structure + for v in self: + try: + sorted_nbrs = set(self.neighbors_cw_order(v)) + except KeyError as e: + msg = f"Bad embedding. Missing orientation for a neighbor of {v}" + raise nx.NetworkXException(msg) from e + + unsorted_nbrs = set(self[v]) + if sorted_nbrs != unsorted_nbrs: + msg = "Bad embedding. Edge orientations not set correctly." + raise nx.NetworkXException(msg) + for w in self[v]: + # Check if opposite half-edge exists + if not self.has_edge(w, v): + msg = "Bad embedding. Opposite half-edge is missing." + raise nx.NetworkXException(msg) + + # Check planarity + counted_half_edges = set() + for component in nx.connected_components(self): + if len(component) == 1: + # Don't need to check single node component + continue + num_nodes = len(component) + num_half_edges = 0 + num_faces = 0 + for v in component: + for w in self.neighbors_cw_order(v): + num_half_edges += 1 + if (v, w) not in counted_half_edges: + # We encountered a new face + num_faces += 1 + # Mark all half-edges belonging to this face + self.traverse_face(v, w, counted_half_edges) + num_edges = num_half_edges // 2 # num_half_edges is even + if num_nodes - num_edges + num_faces != 2: + # The result does not match Euler's formula + msg = "Bad embedding. The graph does not match Euler's formula" + raise nx.NetworkXException(msg) + + def add_half_edge_ccw(self, start_node, end_node, reference_neighbor): + """Adds a half-edge from start_node to end_node. + + The half-edge is added counter clockwise next to the existing half-edge + (start_node, reference_neighbor). + + Parameters + ---------- + start_node : node + Start node of inserted edge. + end_node : node + End node of inserted edge. + reference_neighbor: node + End node of reference edge. + + Raises + ------ + NetworkXException + If the reference_neighbor does not exist. + + See Also + -------- + add_half_edge_cw + connect_components + add_half_edge_first + + """ + if reference_neighbor is None: + # The start node has no neighbors + self.add_edge(start_node, end_node) # Add edge to graph + self[start_node][end_node]["cw"] = end_node + self[start_node][end_node]["ccw"] = end_node + self.nodes[start_node]["first_nbr"] = end_node + else: + ccw_reference = self[start_node][reference_neighbor]["ccw"] + self.add_half_edge_cw(start_node, end_node, ccw_reference) + + if reference_neighbor == self.nodes[start_node].get("first_nbr", None): + # Update first neighbor + self.nodes[start_node]["first_nbr"] = end_node + + def add_half_edge_cw(self, start_node, end_node, reference_neighbor): + """Adds a half-edge from start_node to end_node. + + The half-edge is added clockwise next to the existing half-edge + (start_node, reference_neighbor). + + Parameters + ---------- + start_node : node + Start node of inserted edge. + end_node : node + End node of inserted edge. + reference_neighbor: node + End node of reference edge. + + Raises + ------ + NetworkXException + If the reference_neighbor does not exist. + + See Also + -------- + add_half_edge_ccw + connect_components + add_half_edge_first + """ + self.add_edge(start_node, end_node) # Add edge to graph + + if reference_neighbor is None: + # The start node has no neighbors + self[start_node][end_node]["cw"] = end_node + self[start_node][end_node]["ccw"] = end_node + self.nodes[start_node]["first_nbr"] = end_node + return + + if reference_neighbor not in self[start_node]: + raise nx.NetworkXException( + "Cannot add edge. Reference neighbor does not exist" + ) + + # Get half-edge at the other side + cw_reference = self[start_node][reference_neighbor]["cw"] + # Alter half-edge data structures + self[start_node][reference_neighbor]["cw"] = end_node + self[start_node][end_node]["cw"] = cw_reference + self[start_node][cw_reference]["ccw"] = end_node + self[start_node][end_node]["ccw"] = reference_neighbor + + def connect_components(self, v, w): + """Adds half-edges for (v, w) and (w, v) at some position. + + This method should only be called if v and w are in different + components, or it might break the embedding. + This especially means that if `connect_components(v, w)` + is called it is not allowed to call `connect_components(w, v)` + afterwards. The neighbor orientations in both directions are + all set correctly after the first call. + + Parameters + ---------- + v : node + w : node + + See Also + -------- + add_half_edge_ccw + add_half_edge_cw + add_half_edge_first + """ + self.add_half_edge_first(v, w) + self.add_half_edge_first(w, v) + + def add_half_edge_first(self, start_node, end_node): + """The added half-edge is inserted at the first position in the order. + + Parameters + ---------- + start_node : node + end_node : node + + See Also + -------- + add_half_edge_ccw + add_half_edge_cw + connect_components + """ + if start_node in self and "first_nbr" in self.nodes[start_node]: + reference = self.nodes[start_node]["first_nbr"] + else: + reference = None + self.add_half_edge_ccw(start_node, end_node, reference) + + def next_face_half_edge(self, v, w): + """Returns the following half-edge left of a face. + + Parameters + ---------- + v : node + w : node + + Returns + ------- + half-edge : tuple + """ + new_node = self[w][v]["ccw"] + return w, new_node + + def traverse_face(self, v, w, mark_half_edges=None): + """Returns nodes on the face that belong to the half-edge (v, w). + + The face that is traversed lies to the right of the half-edge (in an + orientation where v is below w). + + Optionally it is possible to pass a set to which all encountered half + edges are added. Before calling this method, this set must not include + any half-edges that belong to the face. + + Parameters + ---------- + v : node + Start node of half-edge. + w : node + End node of half-edge. + mark_half_edges: set, optional + Set to which all encountered half-edges are added. + + Returns + ------- + face : list + A list of nodes that lie on this face. + """ + if mark_half_edges is None: + mark_half_edges = set() + + face_nodes = [v] + mark_half_edges.add((v, w)) + prev_node = v + cur_node = w + # Last half-edge is (incoming_node, v) + incoming_node = self[v][w]["cw"] + + while cur_node != v or prev_node != incoming_node: + face_nodes.append(cur_node) + prev_node, cur_node = self.next_face_half_edge(prev_node, cur_node) + if (prev_node, cur_node) in mark_half_edges: + raise nx.NetworkXException("Bad planar embedding. Impossible face.") + mark_half_edges.add((prev_node, cur_node)) + + return face_nodes + + def is_directed(self): + """A valid PlanarEmbedding is undirected. + + All reverse edges are contained, i.e. for every existing + half-edge (v, w) the half-edge in the opposite direction (w, v) is also + contained. + """ + return False diff --git a/networkx/algorithms/reciprocity.py b/networkx/algorithms/reciprocity.py index 9eb5f1f..89df1d3 100644 --- a/networkx/algorithms/reciprocity.py +++ b/networkx/algorithms/reciprocity.py @@ -1,22 +1,13 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2015-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Haochen Wu (wuhaochen42@gmail.com) """Algorithms to calculate reciprocity in a directed graph.""" from networkx import NetworkXError from ..utils import not_implemented_for -__all__ = ['reciprocity', 'overall_reciprocity'] +__all__ = ["reciprocity", "overall_reciprocity"] -@not_implemented_for('undirected', 'multigraph') +@not_implemented_for("undirected", "multigraph") def reciprocity(G, nodes=None): - """Compute the reciprocity in a directed graph. + r"""Compute the reciprocity in a directed graph. The reciprocity of a directed graph is defined as the ratio of the number of edges pointing in both directions to the total @@ -54,7 +45,7 @@ def reciprocity(G, nodes=None): if nodes in G: reciprocity = next(_reciprocity_iter(G, nodes))[1] if reciprocity is None: - raise NetworkXError('Not defined for isolated nodes.') + raise NetworkXError("Not defined for isolated nodes.") else: return reciprocity @@ -82,7 +73,7 @@ def _reciprocity_iter(G, nodes): yield (node, reciprocity) -@not_implemented_for('undirected', 'multigraph') +@not_implemented_for("undirected", "multigraph") def overall_reciprocity(G): """Compute the reciprocity for the whole graph. diff --git a/networkx/algorithms/regular.py b/networkx/algorithms/regular.py new file mode 100644 index 0000000..5b302e2 --- /dev/null +++ b/networkx/algorithms/regular.py @@ -0,0 +1,191 @@ +"""Functions for computing and verifying regular graphs.""" +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["is_regular", "is_k_regular", "k_factor"] + + +def is_regular(G): + """Determines whether the graph ``G`` is a regular graph. + + A regular graph is a graph where each vertex has the same degree. A + regular digraph is a graph where the indegree and outdegree of each + vertex are equal. + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + bool + Whether the given graph or digraph is regular. + + """ + n1 = nx.utils.arbitrary_element(G) + if not G.is_directed(): + d1 = G.degree(n1) + return all(d1 == d for _, d in G.degree) + else: + d_in = G.in_degree(n1) + in_regular = all(d_in == d for _, d in G.in_degree) + d_out = G.out_degree(n1) + out_regular = all(d_out == d for _, d in G.out_degree) + return in_regular and out_regular + + +@not_implemented_for("directed") +def is_k_regular(G, k): + """Determines whether the graph ``G`` is a k-regular graph. + + A k-regular graph is a graph where each vertex has degree k. + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + bool + Whether the given graph is k-regular. + + """ + return all(d == k for n, d in G.degree) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def k_factor(G, k, matching_weight="weight"): + """Compute a k-factor of G + + A k-factor of a graph is a spanning k-regular subgraph. + A spanning k-regular subgraph of G is a subgraph that contains + each vertex of G and a subset of the edges of G such that each + vertex has degree k. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + weight: string, optional (default='weight') + Edge data key corresponding to the edge weight. + Used for finding the max-weighted perfect matching. + If key not found, uses 1 as weight. + + Returns + ------- + G2 : NetworkX graph + A k-factor of G + + References + ---------- + .. [1] "An algorithm for computing simple k-factors.", + Meijer, Henk, Yurai Núñez-Rodríguez, and David Rappaport, + Information processing letters, 2009. + """ + + from networkx.algorithms.matching import max_weight_matching + from networkx.algorithms.matching import is_perfect_matching + + class LargeKGadget: + def __init__(self, k, degree, node, g): + self.original = node + self.g = g + self.k = k + self.degree = degree + + self.outer_vertices = [(node, x) for x in range(degree)] + self.core_vertices = [(node, x + degree) for x in range(degree - k)] + + def replace_node(self): + adj_view = self.g[self.original] + neighbors = list(adj_view.keys()) + edge_attrs = list(adj_view.values()) + for (outer, neighbor, edge_attrs) in zip( + self.outer_vertices, neighbors, edge_attrs + ): + self.g.add_edge(outer, neighbor, **edge_attrs) + for core in self.core_vertices: + for outer in self.outer_vertices: + self.g.add_edge(core, outer) + self.g.remove_node(self.original) + + def restore_node(self): + self.g.add_node(self.original) + for outer in self.outer_vertices: + adj_view = self.g[outer] + for neighbor, edge_attrs in list(adj_view.items()): + if neighbor not in self.core_vertices: + self.g.add_edge(self.original, neighbor, **edge_attrs) + break + g.remove_nodes_from(self.outer_vertices) + g.remove_nodes_from(self.core_vertices) + + class SmallKGadget: + def __init__(self, k, degree, node, g): + self.original = node + self.k = k + self.degree = degree + self.g = g + + self.outer_vertices = [(node, x) for x in range(degree)] + self.inner_vertices = [(node, x + degree) for x in range(degree)] + self.core_vertices = [(node, x + 2 * degree) for x in range(k)] + + def replace_node(self): + adj_view = self.g[self.original] + for (outer, inner, (neighbor, edge_attrs)) in zip( + self.outer_vertices, self.inner_vertices, list(adj_view.items()) + ): + self.g.add_edge(outer, inner) + self.g.add_edge(outer, neighbor, **edge_attrs) + for core in self.core_vertices: + for inner in self.inner_vertices: + self.g.add_edge(core, inner) + self.g.remove_node(self.original) + + def restore_node(self): + self.g.add_node(self.original) + for outer in self.outer_vertices: + adj_view = self.g[outer] + for neighbor, edge_attrs in adj_view.items(): + if neighbor not in self.core_vertices: + self.g.add_edge(self.original, neighbor, **edge_attrs) + break + self.g.remove_nodes_from(self.outer_vertices) + self.g.remove_nodes_from(self.inner_vertices) + self.g.remove_nodes_from(self.core_vertices) + + # Step 1 + if any(d < k for _, d in G.degree): + raise nx.NetworkXUnfeasible("Graph contains a vertex with degree less than k") + g = G.copy() + + # Step 2 + gadgets = [] + for node, degree in list(g.degree): + if k < degree / 2.0: + gadget = SmallKGadget(k, degree, node, g) + else: + gadget = LargeKGadget(k, degree, node, g) + gadget.replace_node() + gadgets.append(gadget) + + # Step 3 + matching = max_weight_matching(g, maxcardinality=True, weight=matching_weight) + + # Step 4 + if not is_perfect_matching(g, matching): + raise nx.NetworkXUnfeasible( + "Cannot find k-factor because no perfect matching exists" + ) + + for edge in g.edges(): + if edge not in matching and (edge[1], edge[0]) not in matching: + g.remove_edge(edge[0], edge[1]) + + for gadget in gadgets: + gadget.restore_node() + + return g diff --git a/networkx/algorithms/richclub.py b/networkx/algorithms/richclub.py index eb23907..b9e48cb 100644 --- a/networkx/algorithms/richclub.py +++ b/networkx/algorithms/richclub.py @@ -1,26 +1,15 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Ben Edwards (bedwards@cs.unm.edu) -# Aric Hagberg (hagberg@lanl.gov) """Functions for computing rich-club coefficients.""" -from __future__ import division import networkx as nx -from networkx.utils import accumulate +from itertools import accumulate from networkx.utils import not_implemented_for -__all__ = ['rich_club_coefficient'] +__all__ = ["rich_club_coefficient"] -@not_implemented_for('directed') -@not_implemented_for('multigraph') -def rich_club_coefficient(G, normalized=True, Q=100): +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def rich_club_coefficient(G, normalized=True, Q=100, seed=None): r"""Returns the rich-club coefficient of the graph `G`. For each degree *k*, the *rich-club coefficient* is the ratio of the @@ -44,6 +33,9 @@ def rich_club_coefficient(G, normalized=True, Q=100): If `normalized` is True, perform `Q * m` double-edge swaps, where `m` is the number of edges in `G`, to use as a null-model for normalization. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -53,8 +45,8 @@ def rich_club_coefficient(G, normalized=True, Q=100): Examples -------- >>> G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)]) - >>> rc = nx.rich_club_coefficient(G, normalized=False) - >>> rc[0] # doctest: +SKIP + >>> rc = nx.rich_club_coefficient(G, normalized=False, seed=42) + >>> rc[0] 0.4 Notes @@ -77,15 +69,16 @@ def rich_club_coefficient(G, normalized=True, Q=100): sequences", 2006. https://arxiv.org/abs/cond-mat/0312028 """ if nx.number_of_selfloops(G) > 0: - raise Exception('rich_club_coefficient is not implemented for ' - 'graphs with self loops.') + raise Exception( + "rich_club_coefficient is not implemented for " "graphs with self loops." + ) rc = _compute_rc(G) if normalized: # make R a copy of G, randomize with Q*|E| double edge swaps # and use rich_club coefficient of R to normalize R = G.copy() E = R.number_of_edges() - nx.double_edge_swap(R, Q * E, max_tries=Q * E * 10) + nx.double_edge_swap(R, Q * E, max_tries=Q * E * 10, seed=seed) rcran = _compute_rc(R) rc = {k: v / rcran[k] for k, v in rc.items()} return rc @@ -111,8 +104,7 @@ def _compute_rc(G): # The list is sorted in reverse order so that we can pop from the # right side of the list later, instead of popping from the left # side of the list, which would have a linear time cost. - edge_degrees = sorted((sorted(map(G.degree, e)) for e in G.edges()), - reverse=True) + edge_degrees = sorted((sorted(map(G.degree, e)) for e in G.edges()), reverse=True) ek = G.number_of_edges() k1, k2 = edge_degrees.pop() rc = {} diff --git a/networkx/algorithms/shortest_paths/astar.py b/networkx/algorithms/shortest_paths/astar.py index f7cb02f..7833044 100644 --- a/networkx/algorithms/shortest_paths/astar.py +++ b/networkx/algorithms/shortest_paths/astar.py @@ -1,27 +1,16 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Salim Fadhley -# Matteo Dell'Amico """Shortest paths and path lengths using the A* ("A star") algorithm. """ from heapq import heappush, heappop from itertools import count import networkx as nx -from networkx.utils import not_implemented_for +from networkx.algorithms.shortest_paths.weighted import _weight_function -__all__ = ['astar_path', 'astar_path_length'] +__all__ = ["astar_path", "astar_path_length"] -@not_implemented_for('multigraph') -def astar_path(G, source, target, heuristic=None, weight='weight'): - """Return a list of nodes in a shortest path between source and target +def astar_path(G, source, target, heuristic=None, weight="weight"): + """Returns a list of nodes in a shortest path between source and target using the A* ("A-star") algorithm. There may be more than one shortest path. This returns only one. @@ -41,8 +30,17 @@ def astar_path(G, source, target, heuristic=None, weight='weight'): from the a node to the target. The function takes two nodes arguments and must return a number. - weight: string, optional (default='weight') - Edge data key corresponding to the edge weight. + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. Raises ------ @@ -55,12 +53,12 @@ def astar_path(G, source, target, heuristic=None, weight='weight'): >>> print(nx.astar_path(G, 0, 4)) [0, 1, 2, 3, 4] >>> G = nx.grid_graph(dim=[3, 3]) # nodes are two-tuples (x,y) - >>> nx.set_edge_attributes(G, {e: e[1][0]*2 for e in G.edges()}, 'cost') + >>> nx.set_edge_attributes(G, {e: e[1][0] * 2 for e in G.edges()}, "cost") >>> def dist(a, b): - ... (x1, y1) = a - ... (x2, y2) = b - ... return ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** 0.5 - >>> print(nx.astar_path(G, (0, 0), (2, 2), heuristic=dist, weight='cost')) + ... (x1, y1) = a + ... (x2, y2) = b + ... return ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** 0.5 + >>> print(nx.astar_path(G, (0, 0), (2, 2), heuristic=dist, weight="cost")) [(0, 0), (0, 1), (0, 2), (1, 2), (2, 2)] @@ -70,8 +68,8 @@ def astar_path(G, source, target, heuristic=None, weight='weight'): """ if source not in G or target not in G: - msg = 'Either source {} or target {} is not in G' - raise nx.NodeNotFound(msg.format(source, target)) + msg = f"Either source {source} or target {target} is not in G" + raise nx.NodeNotFound(msg) if heuristic is None: # The default heuristic is h=0 - same as Dijkstra's algorithm @@ -80,6 +78,7 @@ def heuristic(u, v): push = heappush pop = heappop + weight = _weight_function(G, weight) # The queue stores priority, node, cost to reach, and parent. # Uses Python heapq to keep in priority order. @@ -110,20 +109,25 @@ def heuristic(u, v): return path if curnode in explored: - continue + # Do not override the parent of starting node + if explored[curnode] is None: + continue + + # Skip bad paths that were enqueued before finding a better one + qcost, h = enqueued[curnode] + if qcost < dist: + continue explored[curnode] = parent for neighbor, w in G[curnode].items(): - if neighbor in explored: - continue - ncost = dist + w.get(weight, 1) + ncost = dist + weight(curnode, neighbor, w) if neighbor in enqueued: qcost, h = enqueued[neighbor] - # if qcost < ncost, a longer path to neighbor remains - # enqueued. Removing it would need to filter the whole - # queue, it's better just to leave it there and ignore - # it when we visit the node a second time. + # if qcost <= ncost, a less costly path from the + # neighbor to the source was already determined. + # Therefore, we won't attempt to push this neighbor + # to the queue if qcost <= ncost: continue else: @@ -131,11 +135,11 @@ def heuristic(u, v): enqueued[neighbor] = ncost, h push(queue, (ncost + h, next(c), neighbor, ncost, curnode)) - raise nx.NetworkXNoPath("Node %s not reachable from %s" % (source, target)) + raise nx.NetworkXNoPath(f"Node {target} not reachable from {source}") -def astar_path_length(G, source, target, heuristic=None, weight='weight'): - """Return the length of the shortest path between source and target using +def astar_path_length(G, source, target, heuristic=None, weight="weight"): + """Returns the length of the shortest path between source and target using the A* ("A-star") algorithm. Parameters @@ -164,8 +168,9 @@ def astar_path_length(G, source, target, heuristic=None, weight='weight'): """ if source not in G or target not in G: - msg = 'Either source {} or target {} is not in G' - raise nx.NodeNotFound(msg.format(source, target)) + msg = f"Either source {source} or target {target} is not in G" + raise nx.NodeNotFound(msg) + weight = _weight_function(G, weight) path = astar_path(G, source, target, heuristic, weight) - return sum(G[u][v].get(weight, 1) for u, v in zip(path[:-1], path[1:])) + return sum(weight(u, v, G[u][v]) for u, v in zip(path[:-1], path[1:])) diff --git a/networkx/algorithms/shortest_paths/dense.py b/networkx/algorithms/shortest_paths/dense.py index 55c27b0..148a718 100644 --- a/networkx/algorithms/shortest_paths/dense.py +++ b/networkx/algorithms/shortest_paths/dense.py @@ -1,24 +1,16 @@ -# -*- coding: utf-8 -*- """Floyd-Warshall algorithm for shortest paths. """ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg -# Miguel Sozinho Ramalho import networkx as nx -__all__ = ['floyd_warshall', - 'floyd_warshall_predecessor_and_distance', - 'reconstruct_path', - 'floyd_warshall_numpy'] +__all__ = [ + "floyd_warshall", + "floyd_warshall_predecessor_and_distance", + "reconstruct_path", + "floyd_warshall_numpy", +] -def floyd_warshall_numpy(G, nodelist=None, weight='weight'): +def floyd_warshall_numpy(G, nodelist=None, weight="weight"): """Find all-pairs shortest path lengths using Floyd's algorithm. Parameters @@ -43,28 +35,28 @@ def floyd_warshall_numpy(G, nodelist=None, weight='weight'): ------ Floyd's algorithm is appropriate for finding shortest paths in dense graphs or graphs with negative weights when Dijkstra's - algorithm fails. This algorithm can still fail if there are - negative cycles. It has running time $O(n^3)$ with running space of $O(n^2)$. + algorithm fails. This algorithm can still fail if there are negative + cycles. It has running time $O(n^3)$ with running space of $O(n^2)$. """ try: import numpy as np - except ImportError: - raise ImportError( - "to_numpy_matrix() requires numpy: http://scipy.org/ ") + except ImportError as e: + raise ImportError("to_numpy_array() requires numpy: http://numpy.org/ ") from e # To handle cases when an edge has weight=0, we must make sure that # nonedges are not given the value 0 as well. - A = nx.to_numpy_matrix(G, nodelist=nodelist, multigraph_weight=min, - weight=weight, nonedge=np.inf) + A = nx.to_numpy_array( + G, nodelist=nodelist, multigraph_weight=min, weight=weight, nonedge=np.inf + ) n, m = A.shape - I = np.identity(n) - A[I == 1] = 0 # diagonal elements should be zero + np.fill_diagonal(A, 0) # diagonal elements should be zero for i in range(n): - A = np.minimum(A, A[i, :] + A[:, i]) + # The second term has the same shape as A due to broadcasting + A = np.minimum(A, A[i, :][np.newaxis, :] + A[:, i][:, np.newaxis]) return A -def floyd_warshall_predecessor_and_distance(G, weight='weight'): +def floyd_warshall_predecessor_and_distance(G, weight="weight"): """Find all-pairs shortest path lengths using Floyd's algorithm. Parameters @@ -83,11 +75,22 @@ def floyd_warshall_predecessor_and_distance(G, weight='weight'): Examples -------- >>> G = nx.DiGraph() - >>> G.add_weighted_edges_from([('s', 'u', 10), ('s', 'x', 5), - ... ('u', 'v', 1), ('u', 'x', 2), ('v', 'y', 1), ('x', 'u', 3), - ... ('x', 'v', 5), ('x', 'y', 2), ('y', 's', 7), ('y', 'v', 6)]) + >>> G.add_weighted_edges_from( + ... [ + ... ("s", "u", 10), + ... ("s", "x", 5), + ... ("u", "v", 1), + ... ("u", "x", 2), + ... ("v", "y", 1), + ... ("x", "u", 3), + ... ("x", "v", 5), + ... ("x", "y", 2), + ... ("y", "s", 7), + ... ("y", "v", 6), + ... ] + ... ) >>> predecessors, _ = nx.floyd_warshall_predecessor_and_distance(G) - >>> print(reconstruct_path('s', 'v', predecessors)) + >>> print(nx.reconstruct_path("s", "v", predecessors)) ['s', 'x', 'u', 'v'] Notes @@ -105,10 +108,11 @@ def floyd_warshall_predecessor_and_distance(G, weight='weight'): all_pairs_shortest_path_length """ from collections import defaultdict + # dictionary-of-dictionaries representation for dist and pred # use some defaultdict magick here # for dist the default is the floating point inf value - dist = defaultdict(lambda: defaultdict(lambda: float('inf'))) + dist = defaultdict(lambda: defaultdict(lambda: float("inf"))) for u in G: dist[u][u] = 0 pred = defaultdict(dict) @@ -123,10 +127,13 @@ def floyd_warshall_predecessor_and_distance(G, weight='weight'): dist[v][u] = min(e_weight, dist[v][u]) pred[v][u] = v for w in G: + dist_w = dist[w] # save recomputation for u in G: + dist_u = dist[u] # save recomputation for v in G: - if dist[u][v] > dist[u][w] + dist[w][v]: - dist[u][v] = dist[u][w] + dist[w][v] + d = dist_u[w] + dist_w[v] + if dist_u[v] > d: + dist_u[v] = d pred[u][v] = pred[w][v] return dict(pred), dict(dist) @@ -174,7 +181,7 @@ def reconstruct_path(source, target, predecessors): return list(reversed(path)) -def floyd_warshall(G, weight='weight'): +def floyd_warshall(G, weight="weight"): """Find all-pairs shortest path lengths using Floyd's algorithm. Parameters @@ -207,13 +214,3 @@ def floyd_warshall(G, weight='weight'): """ # could make this its own function to reduce memory costs return floyd_warshall_predecessor_and_distance(G, weight=weight)[1] - -# fixture for nose tests - - -def setup_module(module): - from nose import SkipTest - try: - import numpy - except: - raise SkipTest("NumPy not available") diff --git a/networkx/algorithms/shortest_paths/generic.py b/networkx/algorithms/shortest_paths/generic.py index 0c62bed..97d3a53 100644 --- a/networkx/algorithms/shortest_paths/generic.py +++ b/networkx/algorithms/shortest_paths/generic.py @@ -1,30 +1,23 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg -# Sérgio Nery Simões """ Compute the shortest paths and path lengths between nodes in the graph. These algorithms work with undirected and directed graphs. """ -from __future__ import division import networkx as nx -__all__ = ['shortest_path', 'all_shortest_paths', - 'shortest_path_length', 'average_shortest_path_length', - 'has_path'] +__all__ = [ + "shortest_path", + "all_shortest_paths", + "shortest_path_length", + "average_shortest_path_length", + "has_path", +] def has_path(G, source, target): - """Return *True* if *G* has a path from *source* to *target*. + """Returns *True* if *G* has a path from *source* to *target*. Parameters ---------- @@ -37,13 +30,13 @@ def has_path(G, source, target): Ending node for path """ try: - sp = nx.shortest_path(G, source, target) + nx.shortest_path(G, source, target) except nx.NetworkXNoPath: return False return True -def shortest_path(G, source=None, target=None, weight=None): +def shortest_path(G, source=None, target=None, weight=None, method="dijkstra"): """Compute shortest paths in the graph. Parameters @@ -63,6 +56,13 @@ def shortest_path(G, source=None, target=None, weight=None): If a string, use this edge attribute as the edge weight. Any edge attribute not present defaults to 1. + method : string, optional (default = 'dijkstra') + The algorithm to use to compute the path. + Supported options: 'dijkstra', 'bellman-ford'. + Other inputs produce a ValueError. + If `weight` is None, unweighted graph methods are used, and this + suggestion is ignored. + Returns ------- path: list or dictionary @@ -82,18 +82,26 @@ def shortest_path(G, source=None, target=None, weight=None): If neither the source nor target are specified return a dictionary of dictionaries with path[source][target]=[list of nodes in path]. + Raises + ------ + NodeNotFound + If `source` is not in `G`. + + ValueError + If `method` is not among the supported options. + Examples -------- >>> G = nx.path_graph(5) >>> print(nx.shortest_path(G, source=0, target=4)) [0, 1, 2, 3, 4] - >>> p = nx.shortest_path(G, source=0) # target not specified + >>> p = nx.shortest_path(G, source=0) # target not specified >>> p[4] [0, 1, 2, 3, 4] - >>> p = nx.shortest_path(G, target=4) # source not specified + >>> p = nx.shortest_path(G, target=4) # source not specified >>> p[0] [0, 1, 2, 3, 4] - >>> p = nx.shortest_path(G) # source, target not specified + >>> p = nx.shortest_path(G) # source, target not specified >>> p[0][4] [0, 1, 2, 3, 4] @@ -106,47 +114,58 @@ def shortest_path(G, source=None, target=None, weight=None): -------- all_pairs_shortest_path() all_pairs_dijkstra_path() + all_pairs_bellman_ford_path() single_source_shortest_path() single_source_dijkstra_path() + single_source_bellman_ford_path() """ + if method not in ("dijkstra", "bellman-ford"): + # so we don't need to check in each branch later + raise ValueError(f"method not supported: {method}") + method = "unweighted" if weight is None else method if source is None: if target is None: # Find paths between all pairs. - if weight is None: + if method == "unweighted": paths = dict(nx.all_pairs_shortest_path(G)) - else: + elif method == "dijkstra": paths = dict(nx.all_pairs_dijkstra_path(G, weight=weight)) + else: # method == 'bellman-ford': + paths = dict(nx.all_pairs_bellman_ford_path(G, weight=weight)) else: # Find paths from all nodes co-accessible to the target. - with nx.utils.reversed(G): - if weight is None: - paths = nx.single_source_shortest_path(G, target) - else: - paths = nx.single_source_dijkstra_path(G, target, - weight=weight) - # Now flip the paths so they go from a source to the target. - for target in paths: - paths[target] = list(reversed(paths[target])) - + if G.is_directed(): + G = G.reverse(copy=False) + if method == "unweighted": + paths = nx.single_source_shortest_path(G, target) + elif method == "dijkstra": + paths = nx.single_source_dijkstra_path(G, target, weight=weight) + else: # method == 'bellman-ford': + paths = nx.single_source_bellman_ford_path(G, target, weight=weight) + # Now flip the paths so they go from a source to the target. + for target in paths: + paths[target] = list(reversed(paths[target])) else: if target is None: # Find paths to all nodes accessible from the source. - if weight is None: + if method == "unweighted": paths = nx.single_source_shortest_path(G, source) - else: - paths = nx.single_source_dijkstra_path(G, source, - weight=weight) + elif method == "dijkstra": + paths = nx.single_source_dijkstra_path(G, source, weight=weight) + else: # method == 'bellman-ford': + paths = nx.single_source_bellman_ford_path(G, source, weight=weight) else: # Find shortest source-target path. - if weight is None: + if method == "unweighted": paths = nx.bidirectional_shortest_path(G, source, target) - else: + elif method == "dijkstra": paths = nx.dijkstra_path(G, source, target, weight) - + else: # method == 'bellman-ford': + paths = nx.bellman_ford_path(G, source, target, weight) return paths -def shortest_path_length(G, source=None, target=None, weight=None): +def shortest_path_length(G, source=None, target=None, weight=None, method="dijkstra"): """Compute shortest path lengths in the graph. Parameters @@ -168,6 +187,13 @@ def shortest_path_length(G, source=None, target=None, weight=None): If a string, use this edge attribute as the edge weight. Any edge attribute not present defaults to 1. + method : string, optional (default = 'dijkstra') + The algorithm to use to compute the path length. + Supported options: 'dijkstra', 'bellman-ford'. + Other inputs produce a ValueError. + If `weight` is None, unweighted graph methods are used, and this + suggestion is ignored. + Returns ------- length: int or iterator @@ -186,21 +212,27 @@ def shortest_path_length(G, source=None, target=None, weight=None): Raises ------ + NodeNotFound + If `source` is not in `G`. + NetworkXNoPath If no path exists between source and target. + ValueError + If `method` is not among the supported options. + Examples -------- >>> G = nx.path_graph(5) >>> nx.shortest_path_length(G, source=0, target=4) 4 - >>> p = nx.shortest_path_length(G, source=0) # target not specified + >>> p = nx.shortest_path_length(G, source=0) # target not specified >>> p[4] 4 - >>> p = nx.shortest_path_length(G, target=4) # source not specified + >>> p = nx.shortest_path_length(G, target=4) # source not specified >>> p[0] 4 - >>> p = dict(nx.shortest_path_length(G)) # source,target not specified + >>> p = dict(nx.shortest_path_length(G)) # source,target not specified >>> p[0][4] 4 @@ -217,51 +249,62 @@ def shortest_path_length(G, source=None, target=None, weight=None): -------- all_pairs_shortest_path_length() all_pairs_dijkstra_path_length() + all_pairs_bellman_ford_path_length() single_source_shortest_path_length() single_source_dijkstra_path_length() - + single_source_bellman_ford_path_length() """ + if method not in ("dijkstra", "bellman-ford"): + # so we don't need to check in each branch later + raise ValueError(f"method not supported: {method}") + method = "unweighted" if weight is None else method if source is None: if target is None: # Find paths between all pairs. - if weight is None: + if method == "unweighted": paths = nx.all_pairs_shortest_path_length(G) - else: + elif method == "dijkstra": paths = nx.all_pairs_dijkstra_path_length(G, weight=weight) + else: # method == 'bellman-ford': + paths = nx.all_pairs_bellman_ford_path_length(G, weight=weight) else: # Find paths from all nodes co-accessible to the target. - with nx.utils.reversed(G): - if weight is None: - # We need to exhaust the iterator as Graph needs - # to be reversed. - path_length = nx.single_source_shortest_path_length - paths = path_length(G, target) - else: - path_length = nx.single_source_dijkstra_path_length - paths = path_length(G, target, weight=weight) + if G.is_directed(): + G = G.reverse(copy=False) + if method == "unweighted": + path_length = nx.single_source_shortest_path_length + paths = path_length(G, target) + elif method == "dijkstra": + path_length = nx.single_source_dijkstra_path_length + paths = path_length(G, target, weight=weight) + else: # method == 'bellman-ford': + path_length = nx.single_source_bellman_ford_path_length + paths = path_length(G, target, weight=weight) else: - if source not in G: - raise nx.NodeNotFound("Source {} not in G".format(source)) - if target is None: # Find paths to all nodes accessible from the source. - if weight is None: + if method == "unweighted": paths = nx.single_source_shortest_path_length(G, source) - else: - paths = nx.single_source_dijkstra_path_length(G, source, - weight=weight) + elif method == "dijkstra": + path_length = nx.single_source_dijkstra_path_length + paths = path_length(G, source, weight=weight) + else: # method == 'bellman-ford': + path_length = nx.single_source_bellman_ford_path_length + paths = path_length(G, source, weight=weight) else: # Find shortest source-target path. - if weight is None: + if method == "unweighted": p = nx.bidirectional_shortest_path(G, source, target) paths = len(p) - 1 - else: + elif method == "dijkstra": paths = nx.dijkstra_path_length(G, source, target, weight) + else: # method == 'bellman-ford': + paths = nx.bellman_ford_path_length(G, source, target, weight) return paths -def average_shortest_path_length(G, weight=None): - r"""Return the average shortest path length. +def average_shortest_path_length(G, weight=None, method=None): + r"""Returns the average shortest path length. The average shortest path length is @@ -282,6 +325,14 @@ def average_shortest_path_length(G, weight=None): If a string, use this edge attribute as the edge weight. Any edge attribute not present defaults to 1. + method : string, optional (default = 'unweighted' or 'djikstra') + The algorithm to use to compute the path lengths. + Supported options are 'unweighted', 'dijkstra', 'bellman-ford', + 'floyd-warshall' and 'floyd-warshall-numpy'. + Other method values produce a ValueError. + The default method is 'unweighted' if `weight` is None, + otherwise the default method is 'dijkstra'. + Raises ------ NetworkXPointlessConcept @@ -291,6 +342,9 @@ def average_shortest_path_length(G, weight=None): If `G` is not connected (or not weakly connected, in the case of a directed graph). + ValueError + If `method` is not among the supported options. + Examples -------- >>> G = nx.path_graph(5) @@ -301,18 +355,29 @@ def average_shortest_path_length(G, weight=None): length for each component >>> G = nx.Graph([(1, 2), (3, 4)]) - >>> for C in nx.connected_component_subgraphs(G): + >>> for C in (G.subgraph(c).copy() for c in nx.connected_components(G)): ... print(nx.average_shortest_path_length(C)) 1.0 1.0 """ + single_source_methods = ["unweighted", "dijkstra", "bellman-ford"] + all_pairs_methods = ["floyd-warshall", "floyd-warshall-numpy"] + supported_methods = single_source_methods + all_pairs_methods + + if method is None: + method = "unweighted" if weight is None else "dijkstra" + if method not in supported_methods: + raise ValueError(f"method not supported: {method}") + n = len(G) # For the special case of the null graph, raise an exception, since # there are no paths in the null graph. if n == 0: - msg = ('the null graph has no paths, thus there is no average' - 'shortest path length') + msg = ( + "the null graph has no paths, thus there is no average" + "shortest path length" + ) raise nx.NetworkXPointlessConcept(msg) # For the special case of the trivial graph, return zero immediately. if n == 1: @@ -322,20 +387,30 @@ def average_shortest_path_length(G, weight=None): raise nx.NetworkXError("Graph is not weakly connected.") if not G.is_directed() and not nx.is_connected(G): raise nx.NetworkXError("Graph is not connected.") + # Compute all-pairs shortest paths. - if weight is None: - def path_length(v): return nx.single_source_shortest_path_length(G, v) + def path_length(v): + if method == "unweighted": + return nx.single_source_shortest_path_length(G, v) + elif method == "dijkstra": + return nx.single_source_dijkstra_path_length(G, v, weight=weight) + elif method == "bellman-ford": + return nx.single_source_bellman_ford_path_length(G, v, weight=weight) + + if method in single_source_methods: + # Sum the distances for each (ordered) pair of source and target node. + s = sum(l for u in G for l in path_length(u).values()) else: - ssdpl = nx.single_source_dijkstra_path_length - - def path_length(v): return ssdpl(G, v, weight=weight) - # Sum the distances for each (ordered) pair of source and target node. - s = sum(l for u in G for l in path_length(u).values()) + if method == "floyd-warshall": + all_pairs = nx.floyd_warshall(G, weight=weight) + s = sum([sum(t.values()) for t in all_pairs.values()]) + elif method == "floyd-warshall-numpy": + s = nx.floyd_warshall_numpy(G, weight=weight).sum() return s / (n * (n - 1)) -def all_shortest_paths(G, source, target, weight=None): - """Compute all shortest paths in the graph. +def all_shortest_paths(G, source, target, weight=None, method="dijkstra"): + """Compute all shortest simple paths in the graph. Parameters ---------- @@ -352,11 +427,26 @@ def all_shortest_paths(G, source, target, weight=None): If a string, use this edge attribute as the edge weight. Any edge attribute not present defaults to 1. + method : string, optional (default = 'dijkstra') + The algorithm to use to compute the path lengths. + Supported options: 'dijkstra', 'bellman-ford'. + Other inputs produce a ValueError. + If `weight` is None, unweighted graph methods are used, and this + suggestion is ignored. + Returns ------- paths : generator of lists A generator of all paths between source and target. + Raises + ------ + ValueError + If `method` is not among the supported options. + + NetworkXNoPath + If `target` cannot be reached from `source`. + Examples -------- >>> G = nx.Graph() @@ -367,7 +457,10 @@ def all_shortest_paths(G, source, target, weight=None): Notes ----- - There may be many shortest paths between the source and target. + There may be many shortest paths between the source and target. If G + contains zero-weight cycles, this function will not produce all shortest + paths because doing so would produce infinitely many paths of unbounded + length -- instead, we only produce the shortest simple paths. See Also -------- @@ -375,30 +468,83 @@ def all_shortest_paths(G, source, target, weight=None): single_source_shortest_path() all_pairs_shortest_path() """ - if weight is not None: - pred, dist = nx.dijkstra_predecessor_and_distance(G, source, - weight=weight) - else: + method = "unweighted" if weight is None else method + if method == "unweighted": pred = nx.predecessor(G, source) + elif method == "dijkstra": + pred, dist = nx.dijkstra_predecessor_and_distance(G, source, weight=weight) + elif method == "bellman-ford": + pred, dist = nx.bellman_ford_predecessor_and_distance(G, source, weight=weight) + else: + raise ValueError(f"method not supported: {method}") + + return _build_paths_from_predecessors({source}, target, pred) + + +def _build_paths_from_predecessors(sources, target, pred): + """Compute all simple paths to target, given the predecessors found in + pred, terminating when any source in sources is found. + + Parameters + ---------- + sources : set + Starting nodes for path. + + target : node + Ending node for path. + + pred : dict + A dictionary of predecessor lists, keyed by node + + Returns + ------- + paths : generator of lists + A generator of all paths between source and target. + + Raises + ------ + NetworkXNoPath + If `target` cannot be reached from `source`. - if source not in G: - raise nx.NodeNotFound('Source {} is not in G'.format(source)) + Notes + ----- + There may be many paths between the sources and target. If there are + cycles among the predecessors, this function will not produce all + possible paths because doing so would produce infinitely many paths + of unbounded length -- instead, we only produce simple paths. + See Also + -------- + shortest_path() + single_source_shortest_path() + all_pairs_shortest_path() + all_shortest_paths() + bellman_ford_path() + """ if target not in pred: - raise nx.NetworkXNoPath() + raise nx.NetworkXNoPath( + f"Target {target} cannot be reached" f"from given sources" + ) + seen = {target} stack = [[target, 0]] top = 0 while top >= 0: node, i = stack[top] - if node == source: - yield [p for p, n in reversed(stack[:top + 1])] + if node in sources: + yield [p for p, n in reversed(stack[: top + 1])] if len(pred[node]) > i: + stack[top][1] = i + 1 + next = pred[node][i] + if next in seen: + continue + else: + seen.add(next) top += 1 if top == len(stack): - stack.append([pred[node][i], 0]) + stack.append([next, 0]) else: - stack[top] = [pred[node][i], 0] + stack[top][:] = [next, 0] else: - stack[top - 1][1] += 1 + seen.discard(node) top -= 1 diff --git a/networkx/algorithms/shortest_paths/tests/__init__.py b/networkx/algorithms/shortest_paths/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/algorithms/shortest_paths/tests/test_astar.py b/networkx/algorithms/shortest_paths/tests/test_astar.py index 975601e..9c5d2d2 100644 --- a/networkx/algorithms/shortest_paths/tests/test_astar.py +++ b/networkx/algorithms/shortest_paths/tests/test_astar.py @@ -1,113 +1,166 @@ -from nose.tools import assert_equal -from nose.tools import assert_raises -from nose.tools import raises - -from math import sqrt -from random import random, choice +import pytest import networkx as nx from networkx.utils import pairwise -def dist(a, b): - """Returns the Euclidean distance between points `a` and `b`.""" - return sqrt(sum((x1 - x2) ** 2 for x1, x2 in zip(a, b))) - - class TestAStar: - - def setUp(self): - edges = [('s', 'u', 10), ('s', 'x', 5), ('u', 'v', 1), ('u', 'x', 2), - ('v', 'y', 1), ('x', 'u', 3), ('x', 'v', 5), ('x', 'y', 2), - ('y', 's', 7), ('y', 'v', 6)] - self.XG = nx.DiGraph() - self.XG.add_weighted_edges_from(edges) - - def test_random_graph(self): - """Tests that the A* shortest path agrees with Dijkstra's - shortest path for a random graph. - - """ - - G = nx.Graph() - - points = [(random(), random()) for _ in range(100)] - - # Build a path from points[0] to points[-1] to be sure it exists - for p1, p2 in pairwise(points): - G.add_edge(p1, p2, weight=dist(p1, p2)) - - # Add other random edges - for _ in range(100): - p1, p2 = choice(points), choice(points) - G.add_edge(p1, p2, weight=dist(p1, p2)) - - path = nx.astar_path(G, points[0], points[-1], dist) - assert_equal(path, nx.dijkstra_path(G, points[0], points[-1])) + @classmethod + def setup_class(cls): + edges = [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + cls.XG = nx.DiGraph() + cls.XG.add_weighted_edges_from(edges) + + def test_multiple_optimal_paths(self): + """Tests that A* algorithm finds any of multiple optimal paths""" + heuristic_values = {"a": 1.35, "b": 1.18, "c": 0.67, "d": 0} + + def h(u, v): + return heuristic_values[u] + + graph = nx.Graph() + points = ["a", "b", "c", "d"] + edges = [("a", "b", 0.18), ("a", "c", 0.68), ("b", "c", 0.50), ("c", "d", 0.67)] + + graph.add_nodes_from(points) + graph.add_weighted_edges_from(edges) + + path1 = ["a", "c", "d"] + path2 = ["a", "b", "c", "d"] + assert nx.astar_path(graph, "a", "d", h) in (path1, path2) def test_astar_directed(self): - assert_equal(nx.astar_path(self.XG, 's', 'v'), ['s', 'x', 'u', 'v']) - assert_equal(nx.astar_path_length(self.XG, 's', 'v'), 9) + assert nx.astar_path(self.XG, "s", "v") == ["s", "x", "u", "v"] + assert nx.astar_path_length(self.XG, "s", "v") == 9 def test_astar_multigraph(self): G = nx.MultiDiGraph(self.XG) - assert_raises(nx.NetworkXNotImplemented, nx.astar_path, G, 's', 'v') - assert_raises(nx.NetworkXNotImplemented, nx.astar_path_length, - G, 's', 'v') + G.add_weighted_edges_from((u, v, 1000) for (u, v) in list(G.edges())) + assert nx.astar_path(G, "s", "v") == ["s", "x", "u", "v"] + assert nx.astar_path_length(G, "s", "v") == 9 def test_astar_undirected(self): GG = self.XG.to_undirected() # make sure we get lower weight # to_undirected might choose either edge with weight 2 or weight 3 - GG['u']['x']['weight'] = 2 - GG['y']['v']['weight'] = 2 - assert_equal(nx.astar_path(GG, 's', 'v'), ['s', 'x', 'u', 'v']) - assert_equal(nx.astar_path_length(GG, 's', 'v'), 8) + GG["u"]["x"]["weight"] = 2 + GG["y"]["v"]["weight"] = 2 + assert nx.astar_path(GG, "s", "v") == ["s", "x", "u", "v"] + assert nx.astar_path_length(GG, "s", "v") == 8 def test_astar_directed2(self): XG2 = nx.DiGraph() - edges = [(1, 4, 1), (4, 5, 1), (5, 6, 1), (6, 3, 1), (1, 3, 50), - (1, 2, 100), (2, 3, 100)] + edges = [ + (1, 4, 1), + (4, 5, 1), + (5, 6, 1), + (6, 3, 1), + (1, 3, 50), + (1, 2, 100), + (2, 3, 100), + ] XG2.add_weighted_edges_from(edges) - assert_equal(nx.astar_path(XG2, 1, 3), [1, 4, 5, 6, 3]) + assert nx.astar_path(XG2, 1, 3) == [1, 4, 5, 6, 3] def test_astar_undirected2(self): XG3 = nx.Graph() - edges = [(0, 1, 2), (1, 2, 12), (2, 3, 1), (3, 4, 5), (4, 5, 1), - (5, 0, 10)] + edges = [(0, 1, 2), (1, 2, 12), (2, 3, 1), (3, 4, 5), (4, 5, 1), (5, 0, 10)] XG3.add_weighted_edges_from(edges) - assert_equal(nx.astar_path(XG3, 0, 3), [0, 1, 2, 3]) - assert_equal(nx.astar_path_length(XG3, 0, 3), 15) + assert nx.astar_path(XG3, 0, 3) == [0, 1, 2, 3] + assert nx.astar_path_length(XG3, 0, 3) == 15 def test_astar_undirected3(self): XG4 = nx.Graph() - edges = [(0, 1, 2), (1, 2, 2), (2, 3, 1), (3, 4, 1), (4, 5, 1), - (5, 6, 1), (6, 7, 1), (7, 0, 1)] + edges = [ + (0, 1, 2), + (1, 2, 2), + (2, 3, 1), + (3, 4, 1), + (4, 5, 1), + (5, 6, 1), + (6, 7, 1), + (7, 0, 1), + ] XG4.add_weighted_edges_from(edges) - assert_equal(nx.astar_path(XG4, 0, 2), [0, 1, 2]) - assert_equal(nx.astar_path_length(XG4, 0, 2), 4) - -# >>> MXG4=NX.MultiGraph(XG4) -# >>> MXG4.add_edge(0,1,3) -# >>> NX.dijkstra_path(MXG4,0,2) -# [0, 1, 2] + assert nx.astar_path(XG4, 0, 2) == [0, 1, 2] + assert nx.astar_path_length(XG4, 0, 2) == 4 + + """ Tests that A* finds correct path when multiple paths exist + and the best one is not expanded first (GH issue #3464) + """ + + def test_astar_directed3(self): + heuristic_values = {"n5": 36, "n2": 4, "n1": 0, "n0": 0} + + def h(u, v): + return heuristic_values[u] + + edges = [("n5", "n1", 11), ("n5", "n2", 9), ("n2", "n1", 1), ("n1", "n0", 32)] + graph = nx.DiGraph() + graph.add_weighted_edges_from(edges) + answer = ["n5", "n2", "n1", "n0"] + assert nx.astar_path(graph, "n5", "n0", h) == answer + + """ Tests that that parent is not wrongly overridden when a + node is re-explored multiple times. + """ + + def test_astar_directed4(self): + edges = [ + ("a", "b", 1), + ("a", "c", 1), + ("b", "d", 2), + ("c", "d", 1), + ("d", "e", 1), + ] + graph = nx.DiGraph() + graph.add_weighted_edges_from(edges) + assert nx.astar_path(graph, "a", "e") == ["a", "c", "d", "e"] + + # >>> MXG4=NX.MultiGraph(XG4) + # >>> MXG4.add_edge(0,1,3) + # >>> NX.dijkstra_path(MXG4,0,2) + # [0, 1, 2] def test_astar_w1(self): G = nx.DiGraph() - G.add_edges_from([('s', 'u'), ('s', 'x'), ('u', 'v'), ('u', 'x'), - ('v', 'y'), ('x', 'u'), ('x', 'w'), ('w', 'v'), - ('x', 'y'), ('y', 's'), ('y', 'v')]) - assert_equal(nx.astar_path(G, 's', 'v'), ['s', 'u', 'v']) - assert_equal(nx.astar_path_length(G, 's', 'v'), 2) + G.add_edges_from( + [ + ("s", "u"), + ("s", "x"), + ("u", "v"), + ("u", "x"), + ("v", "y"), + ("x", "u"), + ("x", "w"), + ("w", "v"), + ("x", "y"), + ("y", "s"), + ("y", "v"), + ] + ) + assert nx.astar_path(G, "s", "v") == ["s", "u", "v"] + assert nx.astar_path_length(G, "s", "v") == 2 - @raises(nx.NodeNotFound) def test_astar_nopath(self): - nx.astar_path(self.XG, 's', 'moon') + with pytest.raises(nx.NodeNotFound): + nx.astar_path(self.XG, "s", "moon") def test_cycle(self): C = nx.cycle_graph(7) - assert_equal(nx.astar_path(C, 0, 3), [0, 1, 2, 3]) - assert_equal(nx.dijkstra_path(C, 0, 4), [0, 6, 5, 4]) + assert nx.astar_path(C, 0, 3) == [0, 1, 2, 3] + assert nx.dijkstra_path(C, 0, 4) == [0, 6, 5, 4] def test_unorderable_nodes(self): """Tests that A* accommodates nodes that are not orderable. @@ -115,24 +168,10 @@ def test_unorderable_nodes(self): For more information, see issue #554. """ - # TODO In Python 3, instances of the `object` class are - # unorderable by default, so we wouldn't need to define our own - # class here, we could just instantiate an instance of the - # `object` class. However, we still support Python 2; when - # support for Python 2 is dropped, this test can be simplified - # by replacing `Unorderable()` by `object()`. - class Unorderable(object): - - def __le__(self): - raise NotImplemented - - def __ge__(self): - raise NotImplemented - # Create the cycle graph on four nodes, with nodes represented # as (unorderable) Python objects. - nodes = [Unorderable() for n in range(4)] + nodes = [object() for n in range(4)] G = nx.Graph() G.add_edges_from(pairwise(nodes, cyclic=True)) path = nx.astar_path(G, nodes[0], nodes[2]) - assert_equal(len(path), 3) + assert len(path) == 3 diff --git a/networkx/algorithms/shortest_paths/tests/test_dense.py b/networkx/algorithms/shortest_paths/tests/test_dense.py index 865c76f..1a0c5bc 100644 --- a/networkx/algorithms/shortest_paths/tests/test_dense.py +++ b/networkx/algorithms/shortest_paths/tests/test_dense.py @@ -1,123 +1,197 @@ -#!/usr/bin/env python -from nose.tools import * +import pytest import networkx as nx class TestFloyd: - def setUp(self): + @classmethod + def setup_class(cls): pass def test_floyd_warshall_predecessor_and_distance(self): XG = nx.DiGraph() - XG.add_weighted_edges_from([('s', 'u', 10), ('s', 'x', 5), - ('u', 'v', 1), ('u', 'x', 2), - ('v', 'y', 1), ('x', 'u', 3), - ('x', 'v', 5), ('x', 'y', 2), - ('y', 's', 7), ('y', 'v', 6)]) + XG.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + ) path, dist = nx.floyd_warshall_predecessor_and_distance(XG) - assert_equal(dist['s']['v'], 9) - assert_equal(path['s']['v'], 'u') - assert_equal(dist, - {'y': {'y': 0, 'x': 12, 's': 7, 'u': 15, 'v': 6}, - 'x': {'y': 2, 'x': 0, 's': 9, 'u': 3, 'v': 4}, - 's': {'y': 7, 'x': 5, 's': 0, 'u': 8, 'v': 9}, - 'u': {'y': 2, 'x': 2, 's': 9, 'u': 0, 'v': 1}, - 'v': {'y': 1, 'x': 13, 's': 8, 'u': 16, 'v': 0}}) + assert dist["s"]["v"] == 9 + assert path["s"]["v"] == "u" + assert dist == { + "y": {"y": 0, "x": 12, "s": 7, "u": 15, "v": 6}, + "x": {"y": 2, "x": 0, "s": 9, "u": 3, "v": 4}, + "s": {"y": 7, "x": 5, "s": 0, "u": 8, "v": 9}, + "u": {"y": 2, "x": 2, "s": 9, "u": 0, "v": 1}, + "v": {"y": 1, "x": 13, "s": 8, "u": 16, "v": 0}, + } GG = XG.to_undirected() # make sure we get lower weight # to_undirected might choose either edge with weight 2 or weight 3 - GG['u']['x']['weight'] = 2 + GG["u"]["x"]["weight"] = 2 path, dist = nx.floyd_warshall_predecessor_and_distance(GG) - assert_equal(dist['s']['v'], 8) + assert dist["s"]["v"] == 8 # skip this test, could be alternate path s-u-v -# assert_equal(path['s']['v'],'y') + # assert_equal(path['s']['v'],'y') G = nx.DiGraph() # no weights - G.add_edges_from([('s', 'u'), ('s', 'x'), - ('u', 'v'), ('u', 'x'), - ('v', 'y'), ('x', 'u'), - ('x', 'v'), ('x', 'y'), - ('y', 's'), ('y', 'v')]) + G.add_edges_from( + [ + ("s", "u"), + ("s", "x"), + ("u", "v"), + ("u", "x"), + ("v", "y"), + ("x", "u"), + ("x", "v"), + ("x", "y"), + ("y", "s"), + ("y", "v"), + ] + ) path, dist = nx.floyd_warshall_predecessor_and_distance(G) - assert_equal(dist['s']['v'], 2) + assert dist["s"]["v"] == 2 # skip this test, could be alternate path s-u-v - # assert_equal(path['s']['v'],'x') + # assert_equal(path['s']['v'],'x') # alternate interface dist = nx.floyd_warshall(G) - assert_equal(dist['s']['v'], 2) + assert dist["s"]["v"] == 2 - @raises(KeyError) - def test_reconstruct_path(self): + # floyd_warshall_predecessor_and_distance returns + # dicts-of-defautdicts + # make sure we don't get empty dictionary XG = nx.DiGraph() - XG.add_weighted_edges_from([('s', 'u', 10), ('s', 'x', 5), - ('u', 'v', 1), ('u', 'x', 2), - ('v', 'y', 1), ('x', 'u', 3), - ('x', 'v', 5), ('x', 'y', 2), - ('y', 's', 7), ('y', 'v', 6)]) - predecessors, _ = nx.floyd_warshall_predecessor_and_distance(XG) - - path = nx.reconstruct_path('s', 'v', predecessors) - assert_equal(path, ['s', 'x', 'u', 'v']) - - path = nx.reconstruct_path('s', 's', predecessors) - assert_equal(path, []) + XG.add_weighted_edges_from( + [("v", "x", 5.0), ("y", "x", 5.0), ("v", "y", 6.0), ("x", "u", 2.0)] + ) + path, dist = nx.floyd_warshall_predecessor_and_distance(XG) + inf = float("inf") + assert dist == { + "v": {"v": 0, "x": 5.0, "y": 6.0, "u": 7.0}, + "x": {"x": 0, "u": 2.0, "v": inf, "y": inf}, + "y": {"y": 0, "x": 5.0, "v": inf, "u": 7.0}, + "u": {"u": 0, "v": inf, "x": inf, "y": inf}, + } + assert path == { + "v": {"x": "v", "y": "v", "u": "x"}, + "x": {"u": "x"}, + "y": {"x": "y", "u": "x"}, + } - # this part raises the keyError - nx.reconstruct_path('1', '2', predecessors) + def test_reconstruct_path(self): + with pytest.raises(KeyError): + XG = nx.DiGraph() + XG.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + ) + predecessors, _ = nx.floyd_warshall_predecessor_and_distance(XG) + + path = nx.reconstruct_path("s", "v", predecessors) + assert path == ["s", "x", "u", "v"] + + path = nx.reconstruct_path("s", "s", predecessors) + assert path == [] + + # this part raises the keyError + nx.reconstruct_path("1", "2", predecessors) def test_cycle(self): - path, dist = nx.floyd_warshall_predecessor_and_distance( - nx.cycle_graph(7)) - assert_equal(dist[0][3], 3) - assert_equal(path[0][3], 2) - assert_equal(dist[0][4], 3) + path, dist = nx.floyd_warshall_predecessor_and_distance(nx.cycle_graph(7)) + assert dist[0][3] == 3 + assert path[0][3] == 2 + assert dist[0][4] == 3 def test_weighted(self): XG3 = nx.Graph() - XG3.add_weighted_edges_from([[0, 1, 2], [1, 2, 12], [2, 3, 1], - [3, 4, 5], [4, 5, 1], [5, 0, 10]]) + XG3.add_weighted_edges_from( + [[0, 1, 2], [1, 2, 12], [2, 3, 1], [3, 4, 5], [4, 5, 1], [5, 0, 10]] + ) path, dist = nx.floyd_warshall_predecessor_and_distance(XG3) - assert_equal(dist[0][3], 15) - assert_equal(path[0][3], 2) + assert dist[0][3] == 15 + assert path[0][3] == 2 def test_weighted2(self): XG4 = nx.Graph() - XG4.add_weighted_edges_from([[0, 1, 2], [1, 2, 2], [2, 3, 1], - [3, 4, 1], [4, 5, 1], [5, 6, 1], - [6, 7, 1], [7, 0, 1]]) + XG4.add_weighted_edges_from( + [ + [0, 1, 2], + [1, 2, 2], + [2, 3, 1], + [3, 4, 1], + [4, 5, 1], + [5, 6, 1], + [6, 7, 1], + [7, 0, 1], + ] + ) path, dist = nx.floyd_warshall_predecessor_and_distance(XG4) - assert_equal(dist[0][2], 4) - assert_equal(path[0][2], 1) + assert dist[0][2] == 4 + assert path[0][2] == 1 def test_weight_parameter(self): XG4 = nx.Graph() - XG4.add_edges_from([(0, 1, {'heavy': 2}), (1, 2, {'heavy': 2}), - (2, 3, {'heavy': 1}), (3, 4, {'heavy': 1}), - (4, 5, {'heavy': 1}), (5, 6, {'heavy': 1}), - (6, 7, {'heavy': 1}), (7, 0, {'heavy': 1})]) - path, dist = nx.floyd_warshall_predecessor_and_distance(XG4, - weight='heavy') - assert_equal(dist[0][2], 4) - assert_equal(path[0][2], 1) + XG4.add_edges_from( + [ + (0, 1, {"heavy": 2}), + (1, 2, {"heavy": 2}), + (2, 3, {"heavy": 1}), + (3, 4, {"heavy": 1}), + (4, 5, {"heavy": 1}), + (5, 6, {"heavy": 1}), + (6, 7, {"heavy": 1}), + (7, 0, {"heavy": 1}), + ] + ) + path, dist = nx.floyd_warshall_predecessor_and_distance(XG4, weight="heavy") + assert dist[0][2] == 4 + assert path[0][2] == 1 def test_zero_distance(self): XG = nx.DiGraph() - XG.add_weighted_edges_from([('s', 'u', 10), ('s', 'x', 5), - ('u', 'v', 1), ('u', 'x', 2), - ('v', 'y', 1), ('x', 'u', 3), - ('x', 'v', 5), ('x', 'y', 2), - ('y', 's', 7), ('y', 'v', 6)]) + XG.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + ) path, dist = nx.floyd_warshall_predecessor_and_distance(XG) for u in XG: - assert_equal(dist[u][u], 0) + assert dist[u][u] == 0 GG = XG.to_undirected() # make sure we get lower weight # to_undirected might choose either edge with weight 2 or weight 3 - GG['u']['x']['weight'] = 2 + GG["u"]["x"]["weight"] = 2 path, dist = nx.floyd_warshall_predecessor_and_distance(GG) for u in GG: @@ -125,14 +199,13 @@ def test_zero_distance(self): def test_zero_weight(self): G = nx.DiGraph() - edges = [(1, 2, -2), (2, 3, -4), (1, 5, 1), - (5, 4, 0), (4, 3, -5), (2, 5, -7)] + edges = [(1, 2, -2), (2, 3, -4), (1, 5, 1), (5, 4, 0), (4, 3, -5), (2, 5, -7)] G.add_weighted_edges_from(edges) dist = nx.floyd_warshall(G) - assert_equal(dist[1][3], -14) + assert dist[1][3] == -14 G = nx.MultiDiGraph() edges.append((2, 5, -7)) G.add_weighted_edges_from(edges) dist = nx.floyd_warshall(G) - assert_equal(dist[1][3], -14) + assert dist[1][3] == -14 diff --git a/networkx/algorithms/shortest_paths/tests/test_dense_numpy.py b/networkx/algorithms/shortest_paths/tests/test_dense_numpy.py index a3e66ec..bee0696 100644 --- a/networkx/algorithms/shortest_paths/tests/test_dense_numpy.py +++ b/networkx/algorithms/shortest_paths/tests/test_dense_numpy.py @@ -1,68 +1,76 @@ -#!/usr/bin/env python -from nose.tools import * -from nose import SkipTest -import networkx as nx +import pytest + +numpy = pytest.importorskip("numpy") +npt = pytest.importorskip("numpy.testing") -class TestFloydNumpy(object): - numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test +import networkx as nx - @classmethod - def setupClass(cls): - global numpy - global assert_equal - global assert_almost_equal - try: - import numpy - from numpy.testing import assert_equal, assert_almost_equal - except ImportError: - raise SkipTest('NumPy not available.') +class TestFloydNumpy: def test_cycle_numpy(self): dist = nx.floyd_warshall_numpy(nx.cycle_graph(7)) - assert_equal(dist[0, 3], 3) - assert_equal(dist[0, 4], 3) + assert dist[0, 3] == 3 + assert dist[0, 4] == 3 def test_weighted_numpy_three_edges(self): XG3 = nx.Graph() - XG3.add_weighted_edges_from([[0, 1, 2], [1, 2, 12], [2, 3, 1], - [3, 4, 5], [4, 5, 1], [5, 0, 10]]) + XG3.add_weighted_edges_from( + [[0, 1, 2], [1, 2, 12], [2, 3, 1], [3, 4, 5], [4, 5, 1], [5, 0, 10]] + ) dist = nx.floyd_warshall_numpy(XG3) - assert_equal(dist[0, 3], 15) + assert dist[0, 3] == 15 def test_weighted_numpy_two_edges(self): XG4 = nx.Graph() - XG4.add_weighted_edges_from([[0, 1, 2], [1, 2, 2], [2, 3, 1], - [3, 4, 1], [4, 5, 1], [5, 6, 1], - [6, 7, 1], [7, 0, 1]]) + XG4.add_weighted_edges_from( + [ + [0, 1, 2], + [1, 2, 2], + [2, 3, 1], + [3, 4, 1], + [4, 5, 1], + [5, 6, 1], + [6, 7, 1], + [7, 0, 1], + ] + ) dist = nx.floyd_warshall_numpy(XG4) - assert_equal(dist[0, 2], 4) + assert dist[0, 2] == 4 def test_weight_parameter_numpy(self): XG4 = nx.Graph() - XG4.add_edges_from([(0, 1, {'heavy': 2}), (1, 2, {'heavy': 2}), - (2, 3, {'heavy': 1}), (3, 4, {'heavy': 1}), - (4, 5, {'heavy': 1}), (5, 6, {'heavy': 1}), - (6, 7, {'heavy': 1}), (7, 0, {'heavy': 1})]) - dist = nx.floyd_warshall_numpy(XG4, weight='heavy') - assert_equal(dist[0, 2], 4) + XG4.add_edges_from( + [ + (0, 1, {"heavy": 2}), + (1, 2, {"heavy": 2}), + (2, 3, {"heavy": 1}), + (3, 4, {"heavy": 1}), + (4, 5, {"heavy": 1}), + (5, 6, {"heavy": 1}), + (6, 7, {"heavy": 1}), + (7, 0, {"heavy": 1}), + ] + ) + dist = nx.floyd_warshall_numpy(XG4, weight="heavy") + assert dist[0, 2] == 4 def test_directed_cycle_numpy(self): G = nx.DiGraph() nx.add_cycle(G, [0, 1, 2, 3]) pred, dist = nx.floyd_warshall_predecessor_and_distance(G) D = nx.utils.dict_to_numpy_array(dist) - assert_equal(nx.floyd_warshall_numpy(G), D) + npt.assert_equal(nx.floyd_warshall_numpy(G), D) def test_zero_weight(self): G = nx.DiGraph() edges = [(1, 2, -2), (2, 3, -4), (1, 5, 1), (5, 4, 0), (4, 3, -5), (2, 5, -7)] G.add_weighted_edges_from(edges) dist = nx.floyd_warshall_numpy(G) - assert_equal(int(numpy.min(dist)), -14) + assert int(numpy.min(dist)) == -14 G = nx.MultiDiGraph() edges.append((2, 5, -7)) G.add_weighted_edges_from(edges) dist = nx.floyd_warshall_numpy(G) - assert_equal(int(numpy.min(dist)), -14) + assert int(numpy.min(dist)) == -14 diff --git a/networkx/algorithms/shortest_paths/tests/test_generic.py b/networkx/algorithms/shortest_paths/tests/test_generic.py index e8f3612..251db31 100644 --- a/networkx/algorithms/shortest_paths/tests/test_generic.py +++ b/networkx/algorithms/shortest_paths/tests/test_generic.py @@ -1,171 +1,338 @@ -from __future__ import division +import pytest -from nose.tools import assert_almost_equal -from nose.tools import assert_equal -from nose.tools import assert_false -from nose.tools import assert_true -from nose.tools import assert_raises -from nose.tools import ok_ -from nose.tools import raises import networkx as nx +from networkx.testing import almost_equal def validate_grid_path(r, c, s, t, p): - ok_(isinstance(p, list)) - assert_equal(p[0], s) - assert_equal(p[-1], t) + assert isinstance(p, list) + assert p[0] == s + assert p[-1] == t s = ((s - 1) // c, (s - 1) % c) t = ((t - 1) // c, (t - 1) % c) - assert_equal(len(p), abs(t[0] - s[0]) + abs(t[1] - s[1]) + 1) + assert len(p) == abs(t[0] - s[0]) + abs(t[1] - s[1]) + 1 p = [((u - 1) // c, (u - 1) % c) for u in p] for u in p: - ok_(0 <= u[0] < r) - ok_(0 <= u[1] < c) + assert 0 <= u[0] < r + assert 0 <= u[1] < c for u, v in zip(p[:-1], p[1:]): - ok_((abs(v[0] - u[0]), abs(v[1] - u[1])) in [(0, 1), (1, 0)]) + assert (abs(v[0] - u[0]), abs(v[1] - u[1])) in [(0, 1), (1, 0)] class TestGenericPath: - - def setUp(self): + @classmethod + def setup_class(cls): from networkx import convert_node_labels_to_integers as cnlti - self.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted") - self.cycle = nx.cycle_graph(7) - self.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph()) + + cls.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted") + cls.cycle = nx.cycle_graph(7) + cls.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph()) + cls.neg_weights = nx.DiGraph() + cls.neg_weights.add_edge(0, 1, weight=1) + cls.neg_weights.add_edge(0, 2, weight=3) + cls.neg_weights.add_edge(1, 3, weight=1) + cls.neg_weights.add_edge(2, 3, weight=-2) def test_shortest_path(self): - assert_equal(nx.shortest_path(self.cycle, 0, 3), [0, 1, 2, 3]) - assert_equal(nx.shortest_path(self.cycle, 0, 4), [0, 6, 5, 4]) + assert nx.shortest_path(self.cycle, 0, 3) == [0, 1, 2, 3] + assert nx.shortest_path(self.cycle, 0, 4) == [0, 6, 5, 4] validate_grid_path(4, 4, 1, 12, nx.shortest_path(self.grid, 1, 12)) - assert_equal(nx.shortest_path(self.directed_cycle, 0, 3), [0, 1, 2, 3]) + assert nx.shortest_path(self.directed_cycle, 0, 3) == [0, 1, 2, 3] # now with weights - assert_equal(nx.shortest_path(self.cycle, 0, 3, weight='weight'), [0, 1, 2, 3]) - assert_equal(nx.shortest_path(self.cycle, 0, 4, weight='weight'), [0, 6, 5, 4]) - validate_grid_path(4, 4, 1, 12, nx.shortest_path(self.grid, 1, 12, weight='weight')) - assert_equal(nx.shortest_path(self.directed_cycle, 0, 3, weight='weight'), - [0, 1, 2, 3]) + assert nx.shortest_path(self.cycle, 0, 3, weight="weight") == [0, 1, 2, 3] + assert nx.shortest_path(self.cycle, 0, 4, weight="weight") == [0, 6, 5, 4] + validate_grid_path( + 4, 4, 1, 12, nx.shortest_path(self.grid, 1, 12, weight="weight") + ) + assert nx.shortest_path(self.directed_cycle, 0, 3, weight="weight") == [ + 0, + 1, + 2, + 3, + ] + # weights and method specified + assert nx.shortest_path( + self.directed_cycle, 0, 3, weight="weight", method="dijkstra" + ) == [0, 1, 2, 3] + assert nx.shortest_path( + self.directed_cycle, 0, 3, weight="weight", method="bellman-ford" + ) == [0, 1, 2, 3] + # when Dijkstra's will probably (depending on precise implementation) + # incorrectly return [0, 1, 3] instead + assert nx.shortest_path( + self.neg_weights, 0, 3, weight="weight", method="bellman-ford" + ) == [0, 2, 3] + # confirm bad method rejection + pytest.raises(ValueError, nx.shortest_path, self.cycle, method="SPAM") + # confirm absent source rejection + pytest.raises(nx.NodeNotFound, nx.shortest_path, self.cycle, 8) def test_shortest_path_target(self): + answer = {0: [0, 1], 1: [1], 2: [2, 1]} sp = nx.shortest_path(nx.path_graph(3), target=1) - assert_equal(sp, {0: [0, 1], 1: [1], 2: [2, 1]}) + assert sp == answer + # with weights + sp = nx.shortest_path(nx.path_graph(3), target=1, weight="weight") + assert sp == answer + # weights and method specified + sp = nx.shortest_path( + nx.path_graph(3), target=1, weight="weight", method="dijkstra" + ) + assert sp == answer + sp = nx.shortest_path( + nx.path_graph(3), target=1, weight="weight", method="bellman-ford" + ) + assert sp == answer def test_shortest_path_length(self): - assert_equal(nx.shortest_path_length(self.cycle, 0, 3), 3) - assert_equal(nx.shortest_path_length(self.grid, 1, 12), 5) - assert_equal(nx.shortest_path_length(self.directed_cycle, 0, 4), 4) + assert nx.shortest_path_length(self.cycle, 0, 3) == 3 + assert nx.shortest_path_length(self.grid, 1, 12) == 5 + assert nx.shortest_path_length(self.directed_cycle, 0, 4) == 4 # now with weights - assert_equal(nx.shortest_path_length(self.cycle, 0, 3, weight='weight'), 3) - assert_equal(nx.shortest_path_length(self.grid, 1, 12, weight='weight'), 5) - assert_equal(nx.shortest_path_length(self.directed_cycle, 0, 4, weight='weight'), 4) + assert nx.shortest_path_length(self.cycle, 0, 3, weight="weight") == 3 + assert nx.shortest_path_length(self.grid, 1, 12, weight="weight") == 5 + assert nx.shortest_path_length(self.directed_cycle, 0, 4, weight="weight") == 4 + # weights and method specified + assert ( + nx.shortest_path_length( + self.cycle, 0, 3, weight="weight", method="dijkstra" + ) + == 3 + ) + assert ( + nx.shortest_path_length( + self.cycle, 0, 3, weight="weight", method="bellman-ford" + ) + == 3 + ) + # confirm bad method rejection + pytest.raises(ValueError, nx.shortest_path_length, self.cycle, method="SPAM") + # confirm absent source rejection + pytest.raises(nx.NodeNotFound, nx.shortest_path_length, self.cycle, 8) def test_shortest_path_length_target(self): + answer = {0: 1, 1: 0, 2: 1} sp = dict(nx.shortest_path_length(nx.path_graph(3), target=1)) - assert_equal(sp[0], 1) - assert_equal(sp[1], 0) - assert_equal(sp[2], 1) + assert sp == answer + # with weights + sp = nx.shortest_path_length(nx.path_graph(3), target=1, weight="weight") + assert sp == answer + # weights and method specified + sp = nx.shortest_path_length( + nx.path_graph(3), target=1, weight="weight", method="dijkstra" + ) + assert sp == answer + sp = nx.shortest_path_length( + nx.path_graph(3), target=1, weight="weight", method="bellman-ford" + ) + assert sp == answer def test_single_source_shortest_path(self): p = nx.shortest_path(self.cycle, 0) - assert_equal(p[3], [0, 1, 2, 3]) - assert_equal(p, nx.single_source_shortest_path(self.cycle, 0)) + assert p[3] == [0, 1, 2, 3] + assert p == nx.single_source_shortest_path(self.cycle, 0) p = nx.shortest_path(self.grid, 1) validate_grid_path(4, 4, 1, 12, p[12]) # now with weights - p = nx.shortest_path(self.cycle, 0, weight='weight') - assert_equal(p[3], [0, 1, 2, 3]) - assert_equal(p, nx.single_source_dijkstra_path(self.cycle, 0)) - p = nx.shortest_path(self.grid, 1, weight='weight') + p = nx.shortest_path(self.cycle, 0, weight="weight") + assert p[3] == [0, 1, 2, 3] + assert p == nx.single_source_dijkstra_path(self.cycle, 0) + p = nx.shortest_path(self.grid, 1, weight="weight") validate_grid_path(4, 4, 1, 12, p[12]) + # weights and method specified + p = nx.shortest_path(self.cycle, 0, method="dijkstra", weight="weight") + assert p[3] == [0, 1, 2, 3] + assert p == nx.single_source_shortest_path(self.cycle, 0) + p = nx.shortest_path(self.cycle, 0, method="bellman-ford", weight="weight") + assert p[3] == [0, 1, 2, 3] + assert p == nx.single_source_shortest_path(self.cycle, 0) def test_single_source_shortest_path_length(self): - l = dict(nx.shortest_path_length(self.cycle, 0)) - assert_equal(l, {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1}) - assert_equal(l, dict(nx.single_source_shortest_path_length(self.cycle, 0))) - l = dict(nx.shortest_path_length(self.grid, 1)) - assert_equal(l[16], 6) + ans = dict(nx.shortest_path_length(self.cycle, 0)) + assert ans == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + assert ans == dict(nx.single_source_shortest_path_length(self.cycle, 0)) + ans = dict(nx.shortest_path_length(self.grid, 1)) + assert ans[16] == 6 # now with weights - l = dict(nx.shortest_path_length(self.cycle, 0, weight='weight')) - assert_equal(l, {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1}) - assert_equal(l, dict(nx.single_source_dijkstra_path_length( - self.cycle, 0))) - l = dict(nx.shortest_path_length(self.grid, 1, weight='weight')) - assert_equal(l[16], 6) + ans = dict(nx.shortest_path_length(self.cycle, 0, weight="weight")) + assert ans == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + assert ans == dict(nx.single_source_dijkstra_path_length(self.cycle, 0)) + ans = dict(nx.shortest_path_length(self.grid, 1, weight="weight")) + assert ans[16] == 6 + # weights and method specified + ans = dict( + nx.shortest_path_length(self.cycle, 0, weight="weight", method="dijkstra") + ) + assert ans == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + assert ans == dict(nx.single_source_dijkstra_path_length(self.cycle, 0)) + ans = dict( + nx.shortest_path_length( + self.cycle, 0, weight="weight", method="bellman-ford" + ) + ) + assert ans == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + assert ans == dict(nx.single_source_bellman_ford_path_length(self.cycle, 0)) def test_all_pairs_shortest_path(self): p = nx.shortest_path(self.cycle) - assert_equal(p[0][3], [0, 1, 2, 3]) - assert_equal(p, dict(nx.all_pairs_shortest_path(self.cycle))) + assert p[0][3] == [0, 1, 2, 3] + assert p == dict(nx.all_pairs_shortest_path(self.cycle)) p = nx.shortest_path(self.grid) validate_grid_path(4, 4, 1, 12, p[1][12]) # now with weights - p = nx.shortest_path(self.cycle, weight='weight') - assert_equal(p[0][3], [0, 1, 2, 3]) - assert_equal(p, dict(nx.all_pairs_dijkstra_path(self.cycle))) - p = nx.shortest_path(self.grid, weight='weight') + p = nx.shortest_path(self.cycle, weight="weight") + assert p[0][3] == [0, 1, 2, 3] + assert p == dict(nx.all_pairs_dijkstra_path(self.cycle)) + p = nx.shortest_path(self.grid, weight="weight") validate_grid_path(4, 4, 1, 12, p[1][12]) + # weights and method specified + p = nx.shortest_path(self.cycle, weight="weight", method="dijkstra") + assert p[0][3] == [0, 1, 2, 3] + assert p == dict(nx.all_pairs_dijkstra_path(self.cycle)) + p = nx.shortest_path(self.cycle, weight="weight", method="bellman-ford") + assert p[0][3] == [0, 1, 2, 3] + assert p == dict(nx.all_pairs_bellman_ford_path(self.cycle)) def test_all_pairs_shortest_path_length(self): - l = dict(nx.shortest_path_length(self.cycle)) - assert_equal(l[0], {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1}) - assert_equal(l, dict(nx.all_pairs_shortest_path_length(self.cycle))) - l = dict(nx.shortest_path_length(self.grid)) - assert_equal(l[1][16], 6) + ans = dict(nx.shortest_path_length(self.cycle)) + assert ans[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + assert ans == dict(nx.all_pairs_shortest_path_length(self.cycle)) + ans = dict(nx.shortest_path_length(self.grid)) + assert ans[1][16] == 6 # now with weights - l = dict(nx.shortest_path_length(self.cycle, weight='weight')) - assert_equal(l[0], {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1}) - assert_equal(l, dict(nx.all_pairs_dijkstra_path_length(self.cycle))) - l = dict(nx.shortest_path_length(self.grid, weight='weight')) - assert_equal(l[1][16], 6) + ans = dict(nx.shortest_path_length(self.cycle, weight="weight")) + assert ans[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + assert ans == dict(nx.all_pairs_dijkstra_path_length(self.cycle)) + ans = dict(nx.shortest_path_length(self.grid, weight="weight")) + assert ans[1][16] == 6 + # weights and method specified + ans = dict( + nx.shortest_path_length(self.cycle, weight="weight", method="dijkstra") + ) + assert ans[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + assert ans == dict(nx.all_pairs_dijkstra_path_length(self.cycle)) + ans = dict( + nx.shortest_path_length(self.cycle, weight="weight", method="bellman-ford") + ) + assert ans[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + assert ans == dict(nx.all_pairs_bellman_ford_path_length(self.cycle)) def test_has_path(self): G = nx.Graph() nx.add_path(G, range(3)) nx.add_path(G, range(3, 5)) - assert_true(nx.has_path(G, 0, 2)) - assert_false(nx.has_path(G, 0, 4)) + assert nx.has_path(G, 0, 2) + assert not nx.has_path(G, 0, 4) def test_all_shortest_paths(self): G = nx.Graph() nx.add_path(G, [0, 1, 2, 3]) nx.add_path(G, [0, 10, 20, 3]) - assert_equal([[0, 1, 2, 3], [0, 10, 20, 3]], - sorted(nx.all_shortest_paths(G, 0, 3))) + assert [[0, 1, 2, 3], [0, 10, 20, 3]] == sorted(nx.all_shortest_paths(G, 0, 3)) + # with weights + G = nx.Graph() + nx.add_path(G, [0, 1, 2, 3]) + nx.add_path(G, [0, 10, 20, 3]) + assert [[0, 1, 2, 3], [0, 10, 20, 3]] == sorted( + nx.all_shortest_paths(G, 0, 3, weight="weight") + ) + # weights and method specified + G = nx.Graph() + nx.add_path(G, [0, 1, 2, 3]) + nx.add_path(G, [0, 10, 20, 3]) + assert [[0, 1, 2, 3], [0, 10, 20, 3]] == sorted( + nx.all_shortest_paths(G, 0, 3, weight="weight", method="dijkstra") + ) + G = nx.Graph() + nx.add_path(G, [0, 1, 2, 3]) + nx.add_path(G, [0, 10, 20, 3]) + assert [[0, 1, 2, 3], [0, 10, 20, 3]] == sorted( + nx.all_shortest_paths(G, 0, 3, weight="weight", method="bellman-ford") + ) - @raises(nx.NetworkXNoPath) def test_all_shortest_paths_raise(self): - G = nx.path_graph(4) - G.add_node(4) - paths = list(nx.all_shortest_paths(G, 0, 4)) + with pytest.raises(nx.NetworkXNoPath): + G = nx.path_graph(4) + G.add_node(4) + list(nx.all_shortest_paths(G, 0, 4)) + def test_bad_method(self): + with pytest.raises(ValueError): + G = nx.path_graph(2) + list(nx.all_shortest_paths(G, 0, 1, weight="weight", method="SPAM")) -class TestAverageShortestPathLength(object): + def test_all_shortest_paths_zero_weight_edge(self): + g = nx.Graph() + nx.add_path(g, [0, 1, 3]) + nx.add_path(g, [0, 1, 2, 3]) + g.edges[1, 2]["weight"] = 0 + paths30d = list( + nx.all_shortest_paths(g, 3, 0, weight="weight", method="dijkstra") + ) + paths03d = list( + nx.all_shortest_paths(g, 0, 3, weight="weight", method="dijkstra") + ) + paths30b = list( + nx.all_shortest_paths(g, 3, 0, weight="weight", method="bellman-ford") + ) + paths03b = list( + nx.all_shortest_paths(g, 0, 3, weight="weight", method="bellman-ford") + ) + assert sorted(paths03d) == sorted(p[::-1] for p in paths30d) + assert sorted(paths03d) == sorted(p[::-1] for p in paths30b) + assert sorted(paths03b) == sorted(p[::-1] for p in paths30b) + +class TestAverageShortestPathLength: def test_cycle_graph(self): - l = nx.average_shortest_path_length(nx.cycle_graph(7)) - assert_almost_equal(l, 2) + ans = nx.average_shortest_path_length(nx.cycle_graph(7)) + assert almost_equal(ans, 2) def test_path_graph(self): - l = nx.average_shortest_path_length(nx.path_graph(5)) - assert_almost_equal(l, 2) + ans = nx.average_shortest_path_length(nx.path_graph(5)) + assert almost_equal(ans, 2) def test_weighted(self): G = nx.Graph() nx.add_cycle(G, range(7), weight=2) - l = nx.average_shortest_path_length(G, weight='weight') - assert_almost_equal(l, 4) + ans = nx.average_shortest_path_length(G, weight="weight") + assert almost_equal(ans, 4) G = nx.Graph() nx.add_path(G, range(5), weight=2) - l = nx.average_shortest_path_length(G, weight='weight') - assert_almost_equal(l, 4) + ans = nx.average_shortest_path_length(G, weight="weight") + assert almost_equal(ans, 4) + + def test_specified_methods(self): + G = nx.Graph() + nx.add_cycle(G, range(7), weight=2) + ans = nx.average_shortest_path_length(G, weight="weight", method="dijkstra") + assert almost_equal(ans, 4) + ans = nx.average_shortest_path_length(G, weight="weight", method="bellman-ford") + assert almost_equal(ans, 4) + ans = nx.average_shortest_path_length( + G, weight="weight", method="floyd-warshall" + ) + assert almost_equal(ans, 4) + + G = nx.Graph() + nx.add_path(G, range(5), weight=2) + ans = nx.average_shortest_path_length(G, weight="weight", method="dijkstra") + assert almost_equal(ans, 4) + ans = nx.average_shortest_path_length(G, weight="weight", method="bellman-ford") + assert almost_equal(ans, 4) + ans = nx.average_shortest_path_length( + G, weight="weight", method="floyd-warshall" + ) + assert almost_equal(ans, 4) def test_disconnected(self): g = nx.Graph() g.add_nodes_from(range(3)) g.add_edge(0, 1) - assert_raises(nx.NetworkXError, nx.average_shortest_path_length, g) + pytest.raises(nx.NetworkXError, nx.average_shortest_path_length, g) g = g.to_directed() - assert_raises(nx.NetworkXError, nx.average_shortest_path_length, g) + pytest.raises(nx.NetworkXError, nx.average_shortest_path_length, g) def test_trivial_graph(self): """Tests that the trivial graph has average path length zero, @@ -176,8 +343,39 @@ def test_trivial_graph(self): """ G = nx.trivial_graph() - assert_equal(nx.average_shortest_path_length(G), 0) + assert nx.average_shortest_path_length(G) == 0 - @raises(nx.NetworkXPointlessConcept) def test_null_graph(self): - nx.average_shortest_path_length(nx.null_graph()) + with pytest.raises(nx.NetworkXPointlessConcept): + nx.average_shortest_path_length(nx.null_graph()) + + def test_bad_method(self): + with pytest.raises(ValueError): + G = nx.path_graph(2) + nx.average_shortest_path_length(G, weight="weight", method="SPAM") + + +class TestAverageShortestPathLengthNumpy: + @classmethod + def setup_class(cls): + global numpy + global npt + import pytest + + numpy = pytest.importorskip("numpy") + npt = pytest.importorskip("numpy.testing") + + def test_specified_methods_numpy(self): + G = nx.Graph() + nx.add_cycle(G, range(7), weight=2) + ans = nx.average_shortest_path_length( + G, weight="weight", method="floyd-warshall-numpy" + ) + npt.assert_almost_equal(ans, 4) + + G = nx.Graph() + nx.add_path(G, range(5), weight=2) + ans = nx.average_shortest_path_length( + G, weight="weight", method="floyd-warshall-numpy" + ) + npt.assert_almost_equal(ans, 4) diff --git a/networkx/algorithms/shortest_paths/tests/test_unweighted.py b/networkx/algorithms/shortest_paths/tests/test_unweighted.py index bef300e..96708f0 100644 --- a/networkx/algorithms/shortest_paths/tests/test_unweighted.py +++ b/networkx/algorithms/shortest_paths/tests/test_unweighted.py @@ -1,118 +1,116 @@ -#!/usr/bin/env python -from nose.tools import * import networkx as nx def validate_grid_path(r, c, s, t, p): - ok_(isinstance(p, list)) - assert_equal(p[0], s) - assert_equal(p[-1], t) + assert isinstance(p, list) + assert p[0] == s + assert p[-1] == t s = ((s - 1) // c, (s - 1) % c) t = ((t - 1) // c, (t - 1) % c) - assert_equal(len(p), abs(t[0] - s[0]) + abs(t[1] - s[1]) + 1) + assert len(p) == abs(t[0] - s[0]) + abs(t[1] - s[1]) + 1 p = [((u - 1) // c, (u - 1) % c) for u in p] for u in p: - ok_(0 <= u[0] < r) - ok_(0 <= u[1] < c) + assert 0 <= u[0] < r + assert 0 <= u[1] < c for u, v in zip(p[:-1], p[1:]): - ok_((abs(v[0] - u[0]), abs(v[1] - u[1])) in [(0, 1), (1, 0)]) + assert (abs(v[0] - u[0]), abs(v[1] - u[1])) in [(0, 1), (1, 0)] class TestUnweightedPath: - - def setUp(self): + @classmethod + def setup_class(cls): from networkx import convert_node_labels_to_integers as cnlti - self.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted") - self.cycle = nx.cycle_graph(7) - self.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph()) + + cls.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted") + cls.cycle = nx.cycle_graph(7) + cls.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph()) def test_bidirectional_shortest_path(self): - assert_equal(nx.bidirectional_shortest_path(self.cycle, 0, 3), - [0, 1, 2, 3]) - assert_equal(nx.bidirectional_shortest_path(self.cycle, 0, 4), - [0, 6, 5, 4]) - validate_grid_path(4, 4, 1, 12, nx.bidirectional_shortest_path(self.grid, 1, 12)) - assert_equal(nx.bidirectional_shortest_path(self.directed_cycle, 0, 3), - [0, 1, 2, 3]) + assert nx.bidirectional_shortest_path(self.cycle, 0, 3) == [0, 1, 2, 3] + assert nx.bidirectional_shortest_path(self.cycle, 0, 4) == [0, 6, 5, 4] + validate_grid_path( + 4, 4, 1, 12, nx.bidirectional_shortest_path(self.grid, 1, 12) + ) + assert nx.bidirectional_shortest_path(self.directed_cycle, 0, 3) == [0, 1, 2, 3] def test_shortest_path_length(self): - assert_equal(nx.shortest_path_length(self.cycle, 0, 3), 3) - assert_equal(nx.shortest_path_length(self.grid, 1, 12), 5) - assert_equal(nx.shortest_path_length(self.directed_cycle, 0, 4), 4) + assert nx.shortest_path_length(self.cycle, 0, 3) == 3 + assert nx.shortest_path_length(self.grid, 1, 12) == 5 + assert nx.shortest_path_length(self.directed_cycle, 0, 4) == 4 # now with weights - assert_equal(nx.shortest_path_length(self.cycle, 0, 3, weight=True), 3) - assert_equal(nx.shortest_path_length(self.grid, 1, 12, weight=True), 5) - assert_equal(nx.shortest_path_length(self.directed_cycle, 0, 4, weight=True), 4) + assert nx.shortest_path_length(self.cycle, 0, 3, weight=True) == 3 + assert nx.shortest_path_length(self.grid, 1, 12, weight=True) == 5 + assert nx.shortest_path_length(self.directed_cycle, 0, 4, weight=True) == 4 def test_single_source_shortest_path(self): p = nx.single_source_shortest_path(self.directed_cycle, 3) - assert_equal(p[0], [3, 4, 5, 6, 0]) + assert p[0] == [3, 4, 5, 6, 0] p = nx.single_source_shortest_path(self.cycle, 0) - assert_equal(p[3], [0, 1, 2, 3]) + assert p[3] == [0, 1, 2, 3] p = nx.single_source_shortest_path(self.cycle, 0, cutoff=0) - assert_equal(p, {0: [0]}) + assert p == {0: [0]} def test_single_source_shortest_path_length(self): pl = nx.single_source_shortest_path_length lengths = {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} - assert_equal(dict(pl(self.cycle, 0)), lengths) + assert dict(pl(self.cycle, 0)) == lengths lengths = {0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6} - assert_equal(dict(pl(self.directed_cycle, 0)), lengths) + assert dict(pl(self.directed_cycle, 0)) == lengths def test_single_target_shortest_path(self): p = nx.single_target_shortest_path(self.directed_cycle, 0) - assert_equal(p[3], [3, 4, 5, 6, 0]) + assert p[3] == [3, 4, 5, 6, 0] p = nx.single_target_shortest_path(self.cycle, 0) - assert_equal(p[3], [3, 2, 1, 0]) + assert p[3] == [3, 2, 1, 0] p = nx.single_target_shortest_path(self.cycle, 0, cutoff=0) - assert_equal(p, {0: [0]}) + assert p == {0: [0]} def test_single_target_shortest_path_length(self): pl = nx.single_target_shortest_path_length lengths = {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} - assert_equal(dict(pl(self.cycle, 0)), lengths) + assert dict(pl(self.cycle, 0)) == lengths lengths = {0: 0, 1: 6, 2: 5, 3: 4, 4: 3, 5: 2, 6: 1} - assert_equal(dict(pl(self.directed_cycle, 0)), lengths) + assert dict(pl(self.directed_cycle, 0)) == lengths def test_all_pairs_shortest_path(self): p = dict(nx.all_pairs_shortest_path(self.cycle)) - assert_equal(p[0][3], [0, 1, 2, 3]) + assert p[0][3] == [0, 1, 2, 3] p = dict(nx.all_pairs_shortest_path(self.grid)) validate_grid_path(4, 4, 1, 12, p[1][12]) def test_all_pairs_shortest_path_length(self): l = dict(nx.all_pairs_shortest_path_length(self.cycle)) - assert_equal(l[0], {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1}) + assert l[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} l = dict(nx.all_pairs_shortest_path_length(self.grid)) - assert_equal(l[1][16], 6) + assert l[1][16] == 6 def test_predecessor_path(self): G = nx.path_graph(4) - assert_equal(nx.predecessor(G, 0), {0: [], 1: [0], 2: [1], 3: [2]}) - assert_equal(nx.predecessor(G, 0, 3), [2]) + assert nx.predecessor(G, 0) == {0: [], 1: [0], 2: [1], 3: [2]} + assert nx.predecessor(G, 0, 3) == [2] def test_predecessor_cycle(self): G = nx.cycle_graph(4) pred = nx.predecessor(G, 0) - assert_equal(pred[0], []) - assert_equal(pred[1], [0]) - assert_true(pred[2] in [[1, 3], [3, 1]]) - assert_equal(pred[3], [0]) + assert pred[0] == [] + assert pred[1] == [0] + assert pred[2] in [[1, 3], [3, 1]] + assert pred[3] == [0] def test_predecessor_cutoff(self): G = nx.path_graph(4) p = nx.predecessor(G, 0, 3) - assert_false(4 in p) + assert 4 not in p def test_predecessor_target(self): G = nx.path_graph(4) p = nx.predecessor(G, 0, 3) - assert_equal(p, [2]) + assert p == [2] p = nx.predecessor(G, 0, 3, cutoff=2) - assert_equal(p, []) + assert p == [] p, s = nx.predecessor(G, 0, 3, return_seen=True) - assert_equal(p, [2]) - assert_equal(s, 3) + assert p == [2] + assert s == 3 p, s = nx.predecessor(G, 0, 3, cutoff=2, return_seen=True) - assert_equal(p, []) - assert_equal(s, -1) + assert p == [] + assert s == -1 diff --git a/networkx/algorithms/shortest_paths/tests/test_weighted.py b/networkx/algorithms/shortest_paths/tests/test_weighted.py index b4f732c..b234618 100644 --- a/networkx/algorithms/shortest_paths/tests/test_weighted.py +++ b/networkx/algorithms/shortest_paths/tests/test_weighted.py @@ -1,33 +1,36 @@ -from __future__ import division - -from nose.tools import assert_equal -from nose.tools import assert_true -from nose.tools import assert_false -from nose.tools import assert_raises -from nose.tools import raises +import pytest import networkx as nx from networkx.utils import pairwise -def validate_path(G, s, t, soln_len, path): - assert_equal(path[0], s) - assert_equal(path[-1], t) - if not G.is_multigraph(): - computed = sum(G[u][v].get('weight', 1) for u, v in pairwise(path)) - assert_equal(soln_len, computed) +def validate_path(G, s, t, soln_len, path, weight="weight"): + assert path[0] == s + assert path[-1] == t + + if callable(weight): + weight_f = weight else: - computed = sum(min(e.get('weight', 1) for e in G[u][v].values()) - for u, v in pairwise(path)) - assert_equal(soln_len, computed) + if G.is_multigraph(): + + def weight_f(u, v, d): + return min(e.get(weight, 1) for e in d.values()) + + else: + + def weight_f(u, v, d): + return d.get(weight, 1) + + computed = sum(weight_f(u, v, G[u][v]) for u, v in pairwise(path)) + assert soln_len == computed -def validate_length_path(G, s, t, soln_len, length, path): - assert_equal(soln_len, length) - validate_path(G, s, t, length, path) +def validate_length_path(G, s, t, soln_len, length, path, weight="weight"): + assert soln_len == length + validate_path(G, s, t, length, path, weight=weight) -class WeightedTestBase(object): +class WeightedTestBase: """Base class for test classes that test functions for computing shortest paths in weighted graphs. @@ -36,183 +39,304 @@ class WeightedTestBase(object): def setup(self): """Creates some graphs for use in the unit tests.""" cnlti = nx.convert_node_labels_to_integers - self.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1, - ordering="sorted") + self.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted") self.cycle = nx.cycle_graph(7) self.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph()) self.XG = nx.DiGraph() - self.XG.add_weighted_edges_from([('s', 'u', 10), ('s', 'x', 5), - ('u', 'v', 1), ('u', 'x', 2), - ('v', 'y', 1), ('x', 'u', 3), - ('x', 'v', 5), ('x', 'y', 2), - ('y', 's', 7), ('y', 'v', 6)]) + self.XG.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + ) self.MXG = nx.MultiDiGraph(self.XG) - self.MXG.add_edge('s', 'u', weight=15) + self.MXG.add_edge("s", "u", weight=15) self.XG2 = nx.DiGraph() - self.XG2.add_weighted_edges_from([[1, 4, 1], [4, 5, 1], - [5, 6, 1], [6, 3, 1], - [1, 3, 50], [1, 2, 100], - [2, 3, 100]]) + self.XG2.add_weighted_edges_from( + [ + [1, 4, 1], + [4, 5, 1], + [5, 6, 1], + [6, 3, 1], + [1, 3, 50], + [1, 2, 100], + [2, 3, 100], + ] + ) self.XG3 = nx.Graph() - self.XG3.add_weighted_edges_from([[0, 1, 2], [1, 2, 12], - [2, 3, 1], [3, 4, 5], - [4, 5, 1], [5, 0, 10]]) + self.XG3.add_weighted_edges_from( + [[0, 1, 2], [1, 2, 12], [2, 3, 1], [3, 4, 5], [4, 5, 1], [5, 0, 10]] + ) self.XG4 = nx.Graph() - self.XG4.add_weighted_edges_from([[0, 1, 2], [1, 2, 2], - [2, 3, 1], [3, 4, 1], - [4, 5, 1], [5, 6, 1], - [6, 7, 1], [7, 0, 1]]) + self.XG4.add_weighted_edges_from( + [ + [0, 1, 2], + [1, 2, 2], + [2, 3, 1], + [3, 4, 1], + [4, 5, 1], + [5, 6, 1], + [6, 7, 1], + [7, 0, 1], + ] + ) self.MXG4 = nx.MultiGraph(self.XG4) self.MXG4.add_edge(0, 1, weight=3) self.G = nx.DiGraph() # no weights - self.G.add_edges_from([('s', 'u'), ('s', 'x'), - ('u', 'v'), ('u', 'x'), - ('v', 'y'), ('x', 'u'), - ('x', 'v'), ('x', 'y'), - ('y', 's'), ('y', 'v')]) + self.G.add_edges_from( + [ + ("s", "u"), + ("s", "x"), + ("u", "v"), + ("u", "x"), + ("v", "y"), + ("x", "u"), + ("x", "v"), + ("x", "y"), + ("y", "s"), + ("y", "v"), + ] + ) class TestWeightedPath(WeightedTestBase): - def test_dijkstra(self): - (D, P) = nx.single_source_dijkstra(self.XG, 's') - validate_path(self.XG, 's', 'v', 9, P['v']) - assert_equal(D['v'], 9) + (D, P) = nx.single_source_dijkstra(self.XG, "s") + validate_path(self.XG, "s", "v", 9, P["v"]) + assert D["v"] == 9 validate_path( - self.XG, 's', 'v', 9, nx.single_source_dijkstra_path(self.XG, 's')['v']) - assert_equal(dict( - nx.single_source_dijkstra_path_length(self.XG, 's'))['v'], 9) + self.XG, "s", "v", 9, nx.single_source_dijkstra_path(self.XG, "s")["v"] + ) + assert dict(nx.single_source_dijkstra_path_length(self.XG, "s"))["v"] == 9 validate_path( - self.XG, 's', 'v', 9, nx.single_source_dijkstra(self.XG, 's')[1]['v']) + self.XG, "s", "v", 9, nx.single_source_dijkstra(self.XG, "s")[1]["v"] + ) validate_path( - self.MXG, 's', 'v', 9, nx.single_source_dijkstra_path(self.MXG, 's')['v']) + self.MXG, "s", "v", 9, nx.single_source_dijkstra_path(self.MXG, "s")["v"] + ) GG = self.XG.to_undirected() # make sure we get lower weight # to_undirected might choose either edge with weight 2 or weight 3 - GG['u']['x']['weight'] = 2 - (D, P) = nx.single_source_dijkstra(GG, 's') - validate_path(GG, 's', 'v', 8, P['v']) - assert_equal(D['v'], 8) # uses lower weight of 2 on u<->x edge - validate_path(GG, 's', 'v', 8, nx.dijkstra_path(GG, 's', 'v')) - assert_equal(nx.dijkstra_path_length(GG, 's', 'v'), 8) + GG["u"]["x"]["weight"] = 2 + (D, P) = nx.single_source_dijkstra(GG, "s") + validate_path(GG, "s", "v", 8, P["v"]) + assert D["v"] == 8 # uses lower weight of 2 on u<->x edge + validate_path(GG, "s", "v", 8, nx.dijkstra_path(GG, "s", "v")) + assert nx.dijkstra_path_length(GG, "s", "v") == 8 validate_path(self.XG2, 1, 3, 4, nx.dijkstra_path(self.XG2, 1, 3)) validate_path(self.XG3, 0, 3, 15, nx.dijkstra_path(self.XG3, 0, 3)) - assert_equal(nx.dijkstra_path_length(self.XG3, 0, 3), 15) + assert nx.dijkstra_path_length(self.XG3, 0, 3) == 15 validate_path(self.XG4, 0, 2, 4, nx.dijkstra_path(self.XG4, 0, 2)) - assert_equal(nx.dijkstra_path_length(self.XG4, 0, 2), 4) + assert nx.dijkstra_path_length(self.XG4, 0, 2) == 4 validate_path(self.MXG4, 0, 2, 4, nx.dijkstra_path(self.MXG4, 0, 2)) validate_path( - self.G, 's', 'v', 2, nx.single_source_dijkstra(self.G, 's', 'v')[1]) + self.G, "s", "v", 2, nx.single_source_dijkstra(self.G, "s", "v")[1] + ) validate_path( - self.G, 's', 'v', 2, nx.single_source_dijkstra(self.G, 's')[1]['v']) + self.G, "s", "v", 2, nx.single_source_dijkstra(self.G, "s")[1]["v"] + ) - validate_path(self.G, 's', 'v', 2, nx.dijkstra_path(self.G, 's', 'v')) - assert_equal(nx.dijkstra_path_length(self.G, 's', 'v'), 2) + validate_path(self.G, "s", "v", 2, nx.dijkstra_path(self.G, "s", "v")) + assert nx.dijkstra_path_length(self.G, "s", "v") == 2 # NetworkXError: node s not reachable from moon - assert_raises(nx.NetworkXNoPath, nx.dijkstra_path, self.G, 's', 'moon') - assert_raises( - nx.NetworkXNoPath, nx.dijkstra_path_length, self.G, 's', 'moon') + pytest.raises(nx.NetworkXNoPath, nx.dijkstra_path, self.G, "s", "moon") + pytest.raises(nx.NetworkXNoPath, nx.dijkstra_path_length, self.G, "s", "moon") validate_path(self.cycle, 0, 3, 3, nx.dijkstra_path(self.cycle, 0, 3)) validate_path(self.cycle, 0, 4, 3, nx.dijkstra_path(self.cycle, 0, 4)) - assert_equal(nx.single_source_dijkstra(self.cycle, 0, 0), (0, [0])) + assert nx.single_source_dijkstra(self.cycle, 0, 0) == (0, [0]) def test_bidirectional_dijkstra(self): validate_length_path( - self.XG, 's', 'v', 9, *nx.bidirectional_dijkstra(self.XG, 's', 'v')) + self.XG, "s", "v", 9, *nx.bidirectional_dijkstra(self.XG, "s", "v") + ) validate_length_path( - self.G, 's', 'v', 2, *nx.bidirectional_dijkstra(self.G, 's', 'v')) + self.G, "s", "v", 2, *nx.bidirectional_dijkstra(self.G, "s", "v") + ) validate_length_path( - self.cycle, 0, 3, 3, *nx.bidirectional_dijkstra(self.cycle, 0, 3)) + self.cycle, 0, 3, 3, *nx.bidirectional_dijkstra(self.cycle, 0, 3) + ) validate_length_path( - self.cycle, 0, 4, 3, *nx.bidirectional_dijkstra(self.cycle, 0, 4)) + self.cycle, 0, 4, 3, *nx.bidirectional_dijkstra(self.cycle, 0, 4) + ) validate_length_path( - self.XG3, 0, 3, 15, *nx.bidirectional_dijkstra(self.XG3, 0, 3)) + self.XG3, 0, 3, 15, *nx.bidirectional_dijkstra(self.XG3, 0, 3) + ) validate_length_path( - self.XG4, 0, 2, 4, *nx.bidirectional_dijkstra(self.XG4, 0, 2)) + self.XG4, 0, 2, 4, *nx.bidirectional_dijkstra(self.XG4, 0, 2) + ) # need more tests here - P = nx.single_source_dijkstra_path(self.XG, 's')['v'] - validate_path(self.XG, 's', 'v', sum(self.XG[u][v]['weight'] for u, v in zip( - P[:-1], P[1:])), nx.dijkstra_path(self.XG, 's', 'v')) + P = nx.single_source_dijkstra_path(self.XG, "s")["v"] + validate_path( + self.XG, + "s", + "v", + sum(self.XG[u][v]["weight"] for u, v in zip(P[:-1], P[1:])), + nx.dijkstra_path(self.XG, "s", "v"), + ) + + # check absent source + G = nx.path_graph(2) + pytest.raises(nx.NodeNotFound, nx.bidirectional_dijkstra, G, 3, 0) + + def test_weight_functions(self): + def heuristic(*z): + return hash(z) + + def getpath(pred, v, s): + return [v] if v == s else getpath(pred, pred[v], s) + [v] + + def goldberg_radzik(g, s, t, weight="weight"): + pred, dist = nx.goldberg_radzik(g, s, weight=weight) + dist = dist[t] + return dist, getpath(pred, t, s) + + def astar(g, s, t, weight="weight"): + path = nx.astar_path(g, s, t, heuristic, weight=weight) + dist = nx.astar_path_length(g, s, t, heuristic, weight=weight) + return dist, path + + def vlp(G, s, t, l, F, w): + res = F(G, s, t, weight=w) + validate_length_path(G, s, t, l, *res, weight=w) + + G = self.cycle + s = 6 + t = 4 + path = [6] + list(range(t + 1)) + + def weight(u, v, _): + return 1 + v ** 2 + + length = sum(weight(u, v, None) for u, v in pairwise(path)) + vlp(G, s, t, length, nx.bidirectional_dijkstra, weight) + vlp(G, s, t, length, nx.single_source_dijkstra, weight) + vlp(G, s, t, length, nx.single_source_bellman_ford, weight) + vlp(G, s, t, length, goldberg_radzik, weight) + vlp(G, s, t, length, astar, weight) + + def weight(u, v, _): + return 2 ** (u * v) + + length = sum(weight(u, v, None) for u, v in pairwise(path)) + vlp(G, s, t, length, nx.bidirectional_dijkstra, weight) + vlp(G, s, t, length, nx.single_source_dijkstra, weight) + vlp(G, s, t, length, nx.single_source_bellman_ford, weight) + vlp(G, s, t, length, goldberg_radzik, weight) + vlp(G, s, t, length, astar, weight) - @raises(nx.NetworkXNoPath) def test_bidirectional_dijkstra_no_path(self): - G = nx.Graph() - nx.add_path(G, [1, 2, 3]) - nx.add_path(G, [4, 5, 6]) - path = nx.bidirectional_dijkstra(G, 1, 6) + with pytest.raises(nx.NetworkXNoPath): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + nx.add_path(G, [4, 5, 6]) + path = nx.bidirectional_dijkstra(G, 1, 6) + + def test_absent_source(self): + # the check is in _dijkstra_multisource, but this will provide + # regression testing against later changes to any of the "client" + # Dijkstra or Bellman-Ford functions + G = nx.path_graph(2) + for fn in ( + nx.dijkstra_path, + nx.dijkstra_path_length, + nx.single_source_dijkstra_path, + nx.single_source_dijkstra_path_length, + nx.single_source_dijkstra, + nx.dijkstra_predecessor_and_distance, + ): + pytest.raises(nx.NodeNotFound, fn, G, 3, 0) def test_dijkstra_predecessor1(self): G = nx.path_graph(4) - assert_equal(nx.dijkstra_predecessor_and_distance(G, 0), - ({0: [], 1: [0], 2: [1], 3: [2]}, {0: 0, 1: 1, 2: 2, 3: 3})) + assert nx.dijkstra_predecessor_and_distance(G, 0) == ( + {0: [], 1: [0], 2: [1], 3: [2]}, + {0: 0, 1: 1, 2: 2, 3: 3}, + ) def test_dijkstra_predecessor2(self): # 4-cycle G = nx.Graph([(0, 1), (1, 2), (2, 3), (3, 0)]) pred, dist = nx.dijkstra_predecessor_and_distance(G, (0)) - assert_equal(pred[0], []) - assert_equal(pred[1], [0]) - assert_true(pred[2] in [[1, 3], [3, 1]]) - assert_equal(pred[3], [0]) - assert_equal(dist, {0: 0, 1: 1, 2: 2, 3: 1}) + assert pred[0] == [] + assert pred[1] == [0] + assert pred[2] in [[1, 3], [3, 1]] + assert pred[3] == [0] + assert dist == {0: 0, 1: 1, 2: 2, 3: 1} def test_dijkstra_predecessor3(self): XG = nx.DiGraph() - XG.add_weighted_edges_from([('s', 'u', 10), ('s', 'x', 5), - ('u', 'v', 1), ('u', 'x', 2), - ('v', 'y', 1), ('x', 'u', 3), - ('x', 'v', 5), ('x', 'y', 2), - ('y', 's', 7), ('y', 'v', 6)]) - (P, D) = nx.dijkstra_predecessor_and_distance(XG, 's') - assert_equal(P['v'], ['u']) - assert_equal(D['v'], 9) - (P, D) = nx.dijkstra_predecessor_and_distance(XG, 's', cutoff=8) - assert_false('v' in D) + XG.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + ) + (P, D) = nx.dijkstra_predecessor_and_distance(XG, "s") + assert P["v"] == ["u"] + assert D["v"] == 9 + (P, D) = nx.dijkstra_predecessor_and_distance(XG, "s", cutoff=8) + assert "v" not in D def test_single_source_dijkstra_path_length(self): pl = nx.single_source_dijkstra_path_length - assert_equal(dict(pl(self.MXG4, 0))[2], 4) + assert dict(pl(self.MXG4, 0))[2] == 4 spl = pl(self.MXG4, 0, cutoff=2) - assert_false(2 in spl) + assert 2 not in spl def test_bidirectional_dijkstra_multigraph(self): G = nx.MultiGraph() - G.add_edge('a', 'b', weight=10) - G.add_edge('a', 'b', weight=100) - dp = nx.bidirectional_dijkstra(G, 'a', 'b') - assert_equal(dp, (10, ['a', 'b'])) + G.add_edge("a", "b", weight=10) + G.add_edge("a", "b", weight=100) + dp = nx.bidirectional_dijkstra(G, "a", "b") + assert dp == (10, ["a", "b"]) def test_dijkstra_pred_distance_multigraph(self): G = nx.MultiGraph() - G.add_edge('a', 'b', key='short', foo=5, weight=100) - G.add_edge('a', 'b', key='long', bar=1, weight=110) - p, d = nx.dijkstra_predecessor_and_distance(G, 'a') - assert_equal(p, {'a': [], 'b': ['a']}) - assert_equal(d, {'a': 0, 'b': 100}) + G.add_edge("a", "b", key="short", foo=5, weight=100) + G.add_edge("a", "b", key="long", bar=1, weight=110) + p, d = nx.dijkstra_predecessor_and_distance(G, "a") + assert p == {"a": [], "b": ["a"]} + assert d == {"a": 0, "b": 100} def test_negative_edge_cycle(self): G = nx.cycle_graph(5, create_using=nx.DiGraph()) - assert_equal(nx.negative_edge_cycle(G), False) + assert not nx.negative_edge_cycle(G) G.add_edge(8, 9, weight=-7) G.add_edge(9, 8, weight=3) graph_size = len(G) - assert_equal(nx.negative_edge_cycle(G), True) - assert_equal(graph_size, len(G)) - assert_raises(ValueError, nx.single_source_dijkstra_path_length, G, 8) - assert_raises(ValueError, nx.single_source_dijkstra, G, 8) - assert_raises(ValueError, nx.dijkstra_predecessor_and_distance, G, 8) + assert nx.negative_edge_cycle(G) + assert graph_size == len(G) + pytest.raises(ValueError, nx.single_source_dijkstra_path_length, G, 8) + pytest.raises(ValueError, nx.single_source_dijkstra, G, 8) + pytest.raises(ValueError, nx.dijkstra_predecessor_and_distance, G, 8) G.add_edge(9, 10) - assert_raises(ValueError, nx.bidirectional_dijkstra, G, 8, 10) + pytest.raises(ValueError, nx.bidirectional_dijkstra, G, 8, 10) def test_weight_function(self): """Tests that a callable weight is interpreted as a weight @@ -222,56 +346,58 @@ def test_weight_function(self): # Create a triangle in which the edge from node 0 to node 2 has # a large weight and the other two edges have a small weight. G = nx.complete_graph(3) - G.adj[0][2]['weight'] = 10 - G.adj[0][1]['weight'] = 1 - G.adj[1][2]['weight'] = 1 + G.adj[0][2]["weight"] = 10 + G.adj[0][1]["weight"] = 1 + G.adj[1][2]["weight"] = 1 # The weight function will take the multiplicative inverse of # the weights on the edges. This way, weights that were large # before now become small and vice versa. - def weight(u, v, d): return 1 / d['weight'] + def weight(u, v, d): + return 1 / d["weight"] + # The shortest path from 0 to 2 using the actual weights on the # edges should be [0, 1, 2]. distance, path = nx.single_source_dijkstra(G, 0, 2) - assert_equal(distance, 2) - assert_equal(path, [0, 1, 2]) + assert distance == 2 + assert path == [0, 1, 2] # However, with the above weight function, the shortest path # should be [0, 2], since that has a very small weight. distance, path = nx.single_source_dijkstra(G, 0, 2, weight=weight) - assert_equal(distance, 1 / 10) - assert_equal(path, [0, 2]) + assert distance == 1 / 10 + assert path == [0, 2] def test_all_pairs_dijkstra_path(self): cycle = nx.cycle_graph(7) p = dict(nx.all_pairs_dijkstra_path(cycle)) - assert_equal(p[0][3], [0, 1, 2, 3]) + assert p[0][3] == [0, 1, 2, 3] - cycle[1][2]['weight'] = 10 + cycle[1][2]["weight"] = 10 p = dict(nx.all_pairs_dijkstra_path(cycle)) - assert_equal(p[0][3], [0, 6, 5, 4, 3]) + assert p[0][3] == [0, 6, 5, 4, 3] def test_all_pairs_dijkstra_path_length(self): cycle = nx.cycle_graph(7) pl = dict(nx.all_pairs_dijkstra_path_length(cycle)) - assert_equal(pl[0], {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1}) + assert pl[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} - cycle[1][2]['weight'] = 10 + cycle[1][2]["weight"] = 10 pl = dict(nx.all_pairs_dijkstra_path_length(cycle)) - assert_equal(pl[0], {0: 0, 1: 1, 2: 5, 3: 4, 4: 3, 5: 2, 6: 1}) + assert pl[0] == {0: 0, 1: 1, 2: 5, 3: 4, 4: 3, 5: 2, 6: 1} def test_all_pairs_dijkstra(self): cycle = nx.cycle_graph(7) out = dict(nx.all_pairs_dijkstra(cycle)) - assert_equal(out[0][0], {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1}) - assert_equal(out[0][1][3], [0, 1, 2, 3]) + assert out[0][0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + assert out[0][1][3] == [0, 1, 2, 3] - cycle[1][2]['weight'] = 10 + cycle[1][2]["weight"] = 10 out = dict(nx.all_pairs_dijkstra(cycle)) - assert_equal(out[0][0], {0: 0, 1: 1, 2: 5, 3: 4, 4: 3, 5: 2, 6: 1}) - assert_equal(out[0][1][3], [0, 6, 5, 4, 3]) + assert out[0][0] == {0: 0, 1: 1, 2: 5, 3: 4, 4: 3, 5: 2, 6: 1} + assert out[0][1][3] == [0, 6, 5, 4, 3] -class TestDijkstraPathLength(object): +class TestDijkstraPathLength: """Unit tests for the :func:`networkx.dijkstra_path_length` function. @@ -285,39 +411,50 @@ def test_weight_function(self): # Create a triangle in which the edge from node 0 to node 2 has # a large weight and the other two edges have a small weight. G = nx.complete_graph(3) - G.adj[0][2]['weight'] = 10 - G.adj[0][1]['weight'] = 1 - G.adj[1][2]['weight'] = 1 + G.adj[0][2]["weight"] = 10 + G.adj[0][1]["weight"] = 1 + G.adj[1][2]["weight"] = 1 # The weight function will take the multiplicative inverse of # the weights on the edges. This way, weights that were large # before now become small and vice versa. - def weight(u, v, d): return 1 / d['weight'] + def weight(u, v, d): + return 1 / d["weight"] + # The shortest path from 0 to 2 using the actual weights on the # edges should be [0, 1, 2]. However, with the above weight # function, the shortest path should be [0, 2], since that has a # very small weight. length = nx.dijkstra_path_length(G, 0, 2, weight=weight) - assert_equal(length, 1 / 10) + assert length == 1 / 10 -class TestMultiSourceDijkstra(object): +class TestMultiSourceDijkstra: """Unit tests for the multi-source dialect of Dijkstra's shortest path algorithms. """ - @raises(ValueError) def test_no_sources(self): - nx.multi_source_dijkstra(nx.Graph(), {}) + with pytest.raises(ValueError): + nx.multi_source_dijkstra(nx.Graph(), {}) - @raises(ValueError) def test_path_no_sources(self): - nx.multi_source_dijkstra_path(nx.Graph(), {}) + with pytest.raises(ValueError): + nx.multi_source_dijkstra_path(nx.Graph(), {}) - @raises(ValueError) def test_path_length_no_sources(self): - nx.multi_source_dijkstra_path_length(nx.Graph(), {}) + with pytest.raises(ValueError): + nx.multi_source_dijkstra_path_length(nx.Graph(), {}) + + def test_absent_source(self): + G = nx.path_graph(2) + for fn in ( + nx.multi_source_dijkstra_path, + nx.multi_source_dijkstra_path_length, + nx.multi_source_dijkstra, + ): + pytest.raises(nx.NodeNotFound, fn, G, [3], 0) def test_two_sources(self): edges = [(0, 1, 1), (1, 2, 1), (2, 3, 10), (3, 4, 1)] @@ -327,239 +464,424 @@ def test_two_sources(self): distances, paths = nx.multi_source_dijkstra(G, sources) expected_distances = {0: 0, 1: 1, 2: 2, 3: 1, 4: 0} expected_paths = {0: [0], 1: [0, 1], 2: [0, 1, 2], 3: [4, 3], 4: [4]} - assert_equal(distances, expected_distances) - assert_equal(paths, expected_paths) + assert distances == expected_distances + assert paths == expected_paths def test_simple_paths(self): G = nx.path_graph(4) lengths = nx.multi_source_dijkstra_path_length(G, [0]) - assert_equal(lengths, {n: n for n in G}) + assert lengths == {n: n for n in G} paths = nx.multi_source_dijkstra_path(G, [0]) - assert_equal(paths, {n: list(range(n + 1)) for n in G}) + assert paths == {n: list(range(n + 1)) for n in G} class TestBellmanFordAndGoldbergRadzik(WeightedTestBase): - def test_single_node_graph(self): G = nx.DiGraph() G.add_node(0) - assert_equal(nx.single_source_bellman_ford_path(G, 0), {0: [0]}) - assert_equal(nx.single_source_bellman_ford_path_length(G, 0), {0: 0}) - assert_equal(nx.single_source_bellman_ford(G, 0), ({0: 0}, {0: [0]})) - assert_equal(nx.bellman_ford_predecessor_and_distance(G, 0), ({0: [None]}, {0: 0})) - assert_equal(nx.goldberg_radzik(G, 0), ({0: None}, {0: 0})) - assert_raises(nx.NodeNotFound, nx.bellman_ford_predecessor_and_distance, G, 1) - assert_raises(nx.NodeNotFound, nx.goldberg_radzik, G, 1) + assert nx.single_source_bellman_ford_path(G, 0) == {0: [0]} + assert nx.single_source_bellman_ford_path_length(G, 0) == {0: 0} + assert nx.single_source_bellman_ford(G, 0) == ({0: 0}, {0: [0]}) + assert nx.bellman_ford_predecessor_and_distance(G, 0) == ({0: []}, {0: 0}) + assert nx.goldberg_radzik(G, 0) == ({0: None}, {0: 0}) + + def test_absent_source_bellman_ford(self): + # the check is in _bellman_ford; this provides regression testing + # against later changes to "client" Bellman-Ford functions + G = nx.path_graph(2) + for fn in ( + nx.bellman_ford_predecessor_and_distance, + nx.bellman_ford_path, + nx.bellman_ford_path_length, + nx.single_source_bellman_ford_path, + nx.single_source_bellman_ford_path_length, + nx.single_source_bellman_ford, + ): + pytest.raises(nx.NodeNotFound, fn, G, 3, 0) + + def test_absent_source_goldberg_radzik(self): + with pytest.raises(nx.NodeNotFound): + G = nx.path_graph(2) + nx.goldberg_radzik(G, 3, 0) + + def test_negative_weight_cycle_heuristic(self): + G = nx.DiGraph() + G.add_edge(0, 1, weight=-1) + G.add_edge(1, 2, weight=-1) + G.add_edge(2, 3, weight=-1) + G.add_edge(3, 0, weight=3) + assert not nx.negative_edge_cycle(G, heuristic=True) + G.add_edge(2, 0, weight=1.999) + assert nx.negative_edge_cycle(G, heuristic=True) + G.edges[2, 0]["weight"] = 2 + assert not nx.negative_edge_cycle(G, heuristic=True) + + def test_negative_weight_cycle_consistency(self): + import random + + unif = random.uniform + for random_seed in range(2): # range(20): + random.seed(random_seed) + for density in [0.1, 0.9]: # .3, .7, .9]: + for N in [1, 10, 20]: # range(1, 60 - int(30 * density)): + for max_cost in [1, 90]: # [1, 10, 40, 90]: + G = nx.binomial_graph(N, density, seed=4, directed=True) + edges = ((u, v, unif(-1, max_cost)) for u, v in G.edges) + G.add_weighted_edges_from(edges) + + no_heuristic = nx.negative_edge_cycle(G, heuristic=False) + with_heuristic = nx.negative_edge_cycle(G, heuristic=True) + assert no_heuristic == with_heuristic def test_negative_weight_cycle(self): G = nx.cycle_graph(5, create_using=nx.DiGraph()) G.add_edge(1, 2, weight=-7) for i in range(5): - assert_raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford_path, G, i) - assert_raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford_path_length, G, i) - assert_raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford, G, i) - assert_raises(nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, i) - assert_raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, i) + pytest.raises( + nx.NetworkXUnbounded, nx.single_source_bellman_ford_path, G, i + ) + pytest.raises( + nx.NetworkXUnbounded, nx.single_source_bellman_ford_path_length, G, i + ) + pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford, G, i) + pytest.raises( + nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, i + ) + pytest.raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, i) G = nx.cycle_graph(5) # undirected Graph G.add_edge(1, 2, weight=-3) for i in range(5): - assert_raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford_path, G, i) - assert_raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford_path_length, G, i) - assert_raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford, G, i) - assert_raises(nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, i) - assert_raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, i) - G = nx.DiGraph([(1, 1, {'weight': -1})]) - assert_raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford_path, G, 1) - assert_raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford_path_length, G, 1) - assert_raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford, G, 1) - assert_raises(nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, 1) - assert_raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, 1) + pytest.raises( + nx.NetworkXUnbounded, nx.single_source_bellman_ford_path, G, i + ) + pytest.raises( + nx.NetworkXUnbounded, nx.single_source_bellman_ford_path_length, G, i + ) + pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford, G, i) + pytest.raises( + nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, i + ) + pytest.raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, i) + G = nx.DiGraph([(1, 1, {"weight": -1})]) + pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford_path, G, 1) + pytest.raises( + nx.NetworkXUnbounded, nx.single_source_bellman_ford_path_length, G, 1 + ) + pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford, G, 1) + pytest.raises( + nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, 1 + ) + pytest.raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, 1) # no negative cycle but negative weight G = nx.cycle_graph(5, create_using=nx.DiGraph()) G.add_edge(1, 2, weight=-3) - assert_equal(nx.single_source_bellman_ford_path(G, 0), - {0: [0], 1: [0, 1], 2: [0, 1, 2], 3: [0, 1, 2, 3], 4: [0, 1, 2, 3, 4]}) - assert_equal(nx.single_source_bellman_ford_path_length(G, 0), - {0: 0, 1: 1, 2: -2, 3: -1, 4: 0}) - assert_equal(nx.single_source_bellman_ford(G, 0), - ({0: 0, 1: 1, 2: -2, 3: -1, 4: 0}, - {0: [0], 1: [0, 1], 2: [0, 1, 2], 3: [0, 1, 2, 3], 4: [0, 1, 2, 3, 4]})) - assert_equal(nx.bellman_ford_predecessor_and_distance(G, 0), - ({0: [None], 1: [0], 2: [1], 3: [2], 4: [3]}, - {0: 0, 1: 1, 2: -2, 3: -1, 4: 0})) - assert_equal(nx.goldberg_radzik(G, 0), - ({0: None, 1: 0, 2: 1, 3: 2, 4: 3}, - {0: 0, 1: 1, 2: -2, 3: -1, 4: 0})) + assert nx.single_source_bellman_ford_path(G, 0) == { + 0: [0], + 1: [0, 1], + 2: [0, 1, 2], + 3: [0, 1, 2, 3], + 4: [0, 1, 2, 3, 4], + } + assert nx.single_source_bellman_ford_path_length(G, 0) == { + 0: 0, + 1: 1, + 2: -2, + 3: -1, + 4: 0, + } + assert nx.single_source_bellman_ford(G, 0) == ( + {0: 0, 1: 1, 2: -2, 3: -1, 4: 0}, + {0: [0], 1: [0, 1], 2: [0, 1, 2], 3: [0, 1, 2, 3], 4: [0, 1, 2, 3, 4]}, + ) + assert nx.bellman_ford_predecessor_and_distance(G, 0) == ( + {0: [], 1: [0], 2: [1], 3: [2], 4: [3]}, + {0: 0, 1: 1, 2: -2, 3: -1, 4: 0}, + ) + assert nx.goldberg_radzik(G, 0) == ( + {0: None, 1: 0, 2: 1, 3: 2, 4: 3}, + {0: 0, 1: 1, 2: -2, 3: -1, 4: 0}, + ) def test_not_connected(self): G = nx.complete_graph(6) G.add_edge(10, 11) G.add_edge(10, 12) - assert_equal(nx.single_source_bellman_ford_path(G, 0), - {0: [0], 1: [0, 1], 2: [0, 2], 3: [0, 3], 4: [0, 4], 5: [0, 5]}) - assert_equal(nx.single_source_bellman_ford_path_length(G, 0), - {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}) - assert_equal(nx.single_source_bellman_ford(G, 0), - ({0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, - {0: [0], 1: [0, 1], 2: [0, 2], 3: [0, 3], 4: [0, 4], 5: [0, 5]})) - assert_equal(nx.bellman_ford_predecessor_and_distance(G, 0), - ({0: [None], 1: [0], 2: [0], 3: [0], 4: [0], 5: [0]}, - {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1})) - assert_equal(nx.goldberg_radzik(G, 0), - ({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0}, - {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1})) + assert nx.single_source_bellman_ford_path(G, 0) == { + 0: [0], + 1: [0, 1], + 2: [0, 2], + 3: [0, 3], + 4: [0, 4], + 5: [0, 5], + } + assert nx.single_source_bellman_ford_path_length(G, 0) == { + 0: 0, + 1: 1, + 2: 1, + 3: 1, + 4: 1, + 5: 1, + } + assert nx.single_source_bellman_ford(G, 0) == ( + {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, + {0: [0], 1: [0, 1], 2: [0, 2], 3: [0, 3], 4: [0, 4], 5: [0, 5]}, + ) + assert nx.bellman_ford_predecessor_and_distance(G, 0) == ( + {0: [], 1: [0], 2: [0], 3: [0], 4: [0], 5: [0]}, + {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, + ) + assert nx.goldberg_radzik(G, 0) == ( + {0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0}, + {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, + ) # not connected, with a component not containing the source that # contains a negative cost cycle. G = nx.complete_graph(6) - G.add_edges_from([('A', 'B', {'load': 3}), - ('B', 'C', {'load': -10}), - ('C', 'A', {'load': 2})]) - assert_equal(nx.single_source_bellman_ford_path(G, 0, weight='load'), - {0: [0], 1: [0, 1], 2: [0, 2], 3: [0, 3], 4: [0, 4], 5: [0, 5]}) - assert_equal(nx.single_source_bellman_ford_path_length(G, 0, weight='load'), - {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}) - assert_equal(nx.single_source_bellman_ford(G, 0, weight='load'), - ({0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, - {0: [0], 1: [0, 1], 2: [0, 2], 3: [0, 3], 4: [0, 4], 5: [0, 5]})) - assert_equal(nx.bellman_ford_predecessor_and_distance(G, 0, weight='load'), - ({0: [None], 1: [0], 2: [0], 3: [0], 4: [0], 5: [0]}, - {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1})) - assert_equal(nx.goldberg_radzik(G, 0, weight='load'), - ({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0}, - {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1})) + G.add_edges_from( + [ + ("A", "B", {"load": 3}), + ("B", "C", {"load": -10}), + ("C", "A", {"load": 2}), + ] + ) + assert nx.single_source_bellman_ford_path(G, 0, weight="load") == { + 0: [0], + 1: [0, 1], + 2: [0, 2], + 3: [0, 3], + 4: [0, 4], + 5: [0, 5], + } + assert nx.single_source_bellman_ford_path_length(G, 0, weight="load") == { + 0: 0, + 1: 1, + 2: 1, + 3: 1, + 4: 1, + 5: 1, + } + assert nx.single_source_bellman_ford(G, 0, weight="load") == ( + {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, + {0: [0], 1: [0, 1], 2: [0, 2], 3: [0, 3], 4: [0, 4], 5: [0, 5]}, + ) + assert nx.bellman_ford_predecessor_and_distance(G, 0, weight="load") == ( + {0: [], 1: [0], 2: [0], 3: [0], 4: [0], 5: [0]}, + {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, + ) + assert nx.goldberg_radzik(G, 0, weight="load") == ( + {0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0}, + {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, + ) def test_multigraph(self): - assert_equal(nx.bellman_ford_path(self.MXG, 's', 'v'), ['s', 'x', 'u', 'v']) - assert_equal(nx.bellman_ford_path_length(self.MXG, 's', 'v'), 9) - assert_equal(nx.single_source_bellman_ford_path(self.MXG, 's')['v'], ['s', 'x', 'u', 'v']) - assert_equal(nx.single_source_bellman_ford_path_length(self.MXG, 's')['v'], 9) - D, P = nx.single_source_bellman_ford(self.MXG, 's', target='v') - assert_equal(D, 9) - assert_equal(P, ['s', 'x', 'u', 'v']) - P, D = nx.bellman_ford_predecessor_and_distance(self.MXG, 's') - assert_equal(P['v'], ['u']) - assert_equal(D['v'], 9) - P, D = nx.goldberg_radzik(self.MXG, 's') - assert_equal(P['v'], 'u') - assert_equal(D['v'], 9) - assert_equal(nx.bellman_ford_path(self.MXG4, 0, 2), [0, 1, 2]) - assert_equal(nx.bellman_ford_path_length(self.MXG4, 0, 2), 4) - assert_equal(nx.single_source_bellman_ford_path(self.MXG4, 0)[2], [0, 1, 2]) - assert_equal(nx.single_source_bellman_ford_path_length(self.MXG4, 0)[2], 4) + assert nx.bellman_ford_path(self.MXG, "s", "v") == ["s", "x", "u", "v"] + assert nx.bellman_ford_path_length(self.MXG, "s", "v") == 9 + assert nx.single_source_bellman_ford_path(self.MXG, "s")["v"] == [ + "s", + "x", + "u", + "v", + ] + assert nx.single_source_bellman_ford_path_length(self.MXG, "s")["v"] == 9 + D, P = nx.single_source_bellman_ford(self.MXG, "s", target="v") + assert D == 9 + assert P == ["s", "x", "u", "v"] + P, D = nx.bellman_ford_predecessor_and_distance(self.MXG, "s") + assert P["v"] == ["u"] + assert D["v"] == 9 + P, D = nx.goldberg_radzik(self.MXG, "s") + assert P["v"] == "u" + assert D["v"] == 9 + assert nx.bellman_ford_path(self.MXG4, 0, 2) == [0, 1, 2] + assert nx.bellman_ford_path_length(self.MXG4, 0, 2) == 4 + assert nx.single_source_bellman_ford_path(self.MXG4, 0)[2] == [0, 1, 2] + assert nx.single_source_bellman_ford_path_length(self.MXG4, 0)[2] == 4 D, P = nx.single_source_bellman_ford(self.MXG4, 0, target=2) - assert_equal(D, 4) - assert_equal(P, [0, 1, 2]) + assert D == 4 + assert P == [0, 1, 2] P, D = nx.bellman_ford_predecessor_and_distance(self.MXG4, 0) - assert_equal(P[2], [1]) - assert_equal(D[2], 4) + assert P[2] == [1] + assert D[2] == 4 P, D = nx.goldberg_radzik(self.MXG4, 0) - assert_equal(P[2], 1) - assert_equal(D[2], 4) + assert P[2] == 1 + assert D[2] == 4 def test_others(self): - assert_equal(nx.bellman_ford_path(self.XG, 's', 'v'), ['s', 'x', 'u', 'v']) - assert_equal(nx.bellman_ford_path_length(self.XG, 's', 'v'), 9) - assert_equal(nx.single_source_bellman_ford_path(self.XG, 's')['v'], ['s', 'x', 'u', 'v']) - assert_equal(nx.single_source_bellman_ford_path_length(self.XG, 's')['v'], 9) - D, P = nx.single_source_bellman_ford(self.XG, 's', target='v') - assert_equal(D, 9) - assert_equal(P, ['s', 'x', 'u', 'v']) - (P, D) = nx.bellman_ford_predecessor_and_distance(self.XG, 's') - assert_equal(P['v'], ['u']) - assert_equal(D['v'], 9) - (P, D) = nx.goldberg_radzik(self.XG, 's') - assert_equal(P['v'], 'u') - assert_equal(D['v'], 9) + assert nx.bellman_ford_path(self.XG, "s", "v") == ["s", "x", "u", "v"] + assert nx.bellman_ford_path_length(self.XG, "s", "v") == 9 + assert nx.single_source_bellman_ford_path(self.XG, "s")["v"] == [ + "s", + "x", + "u", + "v", + ] + assert nx.single_source_bellman_ford_path_length(self.XG, "s")["v"] == 9 + D, P = nx.single_source_bellman_ford(self.XG, "s", target="v") + assert D == 9 + assert P == ["s", "x", "u", "v"] + (P, D) = nx.bellman_ford_predecessor_and_distance(self.XG, "s") + assert P["v"] == ["u"] + assert D["v"] == 9 + (P, D) = nx.goldberg_radzik(self.XG, "s") + assert P["v"] == "u" + assert D["v"] == 9 def test_path_graph(self): G = nx.path_graph(4) - assert_equal(nx.single_source_bellman_ford_path(G, 0), - {0: [0], 1: [0, 1], 2: [0, 1, 2], 3: [0, 1, 2, 3]}) - assert_equal(nx.single_source_bellman_ford_path_length(G, 0), - {0: 0, 1: 1, 2: 2, 3: 3}) - assert_equal(nx.single_source_bellman_ford(G, 0), - ({0: 0, 1: 1, 2: 2, 3: 3}, {0: [0], 1: [0, 1], 2: [0, 1, 2], 3: [0, 1, 2, 3]})) - assert_equal(nx.bellman_ford_predecessor_and_distance(G, 0), - ({0: [None], 1: [0], 2: [1], 3: [2]}, {0: 0, 1: 1, 2: 2, 3: 3})) - assert_equal(nx.goldberg_radzik(G, 0), - ({0: None, 1: 0, 2: 1, 3: 2}, {0: 0, 1: 1, 2: 2, 3: 3})) - assert_equal(nx.single_source_bellman_ford_path(G, 3), - {0: [3, 2, 1, 0], 1: [3, 2, 1], 2: [3, 2], 3: [3]}) - assert_equal(nx.single_source_bellman_ford_path_length(G, 3), - {0: 3, 1: 2, 2: 1, 3: 0}) - assert_equal(nx.single_source_bellman_ford(G, 3), - ({0: 3, 1: 2, 2: 1, 3: 0}, {0: [3, 2, 1, 0], 1: [3, 2, 1], 2: [3, 2], 3: [3]})) - assert_equal(nx.bellman_ford_predecessor_and_distance(G, 3), - ({0: [1], 1: [2], 2: [3], 3: [None]}, {0: 3, 1: 2, 2: 1, 3: 0})) - assert_equal(nx.goldberg_radzik(G, 3), - ({0: 1, 1: 2, 2: 3, 3: None}, {0: 3, 1: 2, 2: 1, 3: 0})) + assert nx.single_source_bellman_ford_path(G, 0) == { + 0: [0], + 1: [0, 1], + 2: [0, 1, 2], + 3: [0, 1, 2, 3], + } + assert nx.single_source_bellman_ford_path_length(G, 0) == { + 0: 0, + 1: 1, + 2: 2, + 3: 3, + } + assert nx.single_source_bellman_ford(G, 0) == ( + {0: 0, 1: 1, 2: 2, 3: 3}, + {0: [0], 1: [0, 1], 2: [0, 1, 2], 3: [0, 1, 2, 3]}, + ) + assert nx.bellman_ford_predecessor_and_distance(G, 0) == ( + {0: [], 1: [0], 2: [1], 3: [2]}, + {0: 0, 1: 1, 2: 2, 3: 3}, + ) + assert nx.goldberg_radzik(G, 0) == ( + {0: None, 1: 0, 2: 1, 3: 2}, + {0: 0, 1: 1, 2: 2, 3: 3}, + ) + assert nx.single_source_bellman_ford_path(G, 3) == { + 0: [3, 2, 1, 0], + 1: [3, 2, 1], + 2: [3, 2], + 3: [3], + } + assert nx.single_source_bellman_ford_path_length(G, 3) == { + 0: 3, + 1: 2, + 2: 1, + 3: 0, + } + assert nx.single_source_bellman_ford(G, 3) == ( + {0: 3, 1: 2, 2: 1, 3: 0}, + {0: [3, 2, 1, 0], 1: [3, 2, 1], 2: [3, 2], 3: [3]}, + ) + assert nx.bellman_ford_predecessor_and_distance(G, 3) == ( + {0: [1], 1: [2], 2: [3], 3: []}, + {0: 3, 1: 2, 2: 1, 3: 0}, + ) + assert nx.goldberg_radzik(G, 3) == ( + {0: 1, 1: 2, 2: 3, 3: None}, + {0: 3, 1: 2, 2: 1, 3: 0}, + ) def test_4_cycle(self): # 4-cycle G = nx.Graph([(0, 1), (1, 2), (2, 3), (3, 0)]) dist, path = nx.single_source_bellman_ford(G, 0) - assert_equal(dist, {0: 0, 1: 1, 2: 2, 3: 1}) - assert_equal(path[0], [0]) - assert_equal(path[1], [0, 1]) - assert_true(path[2] in [[0, 1, 2], [0, 3, 2]]) - assert_equal(path[3], [0, 3]) + assert dist == {0: 0, 1: 1, 2: 2, 3: 1} + assert path[0] == [0] + assert path[1] == [0, 1] + assert path[2] in [[0, 1, 2], [0, 3, 2]] + assert path[3] == [0, 3] pred, dist = nx.bellman_ford_predecessor_and_distance(G, 0) - assert_equal(pred[0], [None]) - assert_equal(pred[1], [0]) - assert_true(pred[2] in [[1, 3], [3, 1]]) - assert_equal(pred[3], [0]) - assert_equal(dist, {0: 0, 1: 1, 2: 2, 3: 1}) + assert pred[0] == [] + assert pred[1] == [0] + assert pred[2] in [[1, 3], [3, 1]] + assert pred[3] == [0] + assert dist == {0: 0, 1: 1, 2: 2, 3: 1} pred, dist = nx.goldberg_radzik(G, 0) - assert_equal(pred[0], None) - assert_equal(pred[1], 0) - assert_true(pred[2] in [1, 3]) - assert_equal(pred[3], 0) - assert_equal(dist, {0: 0, 1: 1, 2: 2, 3: 1}) + assert pred[0] is None + assert pred[1] == 0 + assert pred[2] in [1, 3] + assert pred[3] == 0 + assert dist == {0: 0, 1: 1, 2: 2, 3: 1} + def test_negative_weight(self): + G = nx.DiGraph() + G.add_nodes_from("abcd") + G.add_edge("a", "d", weight=0) + G.add_edge("a", "b", weight=1) + G.add_edge("b", "c", weight=-3) + G.add_edge("c", "d", weight=1) -class TestJohnsonAlgorithm(WeightedTestBase): + assert nx.bellman_ford_path(G, "a", "d") == ["a", "b", "c", "d"] + assert nx.bellman_ford_path_length(G, "a", "d") == -1 + + def test_zero_cycle_smoke(self): + D = nx.DiGraph() + D.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1), (3, 1, -2)]) - @raises(nx.NetworkXError) + nx.bellman_ford_path(D, 1, 3) + nx.dijkstra_path(D, 1, 3) + nx.bidirectional_dijkstra(D, 1, 3) + # FIXME nx.goldberg_radzik(D, 1) + + +class TestJohnsonAlgorithm(WeightedTestBase): def test_single_node_graph(self): - G = nx.DiGraph() - G.add_node(0) - nx.johnson(G) + with pytest.raises(nx.NetworkXError): + G = nx.DiGraph() + G.add_node(0) + nx.johnson(G) def test_negative_cycle(self): G = nx.DiGraph() - G.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5), ('1', '0', -5), - ('0', '2', 2), ('1', '2', 4), - ('2', '3', 1)]) - assert_raises(nx.NetworkXUnbounded, nx.johnson, G) + G.add_weighted_edges_from( + [ + ("0", "3", 3), + ("0", "1", -5), + ("1", "0", -5), + ("0", "2", 2), + ("1", "2", 4), + ("2", "3", 1), + ] + ) + pytest.raises(nx.NetworkXUnbounded, nx.johnson, G) G = nx.Graph() - G.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5), ('1', '0', -5), - ('0', '2', 2), ('1', '2', 4), - ('2', '3', 1)]) - assert_raises(nx.NetworkXUnbounded, nx.johnson, G) + G.add_weighted_edges_from( + [ + ("0", "3", 3), + ("0", "1", -5), + ("1", "0", -5), + ("0", "2", 2), + ("1", "2", 4), + ("2", "3", 1), + ] + ) + pytest.raises(nx.NetworkXUnbounded, nx.johnson, G) def test_negative_weights(self): G = nx.DiGraph() - G.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5), - ('0', '2', 2), ('1', '2', 4), - ('2', '3', 1)]) + G.add_weighted_edges_from( + [("0", "3", 3), ("0", "1", -5), ("0", "2", 2), ("1", "2", 4), ("2", "3", 1)] + ) paths = nx.johnson(G) - assert_equal(paths, {'1': {'1': ['1'], '3': ['1', '2', '3'], - '2': ['1', '2']}, '0': {'1': ['0', '1'], - '0': ['0'], '3': ['0', '1', '2', '3'], - '2': ['0', '1', '2']}, '3': {'3': ['3']}, - '2': {'3': ['2', '3'], '2': ['2']}}) + assert paths == { + "1": {"1": ["1"], "3": ["1", "2", "3"], "2": ["1", "2"]}, + "0": { + "1": ["0", "1"], + "0": ["0"], + "3": ["0", "1", "2", "3"], + "2": ["0", "1", "2"], + }, + "3": {"3": ["3"]}, + "2": {"3": ["2", "3"], "2": ["2"]}, + } - @raises(nx.NetworkXError) def test_unweighted_graph(self): - G = nx.path_graph(5) - nx.johnson(G) + with pytest.raises(nx.NetworkXError): + G = nx.path_graph(5) + nx.johnson(G) def test_graphs(self): - validate_path(self.XG, 's', 'v', 9, nx.johnson(self.XG)['s']['v']) - validate_path(self.MXG, 's', 'v', 9, nx.johnson(self.MXG)['s']['v']) + validate_path(self.XG, "s", "v", 9, nx.johnson(self.XG)["s"]["v"]) + validate_path(self.MXG, "s", "v", 9, nx.johnson(self.MXG)["s"]["v"]) validate_path(self.XG2, 1, 3, 4, nx.johnson(self.XG2)[1][3]) validate_path(self.XG3, 0, 3, 15, nx.johnson(self.XG3)[0][3]) validate_path(self.XG4, 0, 2, 4, nx.johnson(self.XG4)[0][2]) diff --git a/networkx/algorithms/shortest_paths/unweighted.py b/networkx/algorithms/shortest_paths/unweighted.py index 4eaf03b..a6e45fa 100644 --- a/networkx/algorithms/shortest_paths/unweighted.py +++ b/networkx/algorithms/shortest_paths/unweighted.py @@ -1,25 +1,18 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Author: Aric Hagberg """ Shortest path algorithms for unweighted graphs. """ import networkx as nx -__all__ = ['bidirectional_shortest_path', - 'single_source_shortest_path', - 'single_source_shortest_path_length', - 'single_target_shortest_path', - 'single_target_shortest_path_length', - 'all_pairs_shortest_path', - 'all_pairs_shortest_path_length', - 'predecessor'] +__all__ = [ + "bidirectional_shortest_path", + "single_source_shortest_path", + "single_source_shortest_path_length", + "single_target_shortest_path", + "single_target_shortest_path_length", + "all_pairs_shortest_path", + "all_pairs_shortest_path_length", + "predecessor", +] def single_source_shortest_path_length(G, source, cutoff=None): @@ -47,7 +40,7 @@ def single_source_shortest_path_length(G, source, cutoff=None): >>> length[4] 4 >>> for node in length: - ... print('{}: {}'.format(node, length[node])) + ... print(f"{node}: {length[node]}") 0: 0 1: 1 2: 2 @@ -59,9 +52,9 @@ def single_source_shortest_path_length(G, source, cutoff=None): shortest_path_length """ if source not in G: - raise nx.NodeNotFound('Source {} is not in G'.format(source)) + raise nx.NodeNotFound(f"Source {source} is not in G") if cutoff is None: - cutoff = float('inf') + cutoff = float("inf") nextlevel = {source: 1} return dict(_single_shortest_path_length(G.adj, nextlevel, cutoff)) @@ -79,18 +72,23 @@ def _single_shortest_path_length(adj, firstlevel, cutoff): cutoff : int or float level at which we stop the process """ - seen = {} # level (number of hops) when seen in BFS - level = 0 # the current level - nextlevel = firstlevel # dict of nodes to check at next level - + seen = {} # level (number of hops) when seen in BFS + level = 0 # the current level + nextlevel = set(firstlevel) # set of nodes to check at next level + n = len(adj) while nextlevel and cutoff >= level: thislevel = nextlevel # advance to next level - nextlevel = {} # and start a new list (fringe) + nextlevel = set() # and start a new set (fringe) + found = [] for v in thislevel: if v not in seen: seen[v] = level # set the level of vertex v - nextlevel.update(adj[v]) # add neighbors of v + found.append(v) yield (v, level) + if len(seen) == n: + return + for v in found: + nextlevel.update(adj[v]) level += 1 del seen @@ -120,7 +118,7 @@ def single_target_shortest_path_length(G, target, cutoff=None): >>> length[0] 4 >>> for node in range(5): - ... print('{}: {}'.format(node, length[node])) + ... print(f"{node}: {length[node]}") 0: 4 1: 3 2: 2 @@ -132,10 +130,10 @@ def single_target_shortest_path_length(G, target, cutoff=None): single_source_shortest_path_length, shortest_path_length """ if target not in G: - raise nx.NodeNotFound('Target {} is not in G'.format(source)) + raise nx.NodeNotFound(f"Target {target} is not in G") if cutoff is None: - cutoff = float('inf') + cutoff = float("inf") # handle either directed or undirected adj = G.pred if G.is_directed() else G.adj nextlevel = {target: 1} @@ -168,7 +166,7 @@ def all_pairs_shortest_path_length(G, cutoff=None): >>> G = nx.path_graph(5) >>> length = dict(nx.all_pairs_shortest_path_length(G)) >>> for node in [0, 1, 2, 3, 4]: - ... print('1 - {}: {}'.format(node, length[1][node])) + ... print(f"1 - {node}: {length[1][node]}") 1 - 0: 1 1 - 1: 0 1 - 2: 1 @@ -187,7 +185,7 @@ def all_pairs_shortest_path_length(G, cutoff=None): def bidirectional_shortest_path(G, source, target): - """Return a list of nodes in a shortest path between source and target. + """Returns a list of nodes in a shortest path between source and target. Parameters ---------- @@ -219,8 +217,8 @@ def bidirectional_shortest_path(G, source, target): """ if source not in G or target not in G: - msg = 'Either source {} or target {} is not in G' - raise nx.NodeNotFound(msg.format(source, target)) + msg = f"Either source {source} or target {target} is not in G" + raise nx.NodeNotFound(msg) # call helper to do the real work results = _bidirectional_pred_succ(G, source, target) @@ -291,7 +289,7 @@ def _bidirectional_pred_succ(G, source, target): if w in pred: # found path return pred, succ, w - raise nx.NetworkXNoPath("No path between %s and %s." % (source, target)) + raise nx.NetworkXNoPath(f"No path between {source} and {target}.") def single_source_shortest_path(G, source, cutoff=None): @@ -332,13 +330,14 @@ def single_source_shortest_path(G, source, cutoff=None): shortest_path """ if source not in G: - raise nx.NodeNotFound("Source {} not in G".format(source)) + raise nx.NodeNotFound(f"Source {source} not in G") def join(p1, p2): return p1 + p2 + if cutoff is None: - cutoff = float('inf') - nextlevel = {source: 1} # list of nodes to check at next level + cutoff = float("inf") + nextlevel = {source: 1} # list of nodes to check at next level paths = {source: [source]} # paths dictionary (paths to key from source) return dict(_single_shortest_path(G.adj, nextlevel, paths, cutoff, join)) @@ -362,7 +361,7 @@ def _single_shortest_path(adj, firstlevel, paths, cutoff, join): list inputs `p1` and `p2`, and returns a list. Usually returns `p1 + p2` (forward from source) or `p2 + p1` (backward from target) """ - level = 0 # the current level + level = 0 # the current level nextlevel = firstlevel while nextlevel and cutoff > level: thislevel = nextlevel @@ -413,15 +412,16 @@ def single_target_shortest_path(G, target, cutoff=None): shortest_path, single_source_shortest_path """ if target not in G: - raise nx.NodeNotFound("Target {} not in G".format(source)) + raise nx.NodeNotFound(f"Target {target} not in G") def join(p1, p2): return p2 + p1 + # handle undirected graphs adj = G.pred if G.is_directed() else G.adj if cutoff is None: - cutoff = float('inf') - nextlevel = {target: 1} # list of nodes to check at next level + cutoff = float("inf") + nextlevel = {target: 1} # list of nodes to check at next level paths = {target: [target]} # paths dictionary (paths to key from source) return dict(_single_shortest_path(adj, nextlevel, paths, cutoff, join)) @@ -493,12 +493,12 @@ def predecessor(G, source, target=None, cutoff=None, return_seen=None): """ if source not in G: - raise nx.NodeNotFound("Source {} not in G".format(source)) + raise nx.NodeNotFound(f"Source {source} not in G") - level = 0 # the current level - nextlevel = [source] # list of nodes to check at next level - seen = {source: level} # level (number of hops) when seen in BFS - pred = {source: []} # predecessor dictionary + level = 0 # the current level + nextlevel = [source] # list of nodes to check at next level + seen = {source: level} # level (number of hops) when seen in BFS + pred = {source: []} # predecessor dictionary while nextlevel: level = level + 1 thislevel = nextlevel @@ -509,9 +509,9 @@ def predecessor(G, source, target=None, cutoff=None, return_seen=None): pred[w] = [v] seen[w] = level nextlevel.append(w) - elif (seen[w] == level): # add v to predecessor list if it - pred[w].append(v) # is at the correct level - if (cutoff and cutoff <= level): + elif seen[w] == level: # add v to predecessor list if it + pred[w].append(v) # is at the correct level + if cutoff and cutoff <= level: break if target is not None: diff --git a/networkx/algorithms/shortest_paths/weighted.py b/networkx/algorithms/shortest_paths/weighted.py index f943bc5..b1cc15c 100644 --- a/networkx/algorithms/shortest_paths/weighted.py +++ b/networkx/algorithms/shortest_paths/weighted.py @@ -1,15 +1,3 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg -# Loïc Séguin-C. -# Dan Schult -# Niels van Adrichem """ Shortest path algorithms for weighed graphs. """ @@ -19,32 +7,35 @@ from itertools import count import networkx as nx from networkx.utils import generate_unique_node - - -__all__ = ['dijkstra_path', - 'dijkstra_path_length', - 'bidirectional_dijkstra', - 'single_source_dijkstra', - 'single_source_dijkstra_path', - 'single_source_dijkstra_path_length', - 'multi_source_dijkstra', - 'multi_source_dijkstra_path', - 'multi_source_dijkstra_path_length', - 'all_pairs_dijkstra', - 'all_pairs_dijkstra_path', - 'all_pairs_dijkstra_path_length', - 'dijkstra_predecessor_and_distance', - 'bellman_ford_path', - 'bellman_ford_path_length', - 'single_source_bellman_ford', - 'single_source_bellman_ford_path', - 'single_source_bellman_ford_path_length', - 'all_pairs_bellman_ford_path', - 'all_pairs_bellman_ford_path_length', - 'bellman_ford_predecessor_and_distance', - 'negative_edge_cycle', - 'goldberg_radzik', - 'johnson'] +from networkx.algorithms.shortest_paths.generic import _build_paths_from_predecessors + + +__all__ = [ + "dijkstra_path", + "dijkstra_path_length", + "bidirectional_dijkstra", + "single_source_dijkstra", + "single_source_dijkstra_path", + "single_source_dijkstra_path_length", + "multi_source_dijkstra", + "multi_source_dijkstra_path", + "multi_source_dijkstra_path_length", + "all_pairs_dijkstra", + "all_pairs_dijkstra_path", + "all_pairs_dijkstra_path_length", + "dijkstra_predecessor_and_distance", + "bellman_ford_path", + "bellman_ford_path_length", + "single_source_bellman_ford", + "single_source_bellman_ford_path", + "single_source_bellman_ford_path_length", + "all_pairs_bellman_ford_path", + "all_pairs_bellman_ford_path_length", + "bellman_ford_predecessor_and_distance", + "negative_edge_cycle", + "goldberg_radzik", + "johnson", +] def _weight_function(G, weight): @@ -87,7 +78,7 @@ def _weight_function(G, weight): return lambda u, v, data: data.get(weight, 1) -def dijkstra_path(G, source, target, weight='weight'): +def dijkstra_path(G, source, target, weight="weight"): """Returns the shortest weighted path from source to target in G. Uses Dijkstra's Method to compute the shortest weighted path @@ -123,13 +114,16 @@ def dijkstra_path(G, source, target, weight='weight'): Raises ------ + NodeNotFound + If `source` is not in `G`. + NetworkXNoPath If no path exists between source and target. Examples -------- - >>> G=nx.path_graph(5) - >>> print(nx.dijkstra_path(G,0,4)) + >>> G = nx.path_graph(5) + >>> print(nx.dijkstra_path(G, 0, 4)) [0, 1, 2, 3, 4] Notes @@ -144,24 +138,27 @@ def dijkstra_path(G, source, target, weight='weight'): The weight function can be used to include node weights. >>> def func(u, v, d): - ... node_u_wt = G.nodes[u].get('node_weight', 1) - ... node_v_wt = G.nodes[v].get('node_weight', 1) - ... edge_wt = d.get('weight', 1) - ... return node_u_wt/2 + node_v_wt/2 + edge_wt + ... node_u_wt = G.nodes[u].get("node_weight", 1) + ... node_v_wt = G.nodes[v].get("node_weight", 1) + ... edge_wt = d.get("weight", 1) + ... return node_u_wt / 2 + node_v_wt / 2 + edge_wt In this example we take the average of start and end node weights of an edge and add it to the weight of the edge. + The function :func:`single_source_dijkstra` computes both + path and length-of-path if you need both, use that. + See Also -------- bidirectional_dijkstra(), bellman_ford_path() + single_source_dijkstra() """ - (length, path) = single_source_dijkstra(G, source, target=target, - weight=weight) + (length, path) = single_source_dijkstra(G, source, target=target, weight=weight) return path -def dijkstra_path_length(G, source, target, weight='weight'): +def dijkstra_path_length(G, source, target, weight="weight"): """Returns the shortest weighted path length in G from source to target. Uses Dijkstra's Method to compute the shortest weighted path length @@ -197,13 +194,16 @@ def dijkstra_path_length(G, source, target, weight='weight'): Raises ------ + NodeNotFound + If `source` is not in `G`. + NetworkXNoPath If no path exists between source and target. Examples -------- - >>> G=nx.path_graph(5) - >>> print(nx.dijkstra_path_length(G,0,4)) + >>> G = nx.path_graph(5) + >>> print(nx.dijkstra_path_length(G, 0, 4)) 4 Notes @@ -215,9 +215,13 @@ def dijkstra_path_length(G, source, target, weight='weight'): So ``weight = lambda u, v, d: 1 if d['color']=="red" else None`` will find the shortest red path. + The function :func:`single_source_dijkstra` computes both + path and length-of-path if you need both, use that. + See Also -------- bidirectional_dijkstra(), bellman_ford_path_length() + single_source_dijkstra() """ if source == target: @@ -226,12 +230,11 @@ def dijkstra_path_length(G, source, target, weight='weight'): length = _dijkstra(G, source, weight, target=target) try: return length[target] - except KeyError: - raise nx.NetworkXNoPath( - "Node %s not reachable from %s" % (target, source)) + except KeyError as e: + raise nx.NetworkXNoPath(f"Node {target} not reachable from {source}") from e -def single_source_dijkstra_path(G, source, cutoff=None, weight='weight'): +def single_source_dijkstra_path(G, source, cutoff=None, weight="weight"): """Find shortest weighted paths in G from a source node. Compute shortest path between source and all other reachable @@ -265,10 +268,15 @@ def single_source_dijkstra_path(G, source, cutoff=None, weight='weight'): paths : dictionary Dictionary of shortest path lengths keyed by target. + Raises + ------ + NodeNotFound + If `source` is not in `G`. + Examples -------- - >>> G=nx.path_graph(5) - >>> path=nx.single_source_dijkstra_path(G,0) + >>> G = nx.path_graph(5) + >>> path = nx.single_source_dijkstra_path(G, 0) >>> path[4] [0, 1, 2, 3, 4] @@ -286,12 +294,10 @@ def single_source_dijkstra_path(G, source, cutoff=None, weight='weight'): single_source_dijkstra(), single_source_bellman_ford() """ - return multi_source_dijkstra_path(G, {source}, cutoff=cutoff, - weight=weight) + return multi_source_dijkstra_path(G, {source}, cutoff=cutoff, weight=weight) -def single_source_dijkstra_path_length(G, source, cutoff=None, - weight='weight'): +def single_source_dijkstra_path_length(G, source, cutoff=None, weight="weight"): """Find shortest weighted path lengths in G from a source node. Compute the shortest path length between source and all other @@ -325,6 +331,11 @@ def single_source_dijkstra_path_length(G, source, cutoff=None, length : dict Dict keyed by node to shortest path length from source. + Raises + ------ + NodeNotFound + If `source` is not in `G`. + Examples -------- >>> G = nx.path_graph(5) @@ -332,7 +343,7 @@ def single_source_dijkstra_path_length(G, source, cutoff=None, >>> length[4] 4 >>> for node in [0, 1, 2, 3, 4]: - ... print('{}: {}'.format(node, length[node])) + ... print(f"{node}: {length[node]}") 0: 0 1: 1 2: 2 @@ -353,12 +364,10 @@ def single_source_dijkstra_path_length(G, source, cutoff=None, single_source_dijkstra(), single_source_bellman_ford_path_length() """ - return multi_source_dijkstra_path_length(G, {source}, cutoff=cutoff, - weight=weight) + return multi_source_dijkstra_path_length(G, {source}, cutoff=cutoff, weight=weight) -def single_source_dijkstra(G, source, target=None, cutoff=None, - weight='weight'): +def single_source_dijkstra(G, source, target=None, cutoff=None, weight="weight"): """Find shortest weighted paths and lengths from a source node. Compute the shortest path length between source and all other @@ -404,6 +413,10 @@ def single_source_dijkstra(G, source, target=None, cutoff=None, distance is the distance from source to target and path is a list representing the path from source to target. + Raises + ------ + NodeNotFound + If `source` is not in `G`. Examples -------- @@ -412,7 +425,7 @@ def single_source_dijkstra(G, source, target=None, cutoff=None, >>> print(length[4]) 4 >>> for node in [0, 1, 2, 3, 4]: - ... print('{}: {}'.format(node, length[node])) + ... print(f"{node}: {length[node]}") 0: 0 1: 1 2: 2 @@ -448,11 +461,12 @@ def single_source_dijkstra(G, source, target=None, cutoff=None, single_source_dijkstra_path_length() single_source_bellman_ford() """ - return multi_source_dijkstra(G, {source}, cutoff=cutoff, target=target, - weight=weight) + return multi_source_dijkstra( + G, {source}, cutoff=cutoff, target=target, weight=weight + ) -def multi_source_dijkstra_path(G, sources, cutoff=None, weight='weight'): +def multi_source_dijkstra_path(G, sources, cutoff=None, weight="weight"): """Find shortest weighted paths in G from a given set of source nodes. @@ -512,19 +526,19 @@ def multi_source_dijkstra_path(G, sources, cutoff=None, weight='weight'): ------ ValueError If `sources` is empty. + NodeNotFound + If any of `sources` is not in `G`. See Also -------- multi_source_dijkstra(), multi_source_bellman_ford() """ - length, path = multi_source_dijkstra(G, sources, cutoff=cutoff, - weight=weight) + length, path = multi_source_dijkstra(G, sources, cutoff=cutoff, weight=weight) return path -def multi_source_dijkstra_path_length(G, sources, cutoff=None, - weight='weight'): +def multi_source_dijkstra_path_length(G, sources, cutoff=None, weight="weight"): """Find shortest weighted path lengths in G from a given set of source nodes. @@ -567,7 +581,7 @@ def multi_source_dijkstra_path_length(G, sources, cutoff=None, >>> G = nx.path_graph(5) >>> length = nx.multi_source_dijkstra_path_length(G, {0, 4}) >>> for node in [0, 1, 2, 3, 4]: - ... print('{}: {}'.format(node, length[node])) + ... print(f"{node}: {length[node]}") 0: 0 1: 1 2: 2 @@ -587,6 +601,8 @@ def multi_source_dijkstra_path_length(G, sources, cutoff=None, ------ ValueError If `sources` is empty. + NodeNotFound + If any of `sources` is not in `G`. See Also -------- @@ -594,13 +610,12 @@ def multi_source_dijkstra_path_length(G, sources, cutoff=None, """ if not sources: - raise ValueError('sources must not be empty') + raise ValueError("sources must not be empty") weight = _weight_function(G, weight) return _dijkstra_multisource(G, sources, weight, cutoff=cutoff) -def multi_source_dijkstra(G, sources, target=None, cutoff=None, - weight='weight'): +def multi_source_dijkstra(G, sources, target=None, cutoff=None, weight="weight"): """Find shortest weighted paths and lengths from a given set of source nodes. @@ -652,7 +667,7 @@ def multi_source_dijkstra(G, sources, target=None, cutoff=None, >>> G = nx.path_graph(5) >>> length, path = nx.multi_source_dijkstra(G, {0, 4}) >>> for node in [0, 1, 2, 3, 4]: - ... print('{}: {}'.format(node, length[node])) + ... print(f"{node}: {length[node]}") 0: 0 1: 1 2: 2 @@ -689,6 +704,8 @@ def multi_source_dijkstra(G, sources, target=None, cutoff=None, ------ ValueError If `sources` is empty. + NodeNotFound + If any of `sources` is not in `G`. See Also -------- @@ -697,23 +714,23 @@ def multi_source_dijkstra(G, sources, target=None, cutoff=None, """ if not sources: - raise ValueError('sources must not be empty') + raise ValueError("sources must not be empty") if target in sources: return (0, [target]) weight = _weight_function(G, weight) paths = {source: [source] for source in sources} # dictionary of paths - dist = _dijkstra_multisource(G, sources, weight, paths=paths, - cutoff=cutoff, target=target) + dist = _dijkstra_multisource( + G, sources, weight, paths=paths, cutoff=cutoff, target=target + ) if target is None: return (dist, paths) try: return (dist[target], paths[target]) - except KeyError: - raise nx.NetworkXNoPath("No path to {}.".format(target)) + except KeyError as e: + raise nx.NetworkXNoPath(f"No path to {target}.") from e -def _dijkstra(G, source, weight, pred=None, paths=None, cutoff=None, - target=None): +def _dijkstra(G, source, weight, pred=None, paths=None, cutoff=None, target=None): """Uses Dijkstra's algorithm to find shortest weighted paths from a single source. @@ -722,12 +739,14 @@ def _dijkstra(G, source, weight, pred=None, paths=None, cutoff=None, `sources` set to ``[source]``. """ - return _dijkstra_multisource(G, [source], weight, pred=pred, paths=paths, - cutoff=cutoff, target=target) + return _dijkstra_multisource( + G, [source], weight, pred=pred, paths=paths, cutoff=cutoff, target=target + ) -def _dijkstra_multisource(G, sources, weight, pred=None, paths=None, - cutoff=None, target=None): +def _dijkstra_multisource( + G, sources, weight, pred=None, paths=None, cutoff=None, target=None +): """Uses Dijkstra's algorithm to find shortest weighted paths Parameters @@ -764,6 +783,11 @@ def _dijkstra_multisource(G, sources, weight, pred=None, paths=None, A mapping from node to shortest distance to that node from one of the source nodes. + Raises + ------ + NodeNotFound + If any of `sources` is not in `G`. + Notes ----- The optional predecessor and path dictionaries can be accessed by @@ -782,6 +806,8 @@ def _dijkstra_multisource(G, sources, weight, pred=None, paths=None, c = count() fringe = [] for source in sources: + if source not in G: + raise nx.NodeNotFound(f"Source {source} not in G") seen[source] = 0 push(fringe, (0, next(c), source)) while fringe: @@ -800,9 +826,11 @@ def _dijkstra_multisource(G, sources, weight, pred=None, paths=None, if vu_dist > cutoff: continue if u in dist: - if vu_dist < dist[u]: - raise ValueError('Contradictory paths found:', - 'negative weights?') + u_dist = dist[u] + if vu_dist < u_dist: + raise ValueError("Contradictory paths found:", "negative weights?") + elif pred is not None and vu_dist == u_dist: + pred[u].append(v) elif u not in seen or vu_dist < seen[u]: seen[u] = vu_dist push(fringe, (vu_dist, next(c), u)) @@ -819,7 +847,7 @@ def _dijkstra_multisource(G, sources, weight, pred=None, paths=None, return dist -def dijkstra_predecessor_and_distance(G, source, cutoff=None, weight='weight'): +def dijkstra_predecessor_and_distance(G, source, cutoff=None, weight="weight"): """Compute weighted shortest path length and predecessors. Uses Dijkstra's Method to obtain the shortest weighted paths @@ -857,6 +885,11 @@ def dijkstra_predecessor_and_distance(G, source, cutoff=None, weight='weight'): Warning: If target is specified, the dicts are incomplete as they only contain information for the nodes along a path to target. + Raises + ------ + NodeNotFound + If `source` is not in `G`. + Notes ----- Edge weight attributes must be numerical. @@ -867,8 +900,7 @@ def dijkstra_predecessor_and_distance(G, source, cutoff=None, weight='weight'): Examples -------- - >>> import networkx as nx - >>> G = nx.path_graph(5, create_using = nx.DiGraph()) + >>> G = nx.path_graph(5, create_using=nx.DiGraph()) >>> pred, dist = nx.dijkstra_predecessor_and_distance(G, 0) >>> sorted(pred.items()) [(0, []), (1, [0]), (2, [1]), (3, [2]), (4, [3])] @@ -887,7 +919,7 @@ def dijkstra_predecessor_and_distance(G, source, cutoff=None, weight='weight'): return (pred, _dijkstra(G, source, weight, pred=pred, cutoff=cutoff)) -def all_pairs_dijkstra(G, cutoff=None, weight='weight'): +def all_pairs_dijkstra(G, cutoff=None, weight="weight"): """Find shortest weighted paths and lengths between all nodes. Parameters @@ -926,7 +958,7 @@ def all_pairs_dijkstra(G, cutoff=None, weight='weight'): >>> print(len_path[3][0][1]) 2 >>> for node in [0, 1, 2, 3, 4]: - ... print('3 - {}: {}'.format(node, len_path[3][0][node])) + ... print(f"3 - {node}: {len_path[3][0][node]}") 3 - 0: 3 3 - 1: 2 3 - 2: 1 @@ -954,7 +986,7 @@ def all_pairs_dijkstra(G, cutoff=None, weight='weight'): yield (n, (dist, path)) -def all_pairs_dijkstra_path_length(G, cutoff=None, weight='weight'): +def all_pairs_dijkstra_path_length(G, cutoff=None, weight="weight"): """Compute shortest path lengths between all nodes in a weighted graph. Parameters @@ -988,7 +1020,7 @@ def all_pairs_dijkstra_path_length(G, cutoff=None, weight='weight'): >>> G = nx.path_graph(5) >>> length = dict(nx.all_pairs_dijkstra_path_length(G)) >>> for node in [0, 1, 2, 3, 4]: - ... print('1 - {}: {}'.format(node, length[1][node])) + ... print(f"1 - {node}: {length[1][node]}") 1 - 0: 1 1 - 1: 0 1 - 2: 1 @@ -1011,7 +1043,7 @@ def all_pairs_dijkstra_path_length(G, cutoff=None, weight='weight'): yield (n, length(G, n, cutoff=cutoff, weight=weight)) -def all_pairs_dijkstra_path(G, cutoff=None, weight='weight'): +def all_pairs_dijkstra_path(G, cutoff=None, weight="weight"): """Compute shortest paths between all nodes in a weighted graph. Parameters @@ -1062,8 +1094,9 @@ def all_pairs_dijkstra_path(G, cutoff=None, weight='weight'): yield (n, path(G, n, cutoff=cutoff, weight=weight)) -def bellman_ford_predecessor_and_distance(G, source, target=None, - cutoff=None, weight='weight'): +def bellman_ford_predecessor_and_distance( + G, source, target=None, weight="weight", heuristic=False +): """Compute shortest path lengths and predecessors on shortest paths in weighted graphs. @@ -1093,16 +1126,21 @@ def bellman_ford_predecessor_and_distance(G, source, target=None, dictionary of edge attributes for that edge. The function must return a number. + heuristic : bool + Determines whether to use a heuristic to early detect negative + cycles at a hopefully negligible cost. + Returns ------- pred, dist : dictionaries Returns two dictionaries keyed by node to predecessor in the path and to the distance from the source respectively. - Warning: If target is specified, the dicts are incomplete as they - only contain information for the nodes along a path to target. Raises ------ + NodeNotFound + If `source` is not in `G`. + NetworkXUnbounded If the (di)graph contains a negative cost (di)cycle, the algorithm raises an exception to indicate the presence of the @@ -1111,25 +1149,25 @@ def bellman_ford_predecessor_and_distance(G, source, target=None, Examples -------- - >>> import networkx as nx - >>> G = nx.path_graph(5, create_using = nx.DiGraph()) + >>> G = nx.path_graph(5, create_using=nx.DiGraph()) >>> pred, dist = nx.bellman_ford_predecessor_and_distance(G, 0) >>> sorted(pred.items()) - [(0, [None]), (1, [0]), (2, [1]), (3, [2]), (4, [3])] + [(0, []), (1, [0]), (2, [1]), (3, [2]), (4, [3])] >>> sorted(dist.items()) [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)] >>> pred, dist = nx.bellman_ford_predecessor_and_distance(G, 0, 1) >>> sorted(pred.items()) - [(0, [None]), (1, [0])] + [(0, []), (1, [0]), (2, [1]), (3, [2]), (4, [3])] >>> sorted(dist.items()) - [(0, 0), (1, 1)] + [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)] - >>> from nose.tools import assert_raises - >>> G = nx.cycle_graph(5, create_using = nx.DiGraph()) - >>> G[1][2]['weight'] = -7 - >>> assert_raises(nx.NetworkXUnbounded, \ - nx.bellman_ford_predecessor_and_distance, G, 0) + >>> G = nx.cycle_graph(5, create_using=nx.DiGraph()) + >>> G[1][2]["weight"] = -7 + >>> nx.bellman_ford_predecessor_and_distance(G, 0) + Traceback (most recent call last): + ... + networkx.exception.NetworkXUnbounded: Negative cost cycle detected. Notes ----- @@ -1143,42 +1181,50 @@ def bellman_ford_predecessor_and_distance(G, source, target=None, not containing the source contains a negative cost (di)cycle, it will not be detected. + In NetworkX v2.1 and prior, the source node had predecessor `[None]`. + In NetworkX v2.2 this changed to the source node having predecessor `[]` """ if source not in G: - raise nx.NodeNotFound("Node %s is not found in the graph" % source) + raise nx.NodeNotFound(f"Node {source} is not found in the graph") weight = _weight_function(G, weight) if any(weight(u, v, d) < 0 for u, v, d in nx.selfloop_edges(G, data=True)): raise nx.NetworkXUnbounded("Negative cost cycle detected.") dist = {source: 0} - pred = {source: [None]} + pred = {source: []} if len(G) == 1: return pred, dist weight = _weight_function(G, weight) - dist = _bellman_ford(G, [source], weight, pred=pred, dist=dist, - cutoff=cutoff, target=target) + dist = _bellman_ford( + G, [source], weight, pred=pred, dist=dist, target=target, heuristic=heuristic + ) return (pred, dist) -def _bellman_ford(G, source, weight, pred=None, paths=None, dist=None, - cutoff=None, target=None): - """Relaxation loop for Bellman–Ford algorithm +def _bellman_ford( + G, source, weight, pred=None, paths=None, dist=None, target=None, heuristic=True +): + """Relaxation loop for Bellman–Ford algorithm. + + This is an implementation of the SPFA variant. + See https://en.wikipedia.org/wiki/Shortest_Path_Faster_Algorithm Parameters ---------- G : NetworkX graph source: list - List of source nodes + List of source nodes. The shortest path from any of the source + nodes will be found if multiple sources are provided. weight : function - The weight of an edge is the value returned by the function. The - function must accept exactly three positional arguments: the two - endpoints of an edge and the dictionary of edge attributes for - that edge. The function must return a number. + The weight of an edge is the value returned by the function. The + function must accept exactly three positional arguments: the two + endpoints of an edge and the dictionary of edge attributes for + that edge. The function must return a number. pred: dict of lists, optional (default=None) dict to store a list of predecessors keyed by that node @@ -1193,13 +1239,14 @@ def _bellman_ford(G, source, weight, pred=None, paths=None, dist=None, If None, returned dist dict contents default to 0 for every node in the source list - cutoff: integer or float, optional - Depth to stop the search. Only paths of length <= cutoff are returned - target: node label, optional Ending node for path. Path lengths to other destinations may (and probably will) be incorrect. + heuristic : bool + Determines whether to use a heuristic to early detect negative + cycles at a hopefully negligible cost. + Returns ------- Returns a dict keyed by node to the distance from the source. @@ -1207,21 +1254,32 @@ def _bellman_ford(G, source, weight, pred=None, paths=None, dist=None, Raises ------ + NodeNotFound + If any of `source` is not in `G`. + NetworkXUnbounded If the (di)graph contains a negative cost (di)cycle, the algorithm raises an exception to indicate the presence of the negative cost (di)cycle. Note: any negative weight edge in an undirected graph is a negative cost cycle """ + for s in source: + if s not in G: + raise nx.NodeNotFound(f"Source {s} not in G") if pred is None: - pred = {v: [None] for v in source} + pred = {v: [] for v in source} if dist is None: dist = {v: 0 for v in source} + # Heuristic Storage setup. Note: use None because nodes cannot be None + nonexistent_edge = (None, None) + pred_edge = {v: None for v in source} + recent_update = {v: nonexistent_edge for v in source} + G_succ = G.succ if G.is_directed() else G.adj - inf = float('inf') + inf = float("inf") n = len(G) count = {} @@ -1235,49 +1293,52 @@ def _bellman_ford(G, source, weight, pred=None, paths=None, dist=None, if all(pred_u not in in_q for pred_u in pred[u]): dist_u = dist[u] for v, e in G_succ[u].items(): - dist_v = dist_u + weight(v, u, e) - - if cutoff is not None: - if dist_v > cutoff: - continue - - if target is not None: - if dist_v > dist.get(target, inf): - continue + dist_v = dist_u + weight(u, v, e) if dist_v < dist.get(v, inf): + # In this conditional branch we are updating the path with v. + # If it happens that some earlier update also added node v + # that implies the existence of a negative cycle since + # after the update node v would lie on the update path twice. + # The update path is stored up to one of the source nodes, + # therefore u is always in the dict recent_update + if heuristic: + if v in recent_update[u]: + raise nx.NetworkXUnbounded("Negative cost cycle detected.") + # Transfer the recent update info from u to v if the + # same source node is the head of the update path. + # If the source node is responsible for the cost update, + # then clear the history and use it instead. + if v in pred_edge and pred_edge[v] == u: + recent_update[v] = recent_update[u] + else: + recent_update[v] = (u, v) + if v not in in_q: q.append(v) in_q.add(v) count_v = count.get(v, 0) + 1 if count_v == n: - raise nx.NetworkXUnbounded( - "Negative cost cycle detected.") + raise nx.NetworkXUnbounded("Negative cost cycle detected.") count[v] = count_v dist[v] = dist_v pred[v] = [u] + pred_edge[v] = u elif dist.get(v) is not None and dist_v == dist.get(v): pred[v].append(u) if paths is not None: + sources = set(source) dsts = [target] if target is not None else pred for dst in dsts: - - path = [dst] - cur = dst - - while pred[cur][0] is not None: - cur = pred[cur][0] - path.append(cur) - - path.reverse() - paths[dst] = path + gen = _build_paths_from_predecessors(sources, dst, pred) + paths[dst] = next(gen) return dist -def bellman_ford_path(G, source, target, weight='weight'): +def bellman_ford_path(G, source, target, weight="weight"): """Returns the shortest path from source to target in a weighted graph G. Parameters @@ -1300,12 +1361,15 @@ def bellman_ford_path(G, source, target, weight='weight'): Raises ------ + NodeNotFound + If `source` is not in `G`. + NetworkXNoPath If no path exists between source and target. Examples -------- - >>> G=nx.path_graph(5) + >>> G = nx.path_graph(5) >>> print(nx.bellman_ford_path(G, 0, 4)) [0, 1, 2, 3, 4] @@ -1318,12 +1382,11 @@ def bellman_ford_path(G, source, target, weight='weight'): -------- dijkstra_path(), bellman_ford_path_length() """ - length, path = single_source_bellman_ford(G, source, - target=target, weight=weight) + length, path = single_source_bellman_ford(G, source, target=target, weight=weight) return path -def bellman_ford_path_length(G, source, target, weight='weight'): +def bellman_ford_path_length(G, source, target, weight="weight"): """Returns the shortest path length from source to target in a weighted graph. @@ -1347,13 +1410,16 @@ def bellman_ford_path_length(G, source, target, weight='weight'): Raises ------ + NodeNotFound + If `source` is not in `G`. + NetworkXNoPath If no path exists between source and target. Examples -------- - >>> G=nx.path_graph(5) - >>> print(nx.bellman_ford_path_length(G,0,4)) + >>> G = nx.path_graph(5) + >>> print(nx.bellman_ford_path_length(G, 0, 4)) 4 Notes @@ -1374,12 +1440,11 @@ def bellman_ford_path_length(G, source, target, weight='weight'): try: return length[target] - except KeyError: - raise nx.NetworkXNoPath( - "node %s not reachable from %s" % (source, target)) + except KeyError as e: + raise nx.NetworkXNoPath(f"node {target} not reachable from {source}") from e -def single_source_bellman_ford_path(G, source, cutoff=None, weight='weight'): +def single_source_bellman_ford_path(G, source, weight="weight"): """Compute shortest path between source and all other reachable nodes for a weighted graph. @@ -1393,18 +1458,20 @@ def single_source_bellman_ford_path(G, source, cutoff=None, weight='weight'): weight: string, optional (default='weight') Edge data key corresponding to the edge weight - cutoff : integer or float, optional - Depth to stop the search. Only paths of length <= cutoff are returned. - Returns ------- paths : dictionary Dictionary of shortest path lengths keyed by target. + Raises + ------ + NodeNotFound + If `source` is not in `G`. + Examples -------- - >>> G=nx.path_graph(5) - >>> path=nx.single_source_bellman_ford_path(G,0) + >>> G = nx.path_graph(5) + >>> path = nx.single_source_bellman_ford_path(G, 0) >>> path[4] [0, 1, 2, 3, 4] @@ -1418,13 +1485,11 @@ def single_source_bellman_ford_path(G, source, cutoff=None, weight='weight'): single_source_dijkstra(), single_source_bellman_ford() """ - (length, path) = single_source_bellman_ford( - G, source, cutoff=cutoff, weight=weight) + (length, path) = single_source_bellman_ford(G, source, weight=weight) return path -def single_source_bellman_ford_path_length(G, source, - cutoff=None, weight='weight'): +def single_source_bellman_ford_path_length(G, source, weight="weight"): """Compute the shortest path length between source and all other reachable nodes for a weighted graph. @@ -1438,14 +1503,16 @@ def single_source_bellman_ford_path_length(G, source, weight: string, optional (default='weight') Edge data key corresponding to the edge weight. - cutoff : integer or float, optional - Depth to stop the search. Only paths of length <= cutoff are returned. - Returns ------- length : iterator (target, shortest path length) iterator + Raises + ------ + NodeNotFound + If `source` is not in `G`. + Examples -------- >>> G = nx.path_graph(5) @@ -1453,7 +1520,7 @@ def single_source_bellman_ford_path_length(G, source, >>> length[4] 4 >>> for node in [0, 1, 2, 3, 4]: - ... print('{}: {}'.format(node, length[node])) + ... print(f"{node}: {length[node]}") 0: 0 1: 1 2: 2 @@ -1471,11 +1538,10 @@ def single_source_bellman_ford_path_length(G, source, """ weight = _weight_function(G, weight) - return _bellman_ford(G, [source], weight, cutoff=cutoff) + return _bellman_ford(G, [source], weight) -def single_source_bellman_ford(G, source, - target=None, cutoff=None, weight='weight'): +def single_source_bellman_ford(G, source, target=None, weight="weight"): """Compute shortest paths and lengths in a weighted graph G. Uses Bellman-Ford algorithm for shortest paths. @@ -1490,9 +1556,6 @@ def single_source_bellman_ford(G, source, target : node label, optional Ending node for path - cutoff : integer or float, optional - Depth to stop the search. Only paths of length <= cutoff are returned. - Returns ------- distance, path : pair of dictionaries, or numeric and list @@ -1503,6 +1566,10 @@ def single_source_bellman_ford(G, source, distance is the distance from source to target and path is a list representing the path from source to target. + Raises + ------ + NodeNotFound + If `source` is not in `G`. Examples -------- @@ -1511,7 +1578,7 @@ def single_source_bellman_ford(G, source, >>> print(length[4]) 4 >>> for node in [0, 1, 2, 3, 4]: - ... print('{}: {}'.format(node, length[node])) + ... print(f"{node}: {length[node]}") 0: 0 1: 1 2: 2 @@ -1542,18 +1609,17 @@ def single_source_bellman_ford(G, source, weight = _weight_function(G, weight) paths = {source: [source]} # dictionary of paths - dist = _bellman_ford(G, [source], weight, paths=paths, cutoff=cutoff, - target=target) + dist = _bellman_ford(G, [source], weight, paths=paths, target=target) if target is None: return (dist, paths) try: return (dist[target], paths[target]) - except KeyError: - msg = "Node %s not reachable from %s" % (source, target) - raise nx.NetworkXNoPath(msg) + except KeyError as e: + msg = f"Node {target} not reachable from {source}" + raise nx.NetworkXNoPath(msg) from e -def all_pairs_bellman_ford_path_length(G, cutoff=None, weight='weight'): +def all_pairs_bellman_ford_path_length(G, weight="weight"): """ Compute shortest path lengths between all nodes in a weighted graph. Parameters @@ -1563,9 +1629,6 @@ def all_pairs_bellman_ford_path_length(G, cutoff=None, weight='weight'): weight: string, optional (default='weight') Edge data key corresponding to the edge weight - cutoff : integer or float, optional - Depth to stop the search. Only paths of length <= cutoff are returned. - Returns ------- distance : iterator @@ -1577,7 +1640,7 @@ def all_pairs_bellman_ford_path_length(G, cutoff=None, weight='weight'): >>> G = nx.path_graph(5) >>> length = dict(nx.all_pairs_bellman_ford_path_length(G)) >>> for node in [0, 1, 2, 3, 4]: - ... print('1 - {}: {}'.format(node, length[1][node])) + ... print(f"1 - {node}: {length[1][node]}") 1 - 0: 1 1 - 1: 0 1 - 2: 1 @@ -1597,10 +1660,10 @@ def all_pairs_bellman_ford_path_length(G, cutoff=None, weight='weight'): """ length = single_source_bellman_ford_path_length for n in G: - yield (n, dict(length(G, n, cutoff=cutoff, weight=weight))) + yield (n, dict(length(G, n, weight=weight))) -def all_pairs_bellman_ford_path(G, cutoff=None, weight='weight'): +def all_pairs_bellman_ford_path(G, weight="weight"): """ Compute shortest paths between all nodes in a weighted graph. Parameters @@ -1610,9 +1673,6 @@ def all_pairs_bellman_ford_path(G, cutoff=None, weight='weight'): weight: string, optional (default='weight') Edge data key corresponding to the edge weight - cutoff : integer or float, optional - Depth to stop the search. Only paths of length <= cutoff are returned. - Returns ------- distance : dictionary @@ -1638,10 +1698,10 @@ def all_pairs_bellman_ford_path(G, cutoff=None, weight='weight'): path = single_source_bellman_ford_path # TODO This can be trivially parallelized. for n in G: - yield (n, path(G, n, cutoff=cutoff, weight=weight)) + yield (n, path(G, n, weight=weight)) -def goldberg_radzik(G, source, weight='weight'): +def goldberg_radzik(G, source, weight="weight"): """Compute shortest path lengths and predecessors on shortest paths in weighted graphs. @@ -1679,6 +1739,9 @@ def goldberg_radzik(G, source, weight='weight'): Raises ------ + NodeNotFound + If `source` is not in `G`. + NetworkXUnbounded If the (di)graph contains a negative cost (di)cycle, the algorithm raises an exception to indicate the presence of the @@ -1687,18 +1750,19 @@ def goldberg_radzik(G, source, weight='weight'): Examples -------- - >>> import networkx as nx - >>> G = nx.path_graph(5, create_using = nx.DiGraph()) + >>> G = nx.path_graph(5, create_using=nx.DiGraph()) >>> pred, dist = nx.goldberg_radzik(G, 0) >>> sorted(pred.items()) [(0, None), (1, 0), (2, 1), (3, 2), (4, 3)] >>> sorted(dist.items()) [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)] - >>> from nose.tools import assert_raises - >>> G = nx.cycle_graph(5, create_using = nx.DiGraph()) - >>> G[1][2]['weight'] = -7 - >>> assert_raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, 0) + >>> G = nx.cycle_graph(5, create_using=nx.DiGraph()) + >>> G[1][2]["weight"] = -7 + >>> nx.goldberg_radzik(G, 0) + Traceback (most recent call last): + ... + networkx.exception.NetworkXUnbounded: Negative cost cycle detected. Notes ----- @@ -1714,7 +1778,7 @@ def goldberg_radzik(G, source, weight='weight'): """ if source not in G: - raise nx.NodeNotFound("Node %s is not found in the graph" % source) + raise nx.NodeNotFound(f"Node {source} is not found in the graph") weight = _weight_function(G, weight) if any(weight(u, v, d) < 0 for u, v, d in nx.selfloop_edges(G, data=True)): raise nx.NetworkXUnbounded("Negative cost cycle detected.") @@ -1727,7 +1791,7 @@ def goldberg_radzik(G, source, weight='weight'): else: G_succ = G.adj - inf = float('inf') + inf = float("inf") d = {u: inf for u in G} d[source] = 0 pred = {source: None} @@ -1752,14 +1816,13 @@ def topo_sort(relabeled): continue d_u = d[u] # Skip nodes without out-edges of negative reduced costs. - if all(d_u + weight(u, v, e) >= d[v] - for v, e in G_succ[u].items()): + if all(d_u + weight(u, v, e) >= d[v] for v, e in G_succ[u].items()): continue # Nonrecursive DFS that inserts nodes reachable from u via edges of # nonpositive reduced costs into to_scan in (reverse) topological # order. stack = [(u, iter(G_succ[u].items()))] - in_stack = set([u]) + in_stack = {u} neg_count[u] = 0 while stack: u, it = stack[-1] @@ -1780,14 +1843,12 @@ def topo_sort(relabeled): neg_count[v] = neg_count[u] + int(is_neg) stack.append((v, iter(G_succ[v].items()))) in_stack.add(v) - elif (v in in_stack and - neg_count[u] + int(is_neg) > neg_count[v]): + elif v in in_stack and neg_count[u] + int(is_neg) > neg_count[v]: # (u, v) is a back edge, and the cycle formed by the # path v to u and (u, v) contains at least one edge of # negative reduced cost. The cycle must be of negative # cost. - raise nx.NetworkXUnbounded( - 'Negative cost cycle detected.') + raise nx.NetworkXUnbounded("Negative cost cycle detected.") to_scan.reverse() return to_scan @@ -1809,7 +1870,7 @@ def relax(to_scan): # Set of nodes relabled in the last round of scan operations. Denoted by B # in Goldberg and Radzik's paper. - relabeled = set([source]) + relabeled = {source} while relabeled: to_scan = topo_sort(relabeled) @@ -1819,8 +1880,8 @@ def relax(to_scan): return pred, d -def negative_edge_cycle(G, weight='weight'): - """Return True if there exists a negative edge cycle anywhere in G. +def negative_edge_cycle(G, weight="weight", heuristic=True): + """Returns True if there exists a negative edge cycle anywhere in G. Parameters ---------- @@ -1839,6 +1900,11 @@ def negative_edge_cycle(G, weight='weight'): dictionary of edge attributes for that edge. The function must return a number. + heuristic : bool + Determines whether to use a heuristic to early detect negative + cycles at a negligible cost. In case of graphs with a negative cycle, + the performance of detection increases by at least an order of magnitude. + Returns ------- negative_cycle : bool @@ -1846,11 +1912,10 @@ def negative_edge_cycle(G, weight='weight'): Examples -------- - >>> import networkx as nx - >>> G = nx.cycle_graph(5, create_using = nx.DiGraph()) + >>> G = nx.cycle_graph(5, create_using=nx.DiGraph()) >>> print(nx.negative_edge_cycle(G)) False - >>> G[1][2]['weight'] = -7 + >>> G[1][2]["weight"] = -7 >>> print(nx.negative_edge_cycle(G)) True @@ -1868,7 +1933,7 @@ def negative_edge_cycle(G, weight='weight'): G.add_edges_from([(newnode, n) for n in G]) try: - bellman_ford_predecessor_and_distance(G, newnode, weight) + bellman_ford_predecessor_and_distance(G, newnode, weight, heuristic=heuristic) except nx.NetworkXUnbounded: return True finally: @@ -1876,8 +1941,8 @@ def negative_edge_cycle(G, weight='weight'): return False -def bidirectional_dijkstra(G, source, target, weight='weight'): - """Dijkstra's algorithm for shortest paths using bidirectional search. +def bidirectional_dijkstra(G, source, target, weight="weight"): + r"""Dijkstra's algorithm for shortest paths using bidirectional search. Parameters ---------- @@ -1910,6 +1975,9 @@ def bidirectional_dijkstra(G, source, target, weight='weight'): Raises ------ + NodeNotFound + If either `source` or `target` is not in `G`. + NetworkXNoPath If no path exists between source and target. @@ -1947,15 +2015,17 @@ def bidirectional_dijkstra(G, source, target, weight='weight'): shortest_path_length """ if source not in G or target not in G: - msg = 'Either source {} or target {} is not in G' - raise nx.NodeNotFound(msg.format(source, target)) + msg = f"Either source {source} or target {target} is not in G" + raise nx.NodeNotFound(msg) if source == target: return (0, [source]) + + weight = _weight_function(G, weight) push = heappush pop = heappop # Init: [Forward, Backward] - dists = [{}, {}] # dictionary of final distances + dists = [{}, {}] # dictionary of final distances paths = [{source: [source]}, {target: [target]}] # dictionary of paths fringe = [[], []] # heap of (distance, node) for choosing node to expand seen = [{source: 0}, {target: 0}] # dict of distances to seen nodes @@ -1965,9 +2035,9 @@ def bidirectional_dijkstra(G, source, target, weight='weight'): push(fringe[1], (0, next(c), target)) # neighs for extracting correct neighbor information if G.is_directed(): - neighs = [G.successors, G.predecessors] + neighs = [G._succ, G._pred] else: - neighs = [G.neighbors, G.neighbors] + neighs = [G._adj, G._adj] # variables to hold shortest discovered path # finaldist = 1e30000 finalpath = [] @@ -1988,26 +2058,14 @@ def bidirectional_dijkstra(G, source, target, weight='weight'): # we have now discovered the shortest path return (finaldist, finalpath) - for w in neighs[dir](v): - if(dir == 0): # forward - if G.is_multigraph(): - minweight = min((dd.get(weight, 1) - for k, dd in G[v][w].items())) - else: - minweight = G[v][w].get(weight, 1) - vwLength = dists[dir][v] + minweight # G[v][w].get(weight,1) + for w, d in neighs[dir][v].items(): + if dir == 0: # forward + vwLength = dists[dir][v] + weight(v, w, d) else: # back, must remember to change v,w->w,v - if G.is_multigraph(): - minweight = min((dd.get(weight, 1) - for k, dd in G[w][v].items())) - else: - minweight = G[w][v].get(weight, 1) - vwLength = dists[dir][v] + minweight # G[w][v].get(weight,1) - + vwLength = dists[dir][v] + weight(w, v, d) if w in dists[dir]: if vwLength < dists[dir][w]: - raise ValueError( - "Contradictory paths found: negative weights?") + raise ValueError("Contradictory paths found: negative weights?") elif w not in seen[dir] or vwLength < seen[dir][w]: # relaxing seen[dir][w] = vwLength @@ -2022,10 +2080,10 @@ def bidirectional_dijkstra(G, source, target, weight='weight'): revpath = paths[1][w][:] revpath.reverse() finalpath = paths[0][w] + revpath[1:] - raise nx.NetworkXNoPath("No path between %s and %s." % (source, target)) + raise nx.NetworkXNoPath(f"No path between {source} and {target}.") -def johnson(G, weight='weight'): +def johnson(G, weight="weight"): r"""Uses Johnson's Algorithm to compute shortest paths. Johnson's Algorithm finds a shortest path between each pair of @@ -2060,12 +2118,12 @@ def johnson(G, weight='weight'): Examples -------- - >>> import networkx as nx >>> graph = nx.DiGraph() - >>> graph.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5), - ... ('0', '2', 2), ('1', '2', 4), ('2', '3', 1)]) - >>> paths = nx.johnson(graph, weight='weight') - >>> paths['0']['2'] + >>> graph.add_weighted_edges_from( + ... [("0", "3", 3), ("0", "1", -5), ("0", "2", 2), ("1", "2", 4), ("2", "3", 1)] + ... ) + >>> paths = nx.johnson(graph, weight="weight") + >>> paths["0"]["2"] ['0', '1', '2'] Notes @@ -2093,10 +2151,10 @@ def johnson(G, weight='weight'): """ if not nx.is_weighted(G, weight=weight): - raise nx.NetworkXError('Graph is not weighted.') + raise nx.NetworkXError("Graph is not weighted.") dist = {v: 0 for v in G} - pred = {v: [None] for v in G} + pred = {v: [] for v in G} weight = _weight_function(G, weight) # Calculate distance of shortest paths diff --git a/networkx/algorithms/similarity.py b/networkx/algorithms/similarity.py index 49aecf9..9a129c6 100644 --- a/networkx/algorithms/similarity.py +++ b/networkx/algorithms/similarity.py @@ -1,17 +1,28 @@ -# -*- coding: utf-8 -*- -from __future__ import print_function -import math +""" Functions measuring similarity using graph edit distance. + +The graph edit distance is the number of edge/node changes needed +to make two graphs isomorphic. + +The default algorithm/implementation is sub-optimal for some graphs. +The problem of finding the exact Graph Edit Distance (GED) is NP-hard +so it is often slow. If the simple interface `graph_edit_distance` +takes too long for your graph, try `optimize_graph_edit_distance` +and/or `optimize_edit_paths`. + +At the same time, I encourage capable people to investigate +alternative GED algorithms, in order to improve the choices available. +""" +import time +from itertools import product import networkx as nx -from operator import * -import sys - -__author__ = 'Andrey Paramonov ' __all__ = [ - 'graph_edit_distance', - 'optimal_edit_paths', - 'optimize_graph_edit_distance', - 'optimize_edit_paths' + "graph_edit_distance", + "optimal_edit_paths", + "optimize_graph_edit_distance", + "optimize_edit_paths", + "simrank_similarity", + "simrank_similarity_numpy", ] @@ -19,10 +30,21 @@ def debug_print(*args, **kwargs): print(*args, **kwargs) -def graph_edit_distance(G1, G2, node_match=None, edge_match=None, - node_subst_cost=None, node_del_cost=None, node_ins_cost=None, - edge_subst_cost=None, edge_del_cost=None, edge_ins_cost=None, - upper_bound=None): +def graph_edit_distance( + G1, + G2, + node_match=None, + edge_match=None, + node_subst_cost=None, + node_del_cost=None, + node_ins_cost=None, + edge_subst_cost=None, + edge_del_cost=None, + edge_ins_cost=None, + roots=None, + upper_bound=None, + timeout=None, +): """Returns GED (graph edit distance) between graphs G1 and G2. Graph edit distance is a graph similarity measure analogous to @@ -112,10 +134,20 @@ def graph_edit_distance(G1, G2, node_match=None, edge_match=None, cost of 1 is used. If edge_ins_cost is not specified then default edge insertion cost of 1 is used. + roots : 2-tuple + Tuple where first element is a node in G1 and the second + is a node in G2. + These nodes are forced to be matched in the comparison to + allow comparison between rooted graphs. + upper_bound : numeric Maximum edit distance to consider. Return None if no edit distance under or equal to upper_bound exists. + timeout : numeric + Maximum number of seconds to execute. + After timeout is met, the current best GED is returned. + Examples -------- >>> G1 = nx.cycle_graph(6) @@ -123,6 +155,13 @@ def graph_edit_distance(G1, G2, node_match=None, edge_match=None, >>> nx.graph_edit_distance(G1, G2) 7.0 + >>> G1 = nx.star_graph(5) + >>> G2 = nx.star_graph(5) + >>> nx.graph_edit_distance(G1, G2, roots=(0, 0)) + 0.0 + >>> nx.graph_edit_distance(G1, G2, roots=(1, 0)) + 8.0 + See Also -------- optimal_edit_paths, optimize_graph_edit_distance, @@ -141,20 +180,40 @@ def graph_edit_distance(G1, G2, node_match=None, edge_match=None, """ bestcost = None - for vertex_path, edge_path, cost in \ - optimize_edit_paths(G1, G2, node_match, edge_match, - node_subst_cost, node_del_cost, node_ins_cost, - edge_subst_cost, edge_del_cost, edge_ins_cost, - upper_bound, True): - #assert bestcost is None or cost < bestcost + for vertex_path, edge_path, cost in optimize_edit_paths( + G1, + G2, + node_match, + edge_match, + node_subst_cost, + node_del_cost, + node_ins_cost, + edge_subst_cost, + edge_del_cost, + edge_ins_cost, + upper_bound, + True, + roots, + timeout, + ): + # assert bestcost is None or cost < bestcost bestcost = cost return bestcost -def optimal_edit_paths(G1, G2, node_match=None, edge_match=None, - node_subst_cost=None, node_del_cost=None, node_ins_cost=None, - edge_subst_cost=None, edge_del_cost=None, edge_ins_cost=None, - upper_bound=None): +def optimal_edit_paths( + G1, + G2, + node_match=None, + edge_match=None, + node_subst_cost=None, + node_del_cost=None, + node_ins_cost=None, + edge_subst_cost=None, + edge_del_cost=None, + edge_ins_cost=None, + upper_bound=None, +): """Returns all minimum-cost edit paths transforming G1 to G2. Graph edit path is a sequence of node and edge edit operations @@ -257,13 +316,13 @@ def optimal_edit_paths(G1, G2, node_match=None, edge_match=None, Examples -------- - >>> G1 = nx.cycle_graph(6) - >>> G2 = nx.wheel_graph(7) + >>> G1 = nx.cycle_graph(4) + >>> G2 = nx.wheel_graph(5) >>> paths, cost = nx.optimal_edit_paths(G1, G2) >>> len(paths) - 84 + 40 >>> cost - 7.0 + 5.0 See Also -------- @@ -282,12 +341,21 @@ def optimal_edit_paths(G1, G2, node_match=None, edge_match=None, """ paths = list() bestcost = None - for vertex_path, edge_path, cost in \ - optimize_edit_paths(G1, G2, node_match, edge_match, - node_subst_cost, node_del_cost, node_ins_cost, - edge_subst_cost, edge_del_cost, edge_ins_cost, - upper_bound, False): - #assert bestcost is None or cost <= bestcost + for vertex_path, edge_path, cost in optimize_edit_paths( + G1, + G2, + node_match, + edge_match, + node_subst_cost, + node_del_cost, + node_ins_cost, + edge_subst_cost, + edge_del_cost, + edge_ins_cost, + upper_bound, + False, + ): + # assert bestcost is None or cost <= bestcost if bestcost is not None and cost < bestcost: paths = list() paths.append((vertex_path, edge_path)) @@ -295,10 +363,19 @@ def optimal_edit_paths(G1, G2, node_match=None, edge_match=None, return paths, bestcost -def optimize_graph_edit_distance(G1, G2, node_match=None, edge_match=None, - node_subst_cost=None, node_del_cost=None, node_ins_cost=None, - edge_subst_cost=None, edge_del_cost=None, edge_ins_cost=None, - upper_bound=None): +def optimize_graph_edit_distance( + G1, + G2, + node_match=None, + edge_match=None, + node_subst_cost=None, + node_del_cost=None, + node_ins_cost=None, + edge_subst_cost=None, + edge_del_cost=None, + edge_ins_cost=None, + upper_bound=None, +): """Returns consecutive approximations of GED (graph edit distance) between graphs G1 and G2. @@ -419,18 +496,39 @@ def optimize_graph_edit_distance(G1, G2, node_match=None, edge_match=None, <10.5220/0005209202710278>. https://hal.archives-ouvertes.fr/hal-01168816 """ - for vertex_path, edge_path, cost in \ - optimize_edit_paths(G1, G2, node_match, edge_match, - node_subst_cost, node_del_cost, node_ins_cost, - edge_subst_cost, edge_del_cost, edge_ins_cost, - upper_bound, True): + for vertex_path, edge_path, cost in optimize_edit_paths( + G1, + G2, + node_match, + edge_match, + node_subst_cost, + node_del_cost, + node_ins_cost, + edge_subst_cost, + edge_del_cost, + edge_ins_cost, + upper_bound, + True, + ): yield cost -def optimize_edit_paths(G1, G2, node_match=None, edge_match=None, - node_subst_cost=None, node_del_cost=None, node_ins_cost=None, - edge_subst_cost=None, edge_del_cost=None, edge_ins_cost=None, - upper_bound=None, strictly_decreasing=True): +def optimize_edit_paths( + G1, + G2, + node_match=None, + edge_match=None, + node_subst_cost=None, + node_del_cost=None, + node_ins_cost=None, + edge_subst_cost=None, + edge_del_cost=None, + edge_ins_cost=None, + upper_bound=None, + strictly_decreasing=True, + roots=None, + timeout=None, +): """GED (graph edit distance) calculation: advanced interface. Graph edit path is a sequence of node and edge edit operations @@ -529,6 +627,16 @@ def optimize_edit_paths(G1, G2, node_match=None, edge_match=None, decreasing cost. Otherwise, return all edit paths of cost less than or equal to the previous minimum cost. + roots : 2-tuple + Tuple where first element is a node in G1 and the second + is a node in G2. + These nodes are forced to be matched in the comparison to + allow comparison between rooted graphs. + + timeout : numeric + Maximum number of seconds to execute. + After timeout is met, the current best GED is returned. + Returns ------- Generator of tuples (node_edit_path, edge_edit_path, cost) @@ -558,49 +666,51 @@ def optimize_edit_paths(G1, G2, node_match=None, edge_match=None, class CostMatrix: def __init__(self, C, lsa_row_ind, lsa_col_ind, ls): - #assert C.shape[0] == len(lsa_row_ind) - #assert C.shape[1] == len(lsa_col_ind) - #assert len(lsa_row_ind) == len(lsa_col_ind) - #assert set(lsa_row_ind) == set(range(len(lsa_row_ind))) - #assert set(lsa_col_ind) == set(range(len(lsa_col_ind))) - #assert ls == C[lsa_row_ind, lsa_col_ind].sum() + # assert C.shape[0] == len(lsa_row_ind) + # assert C.shape[1] == len(lsa_col_ind) + # assert len(lsa_row_ind) == len(lsa_col_ind) + # assert set(lsa_row_ind) == set(range(len(lsa_row_ind))) + # assert set(lsa_col_ind) == set(range(len(lsa_col_ind))) + # assert ls == C[lsa_row_ind, lsa_col_ind].sum() self.C = C self.lsa_row_ind = lsa_row_ind self.lsa_col_ind = lsa_col_ind self.ls = ls def make_CostMatrix(C, m, n): - #assert(C.shape == (m + n, m + n)) + # assert(C.shape == (m + n, m + n)) lsa_row_ind, lsa_col_ind = linear_sum_assignment(C) # Fixup dummy assignments: - # each substitution i<->j should have corresponding dummy assignment m+j<->n+i + # each substitution i<->j should have dummy assignment m+j<->n+i # NOTE: fast reduce of Cv relies on it - #assert len(lsa_row_ind) == len(lsa_col_ind) - subst_ind = list(k for k, i, j in zip(range(len(lsa_row_ind)), lsa_row_ind, lsa_col_ind) - if i < m and j < n) - dummy_ind = list(k for k, i, j in zip(range(len(lsa_row_ind)), lsa_row_ind, lsa_col_ind) - if i >= m and j >= n) - #assert len(subst_ind) == len(dummy_ind) + # assert len(lsa_row_ind) == len(lsa_col_ind) + indexes = zip(range(len(lsa_row_ind)), lsa_row_ind, lsa_col_ind) + subst_ind = list(k for k, i, j in indexes if i < m and j < n) + indexes = zip(range(len(lsa_row_ind)), lsa_row_ind, lsa_col_ind) + dummy_ind = list(k for k, i, j in indexes if i >= m and j >= n) + # assert len(subst_ind) == len(dummy_ind) lsa_row_ind[dummy_ind] = lsa_col_ind[subst_ind] + m lsa_col_ind[dummy_ind] = lsa_row_ind[subst_ind] + n - return CostMatrix(C, lsa_row_ind, lsa_col_ind, C[lsa_row_ind, lsa_col_ind].sum()) + return CostMatrix( + C, lsa_row_ind, lsa_col_ind, C[lsa_row_ind, lsa_col_ind].sum() + ) def extract_C(C, i, j, m, n): - #assert(C.shape == (m + n, m + n)) + # assert(C.shape == (m + n, m + n)) row_ind = [k in i or k - m in j for k in range(m + n)] col_ind = [k in j or k - n in i for k in range(m + n)] return C[row_ind, :][:, col_ind] def reduce_C(C, i, j, m, n): - #assert(C.shape == (m + n, m + n)) + # assert(C.shape == (m + n, m + n)) row_ind = [k not in i and k - m not in j for k in range(m + n)] col_ind = [k not in j and k - n not in i for k in range(m + n)] return C[row_ind, :][:, col_ind] def reduce_ind(ind, i): - #assert set(ind) == set(range(len(ind))) + # assert set(ind) == set(range(len(ind))) rind = ind[[k not in i for k in ind]] for k in set(i): rind[rind >= k] -= 1 @@ -625,37 +735,60 @@ def match_edges(u, v, pending_g, pending_h, Ce, matched_uv=[]): """ M = len(pending_g) N = len(pending_h) - #assert Ce.C.shape == (M + N, M + N) - - g_ind = list(i for i in range(M) - if any(pending_g[i] in ((p, u), (u, p), (u, u)) - for p, q in matched_uv)) - h_ind = list(j for j in range(N) - if any(pending_h[j] in ((q, v), (v, q), (v, v)) - for p, q in matched_uv)) + # assert Ce.C.shape == (M + N, M + N) + + g_ind = [ + i + for i in range(M) + if pending_g[i][:2] == (u, u) + or any(pending_g[i][:2] in ((p, u), (u, p)) for p, q in matched_uv) + ] + h_ind = [ + j + for j in range(N) + if pending_h[j][:2] == (v, v) + or any(pending_h[j][:2] in ((q, v), (v, q)) for p, q in matched_uv) + ] m = len(g_ind) n = len(h_ind) if m or n: C = extract_C(Ce.C, g_ind, h_ind, M, N) - #assert C.shape == (m + n, m + n) + # assert C.shape == (m + n, m + n) # Forbid structurally invalid matches - inf = min(min(Ce.C.sum(axis=0)), min(Ce.C.sum(axis=1))) + 1 + # NOTE: inf remembered from Ce construction for k, i in zip(range(m), g_ind): - g = pending_g[i] + g = pending_g[i][:2] for l, j in zip(range(n), h_ind): - h = pending_h[j] - if not any(g in ((p, u), (u, p)) and h in ((q, v), (v, q)) - or g == (u, u) and h == (v, v) - for p, q in matched_uv): - C[k, l] = inf + h = pending_h[j][:2] + if nx.is_directed(G1) or nx.is_directed(G2): + if any( + g == (p, u) and h == (q, v) or g == (u, p) and h == (v, q) + for p, q in matched_uv + ): + continue + else: + if any( + g in ((p, u), (u, p)) and h in ((q, v), (v, q)) + for p, q in matched_uv + ): + continue + if g == (u, u): + continue + if h == (v, v): + continue + C[k, l] = inf localCe = make_CostMatrix(C, m, n) - ij = list((g_ind[k] if k < m else M + h_ind[l], - h_ind[l] if l < n else N + g_ind[k]) - for k, l in zip(localCe.lsa_row_ind, localCe.lsa_col_ind) - if k < m or l < n) + ij = list( + ( + g_ind[k] if k < m else M + h_ind[l], + h_ind[l] if l < n else N + g_ind[k], + ) + for k, l in zip(localCe.lsa_row_ind, localCe.lsa_col_ind) + if k < m or l < n + ) else: ij = [] @@ -672,8 +805,9 @@ def reduce_Ce(Ce, ij, m, n): else: return Ce - def get_edit_ops(matched_uv, pending_u, pending_v, Cv, - pending_g, pending_h, Ce, matched_cost): + def get_edit_ops( + matched_uv, pending_u, pending_v, Cv, pending_g, pending_h, Ce, matched_cost + ): """ Parameters: matched_uv: partial vertex edit path @@ -698,59 +832,89 @@ def get_edit_ops(matched_uv, pending_u, pending_v, Cv, """ m = len(pending_u) n = len(pending_v) - #assert Cv.C.shape == (m + n, m + n) + # assert Cv.C.shape == (m + n, m + n) # 1) a vertex mapping from optimal linear sum assignment - i, j = min((k, l) for k, l in zip(Cv.lsa_row_ind, Cv.lsa_col_ind) - if k < m or l < n) - xy, localCe = match_edges(pending_u[i] if i < m else None, pending_v[j] if j < n else None, - pending_g, pending_h, Ce, matched_uv) + i, j = min( + (k, l) for k, l in zip(Cv.lsa_row_ind, Cv.lsa_col_ind) if k < m or l < n + ) + xy, localCe = match_edges( + pending_u[i] if i < m else None, + pending_v[j] if j < n else None, + pending_g, + pending_h, + Ce, + matched_uv, + ) Ce_xy = reduce_Ce(Ce, xy, len(pending_g), len(pending_h)) - #assert Ce.ls <= localCe.ls + Ce_xy.ls + # assert Ce.ls <= localCe.ls + Ce_xy.ls if prune(matched_cost + Cv.ls + localCe.ls + Ce_xy.ls): pass else: - # # get reduced Cv efficiently - Cv_ij = CostMatrix(reduce_C(Cv.C, (i,), (j,), m, n), - reduce_ind(Cv.lsa_row_ind, (i, m + j)), - reduce_ind(Cv.lsa_col_ind, (j, n + i)), - Cv.ls - Cv.C[i, j]) + # get reduced Cv efficiently + Cv_ij = CostMatrix( + reduce_C(Cv.C, (i,), (j,), m, n), + reduce_ind(Cv.lsa_row_ind, (i, m + j)), + reduce_ind(Cv.lsa_col_ind, (j, n + i)), + Cv.ls - Cv.C[i, j], + ) yield (i, j), Cv_ij, xy, Ce_xy, Cv.C[i, j] + localCe.ls # 2) other candidates, sorted by lower-bound cost estimate other = list() fixed_i, fixed_j = i, j if m <= n: - candidates = ((t, fixed_j) for t in range(m + n) - if t != fixed_i and (t < m or t == m + fixed_j)) + candidates = ( + (t, fixed_j) + for t in range(m + n) + if t != fixed_i and (t < m or t == m + fixed_j) + ) else: - candidates = ((fixed_i, t) for t in range(m + n) - if t != fixed_j and (t < n or t == n + fixed_i)) + candidates = ( + (fixed_i, t) + for t in range(m + n) + if t != fixed_j and (t < n or t == n + fixed_i) + ) for i, j in candidates: if prune(matched_cost + Cv.C[i, j] + Ce.ls): continue - Cv_ij = make_CostMatrix(reduce_C(Cv.C, (i,), (j,), m, n), - m - 1 if i < m else m, - n - 1 if j < n else n) - #assert Cv.ls <= Cv.C[i, j] + Cv_ij.ls + Cv_ij = make_CostMatrix( + reduce_C(Cv.C, (i,), (j,), m, n), + m - 1 if i < m else m, + n - 1 if j < n else n, + ) + # assert Cv.ls <= Cv.C[i, j] + Cv_ij.ls if prune(matched_cost + Cv.C[i, j] + Cv_ij.ls + Ce.ls): continue - xy, localCe = match_edges(pending_u[i] if i < m else None, pending_v[j] if j < n else None, - pending_g, pending_h, Ce, matched_uv) + xy, localCe = match_edges( + pending_u[i] if i < m else None, + pending_v[j] if j < n else None, + pending_g, + pending_h, + Ce, + matched_uv, + ) if prune(matched_cost + Cv.C[i, j] + Cv_ij.ls + localCe.ls): continue Ce_xy = reduce_Ce(Ce, xy, len(pending_g), len(pending_h)) - #assert Ce.ls <= localCe.ls + Ce_xy.ls + # assert Ce.ls <= localCe.ls + Ce_xy.ls if prune(matched_cost + Cv.C[i, j] + Cv_ij.ls + localCe.ls + Ce_xy.ls): continue other.append(((i, j), Cv_ij, xy, Ce_xy, Cv.C[i, j] + localCe.ls)) - # yield from - for t in sorted(other, key=lambda t: t[4] + t[1].ls + t[3].ls): - yield t - - def get_edit_paths(matched_uv, pending_u, pending_v, Cv, - matched_gh, pending_g, pending_h, Ce, matched_cost): + yield from sorted(other, key=lambda t: t[4] + t[1].ls + t[3].ls) + + def get_edit_paths( + matched_uv, + pending_u, + pending_v, + Cv, + matched_gh, + pending_g, + pending_h, + Ce, + matched_cost, + ): """ Parameters: matched_uv: partial vertex edit path @@ -776,38 +940,46 @@ def get_edit_paths(matched_uv, pending_u, pending_v, Cv, cost: total cost of edit path NOTE: path costs are non-increasing """ - #debug_print('matched-uv:', matched_uv) - #debug_print('matched-gh:', matched_gh) - #debug_print('matched-cost:', matched_cost) - #debug_print('pending-u:', pending_u) - #debug_print('pending-v:', pending_v) + # debug_print('matched-uv:', matched_uv) + # debug_print('matched-gh:', matched_gh) + # debug_print('matched-cost:', matched_cost) + # debug_print('pending-u:', pending_u) + # debug_print('pending-v:', pending_v) # debug_print(Cv.C) - #assert list(sorted(G1.nodes)) == list(sorted(list(u for u, v in matched_uv if u is not None) + pending_u)) - #assert list(sorted(G2.nodes)) == list(sorted(list(v for u, v in matched_uv if v is not None) + pending_v)) - #debug_print('pending-g:', pending_g) - #debug_print('pending-h:', pending_h) + # assert list(sorted(G1.nodes)) == list(sorted(list(u for u, v in matched_uv if u is not None) + pending_u)) + # assert list(sorted(G2.nodes)) == list(sorted(list(v for u, v in matched_uv if v is not None) + pending_v)) + # debug_print('pending-g:', pending_g) + # debug_print('pending-h:', pending_h) # debug_print(Ce.C) - #assert list(sorted(G1.edges)) == list(sorted(list(g for g, h in matched_gh if g is not None) + pending_g)) - #assert list(sorted(G2.edges)) == list(sorted(list(h for g, h in matched_gh if h is not None) + pending_h)) + # assert list(sorted(G1.edges)) == list(sorted(list(g for g, h in matched_gh if g is not None) + pending_g)) + # assert list(sorted(G2.edges)) == list(sorted(list(h for g, h in matched_gh if h is not None) + pending_h)) # debug_print() if prune(matched_cost + Cv.ls + Ce.ls): return if not max(len(pending_u), len(pending_v)): - #assert not len(pending_g) - #assert not len(pending_h) + # assert not len(pending_g) + # assert not len(pending_h) # path completed! - #assert matched_cost <= maxcost.value + # assert matched_cost <= maxcost.value maxcost.value = min(maxcost.value, matched_cost) yield matched_uv, matched_gh, matched_cost else: - edit_ops = get_edit_ops(matched_uv, pending_u, pending_v, Cv, - pending_g, pending_h, Ce, matched_cost) + edit_ops = get_edit_ops( + matched_uv, + pending_u, + pending_v, + Cv, + pending_g, + pending_h, + Ce, + matched_cost, + ) for ij, Cv_ij, xy, Ce_xy, edit_cost in edit_ops: i, j = ij - #assert Cv.C[i, j] + sum(Ce.C[t] for t in xy) == edit_cost + # assert Cv.C[i, j] + sum(Ce.C[t] for t in xy) == edit_cost if prune(matched_cost + edit_cost + Cv_ij.ls + Ce_xy.ls): continue @@ -816,25 +988,41 @@ def get_edit_paths(matched_uv, pending_u, pending_v, Cv, v = pending_v.pop(j) if j < len(pending_v) else None matched_uv.append((u, v)) for x, y in xy: - matched_gh.append((pending_g[x] if x < len(pending_g) else None, - pending_h[y] if y < len(pending_h) else None)) + len_g = len(pending_g) + len_h = len(pending_h) + matched_gh.append( + ( + pending_g[x] if x < len_g else None, + pending_h[y] if y < len_h else None, + ) + ) sortedx = list(sorted(x for x, y in xy)) sortedy = list(sorted(y for x, y in xy)) - G = list((pending_g.pop(x) if x < len(pending_g) else None) - for x in reversed(sortedx)) - H = list((pending_h.pop(y) if y < len(pending_h) else None) - for y in reversed(sortedy)) - - # yield from - for t in get_edit_paths(matched_uv, pending_u, pending_v, Cv_ij, - matched_gh, pending_g, pending_h, Ce_xy, - matched_cost + edit_cost): - yield t + G = list( + (pending_g.pop(x) if x < len(pending_g) else None) + for x in reversed(sortedx) + ) + H = list( + (pending_h.pop(y) if y < len(pending_h) else None) + for y in reversed(sortedy) + ) + + yield from get_edit_paths( + matched_uv, + pending_u, + pending_v, + Cv_ij, + matched_gh, + pending_g, + pending_h, + Ce_xy, + matched_cost + edit_cost, + ) # backtrack - if not u is None: + if u is not None: pending_u.insert(i, u) - if not v is None: + if v is not None: pending_v.insert(j, v) matched_uv.pop() for x, g in zip(sortedx, reversed(G)): @@ -851,37 +1039,63 @@ def get_edit_paths(matched_uv, pending_u, pending_v, Cv, pending_u = list(G1.nodes) pending_v = list(G2.nodes) + initial_cost = 0 + if roots: + root_u, root_v = roots + if root_u not in pending_u or root_v not in pending_v: + raise nx.NodeNotFound("Root node not in graph.") + + # remove roots from pending + pending_u.remove(root_u) + pending_v.remove(root_v) + # cost matrix of vertex mappings m = len(pending_u) n = len(pending_v) C = np.zeros((m + n, m + n)) if node_subst_cost: - C[0:m, 0:n] = np.array([node_subst_cost(G1.nodes[u], G2.nodes[v]) - for u in pending_u for v in pending_v]).reshape(m, n) + C[0:m, 0:n] = np.array( + [ + node_subst_cost(G1.nodes[u], G2.nodes[v]) + for u in pending_u + for v in pending_v + ] + ).reshape(m, n) + if roots: + initial_cost = node_subst_cost(G1.nodes[root_u], G2.nodes[root_v]) elif node_match: - C[0:m, 0:n] = np.array([1 - int(node_match(G1.nodes[u], G2.nodes[v])) - for u in pending_u for v in pending_v]).reshape(m, n) + C[0:m, 0:n] = np.array( + [ + 1 - int(node_match(G1.nodes[u], G2.nodes[v])) + for u in pending_u + for v in pending_v + ] + ).reshape(m, n) + if roots: + initial_cost = 1 - node_match(G1.nodes[root_u], G2.nodes[root_v]) else: # all zeroes pass - #assert not min(m, n) or C[0:m, 0:n].min() >= 0 + # assert not min(m, n) or C[0:m, 0:n].min() >= 0 if node_del_cost: del_costs = [node_del_cost(G1.nodes[u]) for u in pending_u] else: del_costs = [1] * len(pending_u) - #assert not m or min(del_costs) >= 0 + # assert not m or min(del_costs) >= 0 if node_ins_cost: ins_costs = [node_ins_cost(G2.nodes[v]) for v in pending_v] else: ins_costs = [1] * len(pending_v) - #assert not n or min(ins_costs) >= 0 + # assert not n or min(ins_costs) >= 0 inf = C[0:m, 0:n].sum() + sum(del_costs) + sum(ins_costs) + 1 - C[0:m, n:n + m] = np.array([del_costs[i] if i == j else inf - for i in range(m) for j in range(m)]).reshape(m, m) - C[m:m + n, 0:n] = np.array([ins_costs[i] if i == j else inf - for i in range(n) for j in range(n)]).reshape(n, n) + C[0:m, n : n + m] = np.array( + [del_costs[i] if i == j else inf for i in range(m) for j in range(m)] + ).reshape(m, m) + C[m : m + n, 0:n] = np.array( + [ins_costs[i] if i == j else inf for i in range(n) for j in range(n)] + ).reshape(n, n) Cv = make_CostMatrix(C, m, n) - #debug_print('Cv: {} x {}'.format(m, n)) + # debug_print(f"Cv: {m} x {n}") # debug_print(Cv.C) pending_g = list(G1.edges) @@ -892,32 +1106,44 @@ def get_edit_paths(matched_uv, pending_u, pending_v, Cv, n = len(pending_h) C = np.zeros((m + n, m + n)) if edge_subst_cost: - C[0:m, 0:n] = np.array([edge_subst_cost(G1.edges[g], G2.edges[h]) - for g in pending_g for h in pending_h]).reshape(m, n) + C[0:m, 0:n] = np.array( + [ + edge_subst_cost(G1.edges[g], G2.edges[h]) + for g in pending_g + for h in pending_h + ] + ).reshape(m, n) elif edge_match: - C[0:m, 0:n] = np.array([1 - int(edge_match(G1.edges[g], G2.edges[h])) - for g in pending_g for h in pending_h]).reshape(m, n) + C[0:m, 0:n] = np.array( + [ + 1 - int(edge_match(G1.edges[g], G2.edges[h])) + for g in pending_g + for h in pending_h + ] + ).reshape(m, n) else: # all zeroes pass - #assert not min(m, n) or C[0:m, 0:n].min() >= 0 + # assert not min(m, n) or C[0:m, 0:n].min() >= 0 if edge_del_cost: del_costs = [edge_del_cost(G1.edges[g]) for g in pending_g] else: del_costs = [1] * len(pending_g) - #assert not m or min(del_costs) >= 0 + # assert not m or min(del_costs) >= 0 if edge_ins_cost: ins_costs = [edge_ins_cost(G2.edges[h]) for h in pending_h] else: ins_costs = [1] * len(pending_h) - #assert not n or min(ins_costs) >= 0 + # assert not n or min(ins_costs) >= 0 inf = C[0:m, 0:n].sum() + sum(del_costs) + sum(ins_costs) + 1 - C[0:m, n:n + m] = np.array([del_costs[i] if i == j else inf - for i in range(m) for j in range(m)]).reshape(m, m) - C[m:m + n, 0:n] = np.array([ins_costs[i] if i == j else inf - for i in range(n) for j in range(n)]).reshape(n, n) + C[0:m, n : n + m] = np.array( + [del_costs[i] if i == j else inf for i in range(m) for j in range(m)] + ).reshape(m, m) + C[m : m + n, 0:n] = np.array( + [ins_costs[i] if i == j else inf for i in range(n) for j in range(n)] + ).reshape(n, n) Ce = make_CostMatrix(C, m, n) - #debug_print('Ce: {} x {}'.format(m, n)) + # debug_print(f'Ce: {m} x {n}') # debug_print(Ce.C) # debug_print() @@ -926,9 +1152,18 @@ def __init__(self): # initial upper-bound estimate # NOTE: should work for empty graph self.value = Cv.C.sum() + Ce.C.sum() + 1 + maxcost = MaxCost() + if timeout is not None: + if timeout <= 0: + raise nx.NetworkXError("Timeout value must be greater than 0") + start = time.perf_counter() + def prune(cost): + if timeout is not None: + if time.perf_counter() - start > timeout: + return True if upper_bound is not None: if cost > upper_bound: return True @@ -939,26 +1174,271 @@ def prune(cost): # Now go! - for vertex_path, edge_path, cost in \ - get_edit_paths([], pending_u, pending_v, Cv, - [], pending_g, pending_h, Ce, 0): - #assert list(sorted(G1.nodes)) == list(sorted(list(u for u, v in vertex_path if u is not None))) - #assert list(sorted(G2.nodes)) == list(sorted(list(v for u, v in vertex_path if v is not None))) - #assert list(sorted(G1.edges)) == list(sorted(list(g for g, h in edge_path if g is not None))) - #assert list(sorted(G2.edges)) == list(sorted(list(h for g, h in edge_path if h is not None))) - #print(vertex_path, edge_path, cost, file = sys.stderr) - #assert cost == maxcost.value + done_uv = [] if roots is None else [roots] + + for vertex_path, edge_path, cost in get_edit_paths( + done_uv, pending_u, pending_v, Cv, [], pending_g, pending_h, Ce, initial_cost + ): + # assert sorted(G1.nodes) == sorted(u for u, v in vertex_path if u is not None) + # assert sorted(G2.nodes) == sorted(v for u, v in vertex_path if v is not None) + # assert sorted(G1.edges) == sorted(g for g, h in edge_path if g is not None) + # assert sorted(G2.edges) == sorted(h for g, h in edge_path if h is not None) + # print(vertex_path, edge_path, cost, file = sys.stderr) + # assert cost == maxcost.value yield list(vertex_path), list(edge_path), cost -def setup_module(module): - """Fixture for nose tests.""" - from nose import SkipTest - try: - import numpy - except: - raise SkipTest("NumPy not available") - try: - import scipy - except: - raise SkipTest("SciPy not available") +def _is_close(d1, d2, atolerance=0, rtolerance=0): + """Determines whether two adjacency matrices are within + a provided tolerance. + + Parameters + ---------- + d1 : dict + Adjacency dictionary + + d2 : dict + Adjacency dictionary + + atolerance : float + Some scalar tolerance value to determine closeness + + rtolerance : float + A scalar tolerance value that will be some proportion + of ``d2``'s value + + Returns + ------- + closeness : bool + If all of the nodes within ``d1`` and ``d2`` are within + a predefined tolerance, they are considered "close" and + this method will return True. Otherwise, this method will + return False. + + """ + # Pre-condition: d1 and d2 have the same keys at each level if they + # are dictionaries. + if not isinstance(d1, dict) and not isinstance(d2, dict): + return abs(d1 - d2) <= atolerance + rtolerance * abs(d2) + return all(all(_is_close(d1[u][v], d2[u][v]) for v in d1[u]) for u in d1) + + +def simrank_similarity( + G, + source=None, + target=None, + importance_factor=0.9, + max_iterations=100, + tolerance=1e-4, +): + """Returns the SimRank similarity of nodes in the graph ``G``. + + SimRank is a similarity metric that says "two objects are considered + to be similar if they are referenced by similar objects." [1]_. + + The pseudo-code definition from the paper is:: + + def simrank(G, u, v): + in_neighbors_u = G.predecessors(u) + in_neighbors_v = G.predecessors(v) + scale = C / (len(in_neighbors_u) * len(in_neighbors_v)) + return scale * sum(simrank(G, w, x) + for w, x in product(in_neighbors_u, + in_neighbors_v)) + + where ``G`` is the graph, ``u`` is the source, ``v`` is the target, + and ``C`` is a float decay or importance factor between 0 and 1. + + The SimRank algorithm for determining node similarity is defined in + [2]_. + + Parameters + ---------- + G : NetworkX graph + A NetworkX graph + + source : node + If this is specified, the returned dictionary maps each node + ``v`` in the graph to the similarity between ``source`` and + ``v``. + + target : node + If both ``source`` and ``target`` are specified, the similarity + value between ``source`` and ``target`` is returned. If + ``target`` is specified but ``source`` is not, this argument is + ignored. + + importance_factor : float + The relative importance of indirect neighbors with respect to + direct neighbors. + + max_iterations : integer + Maximum number of iterations. + + tolerance : float + Error tolerance used to check convergence. When an iteration of + the algorithm finds that no similarity value changes more than + this amount, the algorithm halts. + + Returns + ------- + similarity : dictionary or float + If ``source`` and ``target`` are both ``None``, this returns a + dictionary of dictionaries, where keys are node pairs and value + are similarity of the pair of nodes. + + If ``source`` is not ``None`` but ``target`` is, this returns a + dictionary mapping node to the similarity of ``source`` and that + node. + + If neither ``source`` nor ``target`` is ``None``, this returns + the similarity value for the given pair of nodes. + + Examples + -------- + If the nodes of the graph are numbered from zero to *n - 1*, where *n* + is the number of nodes in the graph, you can create a SimRank matrix + from the return value of this function where the node numbers are + the row and column indices of the matrix:: + + >>> from numpy import array + >>> G = nx.cycle_graph(4) + >>> sim = nx.simrank_similarity(G) + >>> lol = [[sim[u][v] for v in sorted(sim[u])] for u in sorted(sim)] + >>> sim_array = array(lol) + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/SimRank + .. [2] G. Jeh and J. Widom. + "SimRank: a measure of structural-context similarity", + In KDD'02: Proceedings of the Eighth ACM SIGKDD + International Conference on Knowledge Discovery and Data Mining, + pp. 538--543. ACM Press, 2002. + """ + prevsim = None + + # build up our similarity adjacency dictionary output + newsim = {u: {v: 1 if u == v else 0 for v in G} for u in G} + + # These functions compute the update to the similarity value of the nodes + # `u` and `v` with respect to the previous similarity values. + def avg_sim(s): + return sum(newsim[w][x] for (w, x) in s) / len(s) if s else 0.0 + + def sim(u, v): + Gadj = G.pred if G.is_directed() else G.adj + return importance_factor * avg_sim(list(product(Gadj[u], Gadj[v]))) + + for _ in range(max_iterations): + if prevsim and _is_close(prevsim, newsim, tolerance): + break + prevsim = newsim + newsim = { + u: {v: sim(u, v) if u is not v else 1 for v in newsim[u]} for u in newsim + } + + if source is not None and target is not None: + return newsim[source][target] + if source is not None: + return newsim[source] + return newsim + + +def simrank_similarity_numpy( + G, + source=None, + target=None, + importance_factor=0.9, + max_iterations=100, + tolerance=1e-4, +): + """Calculate SimRank of nodes in ``G`` using matrices with ``numpy``. + + The SimRank algorithm for determining node similarity is defined in + [1]_. + + Parameters + ---------- + G : NetworkX graph + A NetworkX graph + + source : node + If this is specified, the returned dictionary maps each node + ``v`` in the graph to the similarity between ``source`` and + ``v``. + + target : node + If both ``source`` and ``target`` are specified, the similarity + value between ``source`` and ``target`` is returned. If + ``target`` is specified but ``source`` is not, this argument is + ignored. + + importance_factor : float + The relative importance of indirect neighbors with respect to + direct neighbors. + + max_iterations : integer + Maximum number of iterations. + + tolerance : float + Error tolerance used to check convergence. When an iteration of + the algorithm finds that no similarity value changes more than + this amount, the algorithm halts. + + Returns + ------- + similarity : numpy matrix, numpy array or float + If ``source`` and ``target`` are both ``None``, this returns a + Matrix containing SimRank scores of the nodes. + + If ``source`` is not ``None`` but ``target`` is, this returns an + Array containing SimRank scores of ``source`` and that + node. + + If neither ``source`` nor ``target`` is ``None``, this returns + the similarity value for the given pair of nodes. + + Examples + -------- + >>> from numpy import array + >>> G = nx.cycle_graph(4) + >>> sim = nx.simrank_similarity_numpy(G) + + References + ---------- + .. [1] G. Jeh and J. Widom. + "SimRank: a measure of structural-context similarity", + In KDD'02: Proceedings of the Eighth ACM SIGKDD + International Conference on Knowledge Discovery and Data Mining, + pp. 538--543. ACM Press, 2002. + """ + # This algorithm follows roughly + # + # S = max{C * (A.T * S * A), I} + # + # where C is the importance factor, A is the column normalized + # adjacency matrix, and I is the identity matrix. + import numpy as np + + adjacency_matrix = nx.to_numpy_array(G) + + # column-normalize the ``adjacency_matrix`` + adjacency_matrix /= adjacency_matrix.sum(axis=0) + + newsim = np.eye(adjacency_matrix.shape[0], dtype=np.float64) + for _ in range(max_iterations): + prevsim = np.copy(newsim) + newsim = importance_factor * np.matmul( + np.matmul(adjacency_matrix.T, prevsim), adjacency_matrix + ) + np.fill_diagonal(newsim, 1.0) + + if np.allclose(prevsim, newsim, atol=tolerance): + break + + if source is not None and target is not None: + return newsim[source, target] + if source is not None: + return newsim[source] + return newsim diff --git a/networkx/algorithms/simple_paths.py b/networkx/algorithms/simple_paths.py index 09a20ae..4d71876 100644 --- a/networkx/algorithms/simple_paths.py +++ b/networkx/algorithms/simple_paths.py @@ -1,30 +1,22 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2012 by -# Sergio Nery Simoes -# All rights reserved. -# BSD license. from heapq import heappush, heappop from itertools import count import networkx as nx from networkx.utils import not_implemented_for from networkx.utils import pairwise - -__author__ = """\n""".join(['Sérgio Nery Simões ', - 'Aric Hagberg ', - 'Andrey Paramonov', - 'Jordi Torrents ']) +from networkx.utils import empty_generator +from networkx.algorithms.shortest_paths.weighted import _weight_function __all__ = [ - 'all_simple_paths', - 'is_simple_path', - 'shortest_simple_paths', + "all_simple_paths", + "is_simple_path", + "shortest_simple_paths", + "all_simple_edge_paths", ] def is_simple_path(G, nodes): - """Returns True if and only if the given nodes form a simple path in - `G`. + """Returns True if and only if `nodes` form a simple path in `G`. A *simple path* in a graph is a nonempty sequence of nodes in which no node appears more than once in the sequence, and each adjacent @@ -38,12 +30,11 @@ def is_simple_path(G, nodes): Returns ------- bool - Whether the given list of nodes represents a simple path in - `G`. + Whether the given list of nodes represents a simple path in `G`. Notes ----- - A list of zero nodes is not a path and a list of one node is a + An empty list of nodes is not a path but a list of one node is a path. Here's an explanation why. This function operates on *node paths*. One could also consider @@ -86,8 +77,7 @@ def is_simple_path(G, nodes): return nodes[0] in G # Test that no node appears more than once, and that each # adjacent pair of nodes is adjacent. - return (len(set(nodes)) == len(nodes) and - all(v in G[u] for u, v in pairwise(nodes))) + return len(set(nodes)) == len(nodes) and all(v in G[u] for u, v in pairwise(nodes)) def all_simple_paths(G, source, target, cutoff=None): @@ -102,8 +92,8 @@ def all_simple_paths(G, source, target, cutoff=None): source : node Starting node for path - target : node - Ending node for path + target : nodes + Single node or iterable of nodes at which to end path cutoff : integer, optional Depth to stop the search. Only paths of length <= cutoff are returned. @@ -148,6 +138,23 @@ def all_simple_paths(G, source, target, cutoff=None): [(0, 2), (2, 3)] [(0, 3)] + Pass an iterable of nodes as target to generate all paths ending in any of several nodes:: + + >>> G = nx.complete_graph(4) + >>> for path in nx.all_simple_paths(G, source=0, target=[3, 2]): + ... print(path) + ... + [0, 1, 2] + [0, 1, 2, 3] + [0, 1, 3] + [0, 1, 3, 2] + [0, 2] + [0, 2, 1, 3] + [0, 2, 3] + [0, 3] + [0, 3, 1, 2] + [0, 3, 2] + Iterate over each path from the root nodes to the leaf nodes in a directed acyclic graph using a functional programming approach:: @@ -178,6 +185,20 @@ def all_simple_paths(G, source, target, cutoff=None): >>> all_paths [[0, 1, 2], [0, 3, 2]] + Iterate over each path from the root nodes to the leaf nodes in a + directed acyclic graph passing all leaves together to avoid unnecessary + compute:: + + >>> G = nx.DiGraph([(0, 1), (2, 1), (1, 3), (1, 4)]) + >>> roots = (v for v, d in G.in_degree() if d == 0) + >>> leaves = [v for v, d in G.out_degree() if d == 0] + >>> all_paths = [] + >>> for root in roots: + ... paths = nx.all_simple_paths(G, root, leaves) + ... all_paths.extend(paths) + >>> all_paths + [[0, 1, 3], [0, 1, 4], [2, 1, 3], [2, 1, 4]] + Notes ----- This algorithm uses a modified depth-first search to generate the @@ -196,48 +217,180 @@ def all_simple_paths(G, source, target, cutoff=None): """ if source not in G: - raise nx.NodeNotFound('source node %s not in graph' % source) - if target not in G: - raise nx.NodeNotFound('target node %s not in graph' % target) - if source == target: - return [] + raise nx.NodeNotFound(f"source node {source} not in graph") + if target in G: + targets = {target} + else: + try: + targets = set(target) + except TypeError as e: + raise nx.NodeNotFound(f"target node {target} not in graph") from e + if source in targets: + return empty_generator() if cutoff is None: cutoff = len(G) - 1 + if cutoff < 1: + return empty_generator() if G.is_multigraph(): - return _all_simple_paths_multigraph(G, source, target, cutoff=cutoff) + return _all_simple_paths_multigraph(G, source, targets, cutoff) else: - return _all_simple_paths_graph(G, source, target, cutoff=cutoff) + return _all_simple_paths_graph(G, source, targets, cutoff) -def _all_simple_paths_graph(G, source, target, cutoff=None): - if cutoff < 1: - return - visited = [source] +def _all_simple_paths_graph(G, source, targets, cutoff): + visited = dict.fromkeys([source]) stack = [iter(G[source])] while stack: children = stack[-1] child = next(children, None) if child is None: stack.pop() - visited.pop() + visited.popitem() elif len(visited) < cutoff: - if child == target: - yield visited + [target] - elif child not in visited: - visited.append(child) + if child in visited: + continue + if child in targets: + yield list(visited) + [child] + visited[child] = None + if targets - set(visited.keys()): # expand stack until find all targets stack.append(iter(G[child])) + else: + visited.popitem() # maybe other ways to child else: # len(visited) == cutoff: - if child == target or target in children: - yield visited + [target] + for target in (targets & (set(children) | {child})) - set(visited.keys()): + yield list(visited) + [target] stack.pop() - visited.pop() + visited.popitem() + + +def _all_simple_paths_multigraph(G, source, targets, cutoff): + visited = dict.fromkeys([source]) + stack = [(v for u, v in G.edges(source))] + while stack: + children = stack[-1] + child = next(children, None) + if child is None: + stack.pop() + visited.popitem() + elif len(visited) < cutoff: + if child in visited: + continue + if child in targets: + yield list(visited) + [child] + visited[child] = None + if targets - set(visited.keys()): + stack.append((v for u, v in G.edges(child))) + else: + visited.popitem() + else: # len(visited) == cutoff: + for target in targets - set(visited.keys()): + count = ([child] + list(children)).count(target) + for i in range(count): + yield list(visited) + [target] + stack.pop() + visited.popitem() + + +def all_simple_edge_paths(G, source, target, cutoff=None): + """Generate lists of edges for all simple paths in G from source to target. + + A simple path is a path with no repeated nodes. + + Parameters + ---------- + G : NetworkX graph + + source : node + Starting node for path + + target : nodes + Single node or iterable of nodes at which to end path + + cutoff : integer, optional + Depth to stop the search. Only paths of length <= cutoff are returned. + + Returns + ------- + path_generator: generator + A generator that produces lists of simple paths. If there are no paths + between the source and target within the given cutoff the generator + produces no output. + For multigraphs, the list of edges have elements of the form `(u,v,k)`. + Where `k` corresponds to the edge key. + + Examples + -------- + + Print the simple path edges of a Graph:: + + >>> g = nx.Graph([(1, 2), (2, 4), (1, 3), (3, 4)]) + >>> for path in sorted(nx.all_simple_edge_paths(g, 1, 4)): + ... print(path) + [(1, 2), (2, 4)] + [(1, 3), (3, 4)] + + Print the simple path edges of a MultiGraph. Returned edges come with + their associated keys:: + + >>> mg = nx.MultiGraph() + >>> mg.add_edge(1, 2, key="k0") + 'k0' + >>> mg.add_edge(1, 2, key="k1") + 'k1' + >>> mg.add_edge(2, 3, key="k0") + 'k0' + >>> for path in sorted(nx.all_simple_edge_paths(mg, 1, 3)): + ... print(path) + [(1, 2, 'k0'), (2, 3, 'k0')] + [(1, 2, 'k1'), (2, 3, 'k0')] + + + Notes + ----- + This algorithm uses a modified depth-first search to generate the + paths [1]_. A single path can be found in $O(V+E)$ time but the + number of simple paths in a graph can be very large, e.g. $O(n!)$ in + the complete graph of order $n$. + References + ---------- + .. [1] R. Sedgewick, "Algorithms in C, Part 5: Graph Algorithms", + Addison Wesley Professional, 3rd ed., 2001. -def _all_simple_paths_multigraph(G, source, target, cutoff=None): + See Also + -------- + all_shortest_paths, shortest_path, all_simple_paths + + """ + if source not in G: + raise nx.NodeNotFound("source node %s not in graph" % source) + if target in G: + targets = {target} + else: + try: + targets = set(target) + except TypeError: + raise nx.NodeNotFound("target node %s not in graph" % target) + if source in targets: + return [] + if cutoff is None: + cutoff = len(G) - 1 if cutoff < 1: - return + return [] + if G.is_multigraph(): + for simp_path in _all_simple_edge_paths_multigraph(G, source, targets, cutoff): + yield simp_path + else: + for simp_path in _all_simple_paths_graph(G, source, targets, cutoff): + yield list(zip(simp_path[:-1], simp_path[1:])) + + +def _all_simple_edge_paths_multigraph(G, source, targets, cutoff): + if not cutoff or cutoff < 1: + return [] visited = [source] - stack = [(v for u, v in G.edges(source))] + stack = [iter(G.edges(source, keys=True))] + while stack: children = stack[-1] child = next(children, None) @@ -245,20 +398,20 @@ def _all_simple_paths_multigraph(G, source, target, cutoff=None): stack.pop() visited.pop() elif len(visited) < cutoff: - if child == target: - yield visited + [target] - elif child not in visited: + if child[1] in targets: + yield visited[1:] + [child] + elif child[1] not in [v[0] for v in visited[1:]]: visited.append(child) - stack.append((v for u, v in G.edges(child))) + stack.append(iter(G.edges(child[1], keys=True))) else: # len(visited) == cutoff: - count = ([child] + list(children)).count(target) - for i in range(count): - yield visited + [target] + for (u, v, k) in [child] + list(children): + if v in targets: + yield visited[1:] + [(u, v, k)] stack.pop() visited.pop() -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def shortest_simple_paths(G, source, target, weight=None): """Generate all simple paths in the graph G from source to target, starting from shortest ones. @@ -266,7 +419,7 @@ def shortest_simple_paths(G, source, target, weight=None): A simple path is a path with no repeated nodes. If a weighted shortest path search is to be used, no negative weights - are allawed. + are allowed. Parameters ---------- @@ -278,9 +431,17 @@ def shortest_simple_paths(G, source, target, weight=None): target : node Ending node for path - weight : string - Name of the edge attribute to be used as a weight. If None all - edges are considered to have unit weight. Default value None. + weight : string or function + If it is a string, it is the name of the edge attribute to be + used as a weight. + + If it is a function, the weight of an edge is the value returned + by the function. The function must accept exactly three positional + arguments: the two endpoints of an edge and the dictionary of edge + attributes for that edge. The function must return a number. + + If None all edges are considered to have unit weight. Default + value None. Returns ------- @@ -312,7 +473,9 @@ def shortest_simple_paths(G, source, target, weight=None): >>> from itertools import islice >>> def k_shortest_paths(G, source, target, k, weight=None): - ... return list(islice(nx.shortest_simple_paths(G, source, target, weight=weight), k)) + ... return list( + ... islice(nx.shortest_simple_paths(G, source, target, weight=weight), k) + ... ) >>> for path in k_shortest_paths(G, 0, 3, 2): ... print(path) [0, 1, 2, 3] @@ -337,17 +500,22 @@ def shortest_simple_paths(G, source, target, weight=None): """ if source not in G: - raise nx.NodeNotFound('source node %s not in graph' % source) + raise nx.NodeNotFound(f"source node {source} not in graph") if target not in G: - raise nx.NodeNotFound('target node %s not in graph' % target) + raise nx.NodeNotFound(f"target node {target} not in graph") if weight is None: length_func = len shortest_path_func = _bidirectional_shortest_path else: + wt = _weight_function(G, weight) + def length_func(path): - return sum(G.adj[u][v][weight] for (u, v) in zip(path, path[1:])) + return sum( + wt(u, v, G.get_edge_data(u, v)) for (u, v) in zip(path, path[1:]) + ) + shortest_path_func = _bidirectional_dijkstra listA = list() @@ -367,10 +535,14 @@ def length_func(path): if path[:i] == root: ignore_edges.add((path[i - 1], path[i])) try: - length, spur = shortest_path_func(G, root[-1], target, - ignore_nodes=ignore_nodes, - ignore_edges=ignore_edges, - weight=weight) + length, spur = shortest_path_func( + G, + root[-1], + target, + ignore_nodes=ignore_nodes, + ignore_edges=ignore_edges, + weight=weight, + ) path = root[:-1] + spur listB.push(root_length + length, path) except nx.NetworkXNoPath: @@ -386,8 +558,7 @@ def length_func(path): break -class PathBuffer(object): - +class PathBuffer: def __init__(self): self.paths = set() self.sortedpaths = list() @@ -409,11 +580,10 @@ def pop(self): return path -def _bidirectional_shortest_path(G, source, target, - ignore_nodes=None, - ignore_edges=None, - weight=None): - """Return the shortest path between source and target ignoring +def _bidirectional_shortest_path( + G, source, target, ignore_nodes=None, ignore_edges=None, weight=None +): + """Returns the shortest path between source and target ignoring nodes and edges in the containers ignore_nodes and ignore_edges. This is a custom modification of the standard bidirectional shortest @@ -481,7 +651,7 @@ def _bidirectional_pred_succ(G, source, target, ignore_nodes=None, ignore_edges= """ # does BFS from both source and target and meets in the middle if ignore_nodes and (source in ignore_nodes or target in ignore_nodes): - raise nx.NetworkXNoPath("No path between %s and %s." % (source, target)) + raise nx.NetworkXNoPath(f"No path between {source} and {target}.") if target == source: return ({target: None}, {source: None}, source) @@ -495,11 +665,13 @@ def _bidirectional_pred_succ(G, source, target, ignore_nodes=None, ignore_edges= # support optional nodes filter if ignore_nodes: + def filter_iter(nodes): def iterate(v): for w in nodes(v): if w not in ignore_nodes: yield w + return iterate Gpred = filter_iter(Gpred) @@ -508,11 +680,13 @@ def iterate(v): # support optional edges filter if ignore_edges: if G.is_directed(): + def filter_pred_iter(pred_iter): def iterate(v): for w in pred_iter(v): if (w, v) not in ignore_edges: yield w + return iterate def filter_succ_iter(succ_iter): @@ -520,18 +694,20 @@ def iterate(v): for w in succ_iter(v): if (v, w) not in ignore_edges: yield w + return iterate Gpred = filter_pred_iter(Gpred) Gsucc = filter_succ_iter(Gsucc) else: + def filter_iter(nodes): def iterate(v): for w in nodes(v): - if (v, w) not in ignore_edges \ - and (w, v) not in ignore_edges: + if (v, w) not in ignore_edges and (w, v) not in ignore_edges: yield w + return iterate Gpred = filter_iter(Gpred) @@ -569,11 +745,12 @@ def iterate(v): # found path return pred, succ, w - raise nx.NetworkXNoPath("No path between %s and %s." % (source, target)) + raise nx.NetworkXNoPath(f"No path between {source} and {target}.") -def _bidirectional_dijkstra(G, source, target, weight='weight', - ignore_nodes=None, ignore_edges=None): +def _bidirectional_dijkstra( + G, source, target, weight="weight", ignore_nodes=None, ignore_edges=None +): """Dijkstra's algorithm for shortest paths using bidirectional search. This function returns the shortest path between source and target @@ -593,8 +770,8 @@ def _bidirectional_dijkstra(G, source, target, weight='weight', target : node Ending node. - weight: string, optional (default='weight') - Edge data key corresponding to the edge weight + weight: string, function, optional (default='weight') + Edge data key or weight function corresponding to the edge weight ignore_nodes : container of nodes nodes to ignore, optional @@ -641,7 +818,7 @@ def _bidirectional_dijkstra(G, source, target, weight='weight', shortest_path_length """ if ignore_nodes and (source in ignore_nodes or target in ignore_nodes): - raise nx.NetworkXNoPath("No path between %s and %s." % (source, target)) + raise nx.NetworkXNoPath(f"No path between {source} and {target}.") if source == target: return (0, [source]) @@ -655,11 +832,13 @@ def _bidirectional_dijkstra(G, source, target, weight='weight', # support optional nodes filter if ignore_nodes: + def filter_iter(nodes): def iterate(v): for w in nodes(v): if w not in ignore_nodes: yield w + return iterate Gpred = filter_iter(Gpred) @@ -668,11 +847,13 @@ def iterate(v): # support optional edges filter if ignore_edges: if G.is_directed(): + def filter_pred_iter(pred_iter): def iterate(v): for w in pred_iter(v): if (w, v) not in ignore_edges: yield w + return iterate def filter_succ_iter(succ_iter): @@ -680,18 +861,20 @@ def iterate(v): for w in succ_iter(v): if (v, w) not in ignore_edges: yield w + return iterate Gpred = filter_pred_iter(Gpred) Gsucc = filter_succ_iter(Gsucc) else: + def filter_iter(nodes): def iterate(v): for w in nodes(v): - if (v, w) not in ignore_edges \ - and (w, v) not in ignore_edges: + if (v, w) not in ignore_edges and (w, v) not in ignore_edges: yield w + return iterate Gpred = filter_iter(Gpred) @@ -700,11 +883,11 @@ def iterate(v): push = heappush pop = heappop # Init: Forward Backward - dists = [{}, {}] # dictionary of final distances + dists = [{}, {}] # dictionary of final distances paths = [{source: [source]}, {target: [target]}] # dictionary of paths - fringe = [[], []] # heap of (distance, node) tuples for + fringe = [[], []] # heap of (distance, node) tuples for # extracting next node to expand - seen = [{source: 0}, {target: 0}] # dictionary of distances to + seen = [{source: 0}, {target: 0}] # dictionary of distances to # nodes seen c = count() # initialize fringe heap @@ -713,7 +896,7 @@ def iterate(v): # neighs for extracting correct neighbor information neighs = [Gsucc, Gpred] # variables to hold shortest discovered path - #finaldist = 1e30000 + # finaldist = 1e30000 finalpath = [] dir = 1 while fringe[0] and fringe[1]: @@ -732,26 +915,18 @@ def iterate(v): # we have now discovered the shortest path return (finaldist, finalpath) + wt = _weight_function(G, weight) for w in neighs[dir](v): - if(dir == 0): # forward - if G.is_multigraph(): - minweight = min((dd.get(weight, 1) - for k, dd in G[v][w].items())) - else: - minweight = G[v][w].get(weight, 1) - vwLength = dists[dir][v] + minweight # G[v][w].get(weight,1) + if dir == 0: # forward + minweight = wt(v, w, G.get_edge_data(v, w)) + vwLength = dists[dir][v] + minweight else: # back, must remember to change v,w->w,v - if G.is_multigraph(): - minweight = min((dd.get(weight, 1) - for k, dd in G[w][v].items())) - else: - minweight = G[w][v].get(weight, 1) - vwLength = dists[dir][v] + minweight # G[w][v].get(weight,1) + minweight = wt(w, v, G.get_edge_data(w, v)) + vwLength = dists[dir][v] + minweight if w in dists[dir]: if vwLength < dists[dir][w]: - raise ValueError( - "Contradictory paths found: negative weights?") + raise ValueError("Contradictory paths found: negative weights?") elif w not in seen[dir] or vwLength < seen[dir][w]: # relaxing seen[dir][w] = vwLength @@ -766,4 +941,4 @@ def iterate(v): revpath = paths[1][w][:] revpath.reverse() finalpath = paths[0][w] + revpath[1:] - raise nx.NetworkXNoPath("No path between %s and %s." % (source, target)) + raise nx.NetworkXNoPath(f"No path between {source} and {target}.") diff --git a/networkx/algorithms/smallworld.py b/networkx/algorithms/smallworld.py new file mode 100644 index 0000000..eb16417 --- /dev/null +++ b/networkx/algorithms/smallworld.py @@ -0,0 +1,378 @@ +"""Functions for estimating the small-world-ness of graphs. + +A small world network is characterized by a small average shortest path length, +and a large clustering coefficient. + +Small-worldness is commonly measured with the coefficient sigma or omega. + +Both coefficients compare the average clustering coefficient and shortest path +length of a given graph against the same quantities for an equivalent random +or lattice graph. + +For more information, see the Wikipedia article on small-world network [1]_. + +.. [1] Small-world network:: https://en.wikipedia.org/wiki/Small-world_network + +""" +import networkx as nx +from networkx.utils import not_implemented_for +from networkx.utils import py_random_state + +__all__ = ["random_reference", "lattice_reference", "sigma", "omega"] + + +@py_random_state(3) +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def random_reference(G, niter=1, connectivity=True, seed=None): + """Compute a random graph by swapping edges of a given graph. + + Parameters + ---------- + G : graph + An undirected graph with 4 or more nodes. + + niter : integer (optional, default=1) + An edge is rewired approximately `niter` times. + + connectivity : boolean (optional, default=True) + When True, ensure connectivity for the randomized graph. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G : graph + The randomized graph. + + Notes + ----- + The implementation is adapted from the algorithm by Maslov and Sneppen + (2002) [1]_. + + References + ---------- + .. [1] Maslov, Sergei, and Kim Sneppen. + "Specificity and stability in topology of protein networks." + Science 296.5569 (2002): 910-913. + """ + if G.is_directed(): + msg = "random_reference() not defined for directed graphs." + raise nx.NetworkXError(msg) + if len(G) < 4: + raise nx.NetworkXError("Graph has less than four nodes.") + + from networkx.utils import cumulative_distribution, discrete_sequence + + local_conn = nx.connectivity.local_edge_connectivity + + G = G.copy() + keys, degrees = zip(*G.degree()) # keys, degree + cdf = cumulative_distribution(degrees) # cdf of degree + nnodes = len(G) + nedges = nx.number_of_edges(G) + niter = niter * nedges + ntries = int(nnodes * nedges / (nnodes * (nnodes - 1) / 2)) + swapcount = 0 + + for i in range(niter): + n = 0 + while n < ntries: + # pick two random edges without creating edge list + # choose source node indices from discrete distribution + (ai, ci) = discrete_sequence(2, cdistribution=cdf, seed=seed) + if ai == ci: + continue # same source, skip + a = keys[ai] # convert index to label + c = keys[ci] + # choose target uniformly from neighbors + b = seed.choice(list(G.neighbors(a))) + d = seed.choice(list(G.neighbors(c))) + bi = keys.index(b) + di = keys.index(d) + if b in [a, c, d] or d in [a, b, c]: + continue # all vertices should be different + + # don't create parallel edges + if (d not in G[a]) and (b not in G[c]): + G.add_edge(a, d) + G.add_edge(c, b) + G.remove_edge(a, b) + G.remove_edge(c, d) + + # Check if the graph is still connected + if connectivity and local_conn(G, a, b) == 0: + # Not connected, revert the swap + G.remove_edge(a, d) + G.remove_edge(c, b) + G.add_edge(a, b) + G.add_edge(c, d) + else: + swapcount += 1 + break + n += 1 + return G + + +@py_random_state(4) +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def lattice_reference(G, niter=1, D=None, connectivity=True, seed=None): + """Latticize the given graph by swapping edges. + + Parameters + ---------- + G : graph + An undirected graph with 4 or more nodes. + + niter : integer (optional, default=1) + An edge is rewired approximatively niter times. + + D : numpy.array (optional, default=None) + Distance to the diagonal matrix. + + connectivity : boolean (optional, default=True) + Ensure connectivity for the latticized graph when set to True. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G : graph + The latticized graph. + + Notes + ----- + The implementation is adapted from the algorithm by Sporns et al. [1]_. + which is inspired from the original work by Maslov and Sneppen(2002) [2]_. + + References + ---------- + .. [1] Sporns, Olaf, and Jonathan D. Zwi. + "The small world of the cerebral cortex." + Neuroinformatics 2.2 (2004): 145-162. + .. [2] Maslov, Sergei, and Kim Sneppen. + "Specificity and stability in topology of protein networks." + Science 296.5569 (2002): 910-913. + """ + import numpy as np + from networkx.utils import cumulative_distribution, discrete_sequence + + local_conn = nx.connectivity.local_edge_connectivity + + if G.is_directed(): + msg = "lattice_reference() not defined for directed graphs." + raise nx.NetworkXError(msg) + if len(G) < 4: + raise nx.NetworkXError("Graph has less than four nodes.") + # Instead of choosing uniformly at random from a generated edge list, + # this algorithm chooses nonuniformly from the set of nodes with + # probability weighted by degree. + G = G.copy() + keys, degrees = zip(*G.degree()) # keys, degree + cdf = cumulative_distribution(degrees) # cdf of degree + + nnodes = len(G) + nedges = nx.number_of_edges(G) + if D is None: + D = np.zeros((nnodes, nnodes)) + un = np.arange(1, nnodes) + um = np.arange(nnodes - 1, 0, -1) + u = np.append((0,), np.where(un < um, un, um)) + + for v in range(int(np.ceil(nnodes / 2))): + D[nnodes - v - 1, :] = np.append(u[v + 1 :], u[: v + 1]) + D[v, :] = D[nnodes - v - 1, :][::-1] + + niter = niter * nedges + ntries = int(nnodes * nedges / (nnodes * (nnodes - 1) / 2)) + swapcount = 0 + + for i in range(niter): + n = 0 + while n < ntries: + # pick two random edges without creating edge list + # choose source node indices from discrete distribution + (ai, ci) = discrete_sequence(2, cdistribution=cdf, seed=seed) + if ai == ci: + continue # same source, skip + a = keys[ai] # convert index to label + c = keys[ci] + # choose target uniformly from neighbors + b = seed.choice(list(G.neighbors(a))) + d = seed.choice(list(G.neighbors(c))) + bi = keys.index(b) + di = keys.index(d) + + if b in [a, c, d] or d in [a, b, c]: + continue # all vertices should be different + + # don't create parallel edges + if (d not in G[a]) and (b not in G[c]): + if D[ai, bi] + D[ci, di] >= D[ai, ci] + D[bi, di]: + # only swap if we get closer to the diagonal + G.add_edge(a, d) + G.add_edge(c, b) + G.remove_edge(a, b) + G.remove_edge(c, d) + + # Check if the graph is still connected + if connectivity and local_conn(G, a, b) == 0: + # Not connected, revert the swap + G.remove_edge(a, d) + G.remove_edge(c, b) + G.add_edge(a, b) + G.add_edge(c, d) + else: + swapcount += 1 + break + n += 1 + + return G + + +@py_random_state(3) +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def sigma(G, niter=100, nrand=10, seed=None): + """Returns the small-world coefficient (sigma) of the given graph. + + The small-world coefficient is defined as: + sigma = C/Cr / L/Lr + where C and L are respectively the average clustering coefficient and + average shortest path length of G. Cr and Lr are respectively the average + clustering coefficient and average shortest path length of an equivalent + random graph. + + A graph is commonly classified as small-world if sigma>1. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + niter : integer (optional, default=100) + Approximate number of rewiring per edge to compute the equivalent + random graph. + nrand : integer (optional, default=10) + Number of random graphs generated to compute the average clustering + coefficient (Cr) and average shortest path length (Lr). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + sigma : float + The small-world coefficient of G. + + Notes + ----- + The implementation is adapted from Humphries et al. [1]_ [2]_. + + References + ---------- + .. [1] The brainstem reticular formation is a small-world, not scale-free, + network M. D. Humphries, K. Gurney and T. J. Prescott, + Proc. Roy. Soc. B 2006 273, 503-511, doi:10.1098/rspb.2005.3354. + .. [2] Humphries and Gurney (2008). + "Network 'Small-World-Ness': A Quantitative Method for Determining + Canonical Network Equivalence". + PLoS One. 3 (4). PMID 18446219. doi:10.1371/journal.pone.0002051. + """ + import numpy as np + + # Compute the mean clustering coefficient and average shortest path length + # for an equivalent random graph + randMetrics = {"C": [], "L": []} + for i in range(nrand): + Gr = random_reference(G, niter=niter, seed=seed) + randMetrics["C"].append(nx.transitivity(Gr)) + randMetrics["L"].append(nx.average_shortest_path_length(Gr)) + + C = nx.transitivity(G) + L = nx.average_shortest_path_length(G) + Cr = np.mean(randMetrics["C"]) + Lr = np.mean(randMetrics["L"]) + + sigma = (C / Cr) / (L / Lr) + + return sigma + + +@py_random_state(3) +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def omega(G, niter=100, nrand=10, seed=None): + """Returns the small-world coefficient (omega) of a graph + + The small-world coefficient of a graph G is: + + omega = Lr/L - C/Cl + + where C and L are respectively the average clustering coefficient and + average shortest path length of G. Lr is the average shortest path length + of an equivalent random graph and Cl is the average clustering coefficient + of an equivalent lattice graph. + + The small-world coefficient (omega) ranges between -1 and 1. Values close + to 0 means the G features small-world characteristics. Values close to -1 + means G has a lattice shape whereas values close to 1 means G is a random + graph. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + niter: integer (optional, default=100) + Approximate number of rewiring per edge to compute the equivalent + random graph. + + nrand: integer (optional, default=10) + Number of random graphs generated to compute the average clustering + coefficient (Cr) and average shortest path length (Lr). + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + + Returns + ------- + omega : float + The small-work coefficient (omega) + + Notes + ----- + The implementation is adapted from the algorithm by Telesford et al. [1]_. + + References + ---------- + .. [1] Telesford, Joyce, Hayasaka, Burdette, and Laurienti (2011). + "The Ubiquity of Small-World Networks". + Brain Connectivity. 1 (0038): 367-75. PMC 3604768. PMID 22432451. + doi:10.1089/brain.2011.0038. + """ + import numpy as np + + # Compute the mean clustering coefficient and average shortest path length + # for an equivalent random graph + randMetrics = {"C": [], "L": []} + for i in range(nrand): + Gr = random_reference(G, niter=niter, seed=seed) + Gl = lattice_reference(G, niter=niter, seed=seed) + randMetrics["C"].append(nx.transitivity(Gl)) + randMetrics["L"].append(nx.average_shortest_path_length(Gr)) + + C = nx.transitivity(G) + L = nx.average_shortest_path_length(G) + Cl = np.mean(randMetrics["C"]) + Lr = np.mean(randMetrics["L"]) + + omega = (Lr / L) - (C / Cl) + + return omega diff --git a/networkx/algorithms/smetric.py b/networkx/algorithms/smetric.py index 75bcfbd..5ea7303 100644 --- a/networkx/algorithms/smetric.py +++ b/networkx/algorithms/smetric.py @@ -1,9 +1,8 @@ import networkx as nx -#from networkx.generators.smax import li_smax_graph def s_metric(G, normalized=True): - """Return the s-metric of graph. + """Returns the s-metric of graph. The s-metric is defined as the sum of the products deg(u)*deg(v) for every edge (u,v) in G. If norm is provided construct the @@ -31,7 +30,7 @@ def s_metric(G, normalized=True): """ if normalized: raise nx.NetworkXError("Normalization not implemented") -# Gmax = li_smax_graph(list(G.degree().values())) -# return s_metric(G,normalized=False)/s_metric(Gmax,normalized=False) -# else: + # Gmax = li_smax_graph(list(G.degree().values())) + # return s_metric(G,normalized=False)/s_metric(Gmax,normalized=False) + # else: return float(sum([G.degree(u) * G.degree(v) for (u, v) in G.edges()])) diff --git a/networkx/algorithms/sparsifiers.py b/networkx/algorithms/sparsifiers.py new file mode 100644 index 0000000..75a5508 --- /dev/null +++ b/networkx/algorithms/sparsifiers.py @@ -0,0 +1,293 @@ +"""Functions for computing sparsifiers of graphs.""" +import math +import networkx as nx +from networkx.utils import not_implemented_for, py_random_state + +__all__ = ["spanner"] + + +@py_random_state(3) +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def spanner(G, stretch, weight=None, seed=None): + """Returns a spanner of the given graph with the given stretch. + + A spanner of a graph G = (V, E) with stretch t is a subgraph + H = (V, E_S) such that E_S is a subset of E and the distance between + any pair of nodes in H is at most t times the distance between the + nodes in G. + + Parameters + ---------- + G : NetworkX graph + An undirected simple graph. + + stretch : float + The stretch of the spanner. + + weight : object + The edge attribute to use as distance. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + NetworkX graph + A spanner of the given graph with the given stretch. + + Raises + ------ + ValueError + If a stretch less than 1 is given. + + Notes + ----- + This function implements the spanner algorithm by Baswana and Sen, + see [1]. + + This algorithm is a randomized las vegas algorithm: The expected + running time is O(km) where k = (stretch + 1) // 2 and m is the + number of edges in G. The returned graph is always a spanner of the + given graph with the specified stretch. For weighted graphs the + number of edges in the spanner is O(k * n^(1 + 1 / k)) where k is + defined as above and n is the number of nodes in G. For unweighted + graphs the number of edges is O(n^(1 + 1 / k) + kn). + + References + ---------- + [1] S. Baswana, S. Sen. A Simple and Linear Time Randomized + Algorithm for Computing Sparse Spanners in Weighted Graphs. + Random Struct. Algorithms 30(4): 532-563 (2007). + """ + if stretch < 1: + raise ValueError("stretch must be at least 1") + + k = (stretch + 1) // 2 + + # initialize spanner H with empty edge set + H = nx.empty_graph() + H.add_nodes_from(G.nodes) + + # phase 1: forming the clusters + # the residual graph has V' from the paper as its node set + # and E' from the paper as its edge set + residual_graph = _setup_residual_graph(G, weight) + # clustering is a dictionary that maps nodes in a cluster to the + # cluster center + clustering = {v: v for v in G.nodes} + sample_prob = math.pow(G.number_of_nodes(), -1 / k) + size_limit = 2 * math.pow(G.number_of_nodes(), 1 + 1 / k) + + i = 0 + while i < k - 1: + # step 1: sample centers + sampled_centers = set() + for center in set(clustering.values()): + if seed.random() < sample_prob: + sampled_centers.add(center) + + # combined loop for steps 2 and 3 + edges_to_add = set() + edges_to_remove = set() + new_clustering = {} + for v in residual_graph.nodes: + if clustering[v] in sampled_centers: + continue + + # step 2: find neighboring (sampled) clusters and + # lightest edges to them + lightest_edge_neighbor, lightest_edge_weight = _lightest_edge_dicts( + residual_graph, clustering, v + ) + neighboring_sampled_centers = ( + set(lightest_edge_weight.keys()) & sampled_centers + ) + + # step 3: add edges to spanner + if not neighboring_sampled_centers: + # connect to each neighboring center via lightest edge + for neighbor in lightest_edge_neighbor.values(): + edges_to_add.add((v, neighbor)) + # remove all incident edges + for neighbor in residual_graph.adj[v]: + edges_to_remove.add((v, neighbor)) + + else: # there is a neighboring sampled center + closest_center = min( + neighboring_sampled_centers, key=lightest_edge_weight.get + ) + closest_center_weight = lightest_edge_weight[closest_center] + closest_center_neighbor = lightest_edge_neighbor[closest_center] + + edges_to_add.add((v, closest_center_neighbor)) + new_clustering[v] = closest_center + + # connect to centers with edge weight less than + # closest_center_weight + for center, edge_weight in lightest_edge_weight.items(): + if edge_weight < closest_center_weight: + neighbor = lightest_edge_neighbor[center] + edges_to_add.add((v, neighbor)) + + # remove edges to centers with edge weight less than + # closest_center_weight + for neighbor in residual_graph.adj[v]: + neighbor_cluster = clustering[neighbor] + neighbor_weight = lightest_edge_weight[neighbor_cluster] + if ( + neighbor_cluster == closest_center + or neighbor_weight < closest_center_weight + ): + edges_to_remove.add((v, neighbor)) + + # check whether iteration added too many edges to spanner, + # if so repeat + if len(edges_to_add) > size_limit: + # an iteration is repeated O(1) times on expectation + continue + + # iteration succeeded + i = i + 1 + + # actually add edges to spanner + for u, v in edges_to_add: + _add_edge_to_spanner(H, residual_graph, u, v, weight) + + # actually delete edges from residual graph + residual_graph.remove_edges_from(edges_to_remove) + + # copy old clustering data to new_clustering + for node, center in clustering.items(): + if center in sampled_centers: + new_clustering[node] = center + clustering = new_clustering + + # step 4: remove intra-cluster edges + for u in residual_graph.nodes: + for v in list(residual_graph.adj[u]): + if clustering[u] == clustering[v]: + residual_graph.remove_edge(u, v) + + # update residual graph node set + for v in list(residual_graph.nodes): + if v not in clustering: + residual_graph.remove_node(v) + + # phase 2: vertex-cluster joining + for v in residual_graph.nodes: + lightest_edge_neighbor, _ = _lightest_edge_dicts(residual_graph, clustering, v) + for neighbor in lightest_edge_neighbor.values(): + _add_edge_to_spanner(H, residual_graph, v, neighbor, weight) + + return H + + +def _setup_residual_graph(G, weight): + """Setup residual graph as a copy of G with unique edges weights. + + The node set of the residual graph corresponds to the set V' from + the Baswana-Sen paper and the edge set corresponds to the set E' + from the paper. + + This function associates distinct weights to the edges of the + residual graph (even for unweighted input graphs), as required by + the algorithm. + + Parameters + ---------- + G : NetworkX graph + An undirected simple graph. + + weight : object + The edge attribute to use as distance. + + Returns + ------- + NetworkX graph + The residual graph used for the Baswana-Sen algorithm. + """ + residual_graph = G.copy() + + # establish unique edge weights, even for unweighted graphs + for u, v in G.edges(): + if not weight: + residual_graph[u][v]["weight"] = (id(u), id(v)) + else: + residual_graph[u][v]["weight"] = (G[u][v][weight], id(u), id(v)) + + return residual_graph + + +def _lightest_edge_dicts(residual_graph, clustering, node): + """Find the lightest edge to each cluster. + + Searches for the minimum-weight edge to each cluster adjacent to + the given node. + + Parameters + ---------- + residual_graph : NetworkX graph + The residual graph used by the Baswana-Sen algorithm. + + clustering : dictionary + The current clustering of the nodes. + + node : node + The node from which the search originates. + + Returns + ------- + lightest_edge_neighbor, lightest_edge_weight : dictionary, dictionary + lightest_edge_neighbor is a dictionary that maps a center C to + a node v in the corresponding cluster such that the edge from + the given node to v is the lightest edge from the given node to + any node in cluster. lightest_edge_weight maps a center C to the + weight of the aforementioned edge. + + Notes + ----- + If a cluster has no node that is adjacent to the given node in the + residual graph then the center of the cluster is not a key in the + returned dictionaries. + """ + lightest_edge_neighbor = {} + lightest_edge_weight = {} + for neighbor in residual_graph.adj[node]: + neighbor_center = clustering[neighbor] + weight = residual_graph[node][neighbor]["weight"] + if ( + neighbor_center not in lightest_edge_weight + or weight < lightest_edge_weight[neighbor_center] + ): + lightest_edge_neighbor[neighbor_center] = neighbor + lightest_edge_weight[neighbor_center] = weight + return lightest_edge_neighbor, lightest_edge_weight + + +def _add_edge_to_spanner(H, residual_graph, u, v, weight): + """Add the edge {u, v} to the spanner H and take weight from + the residual graph. + + Parameters + ---------- + H : NetworkX graph + The spanner under construction. + + residual_graph : NetworkX graph + The residual graph used by the Baswana-Sen algorithm. The weight + for the edge is taken from this graph. + + u : node + One endpoint of the edge. + + v : node + The other endpoint of the edge. + + weight : object + The edge attribute to use as distance. + """ + H.add_edge(u, v) + if weight: + H[u][v][weight] = residual_graph[u][v]["weight"][0] diff --git a/networkx/algorithms/structuralholes.py b/networkx/algorithms/structuralholes.py index d61e61b..fd4e200 100644 --- a/networkx/algorithms/structuralholes.py +++ b/networkx/algorithms/structuralholes.py @@ -1,17 +1,8 @@ -# -*- encoding: utf-8 -*- -# -# Copyright 2008-2018 NetworkX developers. -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. """Functions for computing measures of structural holes.""" -from __future__ import division import networkx as nx -__all__ = ['constraint', 'local_constraint', 'effective_size'] +__all__ = ["constraint", "local_constraint", "effective_size"] def mutual_weight(G, u, v, weight=None): @@ -52,8 +43,7 @@ def normalized_mutual_weight(G, u, v, norm=sum, weight=None): attribute used as weight. """ - scale = norm(mutual_weight(G, u, w, weight) - for w in set(nx.all_neighbors(G, u))) + scale = norm(mutual_weight(G, u, w, weight) for w in set(nx.all_neighbors(G, u))) return 0 if scale == 0 else mutual_weight(G, u, v, weight) / scale @@ -107,7 +97,7 @@ def effective_size(G, nodes=None, weight=None): Returns ------- dict - Dictionary with nodes as keys and the constraint on the node as values. + Dictionary with nodes as keys and the effective size of the node as values. Notes ----- @@ -136,11 +126,15 @@ def effective_size(G, nodes=None, weight=None): http://www.analytictech.com/connections/v20(1)/holes.htm """ + def redundancy(G, u, v, weight=None): nmw = normalized_mutual_weight - r = sum(nmw(G, u, w, weight=weight) * nmw(G, v, w, norm=max, weight=weight) - for w in set(nx.all_neighbors(G, u))) + r = sum( + nmw(G, u, w, weight=weight) * nmw(G, v, w, norm=max, weight=weight) + for w in set(nx.all_neighbors(G, u)) + ) return 1 - r + effective_size = {} if nodes is None: nodes = G @@ -149,7 +143,7 @@ def redundancy(G, u, v, weight=None): for v in nodes: # Effective size is not defined for isolated nodes if len(G[v]) == 0: - effective_size[v] = float('nan') + effective_size[v] = float("nan") continue E = nx.ego_graph(G, v, center=False, undirected=True) effective_size[v] = len(E) - (2 * E.size()) / len(E) @@ -157,10 +151,11 @@ def redundancy(G, u, v, weight=None): for v in nodes: # Effective size is not defined for isolated nodes if len(G[v]) == 0: - effective_size[v] = float('nan') + effective_size[v] = float("nan") continue - effective_size[v] = sum(redundancy(G, v, u, weight) - for u in set(nx.all_neighbors(G, v))) + effective_size[v] = sum( + redundancy(G, v, u, weight) for u in set(nx.all_neighbors(G, v)) + ) return effective_size @@ -216,10 +211,11 @@ def constraint(G, nodes=None, weight=None): for v in nodes: # Constraint is not defined for isolated nodes if len(G[v]) == 0: - constraint[v] = float('nan') + constraint[v] = float("nan") continue - constraint[v] = sum(local_constraint(G, v, n, weight) - for n in set(nx.all_neighbors(G, v))) + constraint[v] = sum( + local_constraint(G, v, n, weight) for n in set(nx.all_neighbors(G, v)) + ) return constraint @@ -275,6 +271,8 @@ def local_constraint(G, u, v, weight=None): """ nmw = normalized_mutual_weight direct = nmw(G, u, v, weight=weight) - indirect = sum(nmw(G, u, w, weight=weight) * nmw(G, w, v, weight=weight) - for w in set(nx.all_neighbors(G, u))) + indirect = sum( + nmw(G, u, w, weight=weight) * nmw(G, w, v, weight=weight) + for w in set(nx.all_neighbors(G, u)) + ) return (direct + indirect) ** 2 diff --git a/networkx/algorithms/swap.py b/networkx/algorithms/swap.py index bd16a2e..8a06a5f 100644 --- a/networkx/algorithms/swap.py +++ b/networkx/algorithms/swap.py @@ -1,30 +1,16 @@ -# -*- coding: utf-8 -*- """Swap edges in a graph. """ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -from __future__ import division import math -import random +from networkx.utils import py_random_state import networkx as nx -__author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', - 'Pieter Swart (swart@lanl.gov)', - 'Dan Schult (dschult@colgate.edu)', - 'Joel Miller (joel.c.miller.research@gmail.com)', - 'Ben Edwards']) +__all__ = ["double_edge_swap", "connected_double_edge_swap"] -__all__ = ['double_edge_swap', - 'connected_double_edge_swap'] - -def double_edge_swap(G, nswap=1, max_tries=100): +@py_random_state(3) +def double_edge_swap(G, nswap=1, max_tries=100, seed=None): """Swap two edges in the graph while keeping the node degrees fixed. A double-edge swap removes two randomly chosen edges u-v and x-y @@ -48,6 +34,10 @@ def double_edge_swap(G, nswap=1, max_tries=100): max_tries : integer (optional) Maximum number of attempts to swap edges + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + Returns ------- G : graph @@ -60,8 +50,7 @@ def double_edge_swap(G, nswap=1, max_tries=100): The graph G is modified in place. """ if G.is_directed(): - raise nx.NetworkXError( - "double_edge_swap() not defined for directed graphs.") + raise nx.NetworkXError("double_edge_swap() not defined for directed graphs.") if nswap > max_tries: raise nx.NetworkXError("Number of swaps > number of tries allowed.") if len(G) < 4: @@ -73,18 +62,19 @@ def double_edge_swap(G, nswap=1, max_tries=100): swapcount = 0 keys, degrees = zip(*G.degree()) # keys, degree cdf = nx.utils.cumulative_distribution(degrees) # cdf of degree + discrete_sequence = nx.utils.discrete_sequence while swapcount < nswap: # if random.random() < 0.5: continue # trick to avoid periodicities? # pick two random edges without creating edge list # choose source node indices from discrete distribution - (ui, xi) = nx.utils.discrete_sequence(2, cdistribution=cdf) + (ui, xi) = discrete_sequence(2, cdistribution=cdf, seed=seed) if ui == xi: continue # same source, skip u = keys[ui] # convert index to label x = keys[xi] # choose target uniformly from neighbors - v = random.choice(list(G[u])) - y = random.choice(list(G[x])) + v = seed.choice(list(G[u])) + y = seed.choice(list(G[x])) if v == y: continue # same target, skip if (x not in G[u]) and (y not in G[v]): # don't create parallel edges @@ -94,14 +84,17 @@ def double_edge_swap(G, nswap=1, max_tries=100): G.remove_edge(x, y) swapcount += 1 if n >= max_tries: - e = ('Maximum number of swap attempts (%s) exceeded ' % n + - 'before desired swaps achieved (%s).' % nswap) + e = ( + f"Maximum number of swap attempts ({n}) exceeded " + f"before desired swaps achieved ({nswap})." + ) raise nx.NetworkXAlgorithmError(e) n += 1 return G -def connected_double_edge_swap(G, nswap=1, _window_threshold=3): +@py_random_state(3) +def connected_double_edge_swap(G, nswap=1, _window_threshold=3, seed=None): """Attempts the specified number of double-edge swaps in the graph `G`. A double-edge swap removes two randomly chosen edges `(u, v)` and `(x, @@ -139,6 +132,10 @@ def connected_double_edge_swap(G, nswap=1, _window_threshold=3): size is above this threshold, then the algorithm performs do all the swaps in the window and only then check if the graph is still connected. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + Returns ------- int @@ -175,6 +172,7 @@ def connected_double_edge_swap(G, nswap=1, _window_threshold=3): # Label key for nodes dk = list(n for n, d in G.degree()) cdf = nx.utils.cumulative_distribution(list(d for n, d in G.degree())) + discrete_sequence = nx.utils.discrete_sequence window = 1 while n < nswap: wcount = 0 @@ -188,7 +186,7 @@ def connected_double_edge_swap(G, nswap=1, _window_threshold=3): while wcount < window and n < nswap: # Pick two random edges without creating the edge list. Choose # source nodes from the discrete degree distribution. - (ui, xi) = nx.utils.discrete_sequence(2, cdistribution=cdf) + (ui, xi) = discrete_sequence(2, cdistribution=cdf, seed=seed) # If the source nodes are the same, skip this pair. if ui == xi: continue @@ -196,8 +194,8 @@ def connected_double_edge_swap(G, nswap=1, _window_threshold=3): u = dk[ui] x = dk[xi] # Choose targets uniformly from neighbors. - v = random.choice(list(G.neighbors(u))) - y = random.choice(list(G.neighbors(x))) + v = seed.choice(list(G.neighbors(u))) + y = seed.choice(list(G.neighbors(x))) # If the target nodes are the same, skip this pair. if v == y: continue @@ -240,8 +238,8 @@ def connected_double_edge_swap(G, nswap=1, _window_threshold=3): u = dk[ui] x = dk[xi] # Choose targets uniformly from neighbors. - v = random.choice(list(G.neighbors(u))) - y = random.choice(list(G.neighbors(x))) + v = seed.choice(list(G.neighbors(u))) + y = seed.choice(list(G.neighbors(x))) # If the target nodes are the same, skip this pair. if v == y: continue diff --git a/networkx/algorithms/tests/__init__.py b/networkx/algorithms/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/algorithms/tests/test_asteroidal.py b/networkx/algorithms/tests/test_asteroidal.py new file mode 100644 index 0000000..b0487af --- /dev/null +++ b/networkx/algorithms/tests/test_asteroidal.py @@ -0,0 +1,24 @@ +import networkx as nx + + +def test_is_at_free(): + + is_at_free = nx.asteroidal.is_at_free + + cycle = nx.cycle_graph(6) + assert not is_at_free(cycle) + + path = nx.path_graph(6) + assert is_at_free(path) + + small_graph = nx.complete_graph(2) + assert is_at_free(small_graph) + + petersen = nx.petersen_graph() + assert not is_at_free(petersen) + + clique = nx.complete_graph(6) + assert is_at_free(clique) + + line_clique = nx.line_graph(clique) + assert not is_at_free(line_clique) diff --git a/networkx/algorithms/tests/test_boundary.py b/networkx/algorithms/tests/test_boundary.py index ff53489..46dd0c5 100644 --- a/networkx/algorithms/tests/test_boundary.py +++ b/networkx/algorithms/tests/test_boundary.py @@ -1,56 +1,44 @@ -# test_boundary.py - unit tests for the boundary module -# -# Copyright 2015 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.algorithms.boundary` module.""" -from __future__ import division from itertools import combinations -from nose.tools import assert_almost_equals -from nose.tools import assert_equal - import networkx as nx -from networkx.testing.utils import * +from networkx.testing import almost_equal, assert_edges_equal from networkx import convert_node_labels_to_integers as cnlti -class TestNodeBoundary(object): +class TestNodeBoundary: """Unit tests for the :func:`~networkx.node_boundary` function.""" def test_null_graph(self): """Tests that the null graph has empty node boundaries.""" null = nx.null_graph() - assert_equal(nx.node_boundary(null, []), set()) - assert_equal(nx.node_boundary(null, [], []), set()) - assert_equal(nx.node_boundary(null, [1, 2, 3]), set()) - assert_equal(nx.node_boundary(null, [1, 2, 3], [4, 5, 6]), set()) - assert_equal(nx.node_boundary(null, [1, 2, 3], [3, 4, 5]), set()) + assert nx.node_boundary(null, []) == set() + assert nx.node_boundary(null, [], []) == set() + assert nx.node_boundary(null, [1, 2, 3]) == set() + assert nx.node_boundary(null, [1, 2, 3], [4, 5, 6]) == set() + assert nx.node_boundary(null, [1, 2, 3], [3, 4, 5]) == set() def test_path_graph(self): P10 = cnlti(nx.path_graph(10), first_label=1) - assert_equal(nx.node_boundary(P10, []), set()) - assert_equal(nx.node_boundary(P10, [], []), set()) - assert_equal(nx.node_boundary(P10, [1, 2, 3]), {4}) - assert_equal(nx.node_boundary(P10, [4, 5, 6]), {3, 7}) - assert_equal(nx.node_boundary(P10, [3, 4, 5, 6, 7]), {2, 8}) - assert_equal(nx.node_boundary(P10, [8, 9, 10]), {7}) - assert_equal(nx.node_boundary(P10, [4, 5, 6], [9, 10]), set()) + assert nx.node_boundary(P10, []) == set() + assert nx.node_boundary(P10, [], []) == set() + assert nx.node_boundary(P10, [1, 2, 3]) == {4} + assert nx.node_boundary(P10, [4, 5, 6]) == {3, 7} + assert nx.node_boundary(P10, [3, 4, 5, 6, 7]) == {2, 8} + assert nx.node_boundary(P10, [8, 9, 10]) == {7} + assert nx.node_boundary(P10, [4, 5, 6], [9, 10]) == set() def test_complete_graph(self): K10 = cnlti(nx.complete_graph(10), first_label=1) - assert_equal(nx.node_boundary(K10, []), set()) - assert_equal(nx.node_boundary(K10, [], []), set()) - assert_equal(nx.node_boundary(K10, [1, 2, 3]), {4, 5, 6, 7, 8, 9, 10}) - assert_equal(nx.node_boundary(K10, [4, 5, 6]), {1, 2, 3, 7, 8, 9, 10}) - assert_equal(nx.node_boundary(K10, [3, 4, 5, 6, 7]), {1, 2, 8, 9, 10}) - assert_equal(nx.node_boundary(K10, [4, 5, 6], []), set()) - assert_equal(nx.node_boundary(K10, K10), set()) - assert_equal(nx.node_boundary(K10, [1, 2, 3], [3, 4, 5]), {4, 5}) + assert nx.node_boundary(K10, []) == set() + assert nx.node_boundary(K10, [], []) == set() + assert nx.node_boundary(K10, [1, 2, 3]) == {4, 5, 6, 7, 8, 9, 10} + assert nx.node_boundary(K10, [4, 5, 6]) == {1, 2, 3, 7, 8, 9, 10} + assert nx.node_boundary(K10, [3, 4, 5, 6, 7]) == {1, 2, 8, 9, 10} + assert nx.node_boundary(K10, [4, 5, 6], []) == set() + assert nx.node_boundary(K10, K10) == set() + assert nx.node_boundary(K10, [1, 2, 3], [3, 4, 5]) == {4, 5} def test_petersen(self): """Check boundaries in the petersen graph @@ -60,15 +48,14 @@ def test_petersen(self): """ def cheeger(G, k): - return min(len(nx.node_boundary(G, nn)) / k - for nn in combinations(G, k)) + return min(len(nx.node_boundary(G, nn)) / k for nn in combinations(G, k)) P = nx.petersen_graph() - assert_almost_equals(cheeger(P, 1), 3.00, places=2) - assert_almost_equals(cheeger(P, 2), 2.00, places=2) - assert_almost_equals(cheeger(P, 3), 1.67, places=2) - assert_almost_equals(cheeger(P, 4), 1.00, places=2) - assert_almost_equals(cheeger(P, 5), 0.80, places=2) + assert almost_equal(cheeger(P, 1), 3.00, places=2) + assert almost_equal(cheeger(P, 2), 2.00, places=2) + assert almost_equal(cheeger(P, 3), 1.67, places=2) + assert almost_equal(cheeger(P, 4), 1.00, places=2) + assert almost_equal(cheeger(P, 5), 0.80, places=2) def test_directed(self): """Tests the node boundary of a directed graph.""" @@ -76,7 +63,7 @@ def test_directed(self): S = {0, 1} boundary = nx.node_boundary(G, S) expected = {2} - assert_equal(boundary, expected) + assert boundary == expected def test_multigraph(self): """Tests the node boundary of a multigraph.""" @@ -84,7 +71,7 @@ def test_multigraph(self): S = {0, 1} boundary = nx.node_boundary(G, S) expected = {2, 4} - assert_equal(boundary, expected) + assert boundary == expected def test_multidigraph(self): """Tests the edge boundary of a multdiigraph.""" @@ -93,49 +80,51 @@ def test_multidigraph(self): S = {0, 1} boundary = nx.node_boundary(G, S) expected = {2} - assert_equal(boundary, expected) + assert boundary == expected -class TestEdgeBoundary(object): +class TestEdgeBoundary: """Unit tests for the :func:`~networkx.edge_boundary` function.""" def test_null_graph(self): null = nx.null_graph() - assert_equal(list(nx.edge_boundary(null, [])), []) - assert_equal(list(nx.edge_boundary(null, [], [])), []) - assert_equal(list(nx.edge_boundary(null, [1, 2, 3])), []) - assert_equal(list(nx.edge_boundary(null, [1, 2, 3], [4, 5, 6])), []) - assert_equal(list(nx.edge_boundary(null, [1, 2, 3], [3, 4, 5])), []) + assert list(nx.edge_boundary(null, [])) == [] + assert list(nx.edge_boundary(null, [], [])) == [] + assert list(nx.edge_boundary(null, [1, 2, 3])) == [] + assert list(nx.edge_boundary(null, [1, 2, 3], [4, 5, 6])) == [] + assert list(nx.edge_boundary(null, [1, 2, 3], [3, 4, 5])) == [] def test_path_graph(self): P10 = cnlti(nx.path_graph(10), first_label=1) - assert_equal(list(nx.edge_boundary(P10, [])), []) - assert_equal(list(nx.edge_boundary(P10, [], [])), []) - assert_equal(list(nx.edge_boundary(P10, [1, 2, 3])), [(3, 4)]) - assert_equal(sorted(nx.edge_boundary(P10, [4, 5, 6])), - [(4, 3), (6, 7)]) - assert_equal(sorted(nx.edge_boundary(P10, [3, 4, 5, 6, 7])), - [(3, 2), (7, 8)]) - assert_equal(list(nx.edge_boundary(P10, [8, 9, 10])), [(8, 7)]) - assert_equal(sorted(nx.edge_boundary(P10, [4, 5, 6], [9, 10])), []) - assert_equal(list(nx.edge_boundary(P10, [1, 2, 3], [3, 4, 5])), - [(2, 3), (3, 4)]) + assert list(nx.edge_boundary(P10, [])) == [] + assert list(nx.edge_boundary(P10, [], [])) == [] + assert list(nx.edge_boundary(P10, [1, 2, 3])) == [(3, 4)] + assert sorted(nx.edge_boundary(P10, [4, 5, 6])) == [(4, 3), (6, 7)] + assert sorted(nx.edge_boundary(P10, [3, 4, 5, 6, 7])) == [(3, 2), (7, 8)] + assert list(nx.edge_boundary(P10, [8, 9, 10])) == [(8, 7)] + assert sorted(nx.edge_boundary(P10, [4, 5, 6], [9, 10])) == [] + assert list(nx.edge_boundary(P10, [1, 2, 3], [3, 4, 5])) == [(2, 3), (3, 4)] def test_complete_graph(self): K10 = cnlti(nx.complete_graph(10), first_label=1) - def ilen(iterable): return sum(1 for i in iterable) - assert_equal(list(nx.edge_boundary(K10, [])), []) - assert_equal(list(nx.edge_boundary(K10, [], [])), []) - assert_equal(ilen(nx.edge_boundary(K10, [1, 2, 3])), 21) - assert_equal(ilen(nx.edge_boundary(K10, [4, 5, 6, 7])), 24) - assert_equal(ilen(nx.edge_boundary(K10, [3, 4, 5, 6, 7])), 25) - assert_equal(ilen(nx.edge_boundary(K10, [8, 9, 10])), 21) - assert_edges_equal(nx.edge_boundary(K10, [4, 5, 6], [9, 10]), - [(4, 9), (4, 10), (5, 9), (5, 10), (6, 9), (6, 10)]) - assert_edges_equal(nx.edge_boundary(K10, [1, 2, 3], [3, 4, 5]), - [(1, 3), (1, 4), (1, 5), (2, 3), (2, 4), - (2, 5), (3, 4), (3, 5)]) + def ilen(iterable): + return sum(1 for i in iterable) + + assert list(nx.edge_boundary(K10, [])) == [] + assert list(nx.edge_boundary(K10, [], [])) == [] + assert ilen(nx.edge_boundary(K10, [1, 2, 3])) == 21 + assert ilen(nx.edge_boundary(K10, [4, 5, 6, 7])) == 24 + assert ilen(nx.edge_boundary(K10, [3, 4, 5, 6, 7])) == 25 + assert ilen(nx.edge_boundary(K10, [8, 9, 10])) == 21 + assert_edges_equal( + nx.edge_boundary(K10, [4, 5, 6], [9, 10]), + [(4, 9), (4, 10), (5, 9), (5, 10), (6, 9), (6, 10)], + ) + assert_edges_equal( + nx.edge_boundary(K10, [1, 2, 3], [3, 4, 5]), + [(1, 3), (1, 4), (1, 5), (2, 3), (2, 4), (2, 5), (3, 4), (3, 5)], + ) def test_directed(self): """Tests the edge boundary of a directed graph.""" @@ -143,7 +132,7 @@ def test_directed(self): S = {0, 1} boundary = list(nx.edge_boundary(G, S)) expected = [(1, 2)] - assert_equal(boundary, expected) + assert boundary == expected def test_multigraph(self): """Tests the edge boundary of a multigraph.""" @@ -151,7 +140,7 @@ def test_multigraph(self): S = {0, 1} boundary = list(nx.edge_boundary(G, S)) expected = [(0, 4), (0, 4), (1, 2), (1, 2)] - assert_equal(boundary, expected) + assert boundary == expected def test_multidigraph(self): """Tests the edge boundary of a multdiigraph.""" @@ -160,4 +149,4 @@ def test_multidigraph(self): S = {0, 1} boundary = list(nx.edge_boundary(G, S)) expected = [(1, 2), (1, 2)] - assert_equal(boundary, expected) + assert boundary == expected diff --git a/networkx/algorithms/tests/test_bridges.py b/networkx/algorithms/tests/test_bridges.py index 2af31de..8f6b0a8 100644 --- a/networkx/algorithms/tests/test_bridges.py +++ b/networkx/algorithms/tests/test_bridges.py @@ -1,72 +1,74 @@ -# test_bridges.py - unit tests for bridge-finding algorithms -# -# Copyright 2004-2018 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for bridge-finding algorithms.""" -from unittest import TestCase -from nose.tools import assert_equal, assert_in import networkx as nx -class TestBridges(TestCase): +class TestBridges: """Unit tests for the bridge-finding function.""" def test_single_bridge(self): edges = [ # DFS tree edges. - (1, 2), (2, 3), (3, 4), (3, 5), (5, 6), (6, 7), (7, 8), (5, 9), + (1, 2), + (2, 3), + (3, 4), + (3, 5), + (5, 6), + (6, 7), + (7, 8), + (5, 9), (9, 10), # Nontree edges. - (1, 3), (1, 4), (2, 5), (5, 10), (6, 8) + (1, 3), + (1, 4), + (2, 5), + (5, 10), + (6, 8), ] G = nx.Graph(edges) source = 1 bridges = list(nx.bridges(G, source)) - self.assertEqual(bridges, [(5, 6)]) + assert bridges == [(5, 6)] def test_barbell_graph(self): # The (3, 0) barbell graph has two triangles joined by a single edge. G = nx.barbell_graph(3, 0) source = 0 bridges = list(nx.bridges(G, source)) - self.assertEqual(bridges, [(2, 3)]) + assert bridges == [(2, 3)] -class TestLocalBridges(TestCase): +class TestLocalBridges: """Unit tests for the local_bridge function.""" - def setUp(self): - self.BB = nx.barbell_graph(4, 0) - self.square = nx.cycle_graph(4) - self.tri = nx.cycle_graph(3) + @classmethod + def setup_class(cls): + cls.BB = nx.barbell_graph(4, 0) + cls.square = nx.cycle_graph(4) + cls.tri = nx.cycle_graph(3) def test_nospan(self): expected = {(3, 4), (4, 3)} - assert_in(next(nx.local_bridges(self.BB, with_span=False)), expected) - assert_equal(set(nx.local_bridges(self.square, with_span=False)), self.square.edges) - assert_equal(list(nx.local_bridges(self.tri, with_span=False)), []) + assert next(nx.local_bridges(self.BB, with_span=False)) in expected + assert set(nx.local_bridges(self.square, with_span=False)) == self.square.edges + assert list(nx.local_bridges(self.tri, with_span=False)) == [] def test_no_weight(self): - inf = float('inf') + inf = float("inf") expected = {(3, 4, inf), (4, 3, inf)} - assert_in(next(nx.local_bridges(self.BB)), expected) + assert next(nx.local_bridges(self.BB)) in expected expected = {(u, v, 3) for u, v, in self.square.edges} - assert_equal(set(nx.local_bridges(self.square)), expected) - assert_equal(list(nx.local_bridges(self.tri)), []) + assert set(nx.local_bridges(self.square)) == expected + assert list(nx.local_bridges(self.tri)) == [] def test_weight(self): - inf = float('inf') + inf = float("inf") G = self.square.copy() - G.edges[1, 2]['weight'] = 2 - expected = {(u, v, 5 - wt) for u, v, wt in G.edges(data='weight', default=1)} - assert_equal(set(nx.local_bridges(G, weight='weight')), expected) + G.edges[1, 2]["weight"] = 2 + expected = {(u, v, 5 - wt) for u, v, wt in G.edges(data="weight", default=1)} + assert set(nx.local_bridges(G, weight="weight")) == expected expected = {(u, v, 6) for u, v in G.edges} lb = nx.local_bridges(G, weight=lambda u, v, d: 2) - assert_equal(set(lb), expected) + assert set(lb) == expected diff --git a/networkx/algorithms/tests/test_chains.py b/networkx/algorithms/tests/test_chains.py index b210e6d..9c825e1 100644 --- a/networkx/algorithms/tests/test_chains.py +++ b/networkx/algorithms/tests/test_chains.py @@ -1,15 +1,6 @@ -# test_chains.py - unit tests for the chains module -# -# Copyright 2004-2018 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the chain decomposition functions.""" from itertools import cycle from itertools import islice -from unittest import TestCase import networkx as nx @@ -19,7 +10,7 @@ def cycles(seq): For example:: - >>> list(cycles('abc')) + >>> list(cycles("abc")) [('a', 'b', 'c'), ('b', 'c', 'a'), ('c', 'a', 'b')] """ @@ -35,9 +26,9 @@ def cyclic_equals(seq1, seq2): For example:: - >>> cyclic_equals('xyz', 'zxy') + >>> cyclic_equals("xyz", "zxy") True - >>> cyclic_equals('xyz', 'zyx') + >>> cyclic_equals("xyz", "zyx") False """ @@ -46,7 +37,7 @@ def cyclic_equals(seq1, seq2): return any(x == tuple(seq2) for x in cycles(seq1)) -class TestChainDecomposition(TestCase): +class TestChainDecomposition: """Unit tests for the chain decomposition function.""" def assertContainsChain(self, chain, expected): @@ -60,15 +51,26 @@ def assertContainsChain(self, chain, expected): if cyclic_equals(reversed_chain, candidate): break else: - self.fail('chain not found') + self.fail("chain not found") def test_decomposition(self): edges = [ # DFS tree edges. - (1, 2), (2, 3), (3, 4), (3, 5), (5, 6), (6, 7), (7, 8), (5, 9), + (1, 2), + (2, 3), + (3, 4), + (3, 5), + (5, 6), + (6, 7), + (7, 8), + (5, 9), (9, 10), # Nontree edges. - (1, 3), (1, 4), (2, 5), (5, 10), (6, 8) + (1, 3), + (1, 4), + (2, 5), + (5, 10), + (6, 8), ] G = nx.Graph(edges) expected = [ @@ -79,21 +81,19 @@ def test_decomposition(self): [(6, 8), (8, 7), (7, 6)], ] chains = list(nx.chain_decomposition(G, root=1)) - self.assertEqual(len(chains), len(expected)) -# This chain decomposition isn't unique -# for chain in chains: -# print(chain) -# self.assertContainsChain(chain, expected) + assert len(chains) == len(expected) + + # This chain decomposition isn't unique + # for chain in chains: + # print(chain) + # self.assertContainsChain(chain, expected) def test_barbell_graph(self): # The (3, 0) barbell graph has two triangles joined by a single edge. G = nx.barbell_graph(3, 0) chains = list(nx.chain_decomposition(G, root=0)) - expected = [ - [(0, 1), (1, 2), (2, 0)], - [(3, 4), (4, 5), (5, 3)], - ] - self.assertEqual(len(chains), len(expected)) + expected = [[(0, 1), (1, 2), (2, 0)], [(3, 4), (4, 5), (5, 3)]] + assert len(chains) == len(expected) for chain in chains: self.assertContainsChain(chain, expected) @@ -101,17 +101,17 @@ def test_disconnected_graph(self): """Test for a graph with multiple connected components.""" G = nx.barbell_graph(3, 0) H = nx.barbell_graph(3, 0) - mapping = dict(zip(range(6), 'abcdef')) + mapping = dict(zip(range(6), "abcdef")) nx.relabel_nodes(H, mapping, copy=False) G = nx.union(G, H) chains = list(nx.chain_decomposition(G)) expected = [ [(0, 1), (1, 2), (2, 0)], [(3, 4), (4, 5), (5, 3)], - [('a', 'b'), ('b', 'c'), ('c', 'a')], - [('d', 'e'), ('e', 'f'), ('f', 'd')], + [("a", "b"), ("b", "c"), ("c", "a")], + [("d", "e"), ("e", "f"), ("f", "d")], ] - self.assertEqual(len(chains), len(expected)) + assert len(chains) == len(expected) for chain in chains: self.assertContainsChain(chain, expected) @@ -119,14 +119,14 @@ def test_disconnected_graph_root_node(self): """Test for a single component of a disconnected graph.""" G = nx.barbell_graph(3, 0) H = nx.barbell_graph(3, 0) - mapping = dict(zip(range(6), 'abcdef')) + mapping = dict(zip(range(6), "abcdef")) nx.relabel_nodes(H, mapping, copy=False) G = nx.union(G, H) - chains = list(nx.chain_decomposition(G, root='a')) + chains = list(nx.chain_decomposition(G, root="a")) expected = [ - [('a', 'b'), ('b', 'c'), ('c', 'a')], - [('d', 'e'), ('e', 'f'), ('f', 'd')], + [("a", "b"), ("b", "c"), ("c", "a")], + [("d", "e"), ("e", "f"), ("f", "d")], ] - self.assertEqual(len(chains), len(expected)) + assert len(chains) == len(expected) for chain in chains: self.assertContainsChain(chain, expected) diff --git a/networkx/algorithms/tests/test_chordal.py b/networkx/algorithms/tests/test_chordal.py index c6cbf46..5ca2cae 100644 --- a/networkx/algorithms/tests/test_chordal.py +++ b/networkx/algorithms/tests/test_chordal.py @@ -1,59 +1,109 @@ -#!/usr/bin/env python -from nose.tools import * +import pytest import networkx as nx class TestMCS: - - def setUp(self): + @classmethod + def setup_class(cls): # simple graph connected_chordal_G = nx.Graph() - connected_chordal_G.add_edges_from([(1, 2), (1, 3), (2, 3), (2, 4), (3, 4), - (3, 5), (3, 6), (4, 5), (4, 6), (5, 6)]) - self.connected_chordal_G = connected_chordal_G + connected_chordal_G.add_edges_from( + [ + (1, 2), + (1, 3), + (2, 3), + (2, 4), + (3, 4), + (3, 5), + (3, 6), + (4, 5), + (4, 6), + (5, 6), + ] + ) + cls.connected_chordal_G = connected_chordal_G chordal_G = nx.Graph() - chordal_G.add_edges_from([(1, 2), (1, 3), (2, 3), (2, 4), (3, 4), - (3, 5), (3, 6), (4, 5), (4, 6), (5, 6), (7, 8)]) + chordal_G.add_edges_from( + [ + (1, 2), + (1, 3), + (2, 3), + (2, 4), + (3, 4), + (3, 5), + (3, 6), + (4, 5), + (4, 6), + (5, 6), + (7, 8), + ] + ) chordal_G.add_node(9) - self.chordal_G = chordal_G + cls.chordal_G = chordal_G non_chordal_G = nx.Graph() non_chordal_G.add_edges_from([(1, 2), (1, 3), (2, 4), (2, 5), (3, 4), (3, 5)]) - self.non_chordal_G = non_chordal_G + cls.non_chordal_G = non_chordal_G def test_is_chordal(self): - assert_false(nx.is_chordal(self.non_chordal_G)) - assert_true(nx.is_chordal(self.chordal_G)) - assert_true(nx.is_chordal(self.connected_chordal_G)) - assert_true(nx.is_chordal(nx.complete_graph(3))) - assert_true(nx.is_chordal(nx.cycle_graph(3))) - assert_false(nx.is_chordal(nx.cycle_graph(5))) + assert not nx.is_chordal(self.non_chordal_G) + assert nx.is_chordal(self.chordal_G) + assert nx.is_chordal(self.connected_chordal_G) + assert nx.is_chordal(nx.complete_graph(3)) + assert nx.is_chordal(nx.cycle_graph(3)) + assert not nx.is_chordal(nx.cycle_graph(5)) def test_induced_nodes(self): G = nx.generators.classic.path_graph(10) - I = nx.find_induced_nodes(G, 1, 9, 2) - assert_equal(I, set([1, 2, 3, 4, 5, 6, 7, 8, 9])) - assert_raises(nx.NetworkXTreewidthBoundExceeded, - nx.find_induced_nodes, G, 1, 9, 1) - I = nx.find_induced_nodes(self.chordal_G, 1, 6) - assert_equal(I, set([1, 2, 4, 6])) - assert_raises(nx.NetworkXError, - nx.find_induced_nodes, self.non_chordal_G, 1, 5) + Induced_nodes = nx.find_induced_nodes(G, 1, 9, 2) + assert Induced_nodes == {1, 2, 3, 4, 5, 6, 7, 8, 9} + pytest.raises( + nx.NetworkXTreewidthBoundExceeded, nx.find_induced_nodes, G, 1, 9, 1 + ) + Induced_nodes = nx.find_induced_nodes(self.chordal_G, 1, 6) + assert Induced_nodes == {1, 2, 4, 6} + pytest.raises(nx.NetworkXError, nx.find_induced_nodes, self.non_chordal_G, 1, 5) def test_chordal_find_cliques(self): - cliques = set([frozenset([9]), frozenset([7, 8]), frozenset([1, 2, 3]), - frozenset([2, 3, 4]), frozenset([3, 4, 5, 6])]) - assert_equal(nx.chordal_graph_cliques(self.chordal_G), cliques) + cliques = { + frozenset([9]), + frozenset([7, 8]), + frozenset([1, 2, 3]), + frozenset([2, 3, 4]), + frozenset([3, 4, 5, 6]), + } + assert nx.chordal_graph_cliques(self.chordal_G) == cliques def test_chordal_find_cliques_path(self): G = nx.path_graph(10) cliqueset = nx.chordal_graph_cliques(G) for (u, v) in G.edges(): - assert_true(frozenset([u, v]) in cliqueset - or frozenset([v, u]) in cliqueset) + assert frozenset([u, v]) in cliqueset or frozenset([v, u]) in cliqueset def test_chordal_find_cliquesCC(self): - cliques = set([frozenset([1, 2, 3]), frozenset([2, 3, 4]), - frozenset([3, 4, 5, 6])]) - assert_equal(nx.chordal_graph_cliques(self.connected_chordal_G), cliques) + cliques = {frozenset([1, 2, 3]), frozenset([2, 3, 4]), frozenset([3, 4, 5, 6])} + cgc = nx.chordal_graph_cliques + assert cgc(self.connected_chordal_G) == cliques + + def test_complete_to_chordal_graph(self): + fgrg = nx.fast_gnp_random_graph + test_graphs = [ + nx.barbell_graph(6, 2), + nx.cycle_graph(15), + nx.wheel_graph(20), + nx.grid_graph([10, 4]), + nx.ladder_graph(15), + nx.star_graph(5), + nx.bull_graph(), + fgrg(20, 0.3, seed=1), + ] + for G in test_graphs: + H, a = nx.complete_to_chordal_graph(G) + assert nx.is_chordal(H) + assert len(a) == H.number_of_nodes() + if nx.is_chordal(G): + assert G.number_of_edges() == H.number_of_edges() + assert set(a.values()) == {0} + else: + assert len(set(a.values())) == H.number_of_nodes() diff --git a/networkx/algorithms/tests/test_clique.py b/networkx/algorithms/tests/test_clique.py index 26d0922..ae230ee 100644 --- a/networkx/algorithms/tests/test_clique.py +++ b/networkx/algorithms/tests/test_clique.py @@ -1,17 +1,15 @@ -#!/usr/bin/env python -from nose.tools import * +import pytest import networkx as nx from networkx import convert_node_labels_to_integers as cnlti class TestCliques: - - def setUp(self): + def setup_method(self): z = [3, 4, 3, 4, 2, 4, 2, 1, 1, 1, 1] self.G = cnlti(nx.generators.havel_hakimi_graph(z), first_label=1) self.cl = list(nx.find_cliques(self.G)) H = nx.complete_graph(6) - H = nx.relabel_nodes(H, dict([(i, i + 1) for i in range(6)])) + H = nx.relabel_nodes(H, {i: i + 1 for i in range(6)}) H.remove_edges_from([(2, 6), (2, 5), (2, 4), (1, 3), (5, 3)]) self.H = H @@ -19,96 +17,165 @@ def test_find_cliques1(self): cl = list(nx.find_cliques(self.G)) rcl = nx.find_cliques_recursive(self.G) expected = [[2, 6, 1, 3], [2, 6, 4], [5, 4, 7], [8, 9], [10, 11]] - assert_equal(sorted(map(sorted, cl)), sorted(map(sorted, rcl))) - assert_equal(sorted(map(sorted, cl)), sorted(map(sorted, expected))) + assert sorted(map(sorted, cl)) == sorted(map(sorted, rcl)) + assert sorted(map(sorted, cl)) == sorted(map(sorted, expected)) def test_selfloops(self): self.G.add_edge(1, 1) cl = list(nx.find_cliques(self.G)) - rcl = nx.find_cliques_recursive(self.G) - assert_equal(sorted(map(sorted, cl)), sorted(map(sorted, rcl))) - assert_equal(cl, - [[2, 6, 1, 3], [2, 6, 4], [5, 4, 7], [8, 9], [10, 11]]) + rcl = list(nx.find_cliques_recursive(self.G)) + assert set(map(frozenset, cl)) == set(map(frozenset, rcl)) + answer = [{2, 6, 1, 3}, {2, 6, 4}, {5, 4, 7}, {8, 9}, {10, 11}] + assert len(answer) == len(cl) + assert all(set(c) in answer for c in cl) def test_find_cliques2(self): hcl = list(nx.find_cliques(self.H)) - assert_equal(sorted(map(sorted, hcl)), - [[1, 2], [1, 4, 5, 6], [2, 3], [3, 4, 6]]) + assert sorted(map(sorted, hcl)) == [[1, 2], [1, 4, 5, 6], [2, 3], [3, 4, 6]] def test_clique_number(self): G = self.G - assert_equal(nx.graph_clique_number(G), 4) - assert_equal(nx.graph_clique_number(G, cliques=self.cl), 4) + assert nx.graph_clique_number(G) == 4 + assert nx.graph_clique_number(G, cliques=self.cl) == 4 + + def test_clique_number2(self): + G = nx.Graph() + G.add_nodes_from([1, 2, 3]) + assert nx.graph_clique_number(G) == 1 + + def test_clique_number3(self): + G = nx.Graph() + assert nx.graph_clique_number(G) == 0 def test_number_of_cliques(self): G = self.G - assert_equal(nx.graph_number_of_cliques(G), 5) - assert_equal(nx.graph_number_of_cliques(G, cliques=self.cl), 5) - assert_equal(nx.number_of_cliques(G, 1), 1) - assert_equal(list(nx.number_of_cliques(G, [1]).values()), [1]) - assert_equal(list(nx.number_of_cliques(G, [1, 2]).values()), [1, 2]) - assert_equal(nx.number_of_cliques(G, [1, 2]), {1: 1, 2: 2}) - assert_equal(nx.number_of_cliques(G, 2), 2) - assert_equal(nx.number_of_cliques(G), - {1: 1, 2: 2, 3: 1, 4: 2, 5: 1, - 6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1}) - assert_equal(nx.number_of_cliques(G, nodes=list(G)), - {1: 1, 2: 2, 3: 1, 4: 2, 5: 1, - 6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1}) - assert_equal(nx.number_of_cliques(G, nodes=[2, 3, 4]), - {2: 2, 3: 1, 4: 2}) - assert_equal(nx.number_of_cliques(G, cliques=self.cl), - {1: 1, 2: 2, 3: 1, 4: 2, 5: 1, - 6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1}) - assert_equal(nx.number_of_cliques(G, list(G), cliques=self.cl), - {1: 1, 2: 2, 3: 1, 4: 2, 5: 1, - 6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1}) + assert nx.graph_number_of_cliques(G) == 5 + assert nx.graph_number_of_cliques(G, cliques=self.cl) == 5 + assert nx.number_of_cliques(G, 1) == 1 + assert list(nx.number_of_cliques(G, [1]).values()) == [1] + assert list(nx.number_of_cliques(G, [1, 2]).values()) == [1, 2] + assert nx.number_of_cliques(G, [1, 2]) == {1: 1, 2: 2} + assert nx.number_of_cliques(G, 2) == 2 + assert nx.number_of_cliques(G) == { + 1: 1, + 2: 2, + 3: 1, + 4: 2, + 5: 1, + 6: 2, + 7: 1, + 8: 1, + 9: 1, + 10: 1, + 11: 1, + } + assert nx.number_of_cliques(G, nodes=list(G)) == { + 1: 1, + 2: 2, + 3: 1, + 4: 2, + 5: 1, + 6: 2, + 7: 1, + 8: 1, + 9: 1, + 10: 1, + 11: 1, + } + assert nx.number_of_cliques(G, nodes=[2, 3, 4]) == {2: 2, 3: 1, 4: 2} + assert nx.number_of_cliques(G, cliques=self.cl) == { + 1: 1, + 2: 2, + 3: 1, + 4: 2, + 5: 1, + 6: 2, + 7: 1, + 8: 1, + 9: 1, + 10: 1, + 11: 1, + } + assert nx.number_of_cliques(G, list(G), cliques=self.cl) == { + 1: 1, + 2: 2, + 3: 1, + 4: 2, + 5: 1, + 6: 2, + 7: 1, + 8: 1, + 9: 1, + 10: 1, + 11: 1, + } def test_node_clique_number(self): G = self.G - assert_equal(nx.node_clique_number(G, 1), 4) - assert_equal(list(nx.node_clique_number(G, [1]).values()), [4]) - assert_equal(list(nx.node_clique_number(G, [1, 2]).values()), [4, 4]) - assert_equal(nx.node_clique_number(G, [1, 2]), {1: 4, 2: 4}) - assert_equal(nx.node_clique_number(G, 1), 4) - assert_equal(nx.node_clique_number(G), - {1: 4, 2: 4, 3: 4, 4: 3, 5: 3, 6: 4, - 7: 3, 8: 2, 9: 2, 10: 2, 11: 2}) - assert_equal(nx.node_clique_number(G, cliques=self.cl), - {1: 4, 2: 4, 3: 4, 4: 3, 5: 3, 6: 4, - 7: 3, 8: 2, 9: 2, 10: 2, 11: 2}) + assert nx.node_clique_number(G, 1) == 4 + assert list(nx.node_clique_number(G, [1]).values()) == [4] + assert list(nx.node_clique_number(G, [1, 2]).values()) == [4, 4] + assert nx.node_clique_number(G, [1, 2]) == {1: 4, 2: 4} + assert nx.node_clique_number(G, 1) == 4 + assert nx.node_clique_number(G) == { + 1: 4, + 2: 4, + 3: 4, + 4: 3, + 5: 3, + 6: 4, + 7: 3, + 8: 2, + 9: 2, + 10: 2, + 11: 2, + } + assert nx.node_clique_number(G, cliques=self.cl) == { + 1: 4, + 2: 4, + 3: 4, + 4: 3, + 5: 3, + 6: 4, + 7: 3, + 8: 2, + 9: 2, + 10: 2, + 11: 2, + } def test_cliques_containing_node(self): G = self.G - assert_equal(nx.cliques_containing_node(G, 1), - [[2, 6, 1, 3]]) - assert_equal(list(nx.cliques_containing_node(G, [1]).values()), - [[[2, 6, 1, 3]]]) - assert_equal(list(nx.cliques_containing_node(G, [1, 2]).values()), - [[[2, 6, 1, 3]], [[2, 6, 1, 3], [2, 6, 4]]]) - assert_equal(nx.cliques_containing_node(G, [1, 2]), - {1: [[2, 6, 1, 3]], 2: [[2, 6, 1, 3], [2, 6, 4]]}) - assert_equal(nx.cliques_containing_node(G, 1), - [[2, 6, 1, 3]]) - assert_equal(nx.cliques_containing_node(G, 2), - [[2, 6, 1, 3], [2, 6, 4]]) - assert_equal(nx.cliques_containing_node(G, 2, cliques=self.cl), - [[2, 6, 1, 3], [2, 6, 4]]) - assert_equal(len(nx.cliques_containing_node(G)), 11) + assert nx.cliques_containing_node(G, 1) == [[2, 6, 1, 3]] + assert list(nx.cliques_containing_node(G, [1]).values()) == [[[2, 6, 1, 3]]] + assert [ + sorted(c) for c in list(nx.cliques_containing_node(G, [1, 2]).values()) + ] == [[[2, 6, 1, 3]], [[2, 6, 1, 3], [2, 6, 4]]] + result = nx.cliques_containing_node(G, [1, 2]) + for k, v in result.items(): + result[k] = sorted(v) + assert result == {1: [[2, 6, 1, 3]], 2: [[2, 6, 1, 3], [2, 6, 4]]} + assert nx.cliques_containing_node(G, 1) == [[2, 6, 1, 3]] + expected = [{2, 6, 1, 3}, {2, 6, 4}] + answer = [set(c) for c in nx.cliques_containing_node(G, 2)] + assert answer in (expected, list(reversed(expected))) + + answer = [set(c) for c in nx.cliques_containing_node(G, 2, cliques=self.cl)] + assert answer in (expected, list(reversed(expected))) + assert len(nx.cliques_containing_node(G)) == 11 def test_make_clique_bipartite(self): G = self.G B = nx.make_clique_bipartite(G) - assert_equal(sorted(B), - [-5, -4, -3, -2, -1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]) + assert sorted(B) == [-5, -4, -3, -2, -1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] # Project onto the nodes of the original graph. H = nx.project(B, range(1, 12)) - assert_equal(H.adj, G.adj) + assert H.adj == G.adj # Project onto the nodes representing the cliques. H1 = nx.project(B, range(-5, 0)) # Relabel the negative numbers as positive ones. H1 = nx.relabel_nodes(H1, {-v: v for v in range(1, 6)}) - assert_equal(sorted(H1), [1, 2, 3, 4, 5]) + assert sorted(H1) == [1, 2, 3, 4, 5] def test_make_max_clique_graph(self): """Tests that the maximal clique graph is the same as the bipartite @@ -124,77 +191,90 @@ def test_make_max_clique_graph(self): # 0. H1 = nx.relabel_nodes(H1, {-v: v - 1 for v in range(1, 6)}) H2 = nx.make_max_clique_graph(G) - assert_equal(H1.adj, H2.adj) + assert H1.adj == H2.adj - @raises(nx.NetworkXNotImplemented) def test_directed(self): - cliques = nx.find_cliques(nx.DiGraph()) + with pytest.raises(nx.NetworkXNotImplemented): + cliques = nx.find_cliques(nx.DiGraph()) class TestEnumerateAllCliques: - def test_paper_figure_4(self): # Same graph as given in Fig. 4 of paper enumerate_all_cliques is # based on. # http://ieeexplore.ieee.org/stamp/stamp.jsp?tp=&arnumber=1559964&isnumber=33129 G = nx.Graph() - edges_fig_4 = [('a', 'b'), ('a', 'c'), ('a', 'd'), ('a', 'e'), - ('b', 'c'), ('b', 'd'), ('b', 'e'), - ('c', 'd'), ('c', 'e'), - ('d', 'e'), - ('f', 'b'), ('f', 'c'), ('f', 'g'), - ('g', 'f'), ('g', 'c'), ('g', 'd'), ('g', 'e')] + edges_fig_4 = [ + ("a", "b"), + ("a", "c"), + ("a", "d"), + ("a", "e"), + ("b", "c"), + ("b", "d"), + ("b", "e"), + ("c", "d"), + ("c", "e"), + ("d", "e"), + ("f", "b"), + ("f", "c"), + ("f", "g"), + ("g", "f"), + ("g", "c"), + ("g", "d"), + ("g", "e"), + ] G.add_edges_from(edges_fig_4) cliques = list(nx.enumerate_all_cliques(G)) clique_sizes = list(map(len, cliques)) - assert_equal(sorted(clique_sizes), clique_sizes) - - expected_cliques = [['a'], - ['b'], - ['c'], - ['d'], - ['e'], - ['f'], - ['g'], - ['a', 'b'], - ['a', 'b', 'd'], - ['a', 'b', 'd', 'e'], - ['a', 'b', 'e'], - ['a', 'c'], - ['a', 'c', 'd'], - ['a', 'c', 'd', 'e'], - ['a', 'c', 'e'], - ['a', 'd'], - ['a', 'd', 'e'], - ['a', 'e'], - ['b', 'c'], - ['b', 'c', 'd'], - ['b', 'c', 'd', 'e'], - ['b', 'c', 'e'], - ['b', 'c', 'f'], - ['b', 'd'], - ['b', 'd', 'e'], - ['b', 'e'], - ['b', 'f'], - ['c', 'd'], - ['c', 'd', 'e'], - ['c', 'd', 'e', 'g'], - ['c', 'd', 'g'], - ['c', 'e'], - ['c', 'e', 'g'], - ['c', 'f'], - ['c', 'f', 'g'], - ['c', 'g'], - ['d', 'e'], - ['d', 'e', 'g'], - ['d', 'g'], - ['e', 'g'], - ['f', 'g'], - ['a', 'b', 'c'], - ['a', 'b', 'c', 'd'], - ['a', 'b', 'c', 'd', 'e'], - ['a', 'b', 'c', 'e']] - - assert_equal(sorted(map(sorted, cliques)), - sorted(map(sorted, expected_cliques))) + assert sorted(clique_sizes) == clique_sizes + + expected_cliques = [ + ["a"], + ["b"], + ["c"], + ["d"], + ["e"], + ["f"], + ["g"], + ["a", "b"], + ["a", "b", "d"], + ["a", "b", "d", "e"], + ["a", "b", "e"], + ["a", "c"], + ["a", "c", "d"], + ["a", "c", "d", "e"], + ["a", "c", "e"], + ["a", "d"], + ["a", "d", "e"], + ["a", "e"], + ["b", "c"], + ["b", "c", "d"], + ["b", "c", "d", "e"], + ["b", "c", "e"], + ["b", "c", "f"], + ["b", "d"], + ["b", "d", "e"], + ["b", "e"], + ["b", "f"], + ["c", "d"], + ["c", "d", "e"], + ["c", "d", "e", "g"], + ["c", "d", "g"], + ["c", "e"], + ["c", "e", "g"], + ["c", "f"], + ["c", "f", "g"], + ["c", "g"], + ["d", "e"], + ["d", "e", "g"], + ["d", "g"], + ["e", "g"], + ["f", "g"], + ["a", "b", "c"], + ["a", "b", "c", "d"], + ["a", "b", "c", "d", "e"], + ["a", "b", "c", "e"], + ] + + assert sorted(map(sorted, cliques)) == sorted(map(sorted, expected_cliques)) diff --git a/networkx/algorithms/tests/test_cluster.py b/networkx/algorithms/tests/test_cluster.py index b1b9487..c3e9aac 100644 --- a/networkx/algorithms/tests/test_cluster.py +++ b/networkx/algorithms/tests/test_cluster.py @@ -1,217 +1,436 @@ -#!/usr/bin/env python -from nose.tools import * import networkx as nx class TestTriangles: - def test_empty(self): G = nx.Graph() - assert_equal(list(nx.triangles(G).values()), []) + assert list(nx.triangles(G).values()) == [] def test_path(self): G = nx.path_graph(10) - assert_equal(list(nx.triangles(G).values()), - [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]) - assert_equal(nx.triangles(G), - {0: 0, 1: 0, 2: 0, 3: 0, 4: 0, - 5: 0, 6: 0, 7: 0, 8: 0, 9: 0}) + assert list(nx.triangles(G).values()) == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + assert nx.triangles(G) == { + 0: 0, + 1: 0, + 2: 0, + 3: 0, + 4: 0, + 5: 0, + 6: 0, + 7: 0, + 8: 0, + 9: 0, + } def test_cubical(self): G = nx.cubical_graph() - assert_equal(list(nx.triangles(G).values()), - [0, 0, 0, 0, 0, 0, 0, 0]) - assert_equal(nx.triangles(G, 1), 0) - assert_equal(list(nx.triangles(G, [1, 2]).values()), [0, 0]) - assert_equal(nx.triangles(G, 1), 0) - assert_equal(nx.triangles(G, [1, 2]), {1: 0, 2: 0}) + assert list(nx.triangles(G).values()) == [0, 0, 0, 0, 0, 0, 0, 0] + assert nx.triangles(G, 1) == 0 + assert list(nx.triangles(G, [1, 2]).values()) == [0, 0] + assert nx.triangles(G, 1) == 0 + assert nx.triangles(G, [1, 2]) == {1: 0, 2: 0} def test_k5(self): G = nx.complete_graph(5) - assert_equal(list(nx.triangles(G).values()), [6, 6, 6, 6, 6]) - assert_equal(sum(nx.triangles(G).values()) / 3.0, 10) - assert_equal(nx.triangles(G, 1), 6) + assert list(nx.triangles(G).values()) == [6, 6, 6, 6, 6] + assert sum(nx.triangles(G).values()) / 3.0 == 10 + assert nx.triangles(G, 1) == 6 G.remove_edge(1, 2) - assert_equal(list(nx.triangles(G).values()), [5, 3, 3, 5, 5]) - assert_equal(nx.triangles(G, 1), 3) + assert list(nx.triangles(G).values()) == [5, 3, 3, 5, 5] + assert nx.triangles(G, 1) == 3 -class TestWeightedClustering: +class TestDirectedClustering: + def test_clustering(self): + G = nx.DiGraph() + assert list(nx.clustering(G).values()) == [] + assert nx.clustering(G) == {} + + def test_path(self): + G = nx.path_graph(10, create_using=nx.DiGraph()) + assert list(nx.clustering(G).values()) == [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ] + assert nx.clustering(G) == { + 0: 0.0, + 1: 0.0, + 2: 0.0, + 3: 0.0, + 4: 0.0, + 5: 0.0, + 6: 0.0, + 7: 0.0, + 8: 0.0, + 9: 0.0, + } + + def test_k5(self): + G = nx.complete_graph(5, create_using=nx.DiGraph()) + assert list(nx.clustering(G).values()) == [1, 1, 1, 1, 1] + assert nx.average_clustering(G) == 1 + G.remove_edge(1, 2) + assert list(nx.clustering(G).values()) == [ + 11.0 / 12.0, + 1.0, + 1.0, + 11.0 / 12.0, + 11.0 / 12.0, + ] + assert nx.clustering(G, [1, 4]) == {1: 1.0, 4: 11.0 / 12.0} + G.remove_edge(2, 1) + assert list(nx.clustering(G).values()) == [ + 5.0 / 6.0, + 1.0, + 1.0, + 5.0 / 6.0, + 5.0 / 6.0, + ] + assert nx.clustering(G, [1, 4]) == {1: 1.0, 4: 0.83333333333333337} + + def test_triangle_and_edge(self): + G = nx.cycle_graph(3, create_using=nx.DiGraph()) + G.add_edge(0, 4) + assert nx.clustering(G)[0] == 1.0 / 6.0 + + +class TestDirectedWeightedClustering: + def test_clustering(self): + G = nx.DiGraph() + assert list(nx.clustering(G, weight="weight").values()) == [] + assert nx.clustering(G) == {} + + def test_path(self): + G = nx.path_graph(10, create_using=nx.DiGraph()) + assert list(nx.clustering(G, weight="weight").values()) == [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ] + assert nx.clustering(G, weight="weight") == { + 0: 0.0, + 1: 0.0, + 2: 0.0, + 3: 0.0, + 4: 0.0, + 5: 0.0, + 6: 0.0, + 7: 0.0, + 8: 0.0, + 9: 0.0, + } + + def test_k5(self): + G = nx.complete_graph(5, create_using=nx.DiGraph()) + assert list(nx.clustering(G, weight="weight").values()) == [1, 1, 1, 1, 1] + assert nx.average_clustering(G, weight="weight") == 1 + G.remove_edge(1, 2) + assert list(nx.clustering(G, weight="weight").values()) == [ + 11.0 / 12.0, + 1.0, + 1.0, + 11.0 / 12.0, + 11.0 / 12.0, + ] + assert nx.clustering(G, [1, 4], weight="weight") == {1: 1.0, 4: 11.0 / 12.0} + G.remove_edge(2, 1) + assert list(nx.clustering(G, weight="weight").values()) == [ + 5.0 / 6.0, + 1.0, + 1.0, + 5.0 / 6.0, + 5.0 / 6.0, + ] + assert nx.clustering(G, [1, 4], weight="weight") == { + 1: 1.0, + 4: 0.83333333333333337, + } + + def test_triangle_and_edge(self): + G = nx.cycle_graph(3, create_using=nx.DiGraph()) + G.add_edge(0, 4, weight=2) + assert nx.clustering(G)[0] == 1.0 / 6.0 + assert nx.clustering(G, weight="weight")[0] == 1.0 / 12.0 + +class TestWeightedClustering: def test_clustering(self): G = nx.Graph() - assert_equal(list(nx.clustering(G, weight='weight').values()), []) - assert_equal(nx.clustering(G), {}) + assert list(nx.clustering(G, weight="weight").values()) == [] + assert nx.clustering(G) == {} def test_path(self): G = nx.path_graph(10) - assert_equal(list(nx.clustering(G, weight='weight').values()), - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) - assert_equal(nx.clustering(G, weight='weight'), - {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0, - 5: 0.0, 6: 0.0, 7: 0.0, 8: 0.0, 9: 0.0}) + assert list(nx.clustering(G, weight="weight").values()) == [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ] + assert nx.clustering(G, weight="weight") == { + 0: 0.0, + 1: 0.0, + 2: 0.0, + 3: 0.0, + 4: 0.0, + 5: 0.0, + 6: 0.0, + 7: 0.0, + 8: 0.0, + 9: 0.0, + } def test_cubical(self): G = nx.cubical_graph() - assert_equal(list(nx.clustering(G, weight='weight').values()), - [0, 0, 0, 0, 0, 0, 0, 0]) - assert_equal(nx.clustering(G, 1), 0) - assert_equal(list(nx.clustering(G, [1, 2], weight='weight').values()), [0, 0]) - assert_equal(nx.clustering(G, 1, weight='weight'), 0) - assert_equal(nx.clustering(G, [1, 2], weight='weight'), {1: 0, 2: 0}) + assert list(nx.clustering(G, weight="weight").values()) == [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + ] + assert nx.clustering(G, 1) == 0 + assert list(nx.clustering(G, [1, 2], weight="weight").values()) == [0, 0] + assert nx.clustering(G, 1, weight="weight") == 0 + assert nx.clustering(G, [1, 2], weight="weight") == {1: 0, 2: 0} def test_k5(self): G = nx.complete_graph(5) - assert_equal(list(nx.clustering(G, weight='weight').values()), [1, 1, 1, 1, 1]) - assert_equal(nx.average_clustering(G, weight='weight'), 1) + assert list(nx.clustering(G, weight="weight").values()) == [1, 1, 1, 1, 1] + assert nx.average_clustering(G, weight="weight") == 1 G.remove_edge(1, 2) - assert_equal(list(nx.clustering(G, weight='weight').values()), - [5. / 6., 1.0, 1.0, 5. / 6., 5. / 6.]) - assert_equal(nx.clustering(G, [1, 4], weight='weight'), {1: 1.0, 4: 0.83333333333333337}) + assert list(nx.clustering(G, weight="weight").values()) == [ + 5.0 / 6.0, + 1.0, + 1.0, + 5.0 / 6.0, + 5.0 / 6.0, + ] + assert nx.clustering(G, [1, 4], weight="weight") == { + 1: 1.0, + 4: 0.83333333333333337, + } def test_triangle_and_edge(self): G = nx.cycle_graph(3) G.add_edge(0, 4, weight=2) - assert_equal(nx.clustering(G)[0], 1.0 / 3.0) - assert_equal(nx.clustering(G, weight='weight')[0], 1.0 / 6.0) + assert nx.clustering(G)[0] == 1.0 / 3.0 + assert nx.clustering(G, weight="weight")[0] == 1.0 / 6.0 class TestClustering: - def test_clustering(self): G = nx.Graph() - assert_equal(list(nx.clustering(G).values()), []) - assert_equal(nx.clustering(G), {}) + assert list(nx.clustering(G).values()) == [] + assert nx.clustering(G) == {} def test_path(self): G = nx.path_graph(10) - assert_equal(list(nx.clustering(G).values()), - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) - assert_equal(nx.clustering(G), - {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0, - 5: 0.0, 6: 0.0, 7: 0.0, 8: 0.0, 9: 0.0}) + assert list(nx.clustering(G).values()) == [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ] + assert nx.clustering(G) == { + 0: 0.0, + 1: 0.0, + 2: 0.0, + 3: 0.0, + 4: 0.0, + 5: 0.0, + 6: 0.0, + 7: 0.0, + 8: 0.0, + 9: 0.0, + } def test_cubical(self): G = nx.cubical_graph() - assert_equal(list(nx.clustering(G).values()), - [0, 0, 0, 0, 0, 0, 0, 0]) - assert_equal(nx.clustering(G, 1), 0) - assert_equal(list(nx.clustering(G, [1, 2]).values()), [0, 0]) - assert_equal(nx.clustering(G, 1), 0) - assert_equal(nx.clustering(G, [1, 2]), {1: 0, 2: 0}) + assert list(nx.clustering(G).values()) == [0, 0, 0, 0, 0, 0, 0, 0] + assert nx.clustering(G, 1) == 0 + assert list(nx.clustering(G, [1, 2]).values()) == [0, 0] + assert nx.clustering(G, 1) == 0 + assert nx.clustering(G, [1, 2]) == {1: 0, 2: 0} def test_k5(self): G = nx.complete_graph(5) - assert_equal(list(nx.clustering(G).values()), [1, 1, 1, 1, 1]) - assert_equal(nx.average_clustering(G), 1) + assert list(nx.clustering(G).values()) == [1, 1, 1, 1, 1] + assert nx.average_clustering(G) == 1 G.remove_edge(1, 2) - assert_equal(list(nx.clustering(G).values()), - [5. / 6., 1.0, 1.0, 5. / 6., 5. / 6.]) - assert_equal(nx.clustering(G, [1, 4]), {1: 1.0, 4: 0.83333333333333337}) + assert list(nx.clustering(G).values()) == [ + 5.0 / 6.0, + 1.0, + 1.0, + 5.0 / 6.0, + 5.0 / 6.0, + ] + assert nx.clustering(G, [1, 4]) == {1: 1.0, 4: 0.83333333333333337} class TestTransitivity: - def test_transitivity(self): G = nx.Graph() - assert_equal(nx.transitivity(G), 0.0) + assert nx.transitivity(G) == 0.0 def test_path(self): G = nx.path_graph(10) - assert_equal(nx.transitivity(G), 0.0) + assert nx.transitivity(G) == 0.0 def test_cubical(self): G = nx.cubical_graph() - assert_equal(nx.transitivity(G), 0.0) + assert nx.transitivity(G) == 0.0 def test_k5(self): G = nx.complete_graph(5) - assert_equal(nx.transitivity(G), 1.0) + assert nx.transitivity(G) == 1.0 G.remove_edge(1, 2) - assert_equal(nx.transitivity(G), 0.875) - - # def test_clustering_transitivity(self): - # # check that weighted average of clustering is transitivity - # G = nx.complete_graph(5) - # G.remove_edge(1,2) - # t1=nx.transitivity(G) - # (cluster_d2,weights)=nx.clustering(G,weights=True) - # trans=[] - # for v in G.nodes(): - # trans.append(cluster_d2[v]*weights[v]) - # t2=sum(trans) - # assert_almost_equal(abs(t1-t2),0) + assert nx.transitivity(G) == 0.875 class TestSquareClustering: - def test_clustering(self): G = nx.Graph() - assert_equal(list(nx.square_clustering(G).values()), []) - assert_equal(nx.square_clustering(G), {}) + assert list(nx.square_clustering(G).values()) == [] + assert nx.square_clustering(G) == {} def test_path(self): G = nx.path_graph(10) - assert_equal(list(nx.square_clustering(G).values()), - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) - assert_equal(nx.square_clustering(G), - {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0, - 5: 0.0, 6: 0.0, 7: 0.0, 8: 0.0, 9: 0.0}) + assert list(nx.square_clustering(G).values()) == [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ] + assert nx.square_clustering(G) == { + 0: 0.0, + 1: 0.0, + 2: 0.0, + 3: 0.0, + 4: 0.0, + 5: 0.0, + 6: 0.0, + 7: 0.0, + 8: 0.0, + 9: 0.0, + } def test_cubical(self): G = nx.cubical_graph() - assert_equal(list(nx.square_clustering(G).values()), - [0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5]) - assert_equal(list(nx.square_clustering(G, [1, 2]).values()), [0.5, 0.5]) - assert_equal(nx.square_clustering(G, [1])[1], 0.5) - assert_equal(nx.square_clustering(G, [1, 2]), {1: 0.5, 2: 0.5}) + assert list(nx.square_clustering(G).values()) == [ + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + ] + assert list(nx.square_clustering(G, [1, 2]).values()) == [0.5, 0.5] + assert nx.square_clustering(G, [1])[1] == 0.5 + assert nx.square_clustering(G, [1, 2]) == {1: 0.5, 2: 0.5} def test_k5(self): G = nx.complete_graph(5) - assert_equal(list(nx.square_clustering(G).values()), [1, 1, 1, 1, 1]) + assert list(nx.square_clustering(G).values()) == [1, 1, 1, 1, 1] def test_bipartite_k5(self): G = nx.complete_bipartite_graph(5, 5) - assert_equal(list(nx.square_clustering(G).values()), - [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]) + assert list(nx.square_clustering(G).values()) == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1] def test_lind_square_clustering(self): """Test C4 for figure 1 Lind et al (2005)""" - G = nx.Graph([(1, 2), (1, 3), (1, 6), (1, 7), (2, 4), (2, 5), - (3, 4), (3, 5), (6, 7), (7, 8), (6, 8), (7, 9), - (7, 10), (6, 11), (6, 12), (2, 13), (2, 14), (3, 15), (3, 16)]) + G = nx.Graph( + [ + (1, 2), + (1, 3), + (1, 6), + (1, 7), + (2, 4), + (2, 5), + (3, 4), + (3, 5), + (6, 7), + (7, 8), + (6, 8), + (7, 9), + (7, 10), + (6, 11), + (6, 12), + (2, 13), + (2, 14), + (3, 15), + (3, 16), + ] + ) G1 = G.subgraph([1, 2, 3, 4, 5, 13, 14, 15, 16]) G2 = G.subgraph([1, 6, 7, 8, 9, 10, 11, 12]) - assert_equal(nx.square_clustering(G, [1])[1], 3 / 75.0) - assert_equal(nx.square_clustering(G1, [1])[1], 2 / 6.0) - assert_equal(nx.square_clustering(G2, [1])[1], 1 / 5.0) + assert nx.square_clustering(G, [1])[1] == 3 / 75.0 + assert nx.square_clustering(G1, [1])[1] == 2 / 6.0 + assert nx.square_clustering(G2, [1])[1] == 1 / 5.0 def test_average_clustering(): G = nx.cycle_graph(3) G.add_edge(2, 3) - assert_equal(nx.average_clustering(G), (1 + 1 + 1 / 3.0) / 4.0) - assert_equal(nx.average_clustering(G, count_zeros=True), (1 + 1 + 1 / 3.0) / 4.0) - assert_equal(nx.average_clustering(G, count_zeros=False), (1 + 1 + 1 / 3.0) / 3.0) + assert nx.average_clustering(G) == (1 + 1 + 1 / 3.0) / 4.0 + assert nx.average_clustering(G, count_zeros=True) == (1 + 1 + 1 / 3.0) / 4.0 + assert nx.average_clustering(G, count_zeros=False) == (1 + 1 + 1 / 3.0) / 3.0 class TestGeneralizedDegree: - def test_generalized_degree(self): G = nx.Graph() - assert_equal(nx.generalized_degree(G), {}) + assert nx.generalized_degree(G) == {} def test_path(self): G = nx.path_graph(5) - assert_equal(nx.generalized_degree(G, 0), {0: 1}) - assert_equal(nx.generalized_degree(G, 1), {0: 2}) + assert nx.generalized_degree(G, 0) == {0: 1} + assert nx.generalized_degree(G, 1) == {0: 2} def test_cubical(self): G = nx.cubical_graph() - assert_equal(nx.generalized_degree(G, 0), {0: 3}) + assert nx.generalized_degree(G, 0) == {0: 3} def test_k5(self): G = nx.complete_graph(5) - assert_equal(nx.generalized_degree(G, 0), {3: 4}) + assert nx.generalized_degree(G, 0) == {3: 4} G.remove_edge(0, 1) - assert_equal(nx.generalized_degree(G, 0), {2: 3}) + assert nx.generalized_degree(G, 0) == {2: 3} diff --git a/networkx/algorithms/tests/test_communicability.py b/networkx/algorithms/tests/test_communicability.py index 2ab487c..c35cd33 100644 --- a/networkx/algorithms/tests/test_communicability.py +++ b/networkx/algorithms/tests/test_communicability.py @@ -1,83 +1,82 @@ from collections import defaultdict -from nose.tools import * -from nose import SkipTest + +import pytest + +numpy = pytest.importorskip("numpy") +scipy = pytest.importorskip("scipy") + import networkx as nx -from networkx.algorithms.communicability_alg import * +from networkx.testing import almost_equal +from networkx.algorithms.communicability_alg import communicability, communicability_exp class TestCommunicability: - @classmethod - def setupClass(cls): - global numpy - global scipy - try: - import numpy - except ImportError: - raise SkipTest('NumPy not available.') - try: - import scipy - except ImportError: - raise SkipTest('SciPy not available.') - def test_communicability(self): - answer = {0: {0: 1.5430806348152435, - 1: 1.1752011936438012 - }, - 1: {0: 1.1752011936438012, - 1: 1.5430806348152435 - } - } -# answer={(0, 0): 1.5430806348152435, -# (0, 1): 1.1752011936438012, -# (1, 0): 1.1752011936438012, -# (1, 1): 1.5430806348152435} + answer = { + 0: {0: 1.5430806348152435, 1: 1.1752011936438012}, + 1: {0: 1.1752011936438012, 1: 1.5430806348152435}, + } + # answer={(0, 0): 1.5430806348152435, + # (0, 1): 1.1752011936438012, + # (1, 0): 1.1752011936438012, + # (1, 1): 1.5430806348152435} result = communicability(nx.path_graph(2)) for k1, val in result.items(): for k2 in val: - assert_almost_equal(answer[k1][k2], result[k1][k2], places=7) + assert almost_equal(answer[k1][k2], result[k1][k2], places=7) def test_communicability2(self): - answer_orig = {('1', '1'): 1.6445956054135658, - ('1', 'Albert'): 0.7430186221096251, - ('1', 'Aric'): 0.7430186221096251, - ('1', 'Dan'): 1.6208126320442937, - ('1', 'Franck'): 0.42639707170035257, - ('Albert', '1'): 0.7430186221096251, - ('Albert', 'Albert'): 2.4368257358712189, - ('Albert', 'Aric'): 1.4368257358712191, - ('Albert', 'Dan'): 2.0472097037446453, - ('Albert', 'Franck'): 1.8340111678944691, - ('Aric', '1'): 0.7430186221096251, - ('Aric', 'Albert'): 1.4368257358712191, - ('Aric', 'Aric'): 2.4368257358712193, - ('Aric', 'Dan'): 2.0472097037446457, - ('Aric', 'Franck'): 1.8340111678944691, - ('Dan', '1'): 1.6208126320442937, - ('Dan', 'Albert'): 2.0472097037446453, - ('Dan', 'Aric'): 2.0472097037446457, - ('Dan', 'Dan'): 3.1306328496328168, - ('Dan', 'Franck'): 1.4860372442192515, - ('Franck', '1'): 0.42639707170035257, - ('Franck', 'Albert'): 1.8340111678944691, - ('Franck', 'Aric'): 1.8340111678944691, - ('Franck', 'Dan'): 1.4860372442192515, - ('Franck', 'Franck'): 2.3876142275231915} + answer_orig = { + ("1", "1"): 1.6445956054135658, + ("1", "Albert"): 0.7430186221096251, + ("1", "Aric"): 0.7430186221096251, + ("1", "Dan"): 1.6208126320442937, + ("1", "Franck"): 0.42639707170035257, + ("Albert", "1"): 0.7430186221096251, + ("Albert", "Albert"): 2.4368257358712189, + ("Albert", "Aric"): 1.4368257358712191, + ("Albert", "Dan"): 2.0472097037446453, + ("Albert", "Franck"): 1.8340111678944691, + ("Aric", "1"): 0.7430186221096251, + ("Aric", "Albert"): 1.4368257358712191, + ("Aric", "Aric"): 2.4368257358712193, + ("Aric", "Dan"): 2.0472097037446457, + ("Aric", "Franck"): 1.8340111678944691, + ("Dan", "1"): 1.6208126320442937, + ("Dan", "Albert"): 2.0472097037446453, + ("Dan", "Aric"): 2.0472097037446457, + ("Dan", "Dan"): 3.1306328496328168, + ("Dan", "Franck"): 1.4860372442192515, + ("Franck", "1"): 0.42639707170035257, + ("Franck", "Albert"): 1.8340111678944691, + ("Franck", "Aric"): 1.8340111678944691, + ("Franck", "Dan"): 1.4860372442192515, + ("Franck", "Franck"): 2.3876142275231915, + } answer = defaultdict(dict) for (k1, k2), v in answer_orig.items(): answer[k1][k2] = v - G1 = nx.Graph([('Franck', 'Aric'), ('Aric', 'Dan'), ('Dan', 'Albert'), - ('Albert', 'Franck'), ('Dan', '1'), ('Franck', 'Albert')]) + G1 = nx.Graph( + [ + ("Franck", "Aric"), + ("Aric", "Dan"), + ("Dan", "Albert"), + ("Albert", "Franck"), + ("Dan", "1"), + ("Franck", "Albert"), + ] + ) result = communicability(G1) for k1, val in result.items(): for k2 in val: - assert_almost_equal(answer[k1][k2], result[k1][k2], places=7) + assert almost_equal(answer[k1][k2], result[k1][k2], places=7) result = communicability_exp(G1) for k1, val in result.items(): for k2 in val: - assert_almost_equal(answer[k1][k2], result[k1][k2], places=7) + assert almost_equal(answer[k1][k2], result[k1][k2], places=7) diff --git a/networkx/algorithms/tests/test_core.py b/networkx/algorithms/tests/test_core.py index 802072d..a539277 100644 --- a/networkx/algorithms/tests/test_core.py +++ b/networkx/algorithms/tests/test_core.py @@ -1,11 +1,10 @@ -#!/usr/bin/env python -from nose.tools import * import networkx as nx -from networkx.testing.utils import * +from networkx.testing.utils import assert_nodes_equal class TestCore: - def setUp(self): + @classmethod + def setup_class(cls): # G is the example graph in Figure 1 from Batagelj and # Zaversnik's paper titled An O(m) Algorithm for Cores # Decomposition of Networks, 2003, @@ -16,12 +15,29 @@ def setUp(self): t1 = nx.convert_node_labels_to_integers(nx.tetrahedral_graph(), 1) t2 = nx.convert_node_labels_to_integers(t1, 5) G = nx.union(t1, t2) - G.add_edges_from([(3, 7), (2, 11), (11, 5), (11, 12), (5, 12), - (12, 19), (12, 18), (3, 9), (7, 9), (7, 10), - (9, 10), (9, 20), (17, 13), (13, 14), (14, 15), - (15, 16), (16, 13)]) + G.add_edges_from( + [ + (3, 7), + (2, 11), + (11, 5), + (11, 12), + (5, 12), + (12, 19), + (12, 18), + (3, 9), + (7, 9), + (7, 10), + (9, 10), + (9, 20), + (17, 13), + (13, 14), + (14, 15), + (15, 16), + (16, 13), + ] + ) G.add_node(21) - self.G = G + cls.G = G # Create the graph H resulting from the degree sequence # [0, 1, 2, 2, 2, 2, 3] when using the Havel-Hakimi algorithm. @@ -29,17 +45,18 @@ def setUp(self): degseq = [0, 1, 2, 2, 2, 2, 3] H = nx.havel_hakimi_graph(degseq) mapping = {6: 0, 0: 1, 4: 3, 5: 6, 3: 4, 1: 2, 2: 5} - self.H = nx.relabel_nodes(H, mapping) + cls.H = nx.relabel_nodes(H, mapping) def test_trivial(self): """Empty graph""" G = nx.Graph() - assert_equal(nx.find_cores(G), {}) + assert nx.find_cores(G) == {} def test_find_cores(self): core = nx.find_cores(self.G) - nodes_by_core = [sorted([n for n in core if core[n] == val]) - for val in range(4)] + nodes_by_core = [ + sorted([n for n in core if core[n] == val]) for val in range(4) + ] assert_nodes_equal(nodes_by_core[0], [21]) assert_nodes_equal(nodes_by_core[1], [17, 18, 19, 20]) assert_nodes_equal(nodes_by_core[2], [9, 10, 11, 12, 13, 14, 15, 16]) @@ -51,76 +68,112 @@ def test_core_number(self): def test_find_cores2(self): core = nx.find_cores(self.H) - nodes_by_core = [sorted([n for n in core if core[n] == val]) - for val in range(3)] + nodes_by_core = [ + sorted([n for n in core if core[n] == val]) for val in range(3) + ] assert_nodes_equal(nodes_by_core[0], [0]) assert_nodes_equal(nodes_by_core[1], [1, 3]) assert_nodes_equal(nodes_by_core[2], [2, 4, 5, 6]) def test_directed_find_cores(self): - '''core number had a bug for directed graphs found in issue #1959''' + """core number had a bug for directed graphs found in issue #1959""" # small example where too timid edge removal can make cn[2] = 3 G = nx.DiGraph() edges = [(1, 2), (2, 1), (2, 3), (2, 4), (3, 4), (4, 3)] G.add_edges_from(edges) - assert_equal(nx.core_number(G), {1: 2, 2: 2, 3: 2, 4: 2}) + assert nx.core_number(G) == {1: 2, 2: 2, 3: 2, 4: 2} # small example where too aggressive edge removal can make cn[2] = 2 more_edges = [(1, 5), (3, 5), (4, 5), (3, 6), (4, 6), (5, 6)] G.add_edges_from(more_edges) - assert_equal(nx.core_number(G), {1: 3, 2: 3, 3: 3, 4: 3, 5: 3, 6: 3}) + assert nx.core_number(G) == {1: 3, 2: 3, 3: 3, 4: 3, 5: 3, 6: 3} def test_main_core(self): main_core_subgraph = nx.k_core(self.H) - assert_equal(sorted(main_core_subgraph.nodes()), [2, 4, 5, 6]) + assert sorted(main_core_subgraph.nodes()) == [2, 4, 5, 6] def test_k_core(self): # k=0 k_core_subgraph = nx.k_core(self.H, k=0) - assert_equal(sorted(k_core_subgraph.nodes()), sorted(self.H.nodes())) + assert sorted(k_core_subgraph.nodes()) == sorted(self.H.nodes()) # k=1 k_core_subgraph = nx.k_core(self.H, k=1) - assert_equal(sorted(k_core_subgraph.nodes()), [1, 2, 3, 4, 5, 6]) + assert sorted(k_core_subgraph.nodes()) == [1, 2, 3, 4, 5, 6] # k = 2 k_core_subgraph = nx.k_core(self.H, k=2) - assert_equal(sorted(k_core_subgraph.nodes()), [2, 4, 5, 6]) + assert sorted(k_core_subgraph.nodes()) == [2, 4, 5, 6] def test_main_crust(self): main_crust_subgraph = nx.k_crust(self.H) - assert_equal(sorted(main_crust_subgraph.nodes()), [0, 1, 3]) + assert sorted(main_crust_subgraph.nodes()) == [0, 1, 3] def test_k_crust(self): # k = 0 k_crust_subgraph = nx.k_crust(self.H, k=2) - assert_equal(sorted(k_crust_subgraph.nodes()), sorted(self.H.nodes())) + assert sorted(k_crust_subgraph.nodes()) == sorted(self.H.nodes()) # k=1 k_crust_subgraph = nx.k_crust(self.H, k=1) - assert_equal(sorted(k_crust_subgraph.nodes()), [0, 1, 3]) + assert sorted(k_crust_subgraph.nodes()) == [0, 1, 3] # k=2 k_crust_subgraph = nx.k_crust(self.H, k=0) - assert_equal(sorted(k_crust_subgraph.nodes()), [0]) + assert sorted(k_crust_subgraph.nodes()) == [0] def test_main_shell(self): main_shell_subgraph = nx.k_shell(self.H) - assert_equal(sorted(main_shell_subgraph.nodes()), [2, 4, 5, 6]) + assert sorted(main_shell_subgraph.nodes()) == [2, 4, 5, 6] def test_k_shell(self): # k=0 k_shell_subgraph = nx.k_shell(self.H, k=2) - assert_equal(sorted(k_shell_subgraph.nodes()), [2, 4, 5, 6]) + assert sorted(k_shell_subgraph.nodes()) == [2, 4, 5, 6] # k=1 k_shell_subgraph = nx.k_shell(self.H, k=1) - assert_equal(sorted(k_shell_subgraph.nodes()), [1, 3]) + assert sorted(k_shell_subgraph.nodes()) == [1, 3] # k=2 k_shell_subgraph = nx.k_shell(self.H, k=0) - assert_equal(sorted(k_shell_subgraph.nodes()), [0]) + assert sorted(k_shell_subgraph.nodes()) == [0] def test_k_corona(self): # k=0 k_corona_subgraph = nx.k_corona(self.H, k=2) - assert_equal(sorted(k_corona_subgraph.nodes()), [2, 4, 5, 6]) + assert sorted(k_corona_subgraph.nodes()) == [2, 4, 5, 6] # k=1 k_corona_subgraph = nx.k_corona(self.H, k=1) - assert_equal(sorted(k_corona_subgraph.nodes()), [1]) + assert sorted(k_corona_subgraph.nodes()) == [1] # k=2 k_corona_subgraph = nx.k_corona(self.H, k=0) - assert_equal(sorted(k_corona_subgraph.nodes()), [0]) + assert sorted(k_corona_subgraph.nodes()) == [0] + + def test_k_truss(self): + # k=-1 + k_truss_subgraph = nx.k_truss(self.G, -1) + assert sorted(k_truss_subgraph.nodes()) == list(range(1, 21)) + # k=0 + k_truss_subgraph = nx.k_truss(self.G, 0) + assert sorted(k_truss_subgraph.nodes()) == list(range(1, 21)) + # k=1 + k_truss_subgraph = nx.k_truss(self.G, 1) + assert sorted(k_truss_subgraph.nodes()) == list(range(1, 21)) + # k=2 + k_truss_subgraph = nx.k_truss(self.G, 2) + assert sorted(k_truss_subgraph.nodes()) == list(range(1, 21)) + # k=3 + k_truss_subgraph = nx.k_truss(self.G, 3) + assert sorted(k_truss_subgraph.nodes()) == list(range(1, 13)) + + k_truss_subgraph = nx.k_truss(self.G, 4) + assert sorted(k_truss_subgraph.nodes()) == list(range(1, 9)) + + k_truss_subgraph = nx.k_truss(self.G, 5) + assert sorted(k_truss_subgraph.nodes()) == [] + + def test_onion_layers(self): + layers = nx.onion_layers(self.G) + nodes_by_layer = [ + sorted([n for n in layers if layers[n] == val]) for val in range(1, 7) + ] + assert_nodes_equal(nodes_by_layer[0], [21]) + assert_nodes_equal(nodes_by_layer[1], [17, 18, 19, 20]) + assert_nodes_equal(nodes_by_layer[2], [10, 12, 13, 14, 15, 16]) + assert_nodes_equal(nodes_by_layer[3], [9, 11]) + assert_nodes_equal(nodes_by_layer[4], [1, 2, 4, 5, 6, 8]) + assert_nodes_equal(nodes_by_layer[5], [3, 7]) diff --git a/networkx/algorithms/tests/test_covering.py b/networkx/algorithms/tests/test_covering.py index d13adbd..78487b7 100644 --- a/networkx/algorithms/tests/test_covering.py +++ b/networkx/algorithms/tests/test_covering.py @@ -1,10 +1,3 @@ -# Copyright 2016-2018 NetworkX developers. -# Copyright (C) 2016 by -# Nishant Nikhil -# All rights reserved. -# BSD license. - -from nose.tools import assert_equal, assert_true, assert_false import networkx as nx @@ -13,36 +6,35 @@ class TestMinEdgeCover: def test_empty_graph(self): G = nx.Graph() - assert_equal(nx.min_edge_cover(G), set()) + assert nx.min_edge_cover(G) == set() def test_graph_with_loop(self): G = nx.Graph() G.add_edge(0, 0) - assert_equal(nx.min_edge_cover(G), {(0, 0)}) + assert nx.min_edge_cover(G) == {(0, 0)} def test_graph_single_edge(self): G = nx.Graph() G.add_edge(0, 1) - assert_equal(nx.min_edge_cover(G), - {(0, 1)}) + assert nx.min_edge_cover(G) in ({(0, 1)}, {(1, 0)}) def test_bipartite_explicit(self): G = nx.Graph() G.add_nodes_from([1, 2, 3, 4], bipartite=0) - G.add_nodes_from(['a', 'b', 'c'], bipartite=1) - G.add_edges_from([(1, 'a'), (1, 'b'), (2, 'b'), - (2, 'c'), (3, 'c'), (4, 'a')]) - min_cover = nx.min_edge_cover(G, nx.algorithms.bipartite.matching. - eppstein_matching) + G.add_nodes_from(["a", "b", "c"], bipartite=1) + G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")]) + min_cover = nx.min_edge_cover( + G, nx.algorithms.bipartite.matching.eppstein_matching + ) min_cover2 = nx.min_edge_cover(G) - assert_true(nx.is_edge_cover(G, min_cover)) - assert_equal(len(min_cover), 8) + assert nx.is_edge_cover(G, min_cover) + assert len(min_cover) == 8 def test_complete_graph(self): G = nx.complete_graph(10) min_cover = nx.min_edge_cover(G) - assert_true(nx.is_edge_cover(G, min_cover)) - assert_equal(len(min_cover), 5) + assert nx.is_edge_cover(G, min_cover) + assert len(min_cover) == 5 class TestIsEdgeCover: @@ -50,17 +42,17 @@ class TestIsEdgeCover: def test_empty_graph(self): G = nx.Graph() - assert_true(nx.is_edge_cover(G, set())) + assert nx.is_edge_cover(G, set()) def test_graph_with_loop(self): G = nx.Graph() G.add_edge(1, 1) - assert_true(nx.is_edge_cover(G, {(1, 1)})) + assert nx.is_edge_cover(G, {(1, 1)}) def test_graph_single_edge(self): G = nx.Graph() G.add_edge(0, 1) - assert_true(nx.is_edge_cover(G, {(0, 0), (1, 1)})) - assert_true(nx.is_edge_cover(G, {(0, 1), (1, 0)})) - assert_true(nx.is_edge_cover(G, {(0, 1)})) - assert_false(nx.is_edge_cover(G, {(0, 0)})) + assert nx.is_edge_cover(G, {(0, 0), (1, 1)}) + assert nx.is_edge_cover(G, {(0, 1), (1, 0)}) + assert nx.is_edge_cover(G, {(0, 1)}) + assert not nx.is_edge_cover(G, {(0, 0)}) diff --git a/networkx/algorithms/tests/test_cuts.py b/networkx/algorithms/tests/test_cuts.py index 7e0d29f..8eea293 100644 --- a/networkx/algorithms/tests/test_cuts.py +++ b/networkx/algorithms/tests/test_cuts.py @@ -1,20 +1,10 @@ -# test_cuts.py - unit tests for the cuts module -# -# Copyright 2015 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.algorithms.cuts` module.""" -from __future__ import division -from nose.tools import assert_equal import networkx as nx -class TestCutSize(object): +class TestCutSize: """Unit tests for the :func:`~networkx.cut_size` function.""" def test_symmetric(self): @@ -22,62 +12,62 @@ def test_symmetric(self): G = nx.barbell_graph(3, 0) S = {0, 1, 4} T = {2, 3, 5} - assert_equal(nx.cut_size(G, S, T), 4) - assert_equal(nx.cut_size(G, T, S), 4) + assert nx.cut_size(G, S, T) == 4 + assert nx.cut_size(G, T, S) == 4 def test_single_edge(self): """Tests for a cut of a single edge.""" G = nx.barbell_graph(3, 0) S = {0, 1, 2} T = {3, 4, 5} - assert_equal(nx.cut_size(G, S, T), 1) - assert_equal(nx.cut_size(G, T, S), 1) + assert nx.cut_size(G, S, T) == 1 + assert nx.cut_size(G, T, S) == 1 def test_directed(self): """Tests that each directed edge is counted once in the cut.""" G = nx.barbell_graph(3, 0).to_directed() S = {0, 1, 2} T = {3, 4, 5} - assert_equal(nx.cut_size(G, S, T), 2) - assert_equal(nx.cut_size(G, T, S), 2) + assert nx.cut_size(G, S, T) == 2 + assert nx.cut_size(G, T, S) == 2 def test_directed_symmetric(self): """Tests that a cut in a directed graph is symmetric.""" G = nx.barbell_graph(3, 0).to_directed() S = {0, 1, 4} T = {2, 3, 5} - assert_equal(nx.cut_size(G, S, T), 8) - assert_equal(nx.cut_size(G, T, S), 8) + assert nx.cut_size(G, S, T) == 8 + assert nx.cut_size(G, T, S) == 8 def test_multigraph(self): """Tests that parallel edges are each counted for a cut.""" - G = nx.MultiGraph(['ab', 'ab']) - assert_equal(nx.cut_size(G, {'a'}, {'b'}), 2) + G = nx.MultiGraph(["ab", "ab"]) + assert nx.cut_size(G, {"a"}, {"b"}) == 2 -class TestVolume(object): +class TestVolume: """Unit tests for the :func:`~networkx.volume` function.""" def test_graph(self): G = nx.cycle_graph(4) - assert_equal(nx.volume(G, {0, 1}), 4) + assert nx.volume(G, {0, 1}) == 4 def test_digraph(self): G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 0)]) - assert_equal(nx.volume(G, {0, 1}), 2) + assert nx.volume(G, {0, 1}) == 2 def test_multigraph(self): edges = list(nx.cycle_graph(4).edges()) G = nx.MultiGraph(edges * 2) - assert_equal(nx.volume(G, {0, 1}), 8) + assert nx.volume(G, {0, 1}) == 8 def test_multidigraph(self): edges = [(0, 1), (1, 2), (2, 3), (3, 0)] G = nx.MultiDiGraph(edges * 2) - assert_equal(nx.volume(G, {0, 1}), 4) + assert nx.volume(G, {0, 1}) == 4 -class TestNormalizedCutSize(object): +class TestNormalizedCutSize: """Unit tests for the :func:`~networkx.normalized_cut_size` function. @@ -90,7 +80,7 @@ def test_graph(self): size = nx.normalized_cut_size(G, S, T) # The cut looks like this: o-{-o--o-}-o expected = 2 * ((1 / 4) + (1 / 2)) - assert_equal(expected, size) + assert expected == size def test_directed(self): G = nx.DiGraph([(0, 1), (1, 2), (2, 3)]) @@ -99,10 +89,10 @@ def test_directed(self): size = nx.normalized_cut_size(G, S, T) # The cut looks like this: o-{->o-->o-}->o expected = 2 * ((1 / 2) + (1 / 1)) - assert_equal(expected, size) + assert expected == size -class TestConductance(object): +class TestConductance: """Unit tests for the :func:`~networkx.conductance` function.""" def test_graph(self): @@ -113,10 +103,10 @@ def test_graph(self): T = {5} conductance = nx.conductance(G, S, T) expected = 1 / 5 - assert_equal(expected, conductance) + assert expected == conductance -class TestEdgeExpansion(object): +class TestEdgeExpansion: """Unit tests for the :func:`~networkx.edge_expansion` function.""" def test_graph(self): @@ -125,10 +115,10 @@ def test_graph(self): T = set(G) - S expansion = nx.edge_expansion(G, S, T) expected = 1 / 5 - assert_equal(expected, expansion) + assert expected == expansion -class TestNodeExpansion(object): +class TestNodeExpansion: """Unit tests for the :func:`~networkx.node_expansion` function. """ @@ -140,10 +130,10 @@ def test_graph(self): # The neighborhood of S has cardinality five, and S has # cardinality three. expected = 5 / 3 - assert_equal(expected, expansion) + assert expected == expansion -class TestBoundaryExpansion(object): +class TestBoundaryExpansion: """Unit tests for the :func:`~networkx.boundary_expansion` function. """ @@ -155,10 +145,10 @@ def test_graph(self): # The node boundary of S has cardinality six, and S has # cardinality three. expected = 6 / 4 - assert_equal(expected, expansion) + assert expected == expansion -class TestMixingExpansion(object): +class TestMixingExpansion: """Unit tests for the :func:`~networkx.mixing_expansion` function. """ @@ -172,4 +162,4 @@ def test_graph(self): # graph is twice the total number of edges in a clique of size # five, plus one more for the bridge. expected = 1 / (2 * (5 * 4 + 1)) - assert_equal(expected, expansion) + assert expected == expansion diff --git a/networkx/algorithms/tests/test_cycles.py b/networkx/algorithms/tests/test_cycles.py index 1b71047..d509e41 100644 --- a/networkx/algorithms/tests/test_cycles.py +++ b/networkx/algorithms/tests/test_cycles.py @@ -1,5 +1,4 @@ -#!/usr/bin/env python -from nose.tools import * +import pytest import networkx import networkx as nx @@ -11,90 +10,92 @@ class TestCycles: - def setUp(self): + @classmethod + def setup_class(cls): G = networkx.Graph() nx.add_cycle(G, [0, 1, 2, 3]) nx.add_cycle(G, [0, 3, 4, 5]) nx.add_cycle(G, [0, 1, 6, 7, 8]) G.add_edge(8, 9) - self.G = G + cls.G = G def is_cyclic_permutation(self, a, b): n = len(a) if len(b) != n: return False l = a + a - return any(l[i:i + n] == b for i in range(2 * n - n + 1)) + return any(l[i : i + n] == b for i in range(n)) def test_cycle_basis(self): G = self.G cy = networkx.cycle_basis(G, 0) sort_cy = sorted(sorted(c) for c in cy) - assert_equal(sort_cy, [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5]]) + assert sort_cy == [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5]] cy = networkx.cycle_basis(G, 1) sort_cy = sorted(sorted(c) for c in cy) - assert_equal(sort_cy, [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5]]) + assert sort_cy == [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5]] cy = networkx.cycle_basis(G, 9) sort_cy = sorted(sorted(c) for c in cy) - assert_equal(sort_cy, [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5]]) + assert sort_cy == [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5]] # test disconnected graphs nx.add_cycle(G, "ABC") cy = networkx.cycle_basis(G, 9) sort_cy = sorted(sorted(c) for c in cy[:-1]) + [sorted(cy[-1])] - assert_equal(sort_cy, [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5], - ['A', 'B', 'C']]) + assert sort_cy == [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5], ["A", "B", "C"]] - @raises(nx.NetworkXNotImplemented) - def test_cycle_basis(self): - G = nx.DiGraph() - cy = networkx.cycle_basis(G, 0) + def test_cycle_basis2(self): + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.DiGraph() + cy = networkx.cycle_basis(G, 0) - @raises(nx.NetworkXNotImplemented) - def test_cycle_basis(self): - G = nx.MultiGraph() - cy = networkx.cycle_basis(G, 0) + def test_cycle_basis3(self): + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.MultiGraph() + cy = networkx.cycle_basis(G, 0) def test_simple_cycles(self): edges = [(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)] G = nx.DiGraph(edges) cc = sorted(nx.simple_cycles(G)) ca = [[0], [0, 1, 2], [0, 2], [1, 2], [2]] + assert len(cc) == len(ca) for c in cc: - assert_true(any(self.is_cyclic_permutation(c, rc) for rc in ca)) + assert any(self.is_cyclic_permutation(c, rc) for rc in ca) - @raises(nx.NetworkXNotImplemented) def test_simple_cycles_graph(self): - G = nx.Graph() - c = sorted(nx.simple_cycles(G)) + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.Graph() + c = sorted(nx.simple_cycles(G)) def test_unsortable(self): # TODO What does this test do? das 6/2013 G = nx.DiGraph() - nx.add_cycle(G, ['a', 1]) + nx.add_cycle(G, ["a", 1]) c = list(nx.simple_cycles(G)) def test_simple_cycles_small(self): G = nx.DiGraph() nx.add_cycle(G, [1, 2, 3]) c = sorted(nx.simple_cycles(G)) - assert_equal(len(c), 1) - assert_true(self.is_cyclic_permutation(c[0], [1, 2, 3])) + assert len(c) == 1 + assert self.is_cyclic_permutation(c[0], [1, 2, 3]) nx.add_cycle(G, [10, 20, 30]) cc = sorted(nx.simple_cycles(G)) + assert len(cc) == 2 ca = [[1, 2, 3], [10, 20, 30]] for c in cc: - assert_true(any(self.is_cyclic_permutation(c, rc) for rc in ca)) + assert any(self.is_cyclic_permutation(c, rc) for rc in ca) def test_simple_cycles_empty(self): G = nx.DiGraph() - assert_equal(list(nx.simple_cycles(G)), []) + assert list(nx.simple_cycles(G)) == [] def test_complete_directed_graph(self): # see table 2 in Johnson's paper ncircuits = [1, 5, 20, 84, 409, 2365, 16064] for n, c in zip(range(2, 9), ncircuits): G = nx.DiGraph(nx.complete_graph(n)) - assert_equal(len(list(nx.simple_cycles(G))), c) + assert len(list(nx.simple_cycles(G))) == c def worst_case_graph(self, k): # see figure 1 in Johnson's paper @@ -119,89 +120,144 @@ def test_worst_case_graph(self): for k in range(3, 10): G = self.worst_case_graph(k) l = len(list(nx.simple_cycles(G))) - assert_equal(l, 3 * k) + assert l == 3 * k def test_recursive_simple_and_not(self): for k in range(2, 10): G = self.worst_case_graph(k) cc = sorted(nx.simple_cycles(G)) rcc = sorted(nx.recursive_simple_cycles(G)) - assert_equal(len(cc), len(rcc)) + assert len(cc) == len(rcc) for c in cc: - assert_true(any(self.is_cyclic_permutation(c, r) for r in rcc)) + assert any(self.is_cyclic_permutation(c, r) for r in rcc) for rc in rcc: - assert_true(any(self.is_cyclic_permutation(rc, c) for c in cc)) + assert any(self.is_cyclic_permutation(rc, c) for c in cc) def test_simple_graph_with_reported_bug(self): G = nx.DiGraph() - edges = [(0, 2), (0, 3), (1, 0), (1, 3), (2, 1), (2, 4), - (3, 2), (3, 4), (4, 0), (4, 1), (4, 5), (5, 0), - (5, 1), (5, 2), (5, 3)] + edges = [ + (0, 2), + (0, 3), + (1, 0), + (1, 3), + (2, 1), + (2, 4), + (3, 2), + (3, 4), + (4, 0), + (4, 1), + (4, 5), + (5, 0), + (5, 1), + (5, 2), + (5, 3), + ] G.add_edges_from(edges) cc = sorted(nx.simple_cycles(G)) - assert_equal(len(cc), 26) + assert len(cc) == 26 rcc = sorted(nx.recursive_simple_cycles(G)) - assert_equal(len(cc), len(rcc)) + assert len(cc) == len(rcc) for c in cc: - assert_true(any(self.is_cyclic_permutation(c, rc) for rc in rcc)) + assert any(self.is_cyclic_permutation(c, rc) for rc in rcc) for rc in rcc: - assert_true(any(self.is_cyclic_permutation(rc, c) for c in cc)) + assert any(self.is_cyclic_permutation(rc, c) for c in cc) + # These tests might fail with hash randomization since they depend on # edge_dfs. For more information, see the comments in: # networkx/algorithms/traversal/tests/test_edgedfs.py -class TestFindCycle(object): - def setUp(self): - self.nodes = [0, 1, 2, 3] - self.edges = [(-1, 0), (0, 1), (1, 0), (1, 0), (2, 1), (3, 1)] +class TestFindCycle: + @classmethod + def setup_class(cls): + cls.nodes = [0, 1, 2, 3] + cls.edges = [(-1, 0), (0, 1), (1, 0), (1, 0), (2, 1), (3, 1)] + + def test_graph_nocycle(self): + G = nx.Graph(self.edges) + pytest.raises(nx.exception.NetworkXNoCycle, find_cycle, G, self.nodes) + + def test_graph_cycle(self): + G = nx.Graph(self.edges) + G.add_edge(2, 0) + x = list(find_cycle(G, self.nodes)) + x_ = [(0, 1), (1, 2), (2, 0)] + assert x == x_ + + def test_graph_orientation_none(self): + G = nx.Graph(self.edges) + G.add_edge(2, 0) + x = list(find_cycle(G, self.nodes, orientation=None)) + x_ = [(0, 1), (1, 2), (2, 0)] + assert x == x_ - def test_graph(self): + def test_graph_orientation_original(self): G = nx.Graph(self.edges) - assert_raises(nx.exception.NetworkXNoCycle, find_cycle, G, self.nodes) + G.add_edge(2, 0) + x = list(find_cycle(G, self.nodes, orientation="original")) + x_ = [(0, 1, FORWARD), (1, 2, FORWARD), (2, 0, FORWARD)] + assert x == x_ def test_digraph(self): G = nx.DiGraph(self.edges) x = list(find_cycle(G, self.nodes)) x_ = [(0, 1), (1, 0)] - assert_equal(x, x_) + assert x == x_ + + def test_digraph_orientation_none(self): + G = nx.DiGraph(self.edges) + x = list(find_cycle(G, self.nodes, orientation=None)) + x_ = [(0, 1), (1, 0)] + assert x == x_ + + def test_digraph_orientation_original(self): + G = nx.DiGraph(self.edges) + x = list(find_cycle(G, self.nodes, orientation="original")) + x_ = [(0, 1, FORWARD), (1, 0, FORWARD)] + assert x == x_ def test_multigraph(self): G = nx.MultiGraph(self.edges) x = list(find_cycle(G, self.nodes)) x_ = [(0, 1, 0), (1, 0, 1)] # or (1, 0, 2) # Hash randomization...could be any edge. - assert_equal(x[0], x_[0]) - assert_equal(x[1][:2], x_[1][:2]) + assert x[0] == x_[0] + assert x[1][:2] == x_[1][:2] def test_multidigraph(self): G = nx.MultiDiGraph(self.edges) x = list(find_cycle(G, self.nodes)) x_ = [(0, 1, 0), (1, 0, 0)] # (1, 0, 1) - assert_equal(x[0], x_[0]) - assert_equal(x[1][:2], x_[1][:2]) + assert x[0] == x_[0] + assert x[1][:2] == x_[1][:2] def test_digraph_ignore(self): G = nx.DiGraph(self.edges) - x = list(find_cycle(G, self.nodes, orientation='ignore')) + x = list(find_cycle(G, self.nodes, orientation="ignore")) x_ = [(0, 1, FORWARD), (1, 0, FORWARD)] - assert_equal(x, x_) + assert x == x_ + + def test_digraph_reverse(self): + G = nx.DiGraph(self.edges) + x = list(find_cycle(G, self.nodes, orientation="reverse")) + x_ = [(1, 0, REVERSE), (0, 1, REVERSE)] + assert x == x_ def test_multidigraph_ignore(self): G = nx.MultiDiGraph(self.edges) - x = list(find_cycle(G, self.nodes, orientation='ignore')) + x = list(find_cycle(G, self.nodes, orientation="ignore")) x_ = [(0, 1, 0, FORWARD), (1, 0, 0, FORWARD)] # or (1, 0, 1, 1) - assert_equal(x[0], x_[0]) - assert_equal(x[1][:2], x_[1][:2]) - assert_equal(x[1][3], x_[1][3]) + assert x[0] == x_[0] + assert x[1][:2] == x_[1][:2] + assert x[1][3] == x_[1][3] def test_multidigraph_ignore2(self): # Loop traversed an edge while ignoring its orientation. G = nx.MultiDiGraph([(0, 1), (1, 2), (1, 2)]) - x = list(find_cycle(G, [0, 1, 2], orientation='ignore')) + x = list(find_cycle(G, [0, 1, 2], orientation="ignore")) x_ = [(1, 2, 0, FORWARD), (1, 2, 1, REVERSE)] - assert_equal(x, x_) + assert x == x_ def test_multidigraph_original(self): # Node 2 doesn't need to be searched again from visited from 4. @@ -209,61 +265,68 @@ def test_multidigraph_original(self): # when 4 is visited from the first time (so we must make sure that 4 # is not visited from 2, and hence, we respect the edge orientation). G = nx.MultiDiGraph([(0, 1), (1, 2), (2, 3), (4, 2)]) - assert_raises(nx.exception.NetworkXNoCycle, - find_cycle, G, [0, 1, 2, 3, 4], orientation='original') + pytest.raises( + nx.exception.NetworkXNoCycle, + find_cycle, + G, + [0, 1, 2, 3, 4], + orientation="original", + ) def test_dag(self): G = nx.DiGraph([(0, 1), (0, 2), (1, 2)]) - assert_raises(nx.exception.NetworkXNoCycle, - find_cycle, G, orientation='original') - x = list(find_cycle(G, orientation='ignore')) - assert_equal(x, [(0, 1, FORWARD), (1, 2, FORWARD), (0, 2, REVERSE)]) + pytest.raises( + nx.exception.NetworkXNoCycle, find_cycle, G, orientation="original" + ) + x = list(find_cycle(G, orientation="ignore")) + assert x == [(0, 1, FORWARD), (1, 2, FORWARD), (0, 2, REVERSE)] def test_prev_explored(self): # https://github.com/networkx/networkx/issues/2323 G = nx.DiGraph() G.add_edges_from([(1, 0), (2, 0), (1, 2), (2, 1)]) - assert_raises(nx.NetworkXNoCycle, find_cycle, G, source=0) + pytest.raises(nx.NetworkXNoCycle, find_cycle, G, source=0) x = list(nx.find_cycle(G, 1)) x_ = [(1, 2), (2, 1)] - assert_equal(x, x_) + assert x == x_ x = list(nx.find_cycle(G, 2)) x_ = [(2, 1), (1, 2)] - assert_equal(x, x_) + assert x == x_ x = list(nx.find_cycle(G)) x_ = [(1, 2), (2, 1)] - assert_equal(x, x_) + assert x == x_ def test_no_cycle(self): # https://github.com/networkx/networkx/issues/2439 G = nx.DiGraph() G.add_edges_from([(1, 2), (2, 0), (3, 1), (3, 2)]) - assert_raises(nx.NetworkXNoCycle, find_cycle, G, source=0) - assert_raises(nx.NetworkXNoCycle, find_cycle, G) + pytest.raises(nx.NetworkXNoCycle, find_cycle, G, source=0) + pytest.raises(nx.NetworkXNoCycle, find_cycle, G) def assert_basis_equal(a, b): - assert_list_equal(sorted(a), sorted(b)) + assert sorted(a) == sorted(b) -class TestMinimumCycles(object): - def setUp(self): +class TestMinimumCycles: + @classmethod + def setup_class(cls): T = nx.Graph() - T.add_cycle([1, 2, 3, 4], weight=1) + nx.add_cycle(T, [1, 2, 3, 4], weight=1) T.add_edge(2, 4, weight=5) - self.diamond_graph = T + cls.diamond_graph = T def test_unweighted_diamond(self): mcb = minimum_cycle_basis(self.diamond_graph) - assert_basis_equal(mcb, [[1, 2, 4], [2, 3, 4]]) + assert_basis_equal([sorted(c) for c in mcb], [[1, 2, 4], [2, 3, 4]]) def test_weighted_diamond(self): - mcb = minimum_cycle_basis(self.diamond_graph, weight='weight') - assert_basis_equal(mcb, [[1, 2, 4], [1, 2, 3, 4]]) + mcb = minimum_cycle_basis(self.diamond_graph, weight="weight") + assert_basis_equal([sorted(c) for c in mcb], [[1, 2, 4], [1, 2, 3, 4]]) def test_dimensionality(self): # checks |MCB|=|E|-|V|+|NC| @@ -275,13 +338,13 @@ def test_dimensionality(self): ncomp = nx.number_connected_components(rg) dim_mcb = len(minimum_cycle_basis(rg)) - assert_equal(dim_mcb, nedges - nnodes + ncomp) + assert dim_mcb == nedges - nnodes + ncomp def test_complete_graph(self): cg = nx.complete_graph(5) mcb = minimum_cycle_basis(cg) - assert_true(all([len(cycle) == 3 for cycle in mcb])) + assert all([len(cycle) == 3 for cycle in mcb]) def test_tree_graph(self): tg = nx.balanced_tree(3, 3) - assert_false(minimum_cycle_basis(tg)) + assert not minimum_cycle_basis(tg) diff --git a/networkx/algorithms/tests/test_d_separation.py b/networkx/algorithms/tests/test_d_separation.py new file mode 100644 index 0000000..a522962 --- /dev/null +++ b/networkx/algorithms/tests/test_d_separation.py @@ -0,0 +1,156 @@ +from itertools import combinations +import pytest +import networkx as nx + + +def path_graph(): + """Return a path graph of length three.""" + G = nx.path_graph(3, create_using=nx.DiGraph) + G.graph["name"] = "path" + nx.freeze(G) + return G + + +def fork_graph(): + """Return a three node fork graph.""" + G = nx.DiGraph(name="fork") + G.add_edges_from([(0, 1), (0, 2)]) + nx.freeze(G) + return G + + +def collider_graph(): + """Return a collider/v-structure graph with three nodes.""" + G = nx.DiGraph(name="collider") + G.add_edges_from([(0, 2), (1, 2)]) + nx.freeze(G) + return G + + +def naive_bayes_graph(): + """Return a simply Naive Bayes PGM graph.""" + G = nx.DiGraph(name="naive_bayes") + G.add_edges_from([(0, 1), (0, 2), (0, 3), (0, 4)]) + nx.freeze(G) + return G + + +def asia_graph(): + """Return the 'Asia' PGM graph.""" + G = nx.DiGraph(name="asia") + G.add_edges_from( + [ + ("asia", "tuberculosis"), + ("smoking", "cancer"), + ("smoking", "bronchitis"), + ("tuberculosis", "either"), + ("cancer", "either"), + ("either", "xray"), + ("either", "dyspnea"), + ("bronchitis", "dyspnea"), + ] + ) + nx.freeze(G) + return G + + +@pytest.fixture(name="path_graph") +def path_graph_fixture(): + return path_graph() + + +@pytest.fixture(name="fork_graph") +def fork_graph_fixture(): + return fork_graph() + + +@pytest.fixture(name="collider_graph") +def collider_graph_fixture(): + return collider_graph() + + +@pytest.fixture(name="naive_bayes_graph") +def naive_bayes_graph_fixture(): + return naive_bayes_graph() + + +@pytest.fixture(name="asia_graph") +def asia_graph_fixture(): + return asia_graph() + + +@pytest.mark.parametrize( + "graph", + [path_graph(), fork_graph(), collider_graph(), naive_bayes_graph(), asia_graph()], +) +def test_markov_condition(graph): + """Test that the Markov condition holds for each PGM graph.""" + for node in graph.nodes: + parents = set(graph.predecessors(node)) + non_descendants = graph.nodes - nx.descendants(graph, node) - {node} - parents + assert nx.d_separated(graph, {node}, non_descendants, parents) + + +def test_path_graph_dsep(path_graph): + """Example-based test of d-separation for path_graph.""" + assert nx.d_separated(path_graph, {0}, {2}, {1}) + assert not nx.d_separated(path_graph, {0}, {2}, {}) + + +def test_fork_graph_dsep(fork_graph): + """Example-based test of d-separation for fork_graph.""" + assert nx.d_separated(fork_graph, {1}, {2}, {0}) + assert not nx.d_separated(fork_graph, {1}, {2}, {}) + + +def test_collider_graph_dsep(collider_graph): + """Example-based test of d-separation for collider_graph.""" + assert nx.d_separated(collider_graph, {0}, {1}, {}) + assert not nx.d_separated(collider_graph, {0}, {1}, {2}) + + +def test_naive_bayes_dsep(naive_bayes_graph): + """Example-based test of d-separation for naive_bayes_graph.""" + for u, v in combinations(range(1, 5), 2): + assert nx.d_separated(naive_bayes_graph, {u}, {v}, {0}) + assert not nx.d_separated(naive_bayes_graph, {u}, {v}, {}) + + +def test_asia_graph_dsep(asia_graph): + """Example-based test of d-separation for asia_graph.""" + assert nx.d_separated( + asia_graph, {"asia", "smoking"}, {"dyspnea", "xray"}, {"bronchitis", "either"} + ) + assert nx.d_separated( + asia_graph, {"tuberculosis", "cancer"}, {"bronchitis"}, {"smoking", "xray"} + ) + + +def test_undirected_graphs_are_not_supported(): + """ + Test that undirected graphs are not supported. + + d-separation does not apply in the case of undirected graphs. + """ + with pytest.raises(nx.NetworkXNotImplemented): + g = nx.path_graph(3, nx.Graph) + nx.d_separated(g, {0}, {1}, {2}) + + +def test_cyclic_graphs_raise_error(): + """ + Test that cycle graphs should cause erroring. + + This is because PGMs assume a directed acyclic graph. + """ + with pytest.raises(nx.NetworkXError): + g = nx.cycle_graph(3, nx.DiGraph) + nx.d_separated(g, {0}, {1}, {2}) + + +def test_invalid_nodes_raise_error(asia_graph): + """ + Test that graphs that have invalid nodes passed in raise errors. + """ + with pytest.raises(nx.NodeNotFound): + nx.d_separated(asia_graph, {0}, {1}, {2}) diff --git a/networkx/algorithms/tests/test_dag.py b/networkx/algorithms/tests/test_dag.py index d184d75..ee89f19 100644 --- a/networkx/algorithms/tests/test_dag.py +++ b/networkx/algorithms/tests/test_dag.py @@ -1,47 +1,39 @@ -from itertools import combinations +from itertools import combinations, permutations -from nose.tools import assert_equal -from nose.tools import assert_false -from nose.tools import assert_in -from nose.tools import assert_raises -from nose.tools import assert_true -from nose.tools import raises -from nose.tools import ok_ +import pytest import networkx as nx from networkx.testing.utils import assert_edges_equal -from networkx.utils import arbitrary_element from networkx.utils import consume from networkx.utils import pairwise -class TestDagLongestPath(object): +class TestDagLongestPath: """Unit tests computing the longest path in a directed acyclic graph.""" def test_empty(self): G = nx.DiGraph() - assert_equal(nx.dag_longest_path(G), []) + assert nx.dag_longest_path(G) == [] def test_unweighted1(self): edges = [(1, 2), (2, 3), (2, 4), (3, 5), (5, 6), (3, 7)] G = nx.DiGraph(edges) - assert_equal(nx.dag_longest_path(G), [1, 2, 3, 5, 6]) + assert nx.dag_longest_path(G) == [1, 2, 3, 5, 6] def test_unweighted2(self): edges = [(1, 2), (2, 3), (3, 4), (4, 5), (1, 3), (1, 5), (3, 5)] G = nx.DiGraph(edges) - assert_equal(nx.dag_longest_path(G), [1, 2, 3, 4, 5]) + assert nx.dag_longest_path(G) == [1, 2, 3, 4, 5] def test_weighted(self): G = nx.DiGraph() - edges = [(1, 2, -5), (2, 3, 1), (3, 4, 1), (4, 5, 0), (3, 5, 4), - (1, 6, 2)] + edges = [(1, 2, -5), (2, 3, 1), (3, 4, 1), (4, 5, 0), (3, 5, 4), (1, 6, 2)] G.add_weighted_edges_from(edges) - assert_equal(nx.dag_longest_path(G), [2, 3, 5]) + assert nx.dag_longest_path(G) == [2, 3, 5] def test_undirected_not_implemented(self): G = nx.Graph() - assert_raises(nx.NetworkXNotImplemented, nx.dag_longest_path, G) + pytest.raises(nx.NetworkXNotImplemented, nx.dag_longest_path, G) def test_unorderable_nodes(self): """Tests that computing the longest path does not depend on @@ -50,21 +42,9 @@ def test_unorderable_nodes(self): For more information, see issue #1989. """ - # TODO In Python 3, instances of the `object` class are - # unorderable by default, so we wouldn't need to define our own - # class here, we could just instantiate an instance of the - # `object` class. However, we still support Python 2; when - # support for Python 2 is dropped, this test can be simplified - # by replacing `Unorderable()` by `object()`. - class Unorderable(object): - def __lt__(self, other): - error_msg = "< not supported between instances of " \ - "{} and {}".format(type(self).__name__, type(other).__name__) - raise TypeError(error_msg) - # Create the directed path graph on four nodes in a diamond shape, # with nodes represented as (unorderable) Python objects. - nodes = [Unorderable() for n in range(4)] + nodes = [object() for n in range(4)] G = nx.DiGraph() G.add_edge(nodes[0], nodes[1]) G.add_edge(nodes[0], nodes[2]) @@ -75,7 +55,7 @@ def __lt__(self, other): nx.dag_longest_path(G) -class TestDagLongestPathLength(object): +class TestDagLongestPathLength: """Unit tests for computing the length of a longest path in a directed acyclic graph. @@ -84,76 +64,82 @@ class TestDagLongestPathLength(object): def test_unweighted(self): edges = [(1, 2), (2, 3), (2, 4), (3, 5), (5, 6), (5, 7)] G = nx.DiGraph(edges) - assert_equal(nx.dag_longest_path_length(G), 4) + assert nx.dag_longest_path_length(G) == 4 edges = [(1, 2), (2, 3), (3, 4), (4, 5), (1, 3), (1, 5), (3, 5)] G = nx.DiGraph(edges) - assert_equal(nx.dag_longest_path_length(G), 4) + assert nx.dag_longest_path_length(G) == 4 # test degenerate graphs G = nx.DiGraph() G.add_node(1) - assert_equal(nx.dag_longest_path_length(G), 0) + assert nx.dag_longest_path_length(G) == 0 def test_undirected_not_implemented(self): G = nx.Graph() - assert_raises(nx.NetworkXNotImplemented, nx.dag_longest_path_length, G) + pytest.raises(nx.NetworkXNotImplemented, nx.dag_longest_path_length, G) def test_weighted(self): - edges = [(1, 2, -5), (2, 3, 1), (3, 4, 1), (4, 5, 0), (3, 5, 4), - (1, 6, 2)] + edges = [(1, 2, -5), (2, 3, 1), (3, 4, 1), (4, 5, 0), (3, 5, 4), (1, 6, 2)] G = nx.DiGraph() G.add_weighted_edges_from(edges) - assert_equal(nx.dag_longest_path_length(G), 5) + assert nx.dag_longest_path_length(G) == 5 class TestDAG: - - def setUp(self): + @classmethod + def setup_class(cls): pass def test_topological_sort1(self): DG = nx.DiGraph([(1, 2), (1, 3), (2, 3)]) - for algorithm in [nx.topological_sort, - nx.lexicographical_topological_sort]: - assert_equal(tuple(algorithm(DG)), (1, 2, 3)) + for algorithm in [nx.topological_sort, nx.lexicographical_topological_sort]: + assert tuple(algorithm(DG)) == (1, 2, 3) DG.add_edge(3, 2) - for algorithm in [nx.topological_sort, - nx.lexicographical_topological_sort]: - assert_raises(nx.NetworkXUnfeasible, consume, algorithm(DG)) + for algorithm in [nx.topological_sort, nx.lexicographical_topological_sort]: + pytest.raises(nx.NetworkXUnfeasible, consume, algorithm(DG)) DG.remove_edge(2, 3) - for algorithm in [nx.topological_sort, - nx.lexicographical_topological_sort]: - assert_equal(tuple(algorithm(DG)), (1, 3, 2)) + for algorithm in [nx.topological_sort, nx.lexicographical_topological_sort]: + assert tuple(algorithm(DG)) == (1, 3, 2) DG.remove_edge(3, 2) - assert_in(tuple(nx.topological_sort(DG)), {(1, 2, 3), (1, 3, 2)}) - assert_equal(tuple(nx.lexicographical_topological_sort(DG)), (1, 2, 3)) + assert tuple(nx.topological_sort(DG)) in {(1, 2, 3), (1, 3, 2)} + assert tuple(nx.lexicographical_topological_sort(DG)) == (1, 2, 3) def test_is_directed_acyclic_graph(self): G = nx.generators.complete_graph(2) - assert_false(nx.is_directed_acyclic_graph(G)) - assert_false(nx.is_directed_acyclic_graph(G.to_directed())) - assert_false(nx.is_directed_acyclic_graph(nx.Graph([(3, 4), (4, 5)]))) - assert_true(nx.is_directed_acyclic_graph(nx.DiGraph([(3, 4), (4, 5)]))) + assert not nx.is_directed_acyclic_graph(G) + assert not nx.is_directed_acyclic_graph(G.to_directed()) + assert not nx.is_directed_acyclic_graph(nx.Graph([(3, 4), (4, 5)])) + assert nx.is_directed_acyclic_graph(nx.DiGraph([(3, 4), (4, 5)])) def test_topological_sort2(self): - DG = nx.DiGraph({1: [2], 2: [3], 3: [4], - 4: [5], 5: [1], 11: [12], - 12: [13], 13: [14], 14: [15]}) - assert_raises(nx.NetworkXUnfeasible, consume, nx.topological_sort(DG)) - - assert_false(nx.is_directed_acyclic_graph(DG)) + DG = nx.DiGraph( + { + 1: [2], + 2: [3], + 3: [4], + 4: [5], + 5: [1], + 11: [12], + 12: [13], + 13: [14], + 14: [15], + } + ) + pytest.raises(nx.NetworkXUnfeasible, consume, nx.topological_sort(DG)) + + assert not nx.is_directed_acyclic_graph(DG) DG.remove_edge(1, 2) consume(nx.topological_sort(DG)) - assert_true(nx.is_directed_acyclic_graph(DG)) + assert nx.is_directed_acyclic_graph(DG) def test_topological_sort3(self): DG = nx.DiGraph() @@ -163,29 +149,30 @@ def test_topological_sort3(self): DG.add_edges_from([(4, i) for i in range(12, 15)]) def validate(order): - ok_(isinstance(order, list)) - assert_equal(set(order), set(DG)) + assert isinstance(order, list) + assert set(order) == set(DG) for u, v in combinations(order, 2): - assert_false(nx.has_path(DG, v, u)) + assert not nx.has_path(DG, v, u) + validate(list(nx.topological_sort(DG))) DG.add_edge(14, 1) - assert_raises(nx.NetworkXUnfeasible, consume, nx.topological_sort(DG)) + pytest.raises(nx.NetworkXUnfeasible, consume, nx.topological_sort(DG)) def test_topological_sort4(self): G = nx.Graph() G.add_edge(1, 2) # Only directed graphs can be topologically sorted. - assert_raises(nx.NetworkXError, consume, nx.topological_sort(G)) + pytest.raises(nx.NetworkXError, consume, nx.topological_sort(G)) def test_topological_sort5(self): G = nx.DiGraph() G.add_edge(0, 1) - assert_equal(list(nx.topological_sort(G)), [0, 1]) + assert list(nx.topological_sort(G)) == [0, 1] def test_topological_sort6(self): - for algorithm in [nx.topological_sort, - nx.lexicographical_topological_sort]: + for algorithm in [nx.topological_sort, nx.lexicographical_topological_sort]: + def runtime_error(): DG = nx.DiGraph([(1, 2), (2, 3), (3, 4)]) first = True @@ -210,52 +197,156 @@ def runtime_error2(): first = False DG.remove_node(2) - assert_raises(RuntimeError, runtime_error) - assert_raises(RuntimeError, runtime_error2) - assert_raises(nx.NetworkXUnfeasible, unfeasible_error) + pytest.raises(RuntimeError, runtime_error) + pytest.raises(RuntimeError, runtime_error2) + pytest.raises(nx.NetworkXUnfeasible, unfeasible_error) + + def test_all_topological_sorts_1(self): + DG = nx.DiGraph([(1, 2), (2, 3), (3, 4), (4, 5)]) + assert list(nx.all_topological_sorts(DG)) == [[1, 2, 3, 4, 5]] + + def test_all_topological_sorts_2(self): + DG = nx.DiGraph([(1, 3), (2, 1), (2, 4), (4, 3), (4, 5)]) + assert sorted(nx.all_topological_sorts(DG)) == [ + [2, 1, 4, 3, 5], + [2, 1, 4, 5, 3], + [2, 4, 1, 3, 5], + [2, 4, 1, 5, 3], + [2, 4, 5, 1, 3], + ] + + def test_all_topological_sorts_3(self): + def unfeasible(): + DG = nx.DiGraph([(1, 2), (2, 3), (3, 4), (4, 2), (4, 5)]) + # convert to list to execute generator + list(nx.all_topological_sorts(DG)) + + def not_implemented(): + G = nx.Graph([(1, 2), (2, 3)]) + # convert to list to execute generator + list(nx.all_topological_sorts(G)) + + def not_implemted_2(): + G = nx.MultiGraph([(1, 2), (1, 2), (2, 3)]) + list(nx.all_topological_sorts(G)) + + pytest.raises(nx.NetworkXUnfeasible, unfeasible) + pytest.raises(nx.NetworkXNotImplemented, not_implemented) + pytest.raises(nx.NetworkXNotImplemented, not_implemted_2) + + def test_all_topological_sorts_4(self): + DG = nx.DiGraph() + for i in range(7): + DG.add_node(i) + assert sorted(map(list, permutations(DG.nodes))) == sorted( + nx.all_topological_sorts(DG) + ) + + def test_all_topological_sorts_multigraph_1(self): + DG = nx.MultiDiGraph([(1, 2), (1, 2), (2, 3), (3, 4), (3, 5), (3, 5), (3, 5)]) + assert sorted(nx.all_topological_sorts(DG)) == sorted( + [[1, 2, 3, 4, 5], [1, 2, 3, 5, 4]] + ) + + def test_all_topological_sorts_multigraph_2(self): + N = 9 + edges = [] + for i in range(1, N): + edges.extend([(i, i + 1)] * i) + DG = nx.MultiDiGraph(edges) + assert list(nx.all_topological_sorts(DG)) == [list(range(1, N + 1))] def test_ancestors(self): G = nx.DiGraph() ancestors = nx.algorithms.dag.ancestors - G.add_edges_from([ - (1, 2), (1, 3), (4, 2), (4, 3), (4, 5), (2, 6), (5, 6)]) - assert_equal(ancestors(G, 6), set([1, 2, 4, 5])) - assert_equal(ancestors(G, 3), set([1, 4])) - assert_equal(ancestors(G, 1), set()) - assert_raises(nx.NetworkXError, ancestors, G, 8) + G.add_edges_from([(1, 2), (1, 3), (4, 2), (4, 3), (4, 5), (2, 6), (5, 6)]) + assert ancestors(G, 6) == {1, 2, 4, 5} + assert ancestors(G, 3) == {1, 4} + assert ancestors(G, 1) == set() + pytest.raises(nx.NetworkXError, ancestors, G, 8) def test_descendants(self): G = nx.DiGraph() descendants = nx.algorithms.dag.descendants - G.add_edges_from([ - (1, 2), (1, 3), (4, 2), (4, 3), (4, 5), (2, 6), (5, 6)]) - assert_equal(descendants(G, 1), set([2, 3, 6])) - assert_equal(descendants(G, 4), set([2, 3, 5, 6])) - assert_equal(descendants(G, 3), set()) - assert_raises(nx.NetworkXError, descendants, G, 8) + G.add_edges_from([(1, 2), (1, 3), (4, 2), (4, 3), (4, 5), (2, 6), (5, 6)]) + assert descendants(G, 1) == {2, 3, 6} + assert descendants(G, 4) == {2, 3, 5, 6} + assert descendants(G, 3) == set() + pytest.raises(nx.NetworkXError, descendants, G, 8) def test_transitive_closure(self): G = nx.DiGraph([(1, 2), (2, 3), (3, 4)]) - transitive_closure = nx.algorithms.dag.transitive_closure + solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)] + assert_edges_equal(nx.transitive_closure(G).edges(), solution) + G = nx.DiGraph([(1, 2), (2, 3), (2, 4)]) + solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4)] + assert_edges_equal(nx.transitive_closure(G).edges(), solution) + G = nx.DiGraph([(1, 2), (2, 3), (3, 1)]) + solution = [(1, 2), (2, 1), (2, 3), (3, 2), (1, 3), (3, 1)] + soln = sorted(solution + [(n, n) for n in G]) + assert_edges_equal(sorted(nx.transitive_closure(G).edges()), soln) + G = nx.Graph([(1, 2), (2, 3), (3, 4)]) + pytest.raises(nx.NetworkXNotImplemented, nx.transitive_closure, G) + + # test if edge data is copied + G = nx.DiGraph([(1, 2, {"a": 3}), (2, 3, {"b": 0}), (3, 4)]) + H = nx.transitive_closure(G) + for u, v in G.edges(): + assert G.get_edge_data(u, v) == H.get_edge_data(u, v) + + k = 10 + G = nx.DiGraph((i, i + 1, {"f": "b", "weight": i}) for i in range(k)) + H = nx.transitive_closure(G) + for u, v in G.edges(): + assert G.get_edge_data(u, v) == H.get_edge_data(u, v) + + def test_reflexive_transitive_closure(self): + G = nx.DiGraph([(1, 2), (2, 3), (3, 4)]) + solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)] + soln = sorted(solution + [(n, n) for n in G]) + assert_edges_equal(nx.transitive_closure(G).edges(), solution) + assert_edges_equal(nx.transitive_closure(G, False).edges(), solution) + assert_edges_equal(nx.transitive_closure(G, True).edges(), soln) + assert_edges_equal(nx.transitive_closure(G, None).edges(), solution) + + G = nx.DiGraph([(1, 2), (2, 3), (2, 4)]) + solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4)] + soln = sorted(solution + [(n, n) for n in G]) + assert_edges_equal(nx.transitive_closure(G).edges(), solution) + assert_edges_equal(nx.transitive_closure(G, False).edges(), solution) + assert_edges_equal(nx.transitive_closure(G, True).edges(), soln) + assert_edges_equal(nx.transitive_closure(G, None).edges(), solution) + + G = nx.DiGraph([(1, 2), (2, 3), (3, 1)]) + solution = sorted([(1, 2), (2, 1), (2, 3), (3, 2), (1, 3), (3, 1)]) + soln = sorted(solution + [(n, n) for n in G]) + assert_edges_equal(sorted(nx.transitive_closure(G).edges()), soln) + assert_edges_equal(sorted(nx.transitive_closure(G, False).edges()), soln) + assert_edges_equal(sorted(nx.transitive_closure(G, None).edges()), solution) + assert_edges_equal(sorted(nx.transitive_closure(G, True).edges()), soln) + + def test_transitive_closure_dag(self): + G = nx.DiGraph([(1, 2), (2, 3), (3, 4)]) + transitive_closure = nx.algorithms.dag.transitive_closure_dag solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)] assert_edges_equal(transitive_closure(G).edges(), solution) G = nx.DiGraph([(1, 2), (2, 3), (2, 4)]) solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4)] assert_edges_equal(transitive_closure(G).edges(), solution) G = nx.Graph([(1, 2), (2, 3), (3, 4)]) - assert_raises(nx.NetworkXNotImplemented, transitive_closure, G) + pytest.raises(nx.NetworkXNotImplemented, transitive_closure, G) # test if edge data is copied G = nx.DiGraph([(1, 2, {"a": 3}), (2, 3, {"b": 0}), (3, 4)]) H = transitive_closure(G) for u, v in G.edges(): - assert_equal(G.get_edge_data(u, v), H.get_edge_data(u, v)) + assert G.get_edge_data(u, v) == H.get_edge_data(u, v) k = 10 G = nx.DiGraph((i, i + 1, {"foo": "bar", "weight": i}) for i in range(k)) H = transitive_closure(G) for u, v in G.edges(): - assert_equal(G.get_edge_data(u, v), H.get_edge_data(u, v)) + assert G.get_edge_data(u, v) == H.get_edge_data(u, v) def test_transitive_reduction(self): G = nx.DiGraph([(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)]) @@ -267,12 +358,12 @@ def test_transitive_reduction(self): solution = [(1, 2), (2, 3), (2, 4)] assert_edges_equal(transitive_reduction(G).edges(), solution) G = nx.Graph([(1, 2), (2, 3), (3, 4)]) - assert_raises(nx.NetworkXNotImplemented, transitive_reduction, G) + pytest.raises(nx.NetworkXNotImplemented, transitive_reduction, G) def _check_antichains(self, solution, result): sol = [frozenset(a) for a in solution] res = [frozenset(a) for a in result] - assert_true(set(sol) == set(res)) + assert set(sol) == set(res) def test_antichains(self): antichains = nx.algorithms.dag.antichains @@ -280,12 +371,41 @@ def test_antichains(self): solution = [[], [4], [3], [2], [1]] self._check_antichains(list(antichains(G)), solution) G = nx.DiGraph([(1, 2), (2, 3), (2, 4), (3, 5), (5, 6), (5, 7)]) - solution = [[], [4], [7], [7, 4], [6], [6, 4], [6, 7], [6, 7, 4], - [5], [5, 4], [3], [3, 4], [2], [1]] + solution = [ + [], + [4], + [7], + [7, 4], + [6], + [6, 4], + [6, 7], + [6, 7, 4], + [5], + [5, 4], + [3], + [3, 4], + [2], + [1], + ] self._check_antichains(list(antichains(G)), solution) G = nx.DiGraph([(1, 2), (1, 3), (3, 4), (3, 5), (5, 6)]) - solution = [[], [6], [5], [4], [4, 6], [4, 5], [3], [2], [2, 6], - [2, 5], [2, 4], [2, 4, 6], [2, 4, 5], [2, 3], [1]] + solution = [ + [], + [6], + [5], + [4], + [4, 6], + [4, 5], + [3], + [2], + [2, 6], + [2, 5], + [2, 4], + [2, 4, 6], + [2, 4, 5], + [2, 3], + [1], + ] self._check_antichains(list(antichains(G)), solution) G = nx.DiGraph({0: [1, 2], 1: [4], 2: [3], 3: [4]}) solution = [[], [4], [3], [2], [1], [1, 3], [1, 2], [0]] @@ -297,74 +417,109 @@ def test_antichains(self): solution = [[], [0], [1], [1, 0], [2], [2, 0], [2, 1], [2, 1, 0]] self._check_antichains(list(antichains(G)), solution) - def f(x): return list(antichains(x)) + def f(x): + return list(antichains(x)) + G = nx.Graph([(1, 2), (2, 3), (3, 4)]) - assert_raises(nx.NetworkXNotImplemented, f, G) + pytest.raises(nx.NetworkXNotImplemented, f, G) G = nx.DiGraph([(1, 2), (2, 3), (3, 1)]) - assert_raises(nx.NetworkXUnfeasible, f, G) + pytest.raises(nx.NetworkXUnfeasible, f, G) def test_lexicographical_topological_sort(self): G = nx.DiGraph([(1, 2), (2, 3), (1, 4), (1, 5), (2, 6)]) - assert_equal(list(nx.lexicographical_topological_sort(G)), - [1, 2, 3, 4, 5, 6]) - assert_equal(list(nx.lexicographical_topological_sort( - G, key=lambda x: x)), - [1, 2, 3, 4, 5, 6]) - assert_equal(list(nx.lexicographical_topological_sort( - G, key=lambda x: -x)), - [1, 5, 4, 2, 6, 3]) + assert list(nx.lexicographical_topological_sort(G)) == [1, 2, 3, 4, 5, 6] + assert list(nx.lexicographical_topological_sort(G, key=lambda x: x)) == [ + 1, + 2, + 3, + 4, + 5, + 6, + ] + assert list(nx.lexicographical_topological_sort(G, key=lambda x: -x)) == [ + 1, + 5, + 4, + 2, + 6, + 3, + ] + + def test_lexicographical_topological_sort2(self): + """ + Check the case of two or more nodes with same key value. + Want to avoid exception raised due to comparing nodes directly. + See Issue #3493 + """ + + class Test_Node: + def __init__(self, n): + self.label = n + self.priority = 1 + + def __repr__(self): + return f"Node({self.label})" + + def sorting_key(node): + return node.priority + + test_nodes = [Test_Node(n) for n in range(4)] + G = nx.DiGraph() + edges = [(0, 1), (0, 2), (0, 3), (2, 3)] + G.add_edges_from((test_nodes[a], test_nodes[b]) for a, b in edges) + + sorting = list(nx.lexicographical_topological_sort(G, key=sorting_key)) + assert sorting == test_nodes def test_is_aperiodic_cycle(): G = nx.DiGraph() nx.add_cycle(G, [1, 2, 3, 4]) - assert_false(nx.is_aperiodic(G)) + assert not nx.is_aperiodic(G) def test_is_aperiodic_cycle2(): G = nx.DiGraph() nx.add_cycle(G, [1, 2, 3, 4]) nx.add_cycle(G, [3, 4, 5, 6, 7]) - assert_true(nx.is_aperiodic(G)) + assert nx.is_aperiodic(G) def test_is_aperiodic_cycle3(): G = nx.DiGraph() nx.add_cycle(G, [1, 2, 3, 4]) nx.add_cycle(G, [3, 4, 5, 6]) - assert_false(nx.is_aperiodic(G)) + assert not nx.is_aperiodic(G) def test_is_aperiodic_cycle4(): G = nx.DiGraph() nx.add_cycle(G, [1, 2, 3, 4]) G.add_edge(1, 3) - assert_true(nx.is_aperiodic(G)) + assert nx.is_aperiodic(G) def test_is_aperiodic_selfloop(): G = nx.DiGraph() nx.add_cycle(G, [1, 2, 3, 4]) G.add_edge(1, 1) - assert_true(nx.is_aperiodic(G)) + assert nx.is_aperiodic(G) def test_is_aperiodic_raise(): G = nx.Graph() - assert_raises(nx.NetworkXError, - nx.is_aperiodic, - G) + pytest.raises(nx.NetworkXError, nx.is_aperiodic, G) def test_is_aperiodic_bipartite(): # Bipartite graph G = nx.DiGraph(nx.davis_southern_women_graph()) - assert_false(nx.is_aperiodic(G)) + assert not nx.is_aperiodic(G) def test_is_aperiodic_rary_tree(): G = nx.full_rary_tree(3, 27, create_using=nx.DiGraph()) - assert_false(nx.is_aperiodic(G)) + assert not nx.is_aperiodic(G) def test_is_aperiodic_disconnected(): @@ -372,20 +527,20 @@ def test_is_aperiodic_disconnected(): G = nx.DiGraph() nx.add_cycle(G, [1, 2, 3, 4]) nx.add_cycle(G, [5, 6, 7, 8]) - assert_false(nx.is_aperiodic(G)) + assert not nx.is_aperiodic(G) G.add_edge(1, 3) G.add_edge(5, 7) - assert_true(nx.is_aperiodic(G)) + assert nx.is_aperiodic(G) def test_is_aperiodic_disconnected2(): G = nx.DiGraph() nx.add_cycle(G, [0, 1, 2]) G.add_edge(3, 3) - assert_false(nx.is_aperiodic(G)) + assert not nx.is_aperiodic(G) -class TestDagToBranching(object): +class TestDagToBranching: """Unit tests for the :func:`networkx.dag_to_branching` function.""" def test_single_root(self): @@ -396,8 +551,8 @@ def test_single_root(self): G = nx.DiGraph([(0, 1), (0, 2), (1, 3), (2, 3)]) B = nx.dag_to_branching(G) expected = nx.DiGraph([(0, 1), (1, 3), (0, 2), (2, 4)]) - assert_true(nx.is_arborescence(B)) - assert_true(nx.is_isomorphic(B, expected)) + assert nx.is_arborescence(B) + assert nx.is_isomorphic(B, expected) def test_multiple_roots(self): """Tests that a directed acyclic graph with multiple degree zero @@ -408,9 +563,9 @@ def test_multiple_roots(self): G = nx.DiGraph([(0, 1), (0, 2), (1, 3), (2, 3), (5, 2)]) B = nx.dag_to_branching(G) expected = nx.DiGraph([(0, 1), (1, 3), (0, 2), (2, 4), (5, 6), (6, 7)]) - assert_true(nx.is_branching(B)) - assert_false(nx.is_arborescence(B)) - assert_true(nx.is_isomorphic(B, expected)) + assert nx.is_branching(B) + assert not nx.is_arborescence(B) + assert nx.is_isomorphic(B, expected) # # Attributes are not copied by this function. If they were, this would # # be a good test to uncomment. @@ -446,7 +601,7 @@ def test_already_arborescence(self): """ A = nx.balanced_tree(2, 2, create_using=nx.DiGraph()) B = nx.dag_to_branching(A) - assert_true(nx.is_isomorphic(A, B)) + assert nx.is_isomorphic(A, B) def test_already_branching(self): """Tests that a directed acyclic graph that is already a @@ -457,22 +612,22 @@ def test_already_branching(self): T2 = nx.balanced_tree(2, 2, create_using=nx.DiGraph()) G = nx.disjoint_union(T1, T2) B = nx.dag_to_branching(G) - assert_true(nx.is_isomorphic(G, B)) + assert nx.is_isomorphic(G, B) - @raises(nx.HasACycle) def test_not_acyclic(self): """Tests that a non-acyclic graph causes an exception.""" - G = nx.DiGraph(pairwise('abc', cyclic=True)) - nx.dag_to_branching(G) + with pytest.raises(nx.HasACycle): + G = nx.DiGraph(pairwise("abc", cyclic=True)) + nx.dag_to_branching(G) - @raises(nx.NetworkXNotImplemented) def test_undirected(self): - nx.dag_to_branching(nx.Graph()) + with pytest.raises(nx.NetworkXNotImplemented): + nx.dag_to_branching(nx.Graph()) - @raises(nx.NetworkXNotImplemented) def test_multigraph(self): - nx.dag_to_branching(nx.MultiGraph()) + with pytest.raises(nx.NetworkXNotImplemented): + nx.dag_to_branching(nx.MultiGraph()) - @raises(nx.NetworkXNotImplemented) def test_multidigraph(self): - nx.dag_to_branching(nx.MultiDiGraph()) + with pytest.raises(nx.NetworkXNotImplemented): + nx.dag_to_branching(nx.MultiDiGraph()) diff --git a/networkx/algorithms/tests/test_distance_measures.py b/networkx/algorithms/tests/test_distance_measures.py index f2daa3f..142deaa 100644 --- a/networkx/algorithms/tests/test_distance_measures.py +++ b/networkx/algorithms/tests/test_distance_measures.py @@ -1,83 +1,272 @@ -#!/usr/bin/env python -from nose.tools import * -import networkx +from random import Random +import pytest + + +import networkx as nx +from networkx import convert_node_labels_to_integers as cnlti -class TestDistance: - def setUp(self): - G = networkx.Graph() - from networkx import convert_node_labels_to_integers as cnlti - G = cnlti(networkx.grid_2d_graph(4, 4), first_label=1, ordering="sorted") +class TestDistance: + def setup_method(self): + G = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted") self.G = G def test_eccentricity(self): - assert_equal(networkx.eccentricity(self.G, 1), 6) - e = networkx.eccentricity(self.G) - assert_equal(e[1], 6) - sp = dict(networkx.shortest_path_length(self.G)) - e = networkx.eccentricity(self.G, sp=sp) - assert_equal(e[1], 6) - e = networkx.eccentricity(self.G, v=1) - assert_equal(e, 6) + assert nx.eccentricity(self.G, 1) == 6 + e = nx.eccentricity(self.G) + assert e[1] == 6 + + sp = dict(nx.shortest_path_length(self.G)) + e = nx.eccentricity(self.G, sp=sp) + assert e[1] == 6 + + e = nx.eccentricity(self.G, v=1) + assert e == 6 + # This behavior changed in version 1.8 (ticket #739) - e = networkx.eccentricity(self.G, v=[1, 1]) - assert_equal(e[1], 6) - e = networkx.eccentricity(self.G, v=[1, 2]) - assert_equal(e[1], 6) + e = nx.eccentricity(self.G, v=[1, 1]) + assert e[1] == 6 + e = nx.eccentricity(self.G, v=[1, 2]) + assert e[1] == 6 + # test against graph with one node - G = networkx.path_graph(1) - e = networkx.eccentricity(G) - assert_equal(e[0], 0) - e = networkx.eccentricity(G, v=0) - assert_equal(e, 0) - assert_raises(networkx.NetworkXError, networkx.eccentricity, G, 1) + G = nx.path_graph(1) + e = nx.eccentricity(G) + assert e[0] == 0 + e = nx.eccentricity(G, v=0) + assert e == 0 + pytest.raises(nx.NetworkXError, nx.eccentricity, G, 1) + # test against empty graph - G = networkx.empty_graph() - e = networkx.eccentricity(G) - assert_equal(e, {}) + G = nx.empty_graph() + e = nx.eccentricity(G) + assert e == {} def test_diameter(self): - assert_equal(networkx.diameter(self.G), 6) + assert nx.diameter(self.G) == 6 def test_radius(self): - assert_equal(networkx.radius(self.G), 4) + assert nx.radius(self.G) == 4 def test_periphery(self): - assert_equal(set(networkx.periphery(self.G)), set([1, 4, 13, 16])) + assert set(nx.periphery(self.G)) == {1, 4, 13, 16} def test_center(self): - assert_equal(set(networkx.center(self.G)), set([6, 7, 10, 11])) + assert set(nx.center(self.G)) == {6, 7, 10, 11} def test_bound_diameter(self): - assert_equal(networkx.diameter(self.G, usebounds=True), 6) + assert nx.diameter(self.G, usebounds=True) == 6 def test_bound_radius(self): - assert_equal(networkx.radius(self.G, usebounds=True), 4) + assert nx.radius(self.G, usebounds=True) == 4 def test_bound_periphery(self): - assert_equal(set(networkx.periphery(self.G, usebounds=True)), set([1, 4, 13, 16])) + result = {1, 4, 13, 16} + assert set(nx.periphery(self.G, usebounds=True)) == result def test_bound_center(self): - assert_equal(set(networkx.center(self.G, usebounds=True)), set([6, 7, 10, 11])) + result = {6, 7, 10, 11} + assert set(nx.center(self.G, usebounds=True)) == result def test_radius_exception(self): - G = networkx.Graph() + G = nx.Graph() G.add_edge(1, 2) G.add_edge(3, 4) - assert_raises(networkx.NetworkXError, networkx.diameter, G) + pytest.raises(nx.NetworkXError, nx.diameter, G) - @raises(networkx.NetworkXError) def test_eccentricity_infinite(self): - G = networkx.Graph([(1, 2), (3, 4)]) - e = networkx.eccentricity(G) + with pytest.raises(nx.NetworkXError): + G = nx.Graph([(1, 2), (3, 4)]) + e = nx.eccentricity(G) - @raises(networkx.NetworkXError) def test_eccentricity_undirected_not_connected(self): - G = networkx.Graph([(1, 2), (3, 4)]) - e = networkx.eccentricity(G, sp=1) + with pytest.raises(nx.NetworkXError): + G = nx.Graph([(1, 2), (3, 4)]) + e = nx.eccentricity(G, sp=1) - @raises(networkx.NetworkXError) def test_eccentricity_directed_weakly_connected(self): - DG = networkx.DiGraph([(1, 2), (1, 3)]) - networkx.eccentricity(DG) + with pytest.raises(nx.NetworkXError): + DG = nx.DiGraph([(1, 2), (1, 3)]) + nx.eccentricity(DG) + + +class TestResistanceDistance: + @classmethod + def setup_class(cls): + global np + global sp_sparse + np = pytest.importorskip("numpy") + scipy = pytest.importorskip("scipy") + sp_sparse = pytest.importorskip("scipy.sparse") + + def setup_method(self): + G = nx.Graph() + G.add_edge(1, 2, weight=2) + G.add_edge(2, 3, weight=4) + G.add_edge(3, 4, weight=1) + G.add_edge(1, 4, weight=3) + self.G = G + + def test_laplacian_submatrix(self): + from networkx.algorithms.distance_measures import _laplacian_submatrix + + M = sp_sparse.csr_matrix([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.float32) + N = sp_sparse.csr_matrix([[5, 6], [8, 9]], dtype=np.float32) + Mn, Mn_nodelist = _laplacian_submatrix(1, M, [1, 2, 3]) + assert Mn_nodelist == [2, 3] + assert np.allclose(Mn.toarray(), N.toarray()) + + def test_laplacian_submatrix_square(self): + with pytest.raises(nx.NetworkXError): + from networkx.algorithms.distance_measures import _laplacian_submatrix + + M = sp_sparse.csr_matrix([[1, 2], [4, 5], [7, 8]], dtype=np.float32) + _laplacian_submatrix(1, M, [1, 2, 3]) + + def test_laplacian_submatrix_matrix_node_dim(self): + with pytest.raises(nx.NetworkXError): + from networkx.algorithms.distance_measures import _laplacian_submatrix + + M = sp_sparse.csr_matrix( + [[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.float32 + ) + _laplacian_submatrix(1, M, [1, 2, 3, 4]) + + def test_resistance_distance(self): + rd = nx.resistance_distance(self.G, 1, 3, "weight", True) + test_data = 1 / (1 / (2 + 4) + 1 / (1 + 3)) + assert round(rd, 5) == round(test_data, 5) + + def test_resistance_distance_noinv(self): + rd = nx.resistance_distance(self.G, 1, 3, "weight", False) + test_data = 1 / (1 / (1 / 2 + 1 / 4) + 1 / (1 / 1 + 1 / 3)) + assert round(rd, 5) == round(test_data, 5) + + def test_resistance_distance_no_weight(self): + rd = nx.resistance_distance(self.G, 1, 3) + assert round(rd, 5) == 1 + + def test_resistance_distance_neg_weight(self): + self.G[2][3]["weight"] = -4 + rd = nx.resistance_distance(self.G, 1, 3, "weight", True) + test_data = 1 / (1 / (2 + -4) + 1 / (1 + 3)) + assert round(rd, 5) == round(test_data, 5) + + def test_multigraph(self): + G = nx.MultiGraph() + G.add_edge(1, 2, weight=2) + G.add_edge(2, 3, weight=4) + G.add_edge(3, 4, weight=1) + G.add_edge(1, 4, weight=3) + rd = nx.resistance_distance(G, 1, 3, "weight", True) + assert np.isclose(rd, 1 / (1 / (2 + 4) + 1 / (1 + 3))) + + def test_resistance_distance_div0(self): + with pytest.raises(ZeroDivisionError): + self.G[1][2]["weight"] = 0 + nx.resistance_distance(self.G, 1, 3, "weight") + + def test_resistance_distance_not_connected(self): + with pytest.raises(nx.NetworkXError): + self.G.add_node(5) + nx.resistance_distance(self.G, 1, 5) + + def test_resistance_distance_same_node(self): + with pytest.raises(nx.NetworkXError): + nx.resistance_distance(self.G, 1, 1) + + def test_resistance_distance_nodeA_not_in_graph(self): + with pytest.raises(nx.NetworkXError): + nx.resistance_distance(self.G, 9, 1) + + def test_resistance_distance_nodeB_not_in_graph(self): + with pytest.raises(nx.NetworkXError): + nx.resistance_distance(self.G, 1, 9) + + +class TestBarycenter: + """Test :func:`networkx.algorithms.distance_measures.barycenter`.""" + + def barycenter_as_subgraph(self, g, **kwargs): + """Return the subgraph induced on the barycenter of g""" + b = nx.barycenter(g, **kwargs) + assert isinstance(b, list) + assert set(b) <= set(g) + return g.subgraph(b) + + def test_must_be_connected(self): + pytest.raises(nx.NetworkXNoPath, nx.barycenter, nx.empty_graph(5)) + + def test_sp_kwarg(self): + # Complete graph K_5. Normally it works... + K_5 = nx.complete_graph(5) + sp = dict(nx.shortest_path_length(K_5)) + assert nx.barycenter(K_5, sp=sp) == list(K_5) + + # ...but not with the weight argument + for u, v, data in K_5.edges.data(): + data["weight"] = 1 + pytest.raises(ValueError, nx.barycenter, K_5, sp=sp, weight="weight") + + # ...and a corrupted sp can make it seem like K_5 is disconnected + del sp[0][1] + pytest.raises(nx.NetworkXNoPath, nx.barycenter, K_5, sp=sp) + + def test_trees(self): + """The barycenter of a tree is a single vertex or an edge. + + See [West01]_, p. 78. + """ + prng = Random(0xDEADBEEF) + for i in range(50): + RT = nx.random_tree(prng.randint(1, 75), prng) + b = self.barycenter_as_subgraph(RT) + if len(b) == 2: + assert b.size() == 1 + else: + assert len(b) == 1 + assert b.size() == 0 + + def test_this_one_specific_tree(self): + """Test the tree pictured at the bottom of [West01]_, p. 78.""" + g = nx.Graph( + { + "a": ["b"], + "b": ["a", "x"], + "x": ["b", "y"], + "y": ["x", "z"], + "z": ["y", 0, 1, 2, 3, 4], + 0: ["z"], + 1: ["z"], + 2: ["z"], + 3: ["z"], + 4: ["z"], + } + ) + b = self.barycenter_as_subgraph(g, attr="barycentricity") + assert list(b) == ["z"] + assert not b.edges + expected_barycentricity = { + 0: 23, + 1: 23, + 2: 23, + 3: 23, + 4: 23, + "a": 35, + "b": 27, + "x": 21, + "y": 17, + "z": 15, + } + for node, barycentricity in expected_barycentricity.items(): + assert g.nodes[node]["barycentricity"] == barycentricity + + # Doubling weights should do nothing but double the barycentricities + for edge in g.edges: + g.edges[edge]["weight"] = 2 + b = self.barycenter_as_subgraph(g, weight="weight", attr="barycentricity2") + assert list(b) == ["z"] + assert not b.edges + for node, barycentricity in expected_barycentricity.items(): + assert g.nodes[node]["barycentricity2"] == barycentricity * 2 diff --git a/networkx/algorithms/tests/test_distance_regular.py b/networkx/algorithms/tests/test_distance_regular.py index 37a6aa0..d336b18 100644 --- a/networkx/algorithms/tests/test_distance_regular.py +++ b/networkx/algorithms/tests/test_distance_regular.py @@ -1,52 +1,47 @@ -from nose.tools import assert_equal -from nose.tools import assert_false -from nose.tools import assert_true - import networkx as nx from networkx import is_strongly_regular -class TestDistanceRegular(object): - +class TestDistanceRegular: def test_is_distance_regular(self): - assert_true(nx.is_distance_regular(nx.icosahedral_graph())) - assert_true(nx.is_distance_regular(nx.petersen_graph())) - assert_true(nx.is_distance_regular(nx.cubical_graph())) - assert_true(nx.is_distance_regular(nx.complete_bipartite_graph(3, 3))) - assert_true(nx.is_distance_regular(nx.tetrahedral_graph())) - assert_true(nx.is_distance_regular(nx.dodecahedral_graph())) - assert_true(nx.is_distance_regular(nx.pappus_graph())) - assert_true(nx.is_distance_regular(nx.heawood_graph())) - assert_true(nx.is_distance_regular(nx.cycle_graph(3))) + assert nx.is_distance_regular(nx.icosahedral_graph()) + assert nx.is_distance_regular(nx.petersen_graph()) + assert nx.is_distance_regular(nx.cubical_graph()) + assert nx.is_distance_regular(nx.complete_bipartite_graph(3, 3)) + assert nx.is_distance_regular(nx.tetrahedral_graph()) + assert nx.is_distance_regular(nx.dodecahedral_graph()) + assert nx.is_distance_regular(nx.pappus_graph()) + assert nx.is_distance_regular(nx.heawood_graph()) + assert nx.is_distance_regular(nx.cycle_graph(3)) # no distance regular - assert_false(nx.is_distance_regular(nx.path_graph(4))) + assert not nx.is_distance_regular(nx.path_graph(4)) def test_not_connected(self): G = nx.cycle_graph(4) nx.add_cycle(G, [5, 6, 7]) - assert_false(nx.is_distance_regular(G)) + assert not nx.is_distance_regular(G) def test_global_parameters(self): b, c = nx.intersection_array(nx.cycle_graph(5)) g = nx.global_parameters(b, c) - assert_equal(list(g), [(0, 0, 2), (1, 0, 1), (1, 1, 0)]) + assert list(g) == [(0, 0, 2), (1, 0, 1), (1, 1, 0)] b, c = nx.intersection_array(nx.cycle_graph(3)) g = nx.global_parameters(b, c) - assert_equal(list(g), [(0, 0, 2), (1, 1, 0)]) + assert list(g) == [(0, 0, 2), (1, 1, 0)] def test_intersection_array(self): b, c = nx.intersection_array(nx.cycle_graph(5)) - assert_equal(b, [2, 1]) - assert_equal(c, [1, 1]) + assert b == [2, 1] + assert c == [1, 1] b, c = nx.intersection_array(nx.dodecahedral_graph()) - assert_equal(b, [3, 2, 1, 1, 1]) - assert_equal(c, [1, 1, 1, 2, 3]) + assert b == [3, 2, 1, 1, 1] + assert c == [1, 1, 1, 2, 3] b, c = nx.intersection_array(nx.icosahedral_graph()) - assert_equal(b, [5, 2, 1]) - assert_equal(c, [1, 2, 5]) + assert b == [5, 2, 1] + assert c == [1, 2, 5] -class TestStronglyRegular(object): +class TestStronglyRegular: """Unit tests for the :func:`~networkx.is_strongly_regular` function. @@ -58,14 +53,14 @@ def test_cycle_graph(self): """ G = nx.cycle_graph(5) - assert_true(is_strongly_regular(G)) + assert is_strongly_regular(G) def test_petersen_graph(self): """Tests that the Petersen graph is strongly regular.""" G = nx.petersen_graph() - assert_true(is_strongly_regular(G)) + assert is_strongly_regular(G) def test_path_graph(self): """Tests that the path graph is not strongly regular.""" G = nx.path_graph(4) - assert_false(is_strongly_regular(G)) + assert not is_strongly_regular(G) diff --git a/networkx/algorithms/tests/test_dominance.py b/networkx/algorithms/tests/test_dominance.py index 53d7f76..3246d5c 100644 --- a/networkx/algorithms/tests/test_dominance.py +++ b/networkx/algorithms/tests/test_dominance.py @@ -1,43 +1,41 @@ import networkx as nx -from nose.tools import * +import pytest -class TestImmediateDominators(object): - +class TestImmediateDominators: def test_exceptions(self): G = nx.Graph() G.add_node(0) - assert_raises(nx.NetworkXNotImplemented, nx.immediate_dominators, G, 0) + pytest.raises(nx.NetworkXNotImplemented, nx.immediate_dominators, G, 0) G = nx.MultiGraph(G) - assert_raises(nx.NetworkXNotImplemented, nx.immediate_dominators, G, 0) + pytest.raises(nx.NetworkXNotImplemented, nx.immediate_dominators, G, 0) G = nx.DiGraph([[0, 0]]) - assert_raises(nx.NetworkXError, nx.immediate_dominators, G, 1) + pytest.raises(nx.NetworkXError, nx.immediate_dominators, G, 1) def test_singleton(self): G = nx.DiGraph() G.add_node(0) - assert_equal(nx.immediate_dominators(G, 0), {0: 0}) + assert nx.immediate_dominators(G, 0) == {0: 0} G.add_edge(0, 0) - assert_equal(nx.immediate_dominators(G, 0), {0: 0}) + assert nx.immediate_dominators(G, 0) == {0: 0} def test_path(self): n = 5 G = nx.path_graph(n, create_using=nx.DiGraph()) - assert_equal(nx.immediate_dominators(G, 0), - {i: max(i - 1, 0) for i in range(n)}) + assert nx.immediate_dominators(G, 0) == {i: max(i - 1, 0) for i in range(n)} def test_cycle(self): n = 5 G = nx.cycle_graph(n, create_using=nx.DiGraph()) - assert_equal(nx.immediate_dominators(G, 0), - {i: max(i - 1, 0) for i in range(n)}) + assert nx.immediate_dominators(G, 0) == {i: max(i - 1, 0) for i in range(n)} def test_unreachable(self): n = 5 - assert_greater(n, 1) + assert n > 1 G = nx.path_graph(n, create_using=nx.DiGraph()) - assert_equal(nx.immediate_dominators(G, n // 2), - {i: max(i - 1, n // 2) for i in range(n // 2, n)}) + assert nx.immediate_dominators(G, n // 2) == { + i: max(i - 1, n // 2) for i in range(n // 2, n) + } def test_irreducible1(self): # Graph taken from Figure 2 of @@ -46,81 +44,72 @@ def test_irreducible1(self): # Software Practice & Experience, 4:110, 2001. edges = [(1, 2), (2, 1), (3, 2), (4, 1), (5, 3), (5, 4)] G = nx.DiGraph(edges) - assert_equal(nx.immediate_dominators(G, 5), - {i: 5 for i in range(1, 6)}) + assert nx.immediate_dominators(G, 5) == {i: 5 for i in range(1, 6)} def test_irreducible2(self): # Graph taken from Figure 4 of # K. D. Cooper, T. J. Harvey, and K. Kennedy. # A simple, fast dominance algorithm. # Software Practice & Experience, 4:110, 2001. - edges = [(1, 2), (2, 1), (2, 3), (3, 2), (4, 2), (4, 3), (5, 1), - (6, 4), (6, 5)] + edges = [(1, 2), (2, 1), (2, 3), (3, 2), (4, 2), (4, 3), (5, 1), (6, 4), (6, 5)] G = nx.DiGraph(edges) - assert_equal(nx.immediate_dominators(G, 6), - {i: 6 for i in range(1, 7)}) + result = nx.immediate_dominators(G, 6) + assert result == {i: 6 for i in range(1, 7)} def test_domrel_png(self): # Graph taken from https://commons.wikipedia.org/wiki/File:Domrel.png edges = [(1, 2), (2, 3), (2, 4), (2, 6), (3, 5), (4, 5), (5, 2)] G = nx.DiGraph(edges) - assert_equal(nx.immediate_dominators(G, 1), - {1: 1, 2: 1, 3: 2, 4: 2, 5: 2, 6: 2}) + result = nx.immediate_dominators(G, 1) + assert result == {1: 1, 2: 1, 3: 2, 4: 2, 5: 2, 6: 2} # Test postdominance. - with nx.utils.reversed(G): - assert_equal(nx.immediate_dominators(G, 6), - {1: 2, 2: 6, 3: 5, 4: 5, 5: 2, 6: 6}) + result = nx.immediate_dominators(G.reverse(copy=False), 6) + assert result == {1: 2, 2: 6, 3: 5, 4: 5, 5: 2, 6: 6} def test_boost_example(self): # Graph taken from Figure 1 of # http://www.boost.org/doc/libs/1_56_0/libs/graph/doc/lengauer_tarjan_dominator.htm - edges = [(0, 1), (1, 2), (1, 3), (2, 7), (3, 4), (4, 5), (4, 6), - (5, 7), (6, 4)] + edges = [(0, 1), (1, 2), (1, 3), (2, 7), (3, 4), (4, 5), (4, 6), (5, 7), (6, 4)] G = nx.DiGraph(edges) - assert_equal(nx.immediate_dominators(G, 0), - {0: 0, 1: 0, 2: 1, 3: 1, 4: 3, 5: 4, 6: 4, 7: 1}) + result = nx.immediate_dominators(G, 0) + assert result == {0: 0, 1: 0, 2: 1, 3: 1, 4: 3, 5: 4, 6: 4, 7: 1} # Test postdominance. - with nx.utils.reversed(G): - assert_equal(nx.immediate_dominators(G, 7), - {0: 1, 1: 7, 2: 7, 3: 4, 4: 5, 5: 7, 6: 4, 7: 7}) - + result = nx.immediate_dominators(G.reverse(copy=False), 7) + assert result == {0: 1, 1: 7, 2: 7, 3: 4, 4: 5, 5: 7, 6: 4, 7: 7} -class TestDominanceFrontiers(object): +class TestDominanceFrontiers: def test_exceptions(self): G = nx.Graph() G.add_node(0) - assert_raises(nx.NetworkXNotImplemented, nx.dominance_frontiers, G, 0) + pytest.raises(nx.NetworkXNotImplemented, nx.dominance_frontiers, G, 0) G = nx.MultiGraph(G) - assert_raises(nx.NetworkXNotImplemented, nx.dominance_frontiers, G, 0) + pytest.raises(nx.NetworkXNotImplemented, nx.dominance_frontiers, G, 0) G = nx.DiGraph([[0, 0]]) - assert_raises(nx.NetworkXError, nx.dominance_frontiers, G, 1) + pytest.raises(nx.NetworkXError, nx.dominance_frontiers, G, 1) def test_singleton(self): G = nx.DiGraph() G.add_node(0) - assert_equal(nx.dominance_frontiers(G, 0), {0: set()}) + assert nx.dominance_frontiers(G, 0) == {0: set()} G.add_edge(0, 0) - assert_equal(nx.dominance_frontiers(G, 0), {0: set()}) + assert nx.dominance_frontiers(G, 0) == {0: set()} def test_path(self): n = 5 G = nx.path_graph(n, create_using=nx.DiGraph()) - assert_equal(nx.dominance_frontiers(G, 0), - {i: set() for i in range(n)}) + assert nx.dominance_frontiers(G, 0) == {i: set() for i in range(n)} def test_cycle(self): n = 5 G = nx.cycle_graph(n, create_using=nx.DiGraph()) - assert_equal(nx.dominance_frontiers(G, 0), - {i: set() for i in range(n)}) + assert nx.dominance_frontiers(G, 0) == {i: set() for i in range(n)} def test_unreachable(self): n = 5 - assert_greater(n, 1) + assert n > 1 G = nx.path_graph(n, create_using=nx.DiGraph()) - assert_equal(nx.dominance_frontiers(G, n // 2), - {i: set() for i in range(n // 2, n)}) + assert nx.dominance_frontiers(G, n // 2) == {i: set() for i in range(n // 2, n)} def test_irreducible1(self): # Graph taken from Figure 2 of @@ -129,88 +118,121 @@ def test_irreducible1(self): # Software Practice & Experience, 4:110, 2001. edges = [(1, 2), (2, 1), (3, 2), (4, 1), (5, 3), (5, 4)] G = nx.DiGraph(edges) - assert_equal({u: df - for u, df in nx.dominance_frontiers(G, 5).items()}, - {1: set([2]), 2: set([1]), 3: set([2]), - 4: set([1]), 5: set()}) + assert {u: df for u, df in nx.dominance_frontiers(G, 5).items()} == { + 1: {2}, + 2: {1}, + 3: {2}, + 4: {1}, + 5: set(), + } def test_irreducible2(self): # Graph taken from Figure 4 of # K. D. Cooper, T. J. Harvey, and K. Kennedy. # A simple, fast dominance algorithm. # Software Practice & Experience, 4:110, 2001. - edges = [(1, 2), (2, 1), (2, 3), (3, 2), (4, 2), (4, 3), (5, 1), - (6, 4), (6, 5)] + edges = [(1, 2), (2, 1), (2, 3), (3, 2), (4, 2), (4, 3), (5, 1), (6, 4), (6, 5)] G = nx.DiGraph(edges) - assert_equal(nx.dominance_frontiers(G, 6), - {1: set([2]), 2: set([1, 3]), 3: set([2]), 4: set([2, 3]), 5: set([1]), 6: set([])}) + assert nx.dominance_frontiers(G, 6) == { + 1: {2}, + 2: {1, 3}, + 3: {2}, + 4: {2, 3}, + 5: {1}, + 6: set(), + } def test_domrel_png(self): # Graph taken from https://commons.wikipedia.org/wiki/File:Domrel.png edges = [(1, 2), (2, 3), (2, 4), (2, 6), (3, 5), (4, 5), (5, 2)] G = nx.DiGraph(edges) - assert_equal(nx.dominance_frontiers(G, 1), - {1: set([]), 2: set([2]), 3: set([5]), 4: set([5]), - 5: set([2]), 6: set()}) + assert nx.dominance_frontiers(G, 1) == { + 1: set(), + 2: {2}, + 3: {5}, + 4: {5}, + 5: {2}, + 6: set(), + } # Test postdominance. - with nx.utils.reversed(G): - assert_equal(nx.dominance_frontiers(G, 6), - {1: set(), 2: set([2]), 3: set([2]), 4: set([2]), - 5: set([2]), 6: set()}) + result = nx.dominance_frontiers(G.reverse(copy=False), 6) + assert result == {1: set(), 2: {2}, 3: {2}, 4: {2}, 5: {2}, 6: set()} def test_boost_example(self): # Graph taken from Figure 1 of # http://www.boost.org/doc/libs/1_56_0/libs/graph/doc/lengauer_tarjan_dominator.htm - edges = [(0, 1), (1, 2), (1, 3), (2, 7), (3, 4), (4, 5), (4, 6), - (5, 7), (6, 4)] + edges = [(0, 1), (1, 2), (1, 3), (2, 7), (3, 4), (4, 5), (4, 6), (5, 7), (6, 4)] G = nx.DiGraph(edges) - assert_equal(nx.dominance_frontiers(G, 0), - {0: set(), 1: set(), 2: set([7]), 3: set([7]), - 4: set([4, 7]), 5: set([7]), 6: set([4]), 7: set()}) + assert nx.dominance_frontiers(G, 0) == { + 0: set(), + 1: set(), + 2: {7}, + 3: {7}, + 4: {4, 7}, + 5: {7}, + 6: {4}, + 7: set(), + } # Test postdominance. - with nx.utils.reversed(G): - assert_equal(nx.dominance_frontiers(G, 7), - {0: set(), 1: set(), 2: set([1]), 3: set([1]), - 4: set([1, 4]), 5: set([1]), 6: set([4]), 7: set()}) + result = nx.dominance_frontiers(G.reverse(copy=False), 7) + expected = { + 0: set(), + 1: set(), + 2: {1}, + 3: {1}, + 4: {1, 4}, + 5: {1}, + 6: {4}, + 7: set(), + } + assert result == expected def test_discard_issue(self): # https://github.com/networkx/networkx/issues/2071 g = nx.DiGraph() - g.add_edges_from([ - ('b0', 'b1'), - ('b1', 'b2'), - ('b2', 'b3'), - ('b3', 'b1'), - ('b1', 'b5'), - ('b5', 'b6'), - ('b5', 'b8'), - ('b6', 'b7'), - ('b8', 'b7'), - ('b7', 'b3'), - ('b3', 'b4') - ] + g.add_edges_from( + [ + ("b0", "b1"), + ("b1", "b2"), + ("b2", "b3"), + ("b3", "b1"), + ("b1", "b5"), + ("b5", "b6"), + ("b5", "b8"), + ("b6", "b7"), + ("b8", "b7"), + ("b7", "b3"), + ("b3", "b4"), + ] ) - df = nx.dominance_frontiers(g, 'b0') - assert_equal(df, {'b4': set(), 'b5': set(['b3']), 'b6': set(['b7']), - 'b7': set(['b3']), - 'b0': set(), 'b1': set(['b1']), 'b2': set(['b3']), - 'b3': set(['b1']), 'b8': set(['b7'])}) + df = nx.dominance_frontiers(g, "b0") + assert df == { + "b4": set(), + "b5": {"b3"}, + "b6": {"b7"}, + "b7": {"b3"}, + "b0": set(), + "b1": {"b1"}, + "b2": {"b3"}, + "b3": {"b1"}, + "b8": {"b7"}, + } def test_loop(self): g = nx.DiGraph() - g.add_edges_from([('a', 'b'), ('b', 'c'), ('b', 'a')]) - df = nx.dominance_frontiers(g, 'a') - assert_equal(df, {'a': set(), 'b': set(), 'c': set()}) + g.add_edges_from([("a", "b"), ("b", "c"), ("b", "a")]) + df = nx.dominance_frontiers(g, "a") + assert df == {"a": set(), "b": set(), "c": set()} def test_missing_immediate_doms(self): # see https://github.com/networkx/networkx/issues/2070 g = nx.DiGraph() edges = [ - ('entry_1', 'b1'), - ('b1', 'b2'), - ('b2', 'b3'), - ('b3', 'exit'), - ('entry_2', 'b3') + ("entry_1", "b1"), + ("b1", "b2"), + ("b2", "b3"), + ("b3", "exit"), + ("entry_2", "b3"), ] # entry_1 @@ -226,35 +248,37 @@ def test_missing_immediate_doms(self): g.add_edges_from(edges) # formerly raised KeyError on entry_2 when parsing b3 # because entry_2 does not have immediate doms (no path) - nx.dominance_frontiers(g, 'entry_1') + nx.dominance_frontiers(g, "entry_1") def test_loops_larger(self): # from # http://ecee.colorado.edu/~waite/Darmstadt/motion.html g = nx.DiGraph() edges = [ - ('entry', 'exit'), - ('entry', '1'), - ('1', '2'), - ('2', '3'), - ('3', '4'), - ('4', '5'), - ('5', '6'), - ('6', 'exit'), - ('6', '2'), - ('5', '3'), - ('4', '4') + ("entry", "exit"), + ("entry", "1"), + ("1", "2"), + ("2", "3"), + ("3", "4"), + ("4", "5"), + ("5", "6"), + ("6", "exit"), + ("6", "2"), + ("5", "3"), + ("4", "4"), ] g.add_edges_from(edges) - df = nx.dominance_frontiers(g, 'entry') - answer = {'entry': set(), - '1': set(['exit']), - '2': set(['exit', '2']), - '3': set(['exit', '3', '2']), - '4': set(['exit', '4', '3', '2']), - '5': set(['exit', '3', '2']), - '6': set(['exit', '2']), - 'exit': set()} + df = nx.dominance_frontiers(g, "entry") + answer = { + "entry": set(), + "1": {"exit"}, + "2": {"exit", "2"}, + "3": {"exit", "3", "2"}, + "4": {"exit", "4", "3", "2"}, + "5": {"exit", "3", "2"}, + "6": {"exit", "2"}, + "exit": set(), + } for n in df: - assert_equal(set(df[n]), set(answer[n])) + assert set(df[n]) == set(answer[n]) diff --git a/networkx/algorithms/tests/test_dominating.py b/networkx/algorithms/tests/test_dominating.py index 7a49d16..958989b 100644 --- a/networkx/algorithms/tests/test_dominating.py +++ b/networkx/algorithms/tests/test_dominating.py @@ -1,13 +1,13 @@ -from nose.tools import assert_equal, assert_true, assert_false, raises +import pytest import networkx as nx def test_dominating_set(): G = nx.gnp_random_graph(100, 0.1) D = nx.dominating_set(G) - assert_true(nx.is_dominating_set(G, D)) + assert nx.is_dominating_set(G, D) D = nx.dominating_set(G, start_with=0) - assert_true(nx.is_dominating_set(G, D)) + assert nx.is_dominating_set(G, D) def test_complete(): @@ -15,25 +15,25 @@ def test_complete(): Thus the dominating set has to be of cardinality 1. """ K4 = nx.complete_graph(4) - assert_equal(len(nx.dominating_set(K4)), 1) + assert len(nx.dominating_set(K4)) == 1 K5 = nx.complete_graph(5) - assert_equal(len(nx.dominating_set(K5)), 1) + assert len(nx.dominating_set(K5)) == 1 -@raises(nx.NetworkXError) def test_raise_dominating_set(): - G = nx.path_graph(4) - D = nx.dominating_set(G, start_with=10) + with pytest.raises(nx.NetworkXError): + G = nx.path_graph(4) + D = nx.dominating_set(G, start_with=10) def test_is_dominating_set(): G = nx.path_graph(4) - d = set([1, 3]) - assert_true(nx.is_dominating_set(G, d)) - d = set([0, 2]) - assert_true(nx.is_dominating_set(G, d)) - d = set([1]) - assert_false(nx.is_dominating_set(G, d)) + d = {1, 3} + assert nx.is_dominating_set(G, d) + d = {0, 2} + assert nx.is_dominating_set(G, d) + d = {1} + assert not nx.is_dominating_set(G, d) def test_wikipedia_is_dominating_set(): @@ -41,6 +41,6 @@ def test_wikipedia_is_dominating_set(): """ G = nx.cycle_graph(4) G.add_edges_from([(0, 4), (1, 4), (2, 5)]) - assert_true(nx.is_dominating_set(G, set([4, 3, 5]))) - assert_true(nx.is_dominating_set(G, set([0, 2]))) - assert_true(nx.is_dominating_set(G, set([1, 2]))) + assert nx.is_dominating_set(G, {4, 3, 5}) + assert nx.is_dominating_set(G, {0, 2}) + assert nx.is_dominating_set(G, {1, 2}) diff --git a/networkx/algorithms/tests/test_efficiency.py b/networkx/algorithms/tests/test_efficiency.py index a294ef0..9a2e7d0 100644 --- a/networkx/algorithms/tests/test_efficiency.py +++ b/networkx/algorithms/tests/test_efficiency.py @@ -1,21 +1,10 @@ -# test_efficiency.py - unit tests for the efficiency module -# -# Copyright 2015-2018 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.algorithms.efficiency` module.""" -from __future__ import division -from nose.tools import assert_equal import networkx as nx class TestEfficiency: - - def __init__(self): + def setup_method(self): # G1 is a disconnected graph self.G1 = nx.Graph() self.G1.add_nodes_from([1, 2, 3]) @@ -28,20 +17,20 @@ def test_efficiency_disconnected_nodes(self): """ When nodes are disconnected, efficiency is 0 """ - assert_equal(nx.efficiency(self.G1, 1, 2), 0) + assert nx.efficiency(self.G1, 1, 2) == 0 def test_local_efficiency_disconnected_graph(self): """ In a disconnected graph the efficiency is 0 """ - assert_equal(nx.local_efficiency(self.G1), 0) + assert nx.local_efficiency(self.G1) == 0 def test_efficiency(self): - assert_equal(nx.efficiency(self.G2, 0, 1), 1) - assert_equal(nx.efficiency(self.G2, 0, 2), 1 / 2) + assert nx.efficiency(self.G2, 0, 1) == 1 + assert nx.efficiency(self.G2, 0, 2) == 1 / 2 def test_global_efficiency(self): - assert_equal(nx.global_efficiency(self.G2), 5 / 6) + assert nx.global_efficiency(self.G2) == 5 / 6 def test_global_efficiency_complete_graph(self): """ @@ -49,7 +38,7 @@ def test_global_efficiency_complete_graph(self): """ for n in range(2, 10): G = nx.complete_graph(n) - assert_equal(nx.global_efficiency(G), 1) + assert nx.global_efficiency(G) == 1 def test_local_efficiency_complete_graph(self): """ @@ -59,11 +48,11 @@ def test_local_efficiency_complete_graph(self): """ for n in range(3, 10): G = nx.complete_graph(n) - assert_equal(nx.local_efficiency(G), 1) + assert nx.local_efficiency(G) == 1 def test_using_ego_graph(self): """ Test that the ego graph is used when computing local efficiency. For more information, see GitHub issue #2710. """ - assert_equal(nx.local_efficiency(self.G3), 7 / 12) + assert nx.local_efficiency(self.G3) == 7 / 12 diff --git a/networkx/algorithms/tests/test_euler.py b/networkx/algorithms/tests/test_euler.py index 66501e2..f136ab0 100644 --- a/networkx/algorithms/tests/test_euler.py +++ b/networkx/algorithms/tests/test_euler.py @@ -1,114 +1,191 @@ -from unittest import TestCase +import collections -from nose.tools import assert_equal -from nose.tools import assert_false -try: - from nose.tools import assert_count_equal -except ImportError: - from nose.tools import assert_items_equal as assert_count_equal -from nose.tools import assert_true -from nose.tools import raises +import pytest import networkx as nx -from networkx import is_eulerian, eulerian_circuit -class TestIsEulerian(TestCase): - +class TestIsEulerian: def test_is_eulerian(self): - assert_true(is_eulerian(nx.complete_graph(5))) - assert_true(is_eulerian(nx.complete_graph(7))) - assert_true(is_eulerian(nx.hypercube_graph(4))) - assert_true(is_eulerian(nx.hypercube_graph(6))) + assert nx.is_eulerian(nx.complete_graph(5)) + assert nx.is_eulerian(nx.complete_graph(7)) + assert nx.is_eulerian(nx.hypercube_graph(4)) + assert nx.is_eulerian(nx.hypercube_graph(6)) - assert_false(is_eulerian(nx.complete_graph(4))) - assert_false(is_eulerian(nx.complete_graph(6))) - assert_false(is_eulerian(nx.hypercube_graph(3))) - assert_false(is_eulerian(nx.hypercube_graph(5))) + assert not nx.is_eulerian(nx.complete_graph(4)) + assert not nx.is_eulerian(nx.complete_graph(6)) + assert not nx.is_eulerian(nx.hypercube_graph(3)) + assert not nx.is_eulerian(nx.hypercube_graph(5)) - assert_false(is_eulerian(nx.petersen_graph())) - assert_false(is_eulerian(nx.path_graph(4))) + assert not nx.is_eulerian(nx.petersen_graph()) + assert not nx.is_eulerian(nx.path_graph(4)) def test_is_eulerian2(self): # not connected G = nx.Graph() G.add_nodes_from([1, 2, 3]) - assert_false(is_eulerian(G)) + assert not nx.is_eulerian(G) # not strongly connected G = nx.DiGraph() G.add_nodes_from([1, 2, 3]) - assert_false(is_eulerian(G)) + assert not nx.is_eulerian(G) G = nx.MultiDiGraph() G.add_edge(1, 2) G.add_edge(2, 3) G.add_edge(2, 3) G.add_edge(3, 1) - assert_false(is_eulerian(G)) - + assert not nx.is_eulerian(G) -class TestEulerianCircuit(TestCase): +class TestEulerianCircuit: def test_eulerian_circuit_cycle(self): G = nx.cycle_graph(4) - edges = list(eulerian_circuit(G, source=0)) + edges = list(nx.eulerian_circuit(G, source=0)) nodes = [u for u, v in edges] - assert_equal(nodes, [0, 3, 2, 1]) - assert_equal(edges, [(0, 3), (3, 2), (2, 1), (1, 0)]) + assert nodes == [0, 3, 2, 1] + assert edges == [(0, 3), (3, 2), (2, 1), (1, 0)] - edges = list(eulerian_circuit(G, source=1)) + edges = list(nx.eulerian_circuit(G, source=1)) nodes = [u for u, v in edges] - assert_equal(nodes, [1, 2, 3, 0]) - assert_equal(edges, [(1, 2), (2, 3), (3, 0), (0, 1)]) + assert nodes == [1, 2, 3, 0] + assert edges == [(1, 2), (2, 3), (3, 0), (0, 1)] G = nx.complete_graph(3) - edges = list(eulerian_circuit(G, source=0)) + edges = list(nx.eulerian_circuit(G, source=0)) nodes = [u for u, v in edges] - assert_equal(nodes, [0, 2, 1]) - assert_equal(edges, [(0, 2), (2, 1), (1, 0)]) + assert nodes == [0, 2, 1] + assert edges == [(0, 2), (2, 1), (1, 0)] - edges = list(eulerian_circuit(G, source=1)) + edges = list(nx.eulerian_circuit(G, source=1)) nodes = [u for u, v in edges] - assert_equal(nodes, [1, 2, 0]) - assert_equal(edges, [(1, 2), (2, 0), (0, 1)]) + assert nodes == [1, 2, 0] + assert edges == [(1, 2), (2, 0), (0, 1)] def test_eulerian_circuit_digraph(self): G = nx.DiGraph() nx.add_cycle(G, [0, 1, 2, 3]) - edges = list(eulerian_circuit(G, source=0)) + edges = list(nx.eulerian_circuit(G, source=0)) nodes = [u for u, v in edges] - assert_equal(nodes, [0, 1, 2, 3]) - assert_equal(edges, [(0, 1), (1, 2), (2, 3), (3, 0)]) + assert nodes == [0, 1, 2, 3] + assert edges == [(0, 1), (1, 2), (2, 3), (3, 0)] - edges = list(eulerian_circuit(G, source=1)) + edges = list(nx.eulerian_circuit(G, source=1)) nodes = [u for u, v in edges] - assert_equal(nodes, [1, 2, 3, 0]) - assert_equal(edges, [(1, 2), (2, 3), (3, 0), (0, 1)]) + assert nodes == [1, 2, 3, 0] + assert edges == [(1, 2), (2, 3), (3, 0), (0, 1)] def test_multigraph(self): G = nx.MultiGraph() nx.add_cycle(G, [0, 1, 2, 3]) G.add_edge(1, 2) G.add_edge(1, 2) - edges = list(eulerian_circuit(G, source=0)) + edges = list(nx.eulerian_circuit(G, source=0)) nodes = [u for u, v in edges] - assert_equal(nodes, [0, 3, 2, 1, 2, 1]) - assert_equal(edges, [(0, 3), (3, 2), (2, 1), (1, 2), (2, 1), (1, 0)]) + assert nodes == [0, 3, 2, 1, 2, 1] + assert edges == [(0, 3), (3, 2), (2, 1), (1, 2), (2, 1), (1, 0)] def test_multigraph_with_keys(self): G = nx.MultiGraph() nx.add_cycle(G, [0, 1, 2, 3]) G.add_edge(1, 2) G.add_edge(1, 2) - edges = list(eulerian_circuit(G, source=0, keys=True)) + edges = list(nx.eulerian_circuit(G, source=0, keys=True)) nodes = [u for u, v, k in edges] - assert_equal(nodes, [0, 3, 2, 1, 2, 1]) - assert_equal(edges[:2], [(0, 3, 0), (3, 2, 0)]) - assert_count_equal(edges[2:5], [(2, 1, 0), (1, 2, 1), (2, 1, 2)]) - assert_equal(edges[5:], [(1, 0, 0)]) + assert nodes == [0, 3, 2, 1, 2, 1] + assert edges[:2] == [(0, 3, 0), (3, 2, 0)] + assert collections.Counter(edges[2:5]) == collections.Counter( + [(2, 1, 0), (1, 2, 1), (2, 1, 2)] + ) + assert edges[5:] == [(1, 0, 0)] - @raises(nx.NetworkXError) def test_not_eulerian(self): - f = list(eulerian_circuit(nx.complete_graph(4))) + with pytest.raises(nx.NetworkXError): + f = list(nx.eulerian_circuit(nx.complete_graph(4))) + + +class TestIsSemiEulerian: + def test_is_semieulerian(self): + # Test graphs with Eulerian paths but no cycles return True. + assert nx.is_semieulerian(nx.path_graph(4)) + G = nx.path_graph(6, create_using=nx.DiGraph) + assert nx.is_semieulerian(G) + + # Test graphs with Eulerian cycles return False. + assert not nx.is_semieulerian(nx.complete_graph(5)) + assert not nx.is_semieulerian(nx.complete_graph(7)) + assert not nx.is_semieulerian(nx.hypercube_graph(4)) + assert not nx.is_semieulerian(nx.hypercube_graph(6)) + + +class TestHasEulerianPath: + def test_has_eulerian_path_cyclic(self): + # Test graphs with Eulerian cycles return True. + assert nx.has_eulerian_path(nx.complete_graph(5)) + assert nx.has_eulerian_path(nx.complete_graph(7)) + assert nx.has_eulerian_path(nx.hypercube_graph(4)) + assert nx.has_eulerian_path(nx.hypercube_graph(6)) + + def test_has_eulerian_path_non_cyclic(self): + # Test graphs with Eulerian paths but no cycles return True. + assert nx.has_eulerian_path(nx.path_graph(4)) + G = nx.path_graph(6, create_using=nx.DiGraph) + assert nx.has_eulerian_path(G) + + +class TestFindPathStart: + def testfind_path_start(self): + find_path_start = nx.algorithms.euler._find_path_start + # Test digraphs return correct starting node. + G = nx.path_graph(6, create_using=nx.DiGraph) + assert find_path_start(G) == 0 + edges = [(0, 1), (1, 2), (2, 0), (4, 0)] + assert find_path_start(nx.DiGraph(edges)) == 4 + + # Test graph with no Eulerian path return None. + edges = [(0, 1), (1, 2), (2, 3), (2, 4)] + assert find_path_start(nx.DiGraph(edges)) is None + + +class TestEulerianPath: + def test_eulerian_path(self): + x = [(4, 0), (0, 1), (1, 2), (2, 0)] + for e1, e2 in zip(x, nx.eulerian_path(nx.DiGraph(x))): + assert e1 == e2 + + +class TestEulerize: + def test_disconnected(self): + with pytest.raises(nx.NetworkXError): + G = nx.from_edgelist([(0, 1), (2, 3)]) + nx.eulerize(G) + + def test_null_graph(self): + with pytest.raises(nx.NetworkXPointlessConcept): + nx.eulerize(nx.Graph()) + + def test_null_multigraph(self): + with pytest.raises(nx.NetworkXPointlessConcept): + nx.eulerize(nx.MultiGraph()) + + def test_on_empty_graph(self): + with pytest.raises(nx.NetworkXError): + nx.eulerize(nx.empty_graph(3)) + + def test_on_eulerian(self): + G = nx.cycle_graph(3) + H = nx.eulerize(G) + assert nx.is_isomorphic(G, H) + + def test_on_eulerian_multigraph(self): + G = nx.MultiGraph(nx.cycle_graph(3)) + G.add_edge(0, 1) + H = nx.eulerize(G) + assert nx.is_eulerian(H) + + def test_on_complete_graph(self): + G = nx.complete_graph(4) + assert nx.is_eulerian(nx.eulerize(G)) + assert nx.is_eulerian(nx.eulerize(nx.MultiGraph(G))) diff --git a/networkx/algorithms/tests/test_graph_hashing.py b/networkx/algorithms/tests/test_graph_hashing.py new file mode 100644 index 0000000..719b541 --- /dev/null +++ b/networkx/algorithms/tests/test_graph_hashing.py @@ -0,0 +1,42 @@ +import networkx as nx + + +def test_empty_graph_hash(): + G1 = nx.empty_graph() + G2 = nx.empty_graph() + + h1 = nx.weisfeiler_lehman_graph_hash(G1) + h2 = nx.weisfeiler_lehman_graph_hash(G2) + + assert h1 == h2 + + +def test_relabel(): + G1 = nx.Graph() + G1.add_edges_from( + [ + (1, 2, {"label": "A"}), + (2, 3, {"label": "A"}), + (3, 1, {"label": "A"}), + (1, 4, {"label": "B"}), + ] + ) + h_before = nx.weisfeiler_lehman_graph_hash(G1, edge_attr="label") + + G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()}) + + h_after = nx.weisfeiler_lehman_graph_hash(G2, edge_attr="label") + + assert h_after == h_before + + +def test_directed(): + G1 = nx.DiGraph() + G1.add_edges_from([(1, 2), (2, 3), (3, 1), (1, 5)]) + + h_directed = nx.weisfeiler_lehman_graph_hash(G1) + + G2 = G1.to_undirected() + h_undirected = nx.weisfeiler_lehman_graph_hash(G2) + + assert h_directed != h_undirected diff --git a/networkx/algorithms/tests/test_graphical.py b/networkx/algorithms/tests/test_graphical.py index c440f36..5c5a881 100644 --- a/networkx/algorithms/tests/test_graphical.py +++ b/networkx/algorithms/tests/test_graphical.py @@ -1,17 +1,15 @@ -#!/usr/bin/env python -from nose.tools import * -from nose import SkipTest +import pytest import networkx as nx def test_valid_degree_sequence1(): n = 100 - p = .3 + p = 0.3 for i in range(10): G = nx.erdos_renyi_graph(n, p) deg = (d for n, d in G.degree()) - assert_true(nx.is_graphical(deg, method='eg')) - assert_true(nx.is_graphical(deg, method='hh')) + assert nx.is_graphical(deg, method="eg") + assert nx.is_graphical(deg, method="hh") def test_valid_degree_sequence2(): @@ -19,62 +17,66 @@ def test_valid_degree_sequence2(): for i in range(10): G = nx.barabasi_albert_graph(n, 1) deg = (d for n, d in G.degree()) - assert_true(nx.is_graphical(deg, method='eg')) - assert_true(nx.is_graphical(deg, method='hh')) + assert nx.is_graphical(deg, method="eg") + assert nx.is_graphical(deg, method="hh") -@raises(nx.NetworkXException) def test_string_input(): - a = nx.is_graphical([], 'foo') + pytest.raises(nx.NetworkXException, nx.is_graphical, [], "foo") + pytest.raises(nx.NetworkXException, nx.is_graphical, ["red"], "hh") + pytest.raises(nx.NetworkXException, nx.is_graphical, ["red"], "eg") + + +def test_non_integer_input(): + pytest.raises(nx.NetworkXException, nx.is_graphical, [72.5], "eg") + pytest.raises(nx.NetworkXException, nx.is_graphical, [72.5], "hh") def test_negative_input(): - assert_false(nx.is_graphical([-1], 'hh')) - assert_false(nx.is_graphical([-1], 'eg')) - assert_false(nx.is_graphical([72.5], 'eg')) + assert not nx.is_graphical([-1], "hh") + assert not nx.is_graphical([-1], "eg") -class TestAtlas(object): +class TestAtlas: @classmethod - def setupClass(cls): + def setup_class(cls): global atlas - import platform - if platform.python_implementation() == 'Jython': - raise SkipTest('graph atlas not available under Jython.') + # import platform + # if platform.python_implementation() == 'Jython': + # raise SkipTest('graph atlas not available under Jython.') import networkx.generators.atlas as atlas - def setUp(self): - self.GAG = atlas.graph_atlas_g() + cls.GAG = atlas.graph_atlas_g() def test_atlas(self): for graph in self.GAG: deg = (d for n, d in graph.degree()) - assert_true(nx.is_graphical(deg, method='eg')) - assert_true(nx.is_graphical(deg, method='hh')) + assert nx.is_graphical(deg, method="eg") + assert nx.is_graphical(deg, method="hh") def test_small_graph_true(): z = [5, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] - assert_true(nx.is_graphical(z, method='hh')) - assert_true(nx.is_graphical(z, method='eg')) + assert nx.is_graphical(z, method="hh") + assert nx.is_graphical(z, method="eg") z = [10, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2] - assert_true(nx.is_graphical(z, method='hh')) - assert_true(nx.is_graphical(z, method='eg')) + assert nx.is_graphical(z, method="hh") + assert nx.is_graphical(z, method="eg") z = [1, 1, 1, 1, 1, 2, 2, 2, 3, 4] - assert_true(nx.is_graphical(z, method='hh')) - assert_true(nx.is_graphical(z, method='eg')) + assert nx.is_graphical(z, method="hh") + assert nx.is_graphical(z, method="eg") def test_small_graph_false(): z = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] - assert_false(nx.is_graphical(z, method='hh')) - assert_false(nx.is_graphical(z, method='eg')) + assert not nx.is_graphical(z, method="hh") + assert not nx.is_graphical(z, method="eg") z = [6, 5, 4, 4, 2, 1, 1, 1] - assert_false(nx.is_graphical(z, method='hh')) - assert_false(nx.is_graphical(z, method='eg')) + assert not nx.is_graphical(z, method="hh") + assert not nx.is_graphical(z, method="eg") z = [1, 1, 1, 1, 1, 1, 2, 2, 2, 3, 4] - assert_false(nx.is_graphical(z, method='hh')) - assert_false(nx.is_graphical(z, method='eg')) + assert not nx.is_graphical(z, method="hh") + assert not nx.is_graphical(z, method="eg") def test_directed_degree_sequence(): @@ -85,51 +87,79 @@ def test_directed_degree_sequence(): G = nx.erdos_renyi_graph(n, p * (i + 1), None, True) din = (d for n, d in G.in_degree()) dout = (d for n, d in G.out_degree()) - assert_true(nx.is_digraphical(din, dout)) + assert nx.is_digraphical(din, dout) def test_small_directed_sequences(): dout = [5, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] din = [3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 1] - assert_true(nx.is_digraphical(din, dout)) + assert nx.is_digraphical(din, dout) # Test nongraphical directed sequence dout = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] din = [103, 102, 102, 102, 102, 102, 102, 102, 102, 102] - assert_false(nx.is_digraphical(din, dout)) + assert not nx.is_digraphical(din, dout) # Test digraphical small sequence dout = [1, 1, 1, 1, 1, 2, 2, 2, 3, 4] din = [2, 2, 2, 2, 2, 2, 2, 2, 1, 1] - assert_true(nx.is_digraphical(din, dout)) + assert nx.is_digraphical(din, dout) # Test nonmatching sum din = [2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1] - assert_false(nx.is_digraphical(din, dout)) + assert not nx.is_digraphical(din, dout) # Test for negative integer in sequence din = [2, 2, 2, -2, 2, 2, 2, 2, 1, 1, 4] - assert_false(nx.is_digraphical(din, dout)) + assert not nx.is_digraphical(din, dout) + # Test for noninteger + din = dout = [1, 1, 1.1, 1] + assert not nx.is_digraphical(din, dout) + din = dout = [1, 1, "rer", 1] + assert not nx.is_digraphical(din, dout) def test_multi_sequence(): # Test nongraphical multi sequence seq = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1] - assert_false(nx.is_multigraphical(seq)) + assert not nx.is_multigraphical(seq) # Test small graphical multi sequence seq = [6, 5, 4, 4, 2, 1, 1, 1] - assert_true(nx.is_multigraphical(seq)) + assert nx.is_multigraphical(seq) # Test for negative integer in sequence seq = [6, 5, 4, -4, 2, 1, 1, 1] - assert_false(nx.is_multigraphical(seq)) + assert not nx.is_multigraphical(seq) # Test for sequence with odd sum seq = [1, 1, 1, 1, 1, 1, 2, 2, 2, 3, 4] - assert_false(nx.is_multigraphical(seq)) + assert not nx.is_multigraphical(seq) + # Test for noninteger + seq = [1, 1, 1.1, 1] + assert not nx.is_multigraphical(seq) + seq = [1, 1, "rer", 1] + assert not nx.is_multigraphical(seq) def test_pseudo_sequence(): # Test small valid pseudo sequence seq = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1] - assert_true(nx.is_pseudographical(seq)) + assert nx.is_pseudographical(seq) # Test for sequence with odd sum seq = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] - assert_false(nx.is_pseudographical(seq)) + assert not nx.is_pseudographical(seq) # Test for negative integer in sequence seq = [1000, 3, 3, 3, 3, 2, 2, -2, 1, 1] - assert_false(nx.is_pseudographical(seq)) + assert not nx.is_pseudographical(seq) + # Test for noninteger + seq = [1, 1, 1.1, 1] + assert not nx.is_pseudographical(seq) + seq = [1, 1, "rer", 1] + assert not nx.is_pseudographical(seq) + + +def test_numpy_degree_sequence(): + numpy = pytest.importorskip("numpy") + ds = numpy.array([1, 2, 2, 2, 1], dtype=numpy.int64) + assert nx.is_graphical(ds, "eg") + assert nx.is_graphical(ds, "hh") + ds = numpy.array([1, 2, 2, 2, 1], dtype=numpy.float64) + assert nx.is_graphical(ds, "eg") + assert nx.is_graphical(ds, "hh") + ds = numpy.array([1.1, 2, 2, 2, 1], dtype=numpy.float64) + pytest.raises(nx.NetworkXException, nx.is_graphical, ds, "eg") + pytest.raises(nx.NetworkXException, nx.is_graphical, ds, "hh") diff --git a/networkx/algorithms/tests/test_hierarchy.py b/networkx/algorithms/tests/test_hierarchy.py index 5aa38d0..9e7c6e3 100644 --- a/networkx/algorithms/tests/test_hierarchy.py +++ b/networkx/algorithms/tests/test_hierarchy.py @@ -1,35 +1,38 @@ -#!/usr/bin/env python -from nose.tools import * +import pytest import networkx as nx def test_hierarchy_exception(): G = nx.cycle_graph(5) - assert_raises(nx.NetworkXError, nx.flow_hierarchy, G) + pytest.raises(nx.NetworkXError, nx.flow_hierarchy, G) def test_hierarchy_cycle(): G = nx.cycle_graph(5, create_using=nx.DiGraph()) - assert_equal(nx.flow_hierarchy(G), 0.0) + assert nx.flow_hierarchy(G) == 0.0 def test_hierarchy_tree(): G = nx.full_rary_tree(2, 16, create_using=nx.DiGraph()) - assert_equal(nx.flow_hierarchy(G), 1.0) + assert nx.flow_hierarchy(G) == 1.0 def test_hierarchy_1(): G = nx.DiGraph() G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 1), (3, 4), (0, 4)]) - assert_equal(nx.flow_hierarchy(G), 0.5) + assert nx.flow_hierarchy(G) == 0.5 def test_hierarchy_weight(): G = nx.DiGraph() - G.add_edges_from([(0, 1, {'weight': .3}), - (1, 2, {'weight': .1}), - (2, 3, {'weight': .1}), - (3, 1, {'weight': .1}), - (3, 4, {'weight': .3}), - (0, 4, {'weight': .3})]) - assert_equal(nx.flow_hierarchy(G, weight='weight'), .75) + G.add_edges_from( + [ + (0, 1, {"weight": 0.3}), + (1, 2, {"weight": 0.1}), + (2, 3, {"weight": 0.1}), + (3, 1, {"weight": 0.1}), + (3, 4, {"weight": 0.3}), + (0, 4, {"weight": 0.3}), + ] + ) + assert nx.flow_hierarchy(G, weight="weight") == 0.75 diff --git a/networkx/algorithms/tests/test_hybrid.py b/networkx/algorithms/tests/test_hybrid.py index 76137b8..6af0016 100644 --- a/networkx/algorithms/tests/test_hybrid.py +++ b/networkx/algorithms/tests/test_hybrid.py @@ -1,4 +1,3 @@ -from nose.tools import * import networkx as nx @@ -6,10 +5,10 @@ def test_2d_grid_graph(): # FC article claims 2d grid graph of size n is (3,3)-connected # and (5,9)-connected, but I don't think it is (5,9)-connected G = nx.grid_2d_graph(8, 8, periodic=True) - assert_true(nx.is_kl_connected(G, 3, 3)) - assert_false(nx.is_kl_connected(G, 5, 9)) + assert nx.is_kl_connected(G, 3, 3) + assert not nx.is_kl_connected(G, 5, 9) (H, graphOK) = nx.kl_connected_subgraph(G, 5, 9, same_as_graph=True) - assert_false(graphOK) + assert not graphOK def test_small_graph(): @@ -17,9 +16,9 @@ def test_small_graph(): G.add_edge(1, 2) G.add_edge(1, 3) G.add_edge(2, 3) - assert_true(nx.is_kl_connected(G, 2, 2)) + assert nx.is_kl_connected(G, 2, 2) H = nx.kl_connected_subgraph(G, 2, 2) - (H, graphOK) = nx.kl_connected_subgraph(G, 2, 2, - low_memory=True, - same_as_graph=True) - assert_true(graphOK) + (H, graphOK) = nx.kl_connected_subgraph( + G, 2, 2, low_memory=True, same_as_graph=True + ) + assert graphOK diff --git a/networkx/algorithms/tests/test_isolate.py b/networkx/algorithms/tests/test_isolate.py index 369b730..d29b306 100644 --- a/networkx/algorithms/tests/test_isolate.py +++ b/networkx/algorithms/tests/test_isolate.py @@ -1,15 +1,4 @@ -# test_isolate.py - unit tests for the isolate module -# -# Copyright 2015 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.algorithms.isolates` module.""" -from nose.tools import assert_equal -from nose.tools import assert_false -from nose.tools import assert_true import networkx as nx @@ -18,20 +7,20 @@ def test_is_isolate(): G = nx.Graph() G.add_edge(0, 1) G.add_node(2) - assert_false(nx.is_isolate(G, 0)) - assert_false(nx.is_isolate(G, 1)) - assert_true(nx.is_isolate(G, 2)) + assert not nx.is_isolate(G, 0) + assert not nx.is_isolate(G, 1) + assert nx.is_isolate(G, 2) def test_isolates(): G = nx.Graph() G.add_edge(0, 1) G.add_nodes_from([2, 3]) - assert_equal(sorted(nx.isolates(G)), [2, 3]) + assert sorted(nx.isolates(G)) == [2, 3] def test_number_of_isolates(): G = nx.Graph() G.add_edge(0, 1) G.add_nodes_from([2, 3]) - assert_equal(nx.number_of_isolates(G), 2) + assert nx.number_of_isolates(G) == 2 diff --git a/networkx/algorithms/tests/test_link_prediction.py b/networkx/algorithms/tests/test_link_prediction.py index e725738..cb3e58c 100644 --- a/networkx/algorithms/tests/test_link_prediction.py +++ b/networkx/algorithms/tests/test_link_prediction.py @@ -1,25 +1,26 @@ import math from functools import partial -from nose.tools import * +import pytest import networkx as nx def _test_func(G, ebunch, expected, predict_func, **kwargs): result = predict_func(G, ebunch, **kwargs) - exp_dict = dict((tuple(sorted([u, v])), score) for u, v, score in expected) - res_dict = dict((tuple(sorted([u, v])), score) for u, v, score in result) + exp_dict = {tuple(sorted([u, v])): score for u, v, score in expected} + res_dict = {tuple(sorted([u, v])): score for u, v, score in result} - assert_equal(len(exp_dict), len(res_dict)) + assert len(exp_dict) == len(res_dict) for p in exp_dict: - assert_almost_equal(exp_dict[p], res_dict[p]) + assert nx.testing.almost_equal(exp_dict[p], res_dict[p]) -class TestResourceAllocationIndex(): - def setUp(self): - self.func = nx.resource_allocation_index - self.test = partial(_test_func, predict_func=self.func) +class TestResourceAllocationIndex: + @classmethod + def setup_class(cls): + cls.func = staticmethod(nx.resource_allocation_index) + cls.test = partial(_test_func, predict_func=cls.func) def test_K5(self): G = nx.complete_graph(5) @@ -33,23 +34,22 @@ def test_S4(self): G = nx.star_graph(4) self.test(G, [(1, 2)], [(1, 2, 0.25)]) - @raises(nx.NetworkXNotImplemented) - def test_digraph(self): - G = nx.DiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - self.func(G, [(0, 2)]) - - @raises(nx.NetworkXNotImplemented) - def test_multigraph(self): - G = nx.MultiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - self.func(G, [(0, 2)]) - - @raises(nx.NetworkXNotImplemented) - def test_multidigraph(self): - G = nx.MultiDiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - self.func(G, [(0, 2)]) + def test_notimplemented(self): + assert pytest.raises( + nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)] + ) + assert pytest.raises( + nx.NetworkXNotImplemented, + self.func, + nx.MultiGraph([(0, 1), (1, 2)]), + [(0, 2)], + ) + assert pytest.raises( + nx.NetworkXNotImplemented, + self.func, + nx.MultiDiGraph([(0, 1), (1, 2)]), + [(0, 2)], + ) def test_no_common_neighbor(self): G = nx.Graph() @@ -66,10 +66,11 @@ def test_all_nonexistent_edges(self): self.test(G, None, [(0, 3, 0.5), (1, 2, 0.5), (1, 3, 0)]) -class TestJaccardCoefficient(): - def setUp(self): - self.func = nx.jaccard_coefficient - self.test = partial(_test_func, predict_func=self.func) +class TestJaccardCoefficient: + @classmethod + def setup_class(cls): + cls.func = staticmethod(nx.jaccard_coefficient) + cls.test = partial(_test_func, predict_func=cls.func) def test_K5(self): G = nx.complete_graph(5) @@ -79,23 +80,22 @@ def test_P4(self): G = nx.path_graph(4) self.test(G, [(0, 2)], [(0, 2, 0.5)]) - @raises(nx.NetworkXNotImplemented) - def test_digraph(self): - G = nx.DiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - self.func(G, [(0, 2)]) - - @raises(nx.NetworkXNotImplemented) - def test_multigraph(self): - G = nx.MultiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - self.func(G, [(0, 2)]) - - @raises(nx.NetworkXNotImplemented) - def test_multidigraph(self): - G = nx.MultiDiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - self.func(G, [(0, 2)]) + def test_notimplemented(self): + assert pytest.raises( + nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)] + ) + assert pytest.raises( + nx.NetworkXNotImplemented, + self.func, + nx.MultiGraph([(0, 1), (1, 2)]), + [(0, 2)], + ) + assert pytest.raises( + nx.NetworkXNotImplemented, + self.func, + nx.MultiDiGraph([(0, 1), (1, 2)]), + [(0, 2)], + ) def test_no_common_neighbor(self): G = nx.Graph() @@ -113,10 +113,11 @@ def test_all_nonexistent_edges(self): self.test(G, None, [(0, 3, 0.5), (1, 2, 0.5), (1, 3, 0)]) -class TestAdamicAdarIndex(): - def setUp(self): - self.func = nx.adamic_adar_index - self.test = partial(_test_func, predict_func=self.func) +class TestAdamicAdarIndex: + @classmethod + def setup_class(cls): + cls.func = staticmethod(nx.adamic_adar_index) + cls.test = partial(_test_func, predict_func=cls.func) def test_K5(self): G = nx.complete_graph(5) @@ -130,23 +131,22 @@ def test_S4(self): G = nx.star_graph(4) self.test(G, [(1, 2)], [(1, 2, 1 / math.log(4))]) - @raises(nx.NetworkXNotImplemented) - def test_digraph(self): - G = nx.DiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - self.func(G, [(0, 2)]) - - @raises(nx.NetworkXNotImplemented) - def test_multigraph(self): - G = nx.MultiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - self.func(G, [(0, 2)]) - - @raises(nx.NetworkXNotImplemented) - def test_multidigraph(self): - G = nx.MultiDiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - self.func(G, [(0, 2)]) + def test_notimplemented(self): + assert pytest.raises( + nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)] + ) + assert pytest.raises( + nx.NetworkXNotImplemented, + self.func, + nx.MultiGraph([(0, 1), (1, 2)]), + [(0, 2)], + ) + assert pytest.raises( + nx.NetworkXNotImplemented, + self.func, + nx.MultiDiGraph([(0, 1), (1, 2)]), + [(0, 2)], + ) def test_no_common_neighbor(self): G = nx.Graph() @@ -160,14 +160,16 @@ def test_equal_nodes(self): def test_all_nonexistent_edges(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (2, 3)]) - self.test(G, None, [(0, 3, 1 / math.log(2)), (1, 2, 1 / math.log(2)), - (1, 3, 0)]) + self.test( + G, None, [(0, 3, 1 / math.log(2)), (1, 2, 1 / math.log(2)), (1, 3, 0)] + ) -class TestPreferentialAttachment(): - def setUp(self): - self.func = nx.preferential_attachment - self.test = partial(_test_func, predict_func=self.func) +class TestPreferentialAttachment: + @classmethod + def setup_class(cls): + cls.func = staticmethod(nx.preferential_attachment) + cls.test = partial(_test_func, predict_func=cls.func) def test_K5(self): G = nx.complete_graph(5) @@ -181,23 +183,22 @@ def test_S4(self): G = nx.star_graph(4) self.test(G, [(0, 2)], [(0, 2, 4)]) - @raises(nx.NetworkXNotImplemented) - def test_digraph(self): - G = nx.DiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - self.func(G, [(0, 2)]) - - @raises(nx.NetworkXNotImplemented) - def test_multigraph(self): - G = nx.MultiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - self.func(G, [(0, 2)]) - - @raises(nx.NetworkXNotImplemented) - def test_multidigraph(self): - G = nx.MultiDiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - self.func(G, [(0, 2)]) + def test_notimplemented(self): + assert pytest.raises( + nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)] + ) + assert pytest.raises( + nx.NetworkXNotImplemented, + self.func, + nx.MultiGraph([(0, 1), (1, 2)]), + [(0, 2)], + ) + assert pytest.raises( + nx.NetworkXNotImplemented, + self.func, + nx.MultiDiGraph([(0, 1), (1, 2)]), + [(0, 2)], + ) def test_zero_degrees(self): G = nx.Graph() @@ -210,394 +211,332 @@ def test_all_nonexistent_edges(self): self.test(G, None, [(0, 3, 2), (1, 2, 2), (1, 3, 1)]) -class TestCNSoundarajanHopcroft(): - def setUp(self): - self.func = nx.cn_soundarajan_hopcroft - self.test = partial(_test_func, predict_func=self.func, - community='community') +class TestCNSoundarajanHopcroft: + @classmethod + def setup_class(cls): + cls.func = staticmethod(nx.cn_soundarajan_hopcroft) + cls.test = partial(_test_func, predict_func=cls.func, community="community") def test_K5(self): G = nx.complete_graph(5) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 0 - G.nodes[4]['community'] = 1 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 1 self.test(G, [(0, 1)], [(0, 1, 5)]) def test_P3(self): G = nx.path_graph(3) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 1 - G.nodes[2]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 self.test(G, [(0, 2)], [(0, 2, 1)]) def test_S4(self): G = nx.star_graph(4) - G.nodes[0]['community'] = 1 - G.nodes[1]['community'] = 1 - G.nodes[2]['community'] = 1 - G.nodes[3]['community'] = 0 - G.nodes[4]['community'] = 0 + G.nodes[0]["community"] = 1 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 1 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 0 self.test(G, [(1, 2)], [(1, 2, 2)]) - @raises(nx.NetworkXNotImplemented) - def test_digraph(self): - G = nx.DiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - self.func(G, [(0, 2)]) - - @raises(nx.NetworkXNotImplemented) - def test_multigraph(self): - G = nx.MultiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - self.func(G, [(0, 2)]) - - @raises(nx.NetworkXNotImplemented) - def test_multidigraph(self): - G = nx.MultiDiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - self.func(G, [(0, 2)]) + def test_notimplemented(self): + G = nx.DiGraph([(0, 1), (1, 2)]) + G.add_nodes_from([0, 1, 2], community=0) + assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) + G = nx.MultiGraph([(0, 1), (1, 2)]) + G.add_nodes_from([0, 1, 2], community=0) + assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) + G = nx.MultiDiGraph([(0, 1), (1, 2)]) + G.add_nodes_from([0, 1, 2], community=0) + assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) def test_no_common_neighbor(self): G = nx.Graph() G.add_nodes_from([0, 1]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 self.test(G, [(0, 1)], [(0, 1, 0)]) def test_equal_nodes(self): G = nx.complete_graph(3) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 self.test(G, [(0, 0)], [(0, 0, 4)]) def test_different_community(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 1 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 1 self.test(G, [(0, 3)], [(0, 3, 2)]) - @raises(nx.NetworkXAlgorithmError) def test_no_community_information(self): G = nx.complete_graph(5) - list(self.func(G, [(0, 1)])) + assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 1)])) - @raises(nx.NetworkXAlgorithmError) def test_insufficient_community_information(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[3]['community'] = 0 - list(self.func(G, [(0, 3)])) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[3]["community"] = 0 + assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 3)])) def test_sufficient_community_information(self): G = nx.Graph() G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4), (4, 5)]) - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 0 - G.nodes[4]['community'] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 0 self.test(G, [(1, 4)], [(1, 4, 4)]) def test_custom_community_attribute_name(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) - G.nodes[0]['cmty'] = 0 - G.nodes[1]['cmty'] = 0 - G.nodes[2]['cmty'] = 0 - G.nodes[3]['cmty'] = 1 - self.test(G, [(0, 3)], [(0, 3, 2)], community='cmty') + G.nodes[0]["cmty"] = 0 + G.nodes[1]["cmty"] = 0 + G.nodes[2]["cmty"] = 0 + G.nodes[3]["cmty"] = 1 + self.test(G, [(0, 3)], [(0, 3, 2)], community="cmty") def test_all_nonexistent_edges(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (2, 3)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 1 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 self.test(G, None, [(0, 3, 2), (1, 2, 1), (1, 3, 0)]) -class TestRAIndexSoundarajanHopcroft(): - def setUp(self): - self.func = nx.ra_index_soundarajan_hopcroft - self.test = partial(_test_func, predict_func=self.func, - community='community') +class TestRAIndexSoundarajanHopcroft: + @classmethod + def setup_class(cls): + cls.func = staticmethod(nx.ra_index_soundarajan_hopcroft) + cls.test = partial(_test_func, predict_func=cls.func, community="community") def test_K5(self): G = nx.complete_graph(5) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 0 - G.nodes[4]['community'] = 1 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 1 self.test(G, [(0, 1)], [(0, 1, 0.5)]) def test_P3(self): G = nx.path_graph(3) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 1 - G.nodes[2]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 self.test(G, [(0, 2)], [(0, 2, 0)]) def test_S4(self): G = nx.star_graph(4) - G.nodes[0]['community'] = 1 - G.nodes[1]['community'] = 1 - G.nodes[2]['community'] = 1 - G.nodes[3]['community'] = 0 - G.nodes[4]['community'] = 0 + G.nodes[0]["community"] = 1 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 1 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 0 self.test(G, [(1, 2)], [(1, 2, 0.25)]) - @raises(nx.NetworkXNotImplemented) - def test_digraph(self): - G = nx.DiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - self.func(G, [(0, 2)]) - - @raises(nx.NetworkXNotImplemented) - def test_multigraph(self): - G = nx.MultiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - self.func(G, [(0, 2)]) - - @raises(nx.NetworkXNotImplemented) - def test_multidigraph(self): - G = nx.MultiDiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - self.func(G, [(0, 2)]) + def test_notimplemented(self): + G = nx.DiGraph([(0, 1), (1, 2)]) + G.add_nodes_from([0, 1, 2], community=0) + assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) + G = nx.MultiGraph([(0, 1), (1, 2)]) + G.add_nodes_from([0, 1, 2], community=0) + assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) + G = nx.MultiDiGraph([(0, 1), (1, 2)]) + G.add_nodes_from([0, 1, 2], community=0) + assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) def test_no_common_neighbor(self): G = nx.Graph() G.add_nodes_from([0, 1]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 self.test(G, [(0, 1)], [(0, 1, 0)]) def test_equal_nodes(self): G = nx.complete_graph(3) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 self.test(G, [(0, 0)], [(0, 0, 1)]) def test_different_community(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 1 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 1 self.test(G, [(0, 3)], [(0, 3, 0)]) - @raises(nx.NetworkXAlgorithmError) def test_no_community_information(self): G = nx.complete_graph(5) - list(self.func(G, [(0, 1)])) + assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 1)])) - @raises(nx.NetworkXAlgorithmError) def test_insufficient_community_information(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[3]['community'] = 0 - list(self.func(G, [(0, 3)])) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[3]["community"] = 0 + assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 3)])) def test_sufficient_community_information(self): G = nx.Graph() G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4), (4, 5)]) - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 0 - G.nodes[4]['community'] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 0 self.test(G, [(1, 4)], [(1, 4, 1)]) def test_custom_community_attribute_name(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) - G.nodes[0]['cmty'] = 0 - G.nodes[1]['cmty'] = 0 - G.nodes[2]['cmty'] = 0 - G.nodes[3]['cmty'] = 1 - self.test(G, [(0, 3)], [(0, 3, 0)], community='cmty') + G.nodes[0]["cmty"] = 0 + G.nodes[1]["cmty"] = 0 + G.nodes[2]["cmty"] = 0 + G.nodes[3]["cmty"] = 1 + self.test(G, [(0, 3)], [(0, 3, 0)], community="cmty") def test_all_nonexistent_edges(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (2, 3)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 1 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 self.test(G, None, [(0, 3, 0.5), (1, 2, 0), (1, 3, 0)]) -class TestWithinInterCluster(): - def setUp(self): - self.delta = 0.001 - self.func = nx.within_inter_cluster - self.test = partial(_test_func, predict_func=self.func, - delta=self.delta, community='community') +class TestWithinInterCluster: + @classmethod + def setup_class(cls): + cls.delta = 0.001 + cls.func = staticmethod(nx.within_inter_cluster) + cls.test = partial( + _test_func, predict_func=cls.func, delta=cls.delta, community="community" + ) def test_K5(self): G = nx.complete_graph(5) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 0 - G.nodes[4]['community'] = 1 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 1 self.test(G, [(0, 1)], [(0, 1, 2 / (1 + self.delta))]) def test_P3(self): G = nx.path_graph(3) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 1 - G.nodes[2]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 self.test(G, [(0, 2)], [(0, 2, 0)]) def test_S4(self): G = nx.star_graph(4) - G.nodes[0]['community'] = 1 - G.nodes[1]['community'] = 1 - G.nodes[2]['community'] = 1 - G.nodes[3]['community'] = 0 - G.nodes[4]['community'] = 0 + G.nodes[0]["community"] = 1 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 1 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 0 self.test(G, [(1, 2)], [(1, 2, 1 / self.delta)]) - @raises(nx.NetworkXNotImplemented) - def test_digraph(self): - G = nx.DiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - self.func(G, [(0, 2)]) - - @raises(nx.NetworkXNotImplemented) - def test_multigraph(self): - G = nx.MultiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - self.func(G, [(0, 2)]) - - @raises(nx.NetworkXNotImplemented) - def test_multidigraph(self): - G = nx.MultiDiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - self.func(G, [(0, 2)]) + def test_notimplemented(self): + G = nx.DiGraph([(0, 1), (1, 2)]) + G.add_nodes_from([0, 1, 2], community=0) + assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) + G = nx.MultiGraph([(0, 1), (1, 2)]) + G.add_nodes_from([0, 1, 2], community=0) + assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) + G = nx.MultiDiGraph([(0, 1), (1, 2)]) + G.add_nodes_from([0, 1, 2], community=0) + assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) def test_no_common_neighbor(self): G = nx.Graph() G.add_nodes_from([0, 1]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 self.test(G, [(0, 1)], [(0, 1, 0)]) def test_equal_nodes(self): G = nx.complete_graph(3) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 self.test(G, [(0, 0)], [(0, 0, 2 / self.delta)]) def test_different_community(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 1 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 1 self.test(G, [(0, 3)], [(0, 3, 0)]) def test_no_inter_cluster_common_neighbor(self): G = nx.complete_graph(4) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 self.test(G, [(0, 3)], [(0, 3, 2 / self.delta)]) - @raises(nx.NetworkXAlgorithmError) def test_no_community_information(self): G = nx.complete_graph(5) - list(self.func(G, [(0, 1)])) + assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 1)])) - @raises(nx.NetworkXAlgorithmError) def test_insufficient_community_information(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[3]['community'] = 0 - list(self.func(G, [(0, 3)])) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[3]["community"] = 0 + assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 3)])) def test_sufficient_community_information(self): G = nx.Graph() G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4), (4, 5)]) - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 0 - G.nodes[4]['community'] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 0 self.test(G, [(1, 4)], [(1, 4, 2 / self.delta)]) - @raises(nx.NetworkXAlgorithmError) - def test_zero_delta(self): - G = nx.complete_graph(3) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - list(self.func(G, [(0, 1)], 0)) - - @raises(nx.NetworkXAlgorithmError) - def test_negative_delta(self): + def test_invalid_delta(self): G = nx.complete_graph(3) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - list(self.func(G, [(0, 1)], -0.5)) + G.add_nodes_from([0, 1, 2], community=0) + assert pytest.raises(nx.NetworkXAlgorithmError, self.func, G, [(0, 1)], 0) + assert pytest.raises(nx.NetworkXAlgorithmError, self.func, G, [(0, 1)], -0.5) def test_custom_community_attribute_name(self): G = nx.complete_graph(4) - G.nodes[0]['cmty'] = 0 - G.nodes[1]['cmty'] = 0 - G.nodes[2]['cmty'] = 0 - G.nodes[3]['cmty'] = 0 - self.test(G, [(0, 3)], [(0, 3, 2 / self.delta)], community='cmty') + G.nodes[0]["cmty"] = 0 + G.nodes[1]["cmty"] = 0 + G.nodes[2]["cmty"] = 0 + G.nodes[3]["cmty"] = 0 + self.test(G, [(0, 3)], [(0, 3, 2 / self.delta)], community="cmty") def test_all_nonexistent_edges(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (2, 3)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 1 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 self.test(G, None, [(0, 3, 1 / self.delta), (1, 2, 0), (1, 3, 0)]) diff --git a/networkx/algorithms/tests/test_lowest_common_ancestors.py b/networkx/algorithms/tests/test_lowest_common_ancestors.py index 3d984b3..fb09e1d 100644 --- a/networkx/algorithms/tests/test_lowest_common_ancestors.py +++ b/networkx/algorithms/tests/test_lowest_common_ancestors.py @@ -1,4 +1,4 @@ -from nose.tools import * +import pytest from itertools import chain, combinations, product import networkx as nx @@ -14,52 +14,56 @@ def get_pair(dictionary, n1, n2): return dictionary[n2, n1] -class TestTreeLCA(object): - def setUp(self): - self.DG = nx.DiGraph() +class TestTreeLCA: + @classmethod + def setup_class(cls): + cls.DG = nx.DiGraph() edges = [(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)] - self.DG.add_edges_from(edges) - self.ans = dict(tree_all_pairs_lca(self.DG, 0)) - gold = dict([((n, n), n) for n in self.DG]) - gold.update(dict(((0, i), 0) for i in range(1, 7))) - gold.update({(1, 2): 0, - (1, 3): 1, - (1, 4): 1, - (1, 5): 0, - (1, 6): 0, - (2, 3): 0, - (2, 4): 0, - (2, 5): 2, - (2, 6): 2, - (3, 4): 1, - (3, 5): 0, - (3, 6): 0, - (4, 5): 0, - (4, 6): 0, - (5, 6): 2}) - - self.gold = gold + cls.DG.add_edges_from(edges) + cls.ans = dict(tree_all_pairs_lca(cls.DG, 0)) + gold = {(n, n): n for n in cls.DG} + gold.update({(0, i): 0 for i in range(1, 7)}) + gold.update( + { + (1, 2): 0, + (1, 3): 1, + (1, 4): 1, + (1, 5): 0, + (1, 6): 0, + (2, 3): 0, + (2, 4): 0, + (2, 5): 2, + (2, 6): 2, + (3, 4): 1, + (3, 5): 0, + (3, 6): 0, + (4, 5): 0, + (4, 6): 0, + (5, 6): 2, + } + ) + + cls.gold = gold @staticmethod def assert_has_same_pairs(d1, d2): for (a, b) in ((min(pair), max(pair)) for pair in chain(d1, d2)): - assert_equal(get_pair(d1, a, b), get_pair(d2, a, b)) + assert get_pair(d1, a, b) == get_pair(d2, a, b) def test_tree_all_pairs_lowest_common_ancestor1(self): """Specifying the root is optional.""" - assert_equal(dict(tree_all_pairs_lca(self.DG)), self.ans) + assert dict(tree_all_pairs_lca(self.DG)) == self.ans def test_tree_all_pairs_lowest_common_ancestor2(self): """Specifying only some pairs gives only those pairs.""" test_pairs = [(0, 1), (0, 1), (1, 0)] ans = dict(tree_all_pairs_lca(self.DG, 0, test_pairs)) - assert_true((0, 1) in ans and (1, 0) in ans) - assert_equal(len(ans), 2) + assert (0, 1) in ans and (1, 0) in ans + assert len(ans) == 2 def test_tree_all_pairs_lowest_common_ancestor3(self): """Specifying no pairs same as specifying all.""" - all_pairs = chain(combinations(self.DG, 2), - ((node, node) for node in self.DG)) + all_pairs = chain(combinations(self.DG, 2), ((node, node) for node in self.DG)) ans = dict(tree_all_pairs_lca(self.DG, 0, all_pairs)) self.assert_has_same_pairs(ans, self.ans) @@ -72,135 +76,142 @@ def test_tree_all_pairs_lowest_common_ancestor4(self): def test_tree_all_pairs_lowest_common_ancestor5(self): """Handles invalid input correctly.""" empty_digraph = tree_all_pairs_lca(nx.DiGraph()) - assert_raises(nx.NetworkXPointlessConcept, list, empty_digraph) + pytest.raises(nx.NetworkXPointlessConcept, list, empty_digraph) bad_pairs_digraph = tree_all_pairs_lca(self.DG, pairs=[(-1, -2)]) - assert_raises(nx.NodeNotFound, list, bad_pairs_digraph) + pytest.raises(nx.NodeNotFound, list, bad_pairs_digraph) def test_tree_all_pairs_lowest_common_ancestor6(self): """Works on subtrees.""" ans = dict(tree_all_pairs_lca(self.DG, 1)) - gold = dict((pair, lca) for (pair, lca) in self.gold.items() - if all(n in (1, 3, 4) for n in pair)) + gold = { + pair: lca + for (pair, lca) in self.gold.items() + if all(n in (1, 3, 4) for n in pair) + } self.assert_has_same_pairs(gold, ans) def test_tree_all_pairs_lowest_common_ancestor7(self): """Works on disconnected nodes.""" G = nx.DiGraph() G.add_node(1) - assert_equal({(1, 1): 1}, dict(tree_all_pairs_lca(G))) + assert {(1, 1): 1} == dict(tree_all_pairs_lca(G)) G.add_node(0) - assert_equal({(1, 1): 1}, dict(tree_all_pairs_lca(G, 1))) - assert_equal({(0, 0): 0}, dict(tree_all_pairs_lca(G, 0))) + assert {(1, 1): 1} == dict(tree_all_pairs_lca(G, 1)) + assert {(0, 0): 0} == dict(tree_all_pairs_lca(G, 0)) - assert_raises(nx.NetworkXError, list, tree_all_pairs_lca(G)) + pytest.raises(nx.NetworkXError, list, tree_all_pairs_lca(G)) def test_tree_all_pairs_lowest_common_ancestor8(self): """Raises right errors if not a tree.""" # Cycle G = nx.DiGraph([(1, 2), (2, 1)]) - assert_raises(nx.NetworkXError, list, tree_all_pairs_lca(G)) + pytest.raises(nx.NetworkXError, list, tree_all_pairs_lca(G)) # DAG G = nx.DiGraph([(0, 2), (1, 2)]) - assert_raises(nx.NetworkXError, list, tree_all_pairs_lca(G)) + pytest.raises(nx.NetworkXError, list, tree_all_pairs_lca(G)) def test_tree_all_pairs_lowest_common_ancestor9(self): """Test that pairs works correctly as a generator.""" pairs = iter([(0, 1), (0, 1), (1, 0)]) some_pairs = dict(tree_all_pairs_lca(self.DG, 0, pairs)) - assert_true((0, 1) in some_pairs and (1, 0) in some_pairs) - assert_equal(len(some_pairs), 2) + assert (0, 1) in some_pairs and (1, 0) in some_pairs + assert len(some_pairs) == 2 def test_tree_all_pairs_lowest_common_ancestor10(self): """Test that pairs not in the graph raises error.""" lca = tree_all_pairs_lca(self.DG, 0, [(-1, -1)]) - assert_raises(nx.NodeNotFound, list, lca) + pytest.raises(nx.NodeNotFound, list, lca) def test_tree_all_pairs_lowest_common_ancestor11(self): """Test that None as a node in the graph raises an error.""" G = nx.DiGraph([(None, 3)]) - assert_raises(nx.NetworkXError, list, tree_all_pairs_lca(G)) - assert_raises(nx.NodeNotFound, list, - tree_all_pairs_lca(self.DG, pairs=G.edges())) + pytest.raises(nx.NetworkXError, list, tree_all_pairs_lca(G)) + pytest.raises( + nx.NodeNotFound, list, tree_all_pairs_lca(self.DG, pairs=G.edges()) + ) def test_tree_all_pairs_lowest_common_ancestor12(self): """Test that tree routine bails on DAGs.""" G = nx.DiGraph([(3, 4), (5, 4)]) - assert_raises(nx.NetworkXError, list, tree_all_pairs_lca(G)) + pytest.raises(nx.NetworkXError, list, tree_all_pairs_lca(G)) def test_not_implemented_for(self): NNI = nx.NetworkXNotImplemented G = nx.Graph([(0, 1)]) - assert_raises(NNI, tree_all_pairs_lca, G) - assert_raises(NNI, all_pairs_lca, G) - assert_raises(NNI, nx.lowest_common_ancestor, G, 0, 1) + pytest.raises(NNI, tree_all_pairs_lca, G) + pytest.raises(NNI, all_pairs_lca, G) + pytest.raises(NNI, nx.lowest_common_ancestor, G, 0, 1) G = nx.MultiGraph([(0, 1)]) - assert_raises(NNI, tree_all_pairs_lca, G) - assert_raises(NNI, all_pairs_lca, G) - assert_raises(NNI, nx.lowest_common_ancestor, G, 0, 1) + pytest.raises(NNI, tree_all_pairs_lca, G) + pytest.raises(NNI, all_pairs_lca, G) + pytest.raises(NNI, nx.lowest_common_ancestor, G, 0, 1) G = nx.MultiDiGraph([(0, 1)]) - assert_raises(NNI, tree_all_pairs_lca, G) - assert_raises(NNI, all_pairs_lca, G) - assert_raises(NNI, nx.lowest_common_ancestor, G, 0, 1) + pytest.raises(NNI, tree_all_pairs_lca, G) + pytest.raises(NNI, all_pairs_lca, G) + pytest.raises(NNI, nx.lowest_common_ancestor, G, 0, 1) def test_tree_all_pairs_lowest_common_ancestor13(self): """Test that it works on non-empty trees with no LCAs.""" G = nx.DiGraph() G.add_node(3) ans = list(tree_all_pairs_lca(G)) - assert_equal(ans, [((3, 3), 3)]) + assert ans == [((3, 3), 3)] class TestDAGLCA: - def setUp(self): - self.DG = nx.DiGraph() - nx.add_path(self.DG, (0, 1, 2, 3)) - nx.add_path(self.DG, (0, 4, 3)) - nx.add_path(self.DG, (0, 5, 6, 8, 3)) - nx.add_path(self.DG, (5, 7, 8)) - self.DG.add_edge(6, 2) - self.DG.add_edge(7, 2) - - self.root_distance = nx.shortest_path_length(self.DG, source=0) - - self.gold = {(1, 1): 1, - (1, 2): 1, - (1, 3): 1, - (1, 4): 0, - (1, 5): 0, - (1, 6): 0, - (1, 7): 0, - (1, 8): 0, - (2, 2): 2, - (2, 3): 2, - (2, 4): 0, - (2, 5): 5, - (2, 6): 6, - (2, 7): 7, - (2, 8): 7, - (3, 3): 8, - (3, 4): 4, - (3, 5): 5, - (3, 6): 6, - (3, 7): 7, - (3, 8): 8, - (4, 4): 4, - (4, 5): 0, - (4, 6): 0, - (4, 7): 0, - (4, 8): 0, - (5, 5): 5, - (5, 6): 5, - (5, 7): 5, - (5, 8): 5, - (6, 6): 6, - (6, 7): 5, - (6, 8): 6, - (7, 7): 7, - (7, 8): 7, - (8, 8): 8} - self.gold.update(((0, n), 0) for n in self.DG) + @classmethod + def setup_class(cls): + cls.DG = nx.DiGraph() + nx.add_path(cls.DG, (0, 1, 2, 3)) + nx.add_path(cls.DG, (0, 4, 3)) + nx.add_path(cls.DG, (0, 5, 6, 8, 3)) + nx.add_path(cls.DG, (5, 7, 8)) + cls.DG.add_edge(6, 2) + cls.DG.add_edge(7, 2) + + cls.root_distance = nx.shortest_path_length(cls.DG, source=0) + + cls.gold = { + (1, 1): 1, + (1, 2): 1, + (1, 3): 1, + (1, 4): 0, + (1, 5): 0, + (1, 6): 0, + (1, 7): 0, + (1, 8): 0, + (2, 2): 2, + (2, 3): 2, + (2, 4): 0, + (2, 5): 5, + (2, 6): 6, + (2, 7): 7, + (2, 8): 7, + (3, 3): 8, + (3, 4): 4, + (3, 5): 5, + (3, 6): 6, + (3, 7): 7, + (3, 8): 8, + (4, 4): 4, + (4, 5): 0, + (4, 6): 0, + (4, 7): 0, + (4, 8): 0, + (5, 5): 5, + (5, 6): 5, + (5, 7): 5, + (5, 8): 5, + (6, 6): 6, + (6, 7): 5, + (6, 8): 6, + (7, 7): 7, + (7, 8): 7, + (8, 8): 8, + } + cls.gold.update(((0, n), 0) for n in cls.DG) def assert_lca_dicts_same(self, d1, d2, G=None): """Checks if d1 and d2 contain the same pairs and @@ -211,12 +222,13 @@ def assert_lca_dicts_same(self, d1, d2, G=None): root_distance = self.root_distance else: roots = [n for n, deg in G.in_degree if deg == 0] - assert(len(roots) == 1) + assert len(roots) == 1 root_distance = nx.shortest_path_length(G, source=roots[0]) for a, b in ((min(pair), max(pair)) for pair in chain(d1, d2)): - assert_equal(root_distance[get_pair(d1, a, b)], - root_distance[get_pair(d2, a, b)]) + assert ( + root_distance[get_pair(d1, a, b)] == root_distance[get_pair(d2, a, b)] + ) def test_all_pairs_lowest_common_ancestor1(self): """Produces the correct results.""" @@ -256,49 +268,44 @@ def test_all_pairs_lowest_common_ancestor4(self): def test_all_pairs_lowest_common_ancestor5(self): """Test that pairs not in the graph raises error.""" - assert_raises(nx.NodeNotFound, all_pairs_lca, self.DG, [(-1, -1)]) + pytest.raises(nx.NodeNotFound, all_pairs_lca, self.DG, [(-1, -1)]) def test_all_pairs_lowest_common_ancestor6(self): """Test that pairs with no LCA specified emits nothing.""" G = self.DG.copy() G.add_node(-1) gen = all_pairs_lca(G, [(-1, -1), (-1, 0)]) - assert_equal(dict(gen), {(-1, -1): -1}) + assert dict(gen) == {(-1, -1): -1} def test_all_pairs_lowest_common_ancestor7(self): """Test that LCA on null graph bails.""" - assert_raises(nx.NetworkXPointlessConcept, - all_pairs_lca, - nx.DiGraph()) + pytest.raises(nx.NetworkXPointlessConcept, all_pairs_lca, nx.DiGraph()) def test_all_pairs_lowest_common_ancestor8(self): """Test that LCA on non-dags bails.""" - assert_raises(nx.NetworkXError, all_pairs_lca, - nx.DiGraph([(3, 4), (4, 3)])) + pytest.raises(nx.NetworkXError, all_pairs_lca, nx.DiGraph([(3, 4), (4, 3)])) def test_all_pairs_lowest_common_ancestor9(self): """Test that it works on non-empty graphs with no LCAs.""" G = nx.DiGraph() G.add_node(3) ans = list(all_pairs_lca(G)) - assert_equal(ans, [((3, 3), 3)]) + assert ans == [((3, 3), 3)] def test_all_pairs_lowest_common_ancestor10(self): """Test that it bails on None as a node.""" G = nx.DiGraph([(None, 3)]) - assert_raises(nx.NetworkXError, all_pairs_lca, G) - assert_raises(nx.NodeNotFound, all_pairs_lca, - self.DG, pairs=G.edges()) + pytest.raises(nx.NetworkXError, all_pairs_lca, G) + pytest.raises(nx.NodeNotFound, all_pairs_lca, self.DG, pairs=G.edges()) def test_lowest_common_ancestor1(self): """Test that the one-pair function works on default.""" G = nx.DiGraph([(0, 1), (2, 1)]) sentinel = object() - assert_is(nx.lowest_common_ancestor(G, 0, 2, default=sentinel), - sentinel) + assert nx.lowest_common_ancestor(G, 0, 2, default=sentinel) is sentinel def test_lowest_common_ancestor2(self): """Test that the one-pair function works on identity.""" G = nx.DiGraph() G.add_node(3) - assert_equal(nx.lowest_common_ancestor(G, 3, 3), 3) + assert nx.lowest_common_ancestor(G, 3, 3) == 3 diff --git a/networkx/algorithms/tests/test_matching.py b/networkx/algorithms/tests/test_matching.py index e7da353..5886ed6 100644 --- a/networkx/algorithms/tests/test_matching.py +++ b/networkx/algorithms/tests/test_matching.py @@ -1,15 +1,12 @@ from itertools import permutations import math -from nose.tools import assert_equal -from nose.tools import assert_false -from nose.tools import assert_true import networkx as nx from networkx.algorithms.matching import matching_dict_to_set from networkx.testing import assert_edges_equal -class TestMaxWeightMatching(object): +class TestMaxWeightMatching: """Unit tests for the :func:`~networkx.algorithms.matching.max_weight_matching` function. @@ -18,28 +15,31 @@ class TestMaxWeightMatching(object): def test_trivial1(self): """Empty graph""" G = nx.Graph() - assert_equal(nx.max_weight_matching(G), set()) + assert nx.max_weight_matching(G) == set() def test_trivial2(self): """Self loop""" G = nx.Graph() G.add_edge(0, 0, weight=100) - assert_equal(nx.max_weight_matching(G), set()) + assert nx.max_weight_matching(G) == set() def test_trivial3(self): """Single edge""" G = nx.Graph() G.add_edge(0, 1) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({0: 1, 1: 0})) + assert_edges_equal( + nx.max_weight_matching(G), matching_dict_to_set({0: 1, 1: 0}) + ) def test_trivial4(self): """Small graph""" G = nx.Graph() - G.add_edge('one', 'two', weight=10) - G.add_edge('two', 'three', weight=11) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({'three': 'two', 'two': 'three'})) + G.add_edge("one", "two", weight=10) + G.add_edge("two", "three", weight=11) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set({"three": "two", "two": "three"}), + ) def test_trivial5(self): """Path""" @@ -47,18 +47,22 @@ def test_trivial5(self): G.add_edge(1, 2, weight=5) G.add_edge(2, 3, weight=11) G.add_edge(3, 4, weight=5) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({2: 3, 3: 2})) - assert_edges_equal(nx.max_weight_matching(G, 1), - matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3})) + assert_edges_equal( + nx.max_weight_matching(G), matching_dict_to_set({2: 3, 3: 2}) + ) + assert_edges_equal( + nx.max_weight_matching(G, 1), matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3}) + ) def test_trivial6(self): """Small graph with arbitrary weight attribute""" G = nx.Graph() - G.add_edge('one', 'two', weight=10, abcd=11) - G.add_edge('two', 'three', weight=11, abcd=10) - assert_edges_equal(nx.max_weight_matching(G, weight='abcd'), - matching_dict_to_set({'one': 'two', 'two': 'one'})) + G.add_edge("one", "two", weight=10, abcd=11) + G.add_edge("two", "three", weight=11, abcd=10) + assert_edges_equal( + nx.max_weight_matching(G, weight="abcd"), + matching_dict_to_set({"one": "two", "two": "one"}), + ) def test_floating_point_weights(self): """Floating point weights""" @@ -67,8 +71,9 @@ def test_floating_point_weights(self): G.add_edge(2, 3, weight=math.exp(1)) G.add_edge(1, 3, weight=3.0) G.add_edge(1, 4, weight=math.sqrt(2.0)) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 4, 2: 3, 3: 2, 4: 1})) + assert_edges_equal( + nx.max_weight_matching(G), matching_dict_to_set({1: 4, 2: 3, 3: 2, 4: 1}) + ) def test_negative_weights(self): """Negative weights""" @@ -78,150 +83,299 @@ def test_negative_weights(self): G.add_edge(2, 3, weight=1) G.add_edge(2, 4, weight=-1) G.add_edge(3, 4, weight=-6) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 2, 2: 1})) - assert_edges_equal(nx.max_weight_matching(G, 1), - matching_dict_to_set({1: 3, 2: 4, 3: 1, 4: 2})) + assert_edges_equal( + nx.max_weight_matching(G), matching_dict_to_set({1: 2, 2: 1}) + ) + assert_edges_equal( + nx.max_weight_matching(G, 1), matching_dict_to_set({1: 3, 2: 4, 3: 1, 4: 2}) + ) def test_s_blossom(self): """Create S-blossom and use it for augmentation:""" G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 8), (1, 3, 9), - (2, 3, 10), (3, 4, 7)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3})) + G.add_weighted_edges_from([(1, 2, 8), (1, 3, 9), (2, 3, 10), (3, 4, 7)]) + assert_edges_equal( + nx.max_weight_matching(G), matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3}) + ) G.add_weighted_edges_from([(1, 6, 5), (4, 5, 6)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1})) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1}), + ) def test_s_t_blossom(self): """Create S-blossom, relabel as T-blossom, use for augmentation:""" G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 9), (1, 3, 8), (2, 3, 10), - (1, 4, 5), (4, 5, 4), (1, 6, 3)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1})) + G.add_weighted_edges_from( + [(1, 2, 9), (1, 3, 8), (2, 3, 10), (1, 4, 5), (4, 5, 4), (1, 6, 3)] + ) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1}), + ) G.add_edge(4, 5, weight=3) G.add_edge(1, 6, weight=4) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1})) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1}), + ) G.remove_edge(1, 6) G.add_edge(3, 6, weight=4) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 2, 2: 1, 3: 6, 4: 5, 5: 4, 6: 3})) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set({1: 2, 2: 1, 3: 6, 4: 5, 5: 4, 6: 3}), + ) def test_nested_s_blossom(self): """Create nested S-blossom, use for augmentation:""" G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 9), (1, 3, 9), (2, 3, 10), - (2, 4, 8), (3, 5, 8), (4, 5, 10), - (5, 6, 6)]) - assert_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 3, 2: 4, 3: 1, 4: 2, 5: 6, 6: 5})) + G.add_weighted_edges_from( + [ + (1, 2, 9), + (1, 3, 9), + (2, 3, 10), + (2, 4, 8), + (3, 5, 8), + (4, 5, 10), + (5, 6, 6), + ] + ) + dict_format = {1: 3, 2: 4, 3: 1, 4: 2, 5: 6, 6: 5} + expected = {frozenset(e) for e in matching_dict_to_set(dict_format)} + answer = {frozenset(e) for e in nx.max_weight_matching(G)} + assert answer == expected def test_nested_s_blossom_relabel(self): """Create S-blossom, relabel as S, include in nested S-blossom:""" G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 10), (1, 7, 10), (2, 3, 12), - (3, 4, 20), (3, 5, 20), (4, 5, 25), - (5, 6, 10), (6, 7, 10), (7, 8, 8)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3, 5: 6, 6: 5, 7: 8, 8: 7})) + G.add_weighted_edges_from( + [ + (1, 2, 10), + (1, 7, 10), + (2, 3, 12), + (3, 4, 20), + (3, 5, 20), + (4, 5, 25), + (5, 6, 10), + (6, 7, 10), + (7, 8, 8), + ] + ) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3, 5: 6, 6: 5, 7: 8, 8: 7}), + ) def test_nested_s_blossom_expand(self): """Create nested S-blossom, augment, expand recursively:""" G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 8), (1, 3, 8), (2, 3, 10), - (2, 4, 12), (3, 5, 12), (4, 5, 14), - (4, 6, 12), (5, 7, 12), (6, 7, 14), - (7, 8, 12)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 2, 2: 1, 3: 5, 4: 6, 5: 3, 6: 4, 7: 8, 8: 7})) + G.add_weighted_edges_from( + [ + (1, 2, 8), + (1, 3, 8), + (2, 3, 10), + (2, 4, 12), + (3, 5, 12), + (4, 5, 14), + (4, 6, 12), + (5, 7, 12), + (6, 7, 14), + (7, 8, 12), + ] + ) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set({1: 2, 2: 1, 3: 5, 4: 6, 5: 3, 6: 4, 7: 8, 8: 7}), + ) def test_s_blossom_relabel_expand(self): """Create S-blossom, relabel as T, expand:""" G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 23), (1, 5, 22), (1, 6, 15), - (2, 3, 25), (3, 4, 22), (4, 5, 25), - (4, 8, 14), (5, 7, 13)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 8, 5: 7, 6: 1, 7: 5, 8: 4})) + G.add_weighted_edges_from( + [ + (1, 2, 23), + (1, 5, 22), + (1, 6, 15), + (2, 3, 25), + (3, 4, 22), + (4, 5, 25), + (4, 8, 14), + (5, 7, 13), + ] + ) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 8, 5: 7, 6: 1, 7: 5, 8: 4}), + ) def test_nested_s_blossom_relabel_expand(self): """Create nested S-blossom, relabel as T, expand:""" G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 19), (1, 3, 20), (1, 8, 8), - (2, 3, 25), (2, 4, 18), (3, 5, 18), - (4, 5, 13), (4, 7, 7), (5, 6, 7)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 8, 2: 3, 3: 2, 4: 7, 5: 6, 6: 5, 7: 4, 8: 1})) + G.add_weighted_edges_from( + [ + (1, 2, 19), + (1, 3, 20), + (1, 8, 8), + (2, 3, 25), + (2, 4, 18), + (3, 5, 18), + (4, 5, 13), + (4, 7, 7), + (5, 6, 7), + ] + ) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set({1: 8, 2: 3, 3: 2, 4: 7, 5: 6, 6: 5, 7: 4, 8: 1}), + ) def test_nasty_blossom1(self): """Create blossom, relabel as T in more than one way, expand, augment: """ G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 45), (1, 5, 45), (2, 3, 50), - (3, 4, 45), (4, 5, 50), (1, 6, 30), - (3, 9, 35), (4, 8, 35), (5, 7, 26), - (9, 10, 5)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 8, 5: 7, - 6: 1, 7: 5, 8: 4, 9: 10, 10: 9})) + G.add_weighted_edges_from( + [ + (1, 2, 45), + (1, 5, 45), + (2, 3, 50), + (3, 4, 45), + (4, 5, 50), + (1, 6, 30), + (3, 9, 35), + (4, 8, 35), + (5, 7, 26), + (9, 10, 5), + ] + ) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set( + {1: 6, 2: 3, 3: 2, 4: 8, 5: 7, 6: 1, 7: 5, 8: 4, 9: 10, 10: 9} + ), + ) def test_nasty_blossom2(self): """Again but slightly different:""" G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 45), (1, 5, 45), (2, 3, 50), - (3, 4, 45), (4, 5, 50), (1, 6, 30), - (3, 9, 35), (4, 8, 26), (5, 7, 40), - (9, 10, 5)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 8, 5: 7, - 6: 1, 7: 5, 8: 4, 9: 10, 10: 9})) + G.add_weighted_edges_from( + [ + (1, 2, 45), + (1, 5, 45), + (2, 3, 50), + (3, 4, 45), + (4, 5, 50), + (1, 6, 30), + (3, 9, 35), + (4, 8, 26), + (5, 7, 40), + (9, 10, 5), + ] + ) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set( + {1: 6, 2: 3, 3: 2, 4: 8, 5: 7, 6: 1, 7: 5, 8: 4, 9: 10, 10: 9} + ), + ) def test_nasty_blossom_least_slack(self): """Create blossom, relabel as T, expand such that a new least-slack S-to-free dge is produced, augment: """ G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 45), (1, 5, 45), (2, 3, 50), - (3, 4, 45), (4, 5, 50), (1, 6, 30), - (3, 9, 35), (4, 8, 28), (5, 7, 26), - (9, 10, 5)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 8, 5: 7, - 6: 1, 7: 5, 8: 4, 9: 10, 10: 9})) + G.add_weighted_edges_from( + [ + (1, 2, 45), + (1, 5, 45), + (2, 3, 50), + (3, 4, 45), + (4, 5, 50), + (1, 6, 30), + (3, 9, 35), + (4, 8, 28), + (5, 7, 26), + (9, 10, 5), + ] + ) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set( + {1: 6, 2: 3, 3: 2, 4: 8, 5: 7, 6: 1, 7: 5, 8: 4, 9: 10, 10: 9} + ), + ) def test_nasty_blossom_augmenting(self): """Create nested blossom, relabel as T in more than one way""" # expand outer blossom such that inner blossom ends up on an # augmenting path: G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 45), (1, 7, 45), (2, 3, 50), - (3, 4, 45), (4, 5, 95), (4, 6, 94), - (5, 6, 94), (6, 7, 50), (1, 8, 30), - (3, 11, 35), (5, 9, 36), (7, 10, 26), - (11, 12, 5)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 8, 2: 3, 3: 2, 4: 6, 5: 9, 6: 4, - 7: 10, 8: 1, 9: 5, 10: 7, 11: 12, 12: 11})) + G.add_weighted_edges_from( + [ + (1, 2, 45), + (1, 7, 45), + (2, 3, 50), + (3, 4, 45), + (4, 5, 95), + (4, 6, 94), + (5, 6, 94), + (6, 7, 50), + (1, 8, 30), + (3, 11, 35), + (5, 9, 36), + (7, 10, 26), + (11, 12, 5), + ] + ) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set( + { + 1: 8, + 2: 3, + 3: 2, + 4: 6, + 5: 9, + 6: 4, + 7: 10, + 8: 1, + 9: 5, + 10: 7, + 11: 12, + 12: 11, + } + ), + ) def test_nasty_blossom_expand_recursively(self): """Create nested S-blossom, relabel as S, expand recursively:""" G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 40), (1, 3, 40), (2, 3, 60), - (2, 4, 55), (3, 5, 55), (4, 5, 50), - (1, 8, 15), (5, 7, 30), (7, 6, 10), - (8, 10, 10), (4, 9, 30)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 2, 2: 1, 3: 5, 4: 9, 5: 3, - 6: 7, 7: 6, 8: 10, 9: 4, 10: 8})) - - -class TestIsMatching(object): + G.add_weighted_edges_from( + [ + (1, 2, 40), + (1, 3, 40), + (2, 3, 60), + (2, 4, 55), + (3, 5, 55), + (4, 5, 50), + (1, 8, 15), + (5, 7, 30), + (7, 6, 10), + (8, 10, 10), + (4, 9, 30), + ] + ) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set( + {1: 2, 2: 1, 3: 5, 4: 9, 5: 3, 6: 7, 7: 6, 8: 10, 9: 4, 10: 8} + ), + ) + + +class TestIsMatching: """Unit tests for the :func:`~networkx.algorithms.matching.is_matching` function. @@ -229,33 +383,33 @@ class TestIsMatching(object): def test_dict(self): G = nx.path_graph(4) - assert_true(nx.is_matching(G, {0: 1, 1: 0, 2: 3, 3: 2})) + assert nx.is_matching(G, {0: 1, 1: 0, 2: 3, 3: 2}) def test_empty_matching(self): G = nx.path_graph(4) - assert_true(nx.is_matching(G, set())) + assert nx.is_matching(G, set()) def test_single_edge(self): G = nx.path_graph(4) - assert_true(nx.is_matching(G, {(1, 2)})) + assert nx.is_matching(G, {(1, 2)}) def test_edge_order(self): G = nx.path_graph(4) - assert_true(nx.is_matching(G, {(0, 1), (2, 3)})) - assert_true(nx.is_matching(G, {(1, 0), (2, 3)})) - assert_true(nx.is_matching(G, {(0, 1), (3, 2)})) - assert_true(nx.is_matching(G, {(1, 0), (3, 2)})) + assert nx.is_matching(G, {(0, 1), (2, 3)}) + assert nx.is_matching(G, {(1, 0), (2, 3)}) + assert nx.is_matching(G, {(0, 1), (3, 2)}) + assert nx.is_matching(G, {(1, 0), (3, 2)}) def test_valid(self): G = nx.path_graph(4) - assert_true(nx.is_matching(G, {(0, 1), (2, 3)})) + assert nx.is_matching(G, {(0, 1), (2, 3)}) def test_invalid(self): G = nx.path_graph(4) - assert_false(nx.is_matching(G, {(0, 1), (1, 2), (2, 3)})) + assert not nx.is_matching(G, {(0, 1), (1, 2), (2, 3)}) -class TestIsMaximalMatching(object): +class TestIsMaximalMatching: """Unit tests for the :func:`~networkx.algorithms.matching.is_maximal_matching` function. @@ -263,22 +417,56 @@ class TestIsMaximalMatching(object): def test_dict(self): G = nx.path_graph(4) - assert_true(nx.is_maximal_matching(G, {0: 1, 1: 0, 2: 3, 3: 2})) + assert nx.is_maximal_matching(G, {0: 1, 1: 0, 2: 3, 3: 2}) def test_valid(self): G = nx.path_graph(4) - assert_true(nx.is_maximal_matching(G, {(0, 1), (2, 3)})) + assert nx.is_maximal_matching(G, {(0, 1), (2, 3)}) def test_not_matching(self): G = nx.path_graph(4) - assert_false(nx.is_maximal_matching(G, {(0, 1), (1, 2), (2, 3)})) + assert not nx.is_maximal_matching(G, {(0, 1), (1, 2), (2, 3)}) def test_not_maximal(self): G = nx.path_graph(4) - assert_false(nx.is_maximal_matching(G, {(0, 1)})) + assert not nx.is_maximal_matching(G, {(0, 1)}) + + +class TestIsPerfectMatching: + """Unit tests for the + :func:`~networkx.algorithms.matching.is_perfect_matching` function. + + """ + + def test_dict(self): + G = nx.path_graph(4) + assert nx.is_perfect_matching(G, {0: 1, 1: 0, 2: 3, 3: 2}) + + def test_valid(self): + G = nx.path_graph(4) + assert nx.is_perfect_matching(G, {(0, 1), (2, 3)}) + + def test_valid_not_path(self): + G = nx.cycle_graph(4) + G.add_edge(0, 4) + G.add_edge(1, 4) + G.add_edge(5, 2) + + assert nx.is_perfect_matching(G, {(1, 4), (0, 3), (5, 2)}) + + def test_not_matching(self): + G = nx.path_graph(4) + assert not nx.is_perfect_matching(G, {(0, 1), (1, 2), (2, 3)}) + + def test_maximal_but_not_perfect(self): + G = nx.cycle_graph(4) + G.add_edge(0, 4) + G.add_edge(1, 4) + + assert not nx.is_perfect_matching(G, {(1, 4), (0, 3)}) -class TestMaximalMatching(object): +class TestMaximalMatching: """Unit tests for the :func:`~networkx.algorithms.matching.maximal_matching`. @@ -288,24 +476,24 @@ def test_valid_matching(self): edges = [(1, 2), (1, 5), (2, 3), (2, 5), (3, 4), (3, 6), (5, 6)] G = nx.Graph(edges) matching = nx.maximal_matching(G) - assert_true(nx.is_maximal_matching(G, matching)) + assert nx.is_maximal_matching(G, matching) def test_single_edge_matching(self): # In the star graph, any maximal matching has just one edge. G = nx.star_graph(5) matching = nx.maximal_matching(G) - assert_equal(1, len(matching)) - assert_true(nx.is_maximal_matching(G, matching)) + assert 1 == len(matching) + assert nx.is_maximal_matching(G, matching) def test_self_loops(self): # Create the path graph with two self-loops. G = nx.path_graph(3) G.add_edges_from([(0, 0), (1, 1)]) matching = nx.maximal_matching(G) - assert_equal(len(matching), 1) + assert len(matching) == 1 # The matching should never include self-loops. - assert_false(any(u == v for u, v in matching)) - assert_true(nx.is_maximal_matching(G, matching)) + assert not any(u == v for u, v in matching) + assert nx.is_maximal_matching(G, matching) def test_ordering(self): """Tests that a maximal matching is computed correctly @@ -317,5 +505,5 @@ def test_ordering(self): G.add_nodes_from(nodes) G.add_edges_from([(0, 1), (0, 2)]) matching = nx.maximal_matching(G) - assert_equal(len(matching), 1) - assert_true(nx.is_maximal_matching(G, matching)) + assert len(matching) == 1 + assert nx.is_maximal_matching(G, matching) diff --git a/networkx/algorithms/tests/test_max_weight_clique.py b/networkx/algorithms/tests/test_max_weight_clique.py new file mode 100644 index 0000000..0c7984a --- /dev/null +++ b/networkx/algorithms/tests/test_max_weight_clique.py @@ -0,0 +1,180 @@ +"""Maximum weight clique test suite. + +""" + +import networkx as nx +import pytest + + +class TestMaximumWeightClique: + def test_basic_cases(self): + def check_basic_case(graph_func, expected_weight, weight_accessor): + graph = graph_func() + clique, weight = nx.algorithms.max_weight_clique(graph, weight_accessor) + assert verify_clique( + graph, clique, weight, expected_weight, weight_accessor + ) + + for graph_func, (expected_weight, expected_size) in TEST_CASES.items(): + check_basic_case(graph_func, expected_weight, "weight") + check_basic_case(graph_func, expected_size, None) + + def test_key_error(self): + graph = two_node_graph() + with pytest.raises(KeyError): + nx.algorithms.max_weight_clique(graph, "non-existent-key") + + def test_error_on_non_integer_weight(self): + graph = two_node_graph() + graph.nodes[2]["weight"] = 1.5 + with pytest.raises(ValueError): + nx.algorithms.max_weight_clique(graph) + + def test_unaffected_by_self_loops(self): + graph = two_node_graph() + graph.add_edge(1, 1) + graph.add_edge(2, 2) + clique, weight = nx.algorithms.max_weight_clique(graph, "weight") + assert verify_clique(graph, clique, weight, 30, "weight") + graph = three_node_independent_set() + graph.add_edge(1, 1) + clique, weight = nx.algorithms.max_weight_clique(graph, "weight") + assert verify_clique(graph, clique, weight, 20, "weight") + + def test_30_node_prob(self): + G = nx.Graph() + G.add_nodes_from(range(1, 31)) + for i in range(1, 31): + G.nodes[i]["weight"] = i + 1 + # fmt: off + G.add_edges_from( + [ + (1, 12), (1, 13), (1, 15), (1, 16), (1, 18), (1, 19), (1, 20), + (1, 23), (1, 26), (1, 28), (1, 29), (1, 30), (2, 3), (2, 4), + (2, 5), (2, 8), (2, 9), (2, 10), (2, 14), (2, 17), (2, 18), + (2, 21), (2, 22), (2, 23), (2, 27), (3, 9), (3, 15), (3, 21), + (3, 22), (3, 23), (3, 24), (3, 27), (3, 28), (3, 29), (4, 5), + (4, 6), (4, 8), (4, 21), (4, 22), (4, 23), (4, 26), (4, 28), + (4, 30), (5, 6), (5, 8), (5, 9), (5, 13), (5, 14), (5, 15), + (5, 16), (5, 20), (5, 21), (5, 22), (5, 25), (5, 28), (5, 29), + (6, 7), (6, 8), (6, 13), (6, 17), (6, 18), (6, 19), (6, 24), + (6, 26), (6, 27), (6, 28), (6, 29), (7, 12), (7, 14), (7, 15), + (7, 16), (7, 17), (7, 20), (7, 25), (7, 27), (7, 29), (7, 30), + (8, 10), (8, 15), (8, 16), (8, 18), (8, 20), (8, 22), (8, 24), + (8, 26), (8, 27), (8, 28), (8, 30), (9, 11), (9, 12), (9, 13), + (9, 14), (9, 15), (9, 16), (9, 19), (9, 20), (9, 21), (9, 24), + (9, 30), (10, 12), (10, 15), (10, 18), (10, 19), (10, 20), + (10, 22), (10, 23), (10, 24), (10, 26), (10, 27), (10, 29), + (10, 30), (11, 13), (11, 15), (11, 16), (11, 17), (11, 18), + (11, 19), (11, 20), (11, 22), (11, 29), (11, 30), (12, 14), + (12, 17), (12, 18), (12, 19), (12, 20), (12, 21), (12, 23), + (12, 25), (12, 26), (12, 30), (13, 20), (13, 22), (13, 23), + (13, 24), (13, 30), (14, 16), (14, 20), (14, 21), (14, 22), + (14, 23), (14, 25), (14, 26), (14, 27), (14, 29), (14, 30), + (15, 17), (15, 18), (15, 20), (15, 21), (15, 26), (15, 27), + (15, 28), (16, 17), (16, 18), (16, 19), (16, 20), (16, 21), + (16, 29), (16, 30), (17, 18), (17, 21), (17, 22), (17, 25), + (17, 27), (17, 28), (17, 30), (18, 19), (18, 20), (18, 21), + (18, 22), (18, 23), (18, 24), (19, 20), (19, 22), (19, 23), + (19, 24), (19, 25), (19, 27), (19, 30), (20, 21), (20, 23), + (20, 24), (20, 26), (20, 28), (20, 29), (21, 23), (21, 26), + (21, 27), (21, 29), (22, 24), (22, 25), (22, 26), (22, 29), + (23, 25), (23, 30), (24, 25), (24, 26), (25, 27), (25, 29), + (26, 27), (26, 28), (26, 30), (28, 29), (29, 30), + ] + ) + # fmt: on + clique, weight = nx.algorithms.max_weight_clique(G) + assert verify_clique(G, clique, weight, 111, "weight") + + +# ############################ Utility functions ############################ +def verify_clique( + graph, clique, reported_clique_weight, expected_clique_weight, weight_accessor +): + for node1 in clique: + for node2 in clique: + if node1 == node2: + continue + if not graph.has_edge(node1, node2): + return False + + if weight_accessor is None: + clique_weight = len(clique) + else: + clique_weight = sum(graph.nodes[v]["weight"] for v in clique) + + if clique_weight != expected_clique_weight: + return False + if clique_weight != reported_clique_weight: + return False + + return True + + +# ############################ Graph Generation ############################ + + +def empty_graph(): + return nx.Graph() + + +def one_node_graph(): + graph = nx.Graph() + graph.add_nodes_from([1]) + graph.nodes[1]["weight"] = 10 + return graph + + +def two_node_graph(): + graph = nx.Graph() + graph.add_nodes_from([1, 2]) + graph.add_edges_from([(1, 2)]) + graph.nodes[1]["weight"] = 10 + graph.nodes[2]["weight"] = 20 + return graph + + +def three_node_clique(): + graph = nx.Graph() + graph.add_nodes_from([1, 2, 3]) + graph.add_edges_from([(1, 2), (1, 3), (2, 3)]) + graph.nodes[1]["weight"] = 10 + graph.nodes[2]["weight"] = 20 + graph.nodes[3]["weight"] = 5 + return graph + + +def three_node_independent_set(): + graph = nx.Graph() + graph.add_nodes_from([1, 2, 3]) + graph.nodes[1]["weight"] = 10 + graph.nodes[2]["weight"] = 20 + graph.nodes[3]["weight"] = 5 + return graph + + +def disconnected(): + graph = nx.Graph() + graph.add_edges_from([(1, 2), (2, 3), (4, 5), (5, 6)]) + graph.nodes[1]["weight"] = 10 + graph.nodes[2]["weight"] = 20 + graph.nodes[3]["weight"] = 5 + graph.nodes[4]["weight"] = 100 + graph.nodes[5]["weight"] = 200 + graph.nodes[6]["weight"] = 50 + return graph + + +# -------------------------------------------------------------------------- +# Basic tests for all strategies +# For each basic graph function, specify expected weight of max weight clique +# and expected size of maximum clique +TEST_CASES = { + empty_graph: (0, 0), + one_node_graph: (10, 1), + two_node_graph: (30, 2), + three_node_clique: (35, 3), + three_node_independent_set: (20, 1), + disconnected: (300, 2), +} diff --git a/networkx/algorithms/tests/test_minors.py b/networkx/algorithms/tests/test_minors.py index 812faf5..2689f94 100644 --- a/networkx/algorithms/tests/test_minors.py +++ b/networkx/algorithms/tests/test_minors.py @@ -1,22 +1,12 @@ -# test_minors.py - unit tests for the minors module -# -# Copyright 2015 Jeffrey Finkelstein . -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.algorithms.minors` module.""" -from nose.tools import assert_equal -from nose.tools import assert_true -from nose.tools import raises +import pytest import networkx as nx -from networkx.testing.utils import * +from networkx.testing.utils import assert_edges_equal, assert_nodes_equal from networkx.utils import arbitrary_element -class TestQuotient(object): +class TestQuotient: """Unit tests for computing quotient graphs.""" def test_quotient_graph_complete_multipartite(self): @@ -30,13 +20,13 @@ def test_quotient_graph_complete_multipartite(self): # neighbor set. def same_neighbors(u, v): - return (u not in G[v] and v not in G[u] and G[u] == G[v]) + return u not in G[v] and v not in G[u] and G[u] == G[v] expected = nx.complete_graph(3) actual = nx.quotient_graph(G, same_neighbors) # It won't take too long to run a graph isomorphism algorithm on such # small graphs. - assert_true(nx.is_isomorphic(expected, actual)) + assert nx.is_isomorphic(expected, actual) def test_quotient_graph_complete_bipartite(self): """Tests that the quotient graph of the complete bipartite graph under @@ -48,13 +38,13 @@ def test_quotient_graph_complete_bipartite(self): # neighbor set. def same_neighbors(u, v): - return (u not in G[v] and v not in G[u] and G[u] == G[v]) + return u not in G[v] and v not in G[u] and G[u] == G[v] expected = nx.complete_graph(2) actual = nx.quotient_graph(G, same_neighbors) # It won't take too long to run a graph isomorphism algorithm on such # small graphs. - assert_true(nx.is_isomorphic(expected, actual)) + assert nx.is_isomorphic(expected, actual) def test_quotient_graph_edge_relation(self): """Tests for specifying an alternate edge relation for the quotient @@ -67,13 +57,13 @@ def identity(u, v): return u == v def same_parity(b, c): - return (arbitrary_element(b) % 2 == arbitrary_element(c) % 2) + return arbitrary_element(b) % 2 == arbitrary_element(c) % 2 actual = nx.quotient_graph(G, identity, same_parity) expected = nx.Graph() expected.add_edges_from([(0, 2), (0, 4), (2, 4)]) expected.add_edge(1, 3) - assert_true(nx.is_isomorphic(actual, expected)) + assert nx.is_isomorphic(actual, expected) def test_condensation_as_quotient(self): """This tests that the condensation of a graph can be viewed as the @@ -83,20 +73,41 @@ def test_condensation_as_quotient(self): """ # This example graph comes from the file `test_strongly_connected.py`. G = nx.DiGraph() - G.add_edges_from([(1, 2), (2, 3), (2, 11), (2, 12), (3, 4), (4, 3), - (4, 5), (5, 6), (6, 5), (6, 7), (7, 8), (7, 9), - (7, 10), (8, 9), (9, 7), (10, 6), (11, 2), (11, 4), - (11, 6), (12, 6), (12, 11)]) + G.add_edges_from( + [ + (1, 2), + (2, 3), + (2, 11), + (2, 12), + (3, 4), + (4, 3), + (4, 5), + (5, 6), + (6, 5), + (6, 7), + (7, 8), + (7, 9), + (7, 10), + (8, 9), + (9, 7), + (10, 6), + (11, 2), + (11, 4), + (11, 6), + (12, 6), + (12, 11), + ] + ) scc = list(nx.strongly_connected_components(G)) C = nx.condensation(G, scc) - component_of = C.graph['mapping'] + component_of = C.graph["mapping"] # Two nodes are equivalent if they are in the same connected component. def same_component(u, v): return component_of[u] == component_of[v] Q = nx.quotient_graph(G, same_component) - assert_true(nx.is_isomorphic(C, Q)) + assert nx.is_isomorphic(C, Q) def test_path(self): G = nx.path_graph(6) @@ -105,9 +116,9 @@ def test_path(self): assert_nodes_equal(M, [0, 1, 2]) assert_edges_equal(M.edges(), [(0, 1), (1, 2)]) for n in M: - assert_equal(M.nodes[n]['nedges'], 1) - assert_equal(M.nodes[n]['nnodes'], 2) - assert_equal(M.nodes[n]['density'], 1) + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 1 def test_multigraph_path(self): G = nx.MultiGraph(nx.path_graph(6)) @@ -116,9 +127,9 @@ def test_multigraph_path(self): assert_nodes_equal(M, [0, 1, 2]) assert_edges_equal(M.edges(), [(0, 1), (1, 2)]) for n in M: - assert_equal(M.nodes[n]['nedges'], 1) - assert_equal(M.nodes[n]['nnodes'], 2) - assert_equal(M.nodes[n]['density'], 1) + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 1 def test_directed_path(self): G = nx.DiGraph() @@ -128,9 +139,9 @@ def test_directed_path(self): assert_nodes_equal(M, [0, 1, 2]) assert_edges_equal(M.edges(), [(0, 1), (1, 2)]) for n in M: - assert_equal(M.nodes[n]['nedges'], 1) - assert_equal(M.nodes[n]['nnodes'], 2) - assert_equal(M.nodes[n]['density'], 0.5) + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 0.5 def test_directed_multigraph_path(self): G = nx.MultiDiGraph() @@ -140,30 +151,30 @@ def test_directed_multigraph_path(self): assert_nodes_equal(M, [0, 1, 2]) assert_edges_equal(M.edges(), [(0, 1), (1, 2)]) for n in M: - assert_equal(M.nodes[n]['nedges'], 1) - assert_equal(M.nodes[n]['nnodes'], 2) - assert_equal(M.nodes[n]['density'], 0.5) + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 0.5 - @raises(nx.NetworkXException) def test_overlapping_blocks(self): - G = nx.path_graph(6) - partition = [{0, 1, 2}, {2, 3}, {4, 5}] - nx.quotient_graph(G, partition) + with pytest.raises(nx.NetworkXException): + G = nx.path_graph(6) + partition = [{0, 1, 2}, {2, 3}, {4, 5}] + nx.quotient_graph(G, partition) def test_weighted_path(self): G = nx.path_graph(6) for i in range(5): - G[i][i + 1]['weight'] = i + 1 + G[i][i + 1]["weight"] = i + 1 partition = [{0, 1}, {2, 3}, {4, 5}] M = nx.quotient_graph(G, partition, relabel=True) assert_nodes_equal(M, [0, 1, 2]) assert_edges_equal(M.edges(), [(0, 1), (1, 2)]) - assert_equal(M[0][1]['weight'], 2) - assert_equal(M[1][2]['weight'], 4) + assert M[0][1]["weight"] == 2 + assert M[1][2]["weight"] == 4 for n in M: - assert_equal(M.nodes[n]['nedges'], 1) - assert_equal(M.nodes[n]['nnodes'], 2) - assert_equal(M.nodes[n]['density'], 1) + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 1 def test_barbell(self): G = nx.barbell_graph(3, 0) @@ -172,9 +183,9 @@ def test_barbell(self): assert_nodes_equal(M, [0, 1]) assert_edges_equal(M.edges(), [(0, 1)]) for n in M: - assert_equal(M.nodes[n]['nedges'], 3) - assert_equal(M.nodes[n]['nnodes'], 3) - assert_equal(M.nodes[n]['density'], 1) + assert M.nodes[n]["nedges"] == 3 + assert M.nodes[n]["nnodes"] == 3 + assert M.nodes[n]["density"] == 1 def test_barbell_plus(self): G = nx.barbell_graph(3, 0) @@ -184,11 +195,11 @@ def test_barbell_plus(self): M = nx.quotient_graph(G, partition, relabel=True) assert_nodes_equal(M, [0, 1]) assert_edges_equal(M.edges(), [(0, 1)]) - assert_equal(M[0][1]['weight'], 2) + assert M[0][1]["weight"] == 2 for n in M: - assert_equal(M.nodes[n]['nedges'], 3) - assert_equal(M.nodes[n]['nnodes'], 3) - assert_equal(M.nodes[n]['density'], 1) + assert M.nodes[n]["nedges"] == 3 + assert M.nodes[n]["nnodes"] == 3 + assert M.nodes[n]["density"] == 1 def test_blockmodel(self): G = nx.path_graph(6) @@ -197,21 +208,20 @@ def test_blockmodel(self): assert_nodes_equal(M.nodes(), [0, 1, 2]) assert_edges_equal(M.edges(), [(0, 1), (1, 2)]) for n in M.nodes(): - assert_equal(M.nodes[n]['nedges'], 1) - assert_equal(M.nodes[n]['nnodes'], 2) - assert_equal(M.nodes[n]['density'], 1.0) + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 1.0 def test_multigraph_blockmodel(self): G = nx.MultiGraph(nx.path_graph(6)) partition = [[0, 1], [2, 3], [4, 5]] - M = nx.quotient_graph(G, partition, - create_using=nx.MultiGraph(), relabel=True) + M = nx.quotient_graph(G, partition, create_using=nx.MultiGraph(), relabel=True) assert_nodes_equal(M.nodes(), [0, 1, 2]) assert_edges_equal(M.edges(), [(0, 1), (1, 2)]) for n in M.nodes(): - assert_equal(M.nodes[n]['nedges'], 1) - assert_equal(M.nodes[n]['nnodes'], 2) - assert_equal(M.nodes[n]['density'], 1.0) + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 1.0 def test_quotient_graph_incomplete_partition(self): G = nx.path_graph(6) @@ -226,25 +236,46 @@ def test_quotient_graph_incomplete_partition(self): assert_edges_equal(H.edges(), [(0, 1)]) -class TestContraction(object): +class TestContraction: """Unit tests for node and edge contraction functions.""" def test_undirected_node_contraction(self): """Tests for node contraction in an undirected graph.""" G = nx.cycle_graph(4) actual = nx.contracted_nodes(G, 0, 1) - expected = nx.complete_graph(3) + expected = nx.cycle_graph(3) expected.add_edge(0, 0) - assert_true(nx.is_isomorphic(actual, expected)) + assert nx.is_isomorphic(actual, expected) def test_directed_node_contraction(self): """Tests for node contraction in a directed graph.""" G = nx.DiGraph(nx.cycle_graph(4)) actual = nx.contracted_nodes(G, 0, 1) - expected = nx.DiGraph(nx.complete_graph(3)) + expected = nx.DiGraph(nx.cycle_graph(3)) + expected.add_edge(0, 0) + expected.add_edge(0, 0) + assert nx.is_isomorphic(actual, expected) + + def test_undirected_node_contraction_no_copy(self): + """Tests for node contraction in an undirected graph + by making changes in place.""" + G = nx.cycle_graph(4) + actual = nx.contracted_nodes(G, 0, 1, copy=False) + expected = nx.cycle_graph(3) expected.add_edge(0, 0) + assert nx.is_isomorphic(actual, G) + assert nx.is_isomorphic(actual, expected) + + def test_directed_node_contraction_no_copy(self): + """Tests for node contraction in a directed graph + by making changes in place.""" + G = nx.DiGraph(nx.cycle_graph(4)) + actual = nx.contracted_nodes(G, 0, 1, copy=False) + expected = nx.DiGraph(nx.cycle_graph(3)) expected.add_edge(0, 0) - assert_true(nx.is_isomorphic(actual, expected)) + expected.add_edge(0, 0) + assert nx.is_isomorphic(actual, G) + assert nx.is_isomorphic(actual, expected) def test_create_multigraph(self): """Tests that using a MultiGraph creates multiple edges.""" @@ -280,25 +311,25 @@ def test_node_attributes(self): """Tests that node contraction preserves node attributes.""" G = nx.cycle_graph(4) # Add some data to the two nodes being contracted. - G.nodes[0]['foo'] = 'bar' - G.nodes[1]['baz'] = 'xyzzy' + G.nodes[0]["foo"] = "bar" + G.nodes[1]["baz"] = "xyzzy" actual = nx.contracted_nodes(G, 0, 1) # We expect that contracting the nodes 0 and 1 in C_4 yields K_3, but # with nodes labeled 0, 2, and 3, and with a self-loop on 0. expected = nx.complete_graph(3) expected = nx.relabel_nodes(expected, {1: 2, 2: 3}) expected.add_edge(0, 0) - cdict = {1: {'baz': 'xyzzy'}} - expected.nodes[0].update(dict(foo='bar', contraction=cdict)) - assert_true(nx.is_isomorphic(actual, expected)) - assert_equal(actual.nodes, expected.nodes) + cdict = {1: {"baz": "xyzzy"}} + expected.nodes[0].update(dict(foo="bar", contraction=cdict)) + assert nx.is_isomorphic(actual, expected) + assert actual.nodes == expected.nodes def test_without_self_loops(self): """Tests for node contraction without preserving self-loops.""" G = nx.cycle_graph(4) actual = nx.contracted_nodes(G, 0, 1, self_loops=False) expected = nx.complete_graph(3) - assert_true(nx.is_isomorphic(actual, expected)) + assert nx.is_isomorphic(actual, expected) def test_contract_selfloop_graph(self): """Tests for node contraction when nodes have selfloops.""" @@ -321,13 +352,13 @@ def test_undirected_edge_contraction(self): actual = nx.contracted_edge(G, (0, 1)) expected = nx.complete_graph(3) expected.add_edge(0, 0) - assert_true(nx.is_isomorphic(actual, expected)) + assert nx.is_isomorphic(actual, expected) - @raises(ValueError) def test_nonexistent_edge(self): """Tests that attempting to contract a non-existent edge raises an exception. """ - G = nx.cycle_graph(4) - nx.contracted_edge(G, (0, 2)) + with pytest.raises(ValueError): + G = nx.cycle_graph(4) + nx.contracted_edge(G, (0, 2)) diff --git a/networkx/algorithms/tests/test_mis.py b/networkx/algorithms/tests/test_mis.py index d787899..ad94200 100644 --- a/networkx/algorithms/tests/test_mis.py +++ b/networkx/algorithms/tests/test_mis.py @@ -1,84 +1,77 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# $Id: test_maximal_independent_set.py 577 2011-03-01 06:07:53Z lleeoo $ -# Copyright (C) 2004-2018 by -# Leo Lopes -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Author: Leo Lopes """ Tests for maximal (not maximum) independent sets. """ -from nose.tools import * +import pytest import networkx as nx import random -class TestMaximalIndependantSet(object): +class TestMaximalIndependantSet: def setup(self): self.florentine = nx.Graph() - self.florentine.add_edge('Acciaiuoli', 'Medici') - self.florentine.add_edge('Castellani', 'Peruzzi') - self.florentine.add_edge('Castellani', 'Strozzi') - self.florentine.add_edge('Castellani', 'Barbadori') - self.florentine.add_edge('Medici', 'Barbadori') - self.florentine.add_edge('Medici', 'Ridolfi') - self.florentine.add_edge('Medici', 'Tornabuoni') - self.florentine.add_edge('Medici', 'Albizzi') - self.florentine.add_edge('Medici', 'Salviati') - self.florentine.add_edge('Salviati', 'Pazzi') - self.florentine.add_edge('Peruzzi', 'Strozzi') - self.florentine.add_edge('Peruzzi', 'Bischeri') - self.florentine.add_edge('Strozzi', 'Ridolfi') - self.florentine.add_edge('Strozzi', 'Bischeri') - self.florentine.add_edge('Ridolfi', 'Tornabuoni') - self.florentine.add_edge('Tornabuoni', 'Guadagni') - self.florentine.add_edge('Albizzi', 'Ginori') - self.florentine.add_edge('Albizzi', 'Guadagni') - self.florentine.add_edge('Bischeri', 'Guadagni') - self.florentine.add_edge('Guadagni', 'Lamberteschi') + self.florentine.add_edge("Acciaiuoli", "Medici") + self.florentine.add_edge("Castellani", "Peruzzi") + self.florentine.add_edge("Castellani", "Strozzi") + self.florentine.add_edge("Castellani", "Barbadori") + self.florentine.add_edge("Medici", "Barbadori") + self.florentine.add_edge("Medici", "Ridolfi") + self.florentine.add_edge("Medici", "Tornabuoni") + self.florentine.add_edge("Medici", "Albizzi") + self.florentine.add_edge("Medici", "Salviati") + self.florentine.add_edge("Salviati", "Pazzi") + self.florentine.add_edge("Peruzzi", "Strozzi") + self.florentine.add_edge("Peruzzi", "Bischeri") + self.florentine.add_edge("Strozzi", "Ridolfi") + self.florentine.add_edge("Strozzi", "Bischeri") + self.florentine.add_edge("Ridolfi", "Tornabuoni") + self.florentine.add_edge("Tornabuoni", "Guadagni") + self.florentine.add_edge("Albizzi", "Ginori") + self.florentine.add_edge("Albizzi", "Guadagni") + self.florentine.add_edge("Bischeri", "Guadagni") + self.florentine.add_edge("Guadagni", "Lamberteschi") + + def test_random_seed(self): + G = nx.complete_graph(5) + for node in G: + assert nx.maximal_independent_set(G, [node], seed=1) == [node] def test_K5(self): """Maximal independent set: K5""" G = nx.complete_graph(5) for node in G: - assert_equal(nx.maximal_independent_set(G, [node]), [node]) + assert nx.maximal_independent_set(G, [node]) == [node] def test_K55(self): """Maximal independent set: K55""" G = nx.complete_graph(55) for node in G: - assert_equal(nx.maximal_independent_set(G, [node]), [node]) + assert nx.maximal_independent_set(G, [node]) == [node] def test_exception(self): """Bad input should raise exception.""" G = self.florentine - assert_raises(nx.NetworkXUnfeasible, - nx.maximal_independent_set, G, ["Smith"]) - assert_raises(nx.NetworkXUnfeasible, - nx.maximal_independent_set, G, ["Salviati", "Pazzi"]) + pytest.raises(nx.NetworkXUnfeasible, nx.maximal_independent_set, G, ["Smith"]) + pytest.raises( + nx.NetworkXUnfeasible, nx.maximal_independent_set, G, ["Salviati", "Pazzi"] + ) def test_digraph_exception(self): G = nx.DiGraph([(1, 2), (3, 4)]) - assert_raises(nx.NetworkXNotImplemented, nx.maximal_independent_set, G) + pytest.raises(nx.NetworkXNotImplemented, nx.maximal_independent_set, G) def test_florentine_family(self): G = self.florentine indep = nx.maximal_independent_set(G, ["Medici", "Bischeri"]) - assert_equal(sorted(indep), - sorted(["Medici", "Bischeri", "Castellani", "Pazzi", - "Ginori", "Lamberteschi"])) + assert sorted(indep) == sorted( + ["Medici", "Bischeri", "Castellani", "Pazzi", "Ginori", "Lamberteschi"] + ) def test_bipartite(self): G = nx.complete_bipartite_graph(12, 34) indep = nx.maximal_independent_set(G, [4, 5, 9, 10]) - assert_equal(sorted(indep), list(range(12))) + assert sorted(indep) == list(range(12)) def test_random_graphs(self): """Generate 50 random graphs of different types and sizes and @@ -86,7 +79,7 @@ def test_random_graphs(self): for i in range(0, 50, 10): G = nx.random_graphs.erdos_renyi_graph(i * 10 + 1, random.random()) IS = nx.maximal_independent_set(G) - assert_false(list(G.subgraph(IS).edges())) + assert not list(G.subgraph(IS).edges()) neighbors_of_MIS = set.union(*(set(G.neighbors(v)) for v in IS)) for v in set(G.nodes()).difference(IS): - assert_true(v in neighbors_of_MIS) + assert v in neighbors_of_MIS diff --git a/networkx/algorithms/tests/test_moral.py b/networkx/algorithms/tests/test_moral.py new file mode 100644 index 0000000..dee4949 --- /dev/null +++ b/networkx/algorithms/tests/test_moral.py @@ -0,0 +1,16 @@ +import networkx as nx + +from networkx.algorithms.moral import moral_graph + + +def test_get_moral_graph(): + graph = nx.DiGraph() + graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7]) + graph.add_edges_from([(1, 2), (3, 2), (4, 1), (4, 5), (6, 5), (7, 5)]) + H = moral_graph(graph) + assert not H.is_directed() + assert H.has_edge(1, 3) + assert H.has_edge(4, 6) + assert H.has_edge(6, 7) + assert H.has_edge(4, 7) + assert not H.has_edge(1, 5) diff --git a/networkx/algorithms/tests/test_non_randomness.py b/networkx/algorithms/tests/test_non_randomness.py new file mode 100644 index 0000000..17925eb --- /dev/null +++ b/networkx/algorithms/tests/test_non_randomness.py @@ -0,0 +1,14 @@ +import networkx as nx + +import pytest + +numpy = pytest.importorskip("numpy") +npt = pytest.importorskip("numpy.testing") + + +def test_non_randomness(): + G = nx.karate_club_graph() + npt.assert_almost_equal(nx.non_randomness(G, 2)[0], 11.7, decimal=2) + npt.assert_almost_equal( + nx.non_randomness(G)[0], 7.21, decimal=2 + ) # infers 3 communities diff --git a/networkx/algorithms/tests/test_planar_drawing.py b/networkx/algorithms/tests/test_planar_drawing.py new file mode 100644 index 0000000..025e822 --- /dev/null +++ b/networkx/algorithms/tests/test_planar_drawing.py @@ -0,0 +1,272 @@ +import pytest +import networkx as nx +from networkx.algorithms.planar_drawing import triangulate_embedding +import math + + +def test_graph1(): + embedding_data = {0: [1, 2, 3], 1: [2, 0], 2: [3, 0, 1], 3: [2, 0]} + check_embedding_data(embedding_data) + + +def test_graph2(): + embedding_data = { + 0: [8, 6], + 1: [2, 6, 9], + 2: [8, 1, 7, 9, 6, 4], + 3: [9], + 4: [2], + 5: [6, 8], + 6: [9, 1, 0, 5, 2], + 7: [9, 2], + 8: [0, 2, 5], + 9: [1, 6, 2, 7, 3], + } + check_embedding_data(embedding_data) + + +def test_circle_graph(): + embedding_data = { + 0: [1, 9], + 1: [0, 2], + 2: [1, 3], + 3: [2, 4], + 4: [3, 5], + 5: [4, 6], + 6: [5, 7], + 7: [6, 8], + 8: [7, 9], + 9: [8, 0], + } + check_embedding_data(embedding_data) + + +def test_grid_graph(): + embedding_data = { + (0, 1): [(0, 0), (1, 1), (0, 2)], + (1, 2): [(1, 1), (2, 2), (0, 2)], + (0, 0): [(0, 1), (1, 0)], + (2, 1): [(2, 0), (2, 2), (1, 1)], + (1, 1): [(2, 1), (1, 2), (0, 1), (1, 0)], + (2, 0): [(1, 0), (2, 1)], + (2, 2): [(1, 2), (2, 1)], + (1, 0): [(0, 0), (2, 0), (1, 1)], + (0, 2): [(1, 2), (0, 1)], + } + check_embedding_data(embedding_data) + + +def test_one_node_graph(): + embedding_data = {0: []} + check_embedding_data(embedding_data) + + +def test_two_node_graph(): + embedding_data = {0: [1], 1: [0]} + check_embedding_data(embedding_data) + + +def test_three_node_graph(): + embedding_data = {0: [1, 2], 1: [0, 2], 2: [0, 1]} + check_embedding_data(embedding_data) + + +def test_multiple_component_graph1(): + embedding_data = {0: [], 1: []} + check_embedding_data(embedding_data) + + +def test_multiple_component_graph2(): + embedding_data = {0: [1, 2], 1: [0, 2], 2: [0, 1], 3: [4, 5], 4: [3, 5], 5: [3, 4]} + check_embedding_data(embedding_data) + + +def test_invalid_half_edge(): + with pytest.raises(nx.NetworkXException): + embedding_data = {1: [2, 3, 4], 2: [1, 3, 4], 3: [1, 2, 4], 4: [1, 2, 3]} + embedding = nx.PlanarEmbedding() + embedding.set_data(embedding_data) + nx.combinatorial_embedding_to_pos(embedding) + + +def test_triangulate_embedding1(): + embedding = nx.PlanarEmbedding() + embedding.add_node(1) + expected_embedding = {1: []} + check_triangulation(embedding, expected_embedding) + + +def test_triangulate_embedding2(): + embedding = nx.PlanarEmbedding() + embedding.connect_components(1, 2) + expected_embedding = {1: [2], 2: [1]} + check_triangulation(embedding, expected_embedding) + + +def check_triangulation(embedding, expected_embedding): + res_embedding, _ = triangulate_embedding(embedding, True) + assert ( + res_embedding.get_data() == expected_embedding + ), "Expected embedding incorrect" + res_embedding, _ = triangulate_embedding(embedding, False) + assert ( + res_embedding.get_data() == expected_embedding + ), "Expected embedding incorrect" + + +def check_embedding_data(embedding_data): + """Checks that the planar embedding of the input is correct""" + embedding = nx.PlanarEmbedding() + embedding.set_data(embedding_data) + pos_fully = nx.combinatorial_embedding_to_pos(embedding, False) + msg = "Planar drawing does not conform to the embedding (fully " "triangulation)" + assert planar_drawing_conforms_to_embedding(embedding, pos_fully), msg + check_edge_intersections(embedding, pos_fully) + pos_internally = nx.combinatorial_embedding_to_pos(embedding, True) + msg = "Planar drawing does not conform to the embedding (internal " "triangulation)" + assert planar_drawing_conforms_to_embedding(embedding, pos_internally), msg + check_edge_intersections(embedding, pos_internally) + + +def is_close(a, b, rel_tol=1e-09, abs_tol=0.0): + # Check if float numbers are basically equal, for python >=3.5 there is + # function for that in the standard library + return abs(a - b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol) + + +def point_in_between(a, b, p): + # checks if p is on the line between a and b + x1, y1 = a + x2, y2 = b + px, py = p + dist_1_2 = math.sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2) + dist_1_p = math.sqrt((x1 - px) ** 2 + (y1 - py) ** 2) + dist_2_p = math.sqrt((x2 - px) ** 2 + (y2 - py) ** 2) + return is_close(dist_1_p + dist_2_p, dist_1_2) + + +def check_edge_intersections(G, pos): + """Check all edges in G for intersections. + + Raises an exception if an intersection is found. + + Parameters + ---------- + G : NetworkX graph + pos : dict + Maps every node to a tuple (x, y) representing its position + + """ + for a, b in G.edges(): + for c, d in G.edges(): + # Check if end points are different + if a != c and b != d and b != c and a != d: + x1, y1 = pos[a] + x2, y2 = pos[b] + x3, y3 = pos[c] + x4, y4 = pos[d] + determinant = (x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4) + if determinant != 0: # the lines are not parallel + # calculate intersection point, see: + # https://en.wikipedia.org/wiki/Line%E2%80%93line_intersection + px = (x1 * y2 - y1 * x2) * (x3 - x4) - (x1 - x2) * ( + x3 * y4 - y3 * x4 + ) / float(determinant) + py = (x1 * y2 - y1 * x2) * (y3 - y4) - (y1 - y2) * ( + x3 * y4 - y3 * x4 + ) / float(determinant) + + # Check if intersection lies between the points + if point_in_between(pos[a], pos[b], (px, py)) and point_in_between( + pos[c], pos[d], (px, py) + ): + msg = f"There is an intersection at {px},{py}" + raise nx.NetworkXException(msg) + + # Check overlap + msg = "A node lies on a edge connecting two other nodes" + if ( + point_in_between(pos[a], pos[b], pos[c]) + or point_in_between(pos[a], pos[b], pos[d]) + or point_in_between(pos[c], pos[d], pos[a]) + or point_in_between(pos[c], pos[d], pos[b]) + ): + raise nx.NetworkXException(msg) + # No edge intersection found + + +class Vector: + """Compare vectors by their angle without loss of precision + + All vectors in direction [0, 1] are the smallest. + The vectors grow in clockwise direction. + """ + + __slots__ = ["x", "y", "node", "quadrant"] + + def __init__(self, x, y, node): + self.x = x + self.y = y + self.node = node + if self.x >= 0 and self.y > 0: + self.quadrant = 1 + elif self.x > 0 and self.y <= 0: + self.quadrant = 2 + elif self.x <= 0 and self.y < 0: + self.quadrant = 3 + else: + self.quadrant = 4 + + def __eq__(self, other): + return self.quadrant == other.quadrant and self.x * other.y == self.y * other.x + + def __lt__(self, other): + if self.quadrant < other.quadrant: + return True + elif self.quadrant > other.quadrant: + return False + else: + return self.x * other.y < self.y * other.x + + def __ne__(self, other): + return not self == other + + def __le__(self, other): + return not other < self + + def __gt__(self, other): + return other < self + + def __ge__(self, other): + return not self < other + + +def planar_drawing_conforms_to_embedding(embedding, pos): + """Checks if pos conforms to the planar embedding + + Returns true iff the neighbors are actually oriented in the orientation + specified of the embedding + """ + for v in embedding: + nbr_vectors = [] + v_pos = pos[v] + for nbr in embedding[v]: + new_vector = Vector(pos[nbr][0] - v_pos[0], pos[nbr][1] - v_pos[1], nbr) + nbr_vectors.append(new_vector) + # Sort neighbors according to their phi angle + nbr_vectors.sort() + for idx, nbr_vector in enumerate(nbr_vectors): + cw_vector = nbr_vectors[(idx + 1) % len(nbr_vectors)] + ccw_vector = nbr_vectors[idx - 1] + if ( + embedding[v][nbr_vector.node]["cw"] != cw_vector.node + or embedding[v][nbr_vector.node]["ccw"] != ccw_vector.node + ): + return False + if cw_vector.node != nbr_vector.node and cw_vector == nbr_vector: + # Lines overlap + return False + if ccw_vector.node != nbr_vector.node and ccw_vector == nbr_vector: + # Lines overlap + return False + return True diff --git a/networkx/algorithms/tests/test_planarity.py b/networkx/algorithms/tests/test_planarity.py new file mode 100644 index 0000000..89fb8e7 --- /dev/null +++ b/networkx/algorithms/tests/test_planarity.py @@ -0,0 +1,439 @@ +import pytest +import networkx as nx +from networkx.algorithms.planarity import get_counterexample +from networkx.algorithms.planarity import get_counterexample_recursive +from networkx.algorithms.planarity import check_planarity_recursive + + +class TestLRPlanarity: + """Nose Unit tests for the :mod:`networkx.algorithms.planarity` module. + + Tests three things: + 1. Check that the result is correct + (returns planar if and only if the graph is actually planar) + 2. In case a counter example is returned: Check if it is correct + 3. In case an embedding is returned: Check if its actually an embedding + """ + + @staticmethod + def check_graph(G, is_planar=None): + """Raises an exception if the lr_planarity check returns a wrong result + + Parameters + ---------- + G : NetworkX graph + is_planar : bool + The expected result of the planarity check. + If set to None only counter example or embedding are verified. + + """ + + # obtain results of planarity check + is_planar_lr, result = nx.check_planarity(G, True) + is_planar_lr_rec, result_rec = check_planarity_recursive(G, True) + + if is_planar is not None: + # set a message for the assert + if is_planar: + msg = "Wrong planarity check result. Should be planar." + else: + msg = "Wrong planarity check result. Should be non-planar." + + # check if the result is as expected + assert is_planar == is_planar_lr, msg + assert is_planar == is_planar_lr_rec, msg + + if is_planar_lr: + # check embedding + check_embedding(G, result) + check_embedding(G, result_rec) + else: + # check counter example + check_counterexample(G, result) + check_counterexample(G, result_rec) + + def test_simple_planar_graph(self): + e = [ + (1, 2), + (2, 3), + (3, 4), + (4, 6), + (6, 7), + (7, 1), + (1, 5), + (5, 2), + (2, 4), + (4, 5), + (5, 7), + ] + self.check_graph(nx.Graph(e), is_planar=True) + + def test_planar_with_selfloop(self): + e = [ + (1, 1), + (2, 2), + (3, 3), + (4, 4), + (5, 5), + (1, 2), + (1, 3), + (1, 5), + (2, 5), + (2, 4), + (3, 4), + (3, 5), + (4, 5), + ] + self.check_graph(nx.Graph(e), is_planar=True) + + def test_k3_3(self): + self.check_graph(nx.complete_bipartite_graph(3, 3), is_planar=False) + + def test_k5(self): + self.check_graph(nx.complete_graph(5), is_planar=False) + + def test_multiple_components_planar(self): + e = [(1, 2), (2, 3), (3, 1), (4, 5), (5, 6), (6, 4)] + self.check_graph(nx.Graph(e), is_planar=True) + + def test_multiple_components_non_planar(self): + G = nx.complete_graph(5) + # add another planar component to the non planar component + # G stays non planar + G.add_edges_from([(6, 7), (7, 8), (8, 6)]) + self.check_graph(G, is_planar=False) + + def test_non_planar_with_selfloop(self): + G = nx.complete_graph(5) + # add self loops + for i in range(5): + G.add_edge(i, i) + self.check_graph(G, is_planar=False) + + def test_non_planar1(self): + # tests a graph that has no subgraph directly isomorph to K5 or K3_3 + e = [ + (1, 5), + (1, 6), + (1, 7), + (2, 6), + (2, 3), + (3, 5), + (3, 7), + (4, 5), + (4, 6), + (4, 7), + ] + self.check_graph(nx.Graph(e), is_planar=False) + + def test_loop(self): + # test a graph with a selfloop + e = [(1, 2), (2, 2)] + G = nx.Graph(e) + self.check_graph(G, is_planar=True) + + def test_comp(self): + # test multiple component graph + e = [(1, 2), (3, 4)] + G = nx.Graph(e) + G.remove_edge(1, 2) + self.check_graph(G, is_planar=True) + + def test_goldner_harary(self): + # test goldner-harary graph (a maximal planar graph) + e = [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (1, 7), + (1, 8), + (1, 10), + (1, 11), + (2, 3), + (2, 4), + (2, 6), + (2, 7), + (2, 9), + (2, 10), + (2, 11), + (3, 4), + (4, 5), + (4, 6), + (4, 7), + (5, 7), + (6, 7), + (7, 8), + (7, 9), + (7, 10), + (8, 10), + (9, 10), + (10, 11), + ] + G = nx.Graph(e) + self.check_graph(G, is_planar=True) + + def test_planar_multigraph(self): + G = nx.MultiGraph([(1, 2), (1, 2), (1, 2), (1, 2), (2, 3), (3, 1)]) + self.check_graph(G, is_planar=True) + + def test_non_planar_multigraph(self): + G = nx.MultiGraph(nx.complete_graph(5)) + G.add_edges_from([(1, 2)] * 5) + self.check_graph(G, is_planar=False) + + def test_planar_digraph(self): + G = nx.DiGraph([(1, 2), (2, 3), (2, 4), (4, 1), (4, 2), (1, 4), (3, 2)]) + self.check_graph(G, is_planar=True) + + def test_non_planar_digraph(self): + G = nx.DiGraph(nx.complete_graph(5)) + G.remove_edge(1, 2) + G.remove_edge(4, 1) + self.check_graph(G, is_planar=False) + + def test_single_component(self): + # Test a graph with only a single node + G = nx.Graph() + G.add_node(1) + self.check_graph(G, is_planar=True) + + def test_graph1(self): + G = nx.OrderedGraph( + [ + (3, 10), + (2, 13), + (1, 13), + (7, 11), + (0, 8), + (8, 13), + (0, 2), + (0, 7), + (0, 10), + (1, 7), + ] + ) + self.check_graph(G, is_planar=True) + + def test_graph2(self): + G = nx.OrderedGraph( + [ + (1, 2), + (4, 13), + (0, 13), + (4, 5), + (7, 10), + (1, 7), + (0, 3), + (2, 6), + (5, 6), + (7, 13), + (4, 8), + (0, 8), + (0, 9), + (2, 13), + (6, 7), + (3, 6), + (2, 8), + ] + ) + self.check_graph(G, is_planar=False) + + def test_graph3(self): + G = nx.OrderedGraph( + [ + (0, 7), + (3, 11), + (3, 4), + (8, 9), + (4, 11), + (1, 7), + (1, 13), + (1, 11), + (3, 5), + (5, 7), + (1, 3), + (0, 4), + (5, 11), + (5, 13), + ] + ) + self.check_graph(G, is_planar=False) + + def test_counterexample_planar(self): + with pytest.raises(nx.NetworkXException): + # Try to get a counterexample of a planar graph + G = nx.Graph() + G.add_node(1) + get_counterexample(G) + + def test_counterexample_planar_recursive(self): + with pytest.raises(nx.NetworkXException): + # Try to get a counterexample of a planar graph + G = nx.Graph() + G.add_node(1) + get_counterexample_recursive(G) + + +def check_embedding(G, embedding): + """Raises an exception if the combinatorial embedding is not correct + + Parameters + ---------- + G : NetworkX graph + embedding : a dict mapping nodes to a list of edges + This specifies the ordering of the outgoing edges from a node for + a combinatorial embedding + + Notes + ----- + Checks the following things: + - The type of the embedding is correct + - The nodes and edges match the original graph + - Every half edge has its matching opposite half edge + - No intersections of edges (checked by Euler's formula) + """ + + if not isinstance(embedding, nx.PlanarEmbedding): + raise nx.NetworkXException("Bad embedding. Not of type nx.PlanarEmbedding") + + # Check structure + embedding.check_structure() + + # Check that graphs are equivalent + + assert set(G.nodes) == set( + embedding.nodes + ), "Bad embedding. Nodes don't match the original graph." + + # Check that the edges are equal + g_edges = set() + for edge in G.edges: + if edge[0] != edge[1]: + g_edges.add((edge[0], edge[1])) + g_edges.add((edge[1], edge[0])) + assert g_edges == set( + embedding.edges + ), "Bad embedding. Edges don't match the original graph." + + +def check_counterexample(G, sub_graph): + """Raises an exception if the counterexample is wrong. + + Parameters + ---------- + G : NetworkX graph + subdivision_nodes : set + A set of nodes inducing a subgraph as a counterexample + """ + # 1. Create the sub graph + sub_graph = nx.Graph(sub_graph) + + # 2. Remove self loops + for u in sub_graph: + if sub_graph.has_edge(u, u): + sub_graph.remove_edge(u, u) + + # keep track of nodes we might need to contract + contract = list(sub_graph) + + # 3. Contract Edges + while len(contract) > 0: + contract_node = contract.pop() + if contract_node not in sub_graph: + # Node was already contracted + continue + degree = sub_graph.degree[contract_node] + # Check if we can remove the node + if degree == 2: + # Get the two neighbors + neighbors = iter(sub_graph[contract_node]) + u = next(neighbors) + v = next(neighbors) + # Save nodes for later + contract.append(u) + contract.append(v) + # Contract edge + sub_graph.remove_node(contract_node) + sub_graph.add_edge(u, v) + + # 4. Check for isomorphism with K5 or K3_3 graphs + if len(sub_graph) == 5: + if not nx.is_isomorphic(nx.complete_graph(5), sub_graph): + raise nx.NetworkXException("Bad counter example.") + elif len(sub_graph) == 6: + if not nx.is_isomorphic(nx.complete_bipartite_graph(3, 3), sub_graph): + raise nx.NetworkXException("Bad counter example.") + else: + raise nx.NetworkXException("Bad counter example.") + + +class TestPlanarEmbeddingClass: + def test_get_data(self): + embedding = self.get_star_embedding(3) + data = embedding.get_data() + data_cmp = {0: [2, 1], 1: [0], 2: [0]} + assert data == data_cmp + + def test_missing_edge_orientation(self): + with pytest.raises(nx.NetworkXException): + embedding = nx.PlanarEmbedding() + embedding.add_edge(1, 2) + embedding.add_edge(2, 1) + # Invalid structure because the orientation of the edge was not set + embedding.check_structure() + + def test_invalid_edge_orientation(self): + with pytest.raises(nx.NetworkXException): + embedding = nx.PlanarEmbedding() + embedding.add_half_edge_first(1, 2) + embedding.add_half_edge_first(2, 1) + embedding.add_edge(1, 3) + embedding.check_structure() + + def test_missing_half_edge(self): + with pytest.raises(nx.NetworkXException): + embedding = nx.PlanarEmbedding() + embedding.add_half_edge_first(1, 2) + # Invalid structure because other half edge is missing + embedding.check_structure() + + def test_not_fulfilling_euler_formula(self): + with pytest.raises(nx.NetworkXException): + embedding = nx.PlanarEmbedding() + for i in range(5): + for j in range(5): + if i != j: + embedding.add_half_edge_first(i, j) + embedding.check_structure() + + def test_missing_reference(self): + with pytest.raises(nx.NetworkXException): + embedding = nx.PlanarEmbedding() + embedding.add_half_edge_cw(1, 2, 3) + + def test_connect_components(self): + embedding = nx.PlanarEmbedding() + embedding.connect_components(1, 2) + + def test_successful_face_traversal(self): + embedding = nx.PlanarEmbedding() + embedding.add_half_edge_first(1, 2) + embedding.add_half_edge_first(2, 1) + face = embedding.traverse_face(1, 2) + assert face == [1, 2] + + def test_unsuccessful_face_traversal(self): + with pytest.raises(nx.NetworkXException): + embedding = nx.PlanarEmbedding() + embedding.add_edge(1, 2, ccw=2, cw=3) + embedding.add_edge(2, 1, ccw=1, cw=3) + embedding.traverse_face(1, 2) + + @staticmethod + def get_star_embedding(n): + embedding = nx.PlanarEmbedding() + for i in range(1, n): + embedding.add_half_edge_first(0, i) + embedding.add_half_edge_first(i, 0) + return embedding diff --git a/networkx/algorithms/tests/test_reciprocity.py b/networkx/algorithms/tests/test_reciprocity.py index b1526ba..4da399a 100644 --- a/networkx/algorithms/tests/test_reciprocity.py +++ b/networkx/algorithms/tests/test_reciprocity.py @@ -1,8 +1,8 @@ -from nose.tools import * +import pytest import networkx as nx -class TestReciprocity(object): +class TestReciprocity: # test overall reicprocity by passing whole graph def test_reciprocity_digraph(self): @@ -11,10 +11,10 @@ def test_reciprocity_digraph(self): assert reciprocity == 1.0 # test empty graph's overall reciprocity which will throw an error - @raises(nx.NetworkXError) def test_overall_reciprocity_empty_graph(self): - DG = nx.DiGraph() - nx.overall_reciprocity(DG) + with pytest.raises(nx.NetworkXError): + DG = nx.DiGraph() + nx.overall_reciprocity(DG) # test for reciprocity for a list of nodes def test_reciprocity_graph_nodes(self): @@ -30,8 +30,8 @@ def test_reciprocity_graph_node(self): assert reciprocity == 0.6666666666666666 # test for reciprocity for an isolated node - @raises(nx.NetworkXError) def test_reciprocity_graph_isolated_nodes(self): - DG = nx.DiGraph([(1, 2)]) - DG.add_node(4) - nx.reciprocity(DG, 4) + with pytest.raises(nx.NetworkXError): + DG = nx.DiGraph([(1, 2)]) + DG.add_node(4) + nx.reciprocity(DG, 4) diff --git a/networkx/algorithms/tests/test_regular.py b/networkx/algorithms/tests/test_regular.py new file mode 100644 index 0000000..2aaeb32 --- /dev/null +++ b/networkx/algorithms/tests/test_regular.py @@ -0,0 +1,81 @@ +import pytest +import networkx +import networkx as nx + +import networkx.algorithms.regular as reg +import networkx.generators as gen + + +class TestKFactor: + def test_k_factor_trivial(self): + g = gen.cycle_graph(4) + f = reg.k_factor(g, 2) + assert g.edges == f.edges + + def test_k_factor1(self): + g = gen.grid_2d_graph(4, 4) + g_kf = reg.k_factor(g, 2) + for edge in g_kf.edges(): + assert g.has_edge(edge[0], edge[1]) + for _, degree in g_kf.degree(): + assert degree == 2 + + def test_k_factor2(self): + g = gen.complete_graph(6) + g_kf = reg.k_factor(g, 3) + for edge in g_kf.edges(): + assert g.has_edge(edge[0], edge[1]) + for _, degree in g_kf.degree(): + assert degree == 3 + + def test_k_factor3(self): + g = gen.grid_2d_graph(4, 4) + with pytest.raises(nx.NetworkXUnfeasible): + reg.k_factor(g, 3) + + def test_k_factor4(self): + g = gen.lattice.hexagonal_lattice_graph(4, 4) + # Perfect matching doesn't exist for 4,4 hexagonal lattice graph + with pytest.raises(nx.NetworkXUnfeasible): + reg.k_factor(g, 2) + + def test_k_factor5(self): + g = gen.complete_graph(6) + # small k to exercise SmallKGadget + g_kf = reg.k_factor(g, 2) + for edge in g_kf.edges(): + assert g.has_edge(edge[0], edge[1]) + for _, degree in g_kf.degree(): + assert degree == 2 + + +class TestIsRegular: + def test_is_regular1(self): + g = gen.cycle_graph(4) + assert reg.is_regular(g) + + def test_is_regular2(self): + g = gen.complete_graph(5) + assert reg.is_regular(g) + + def test_is_regular3(self): + g = gen.lollipop_graph(5, 5) + assert not reg.is_regular(g) + + +class TestIsKRegular: + def test_is_k_regular1(self): + g = gen.cycle_graph(4) + assert reg.is_k_regular(g, 2) + assert not reg.is_k_regular(g, 3) + + def test_is_k_regular2(self): + g = gen.complete_graph(5) + assert reg.is_k_regular(g, 4) + assert not reg.is_k_regular(g, 3) + assert not reg.is_k_regular(g, 6) + + def test_is_k_regular3(self): + g = gen.lollipop_graph(5, 5) + assert not reg.is_k_regular(g, 5) + assert not reg.is_k_regular(g, 6) diff --git a/networkx/algorithms/tests/test_richclub.py b/networkx/algorithms/tests/test_richclub.py index f9abc84..f933e61 100644 --- a/networkx/algorithms/tests/test_richclub.py +++ b/networkx/algorithms/tests/test_richclub.py @@ -1,73 +1,82 @@ +import pytest import networkx as nx -from nose.tools import * def test_richclub(): G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)]) rc = nx.richclub.rich_club_coefficient(G, normalized=False) - assert_equal(rc, {0: 12.0 / 30, 1: 8.0 / 12}) + assert rc == {0: 12.0 / 30, 1: 8.0 / 12} # test single value rc0 = nx.richclub.rich_club_coefficient(G, normalized=False)[0] - assert_equal(rc0, 12.0 / 30.0) + assert rc0 == 12.0 / 30.0 + + +def test_richclub_seed(): + G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)]) + rcNorm = nx.richclub.rich_club_coefficient(G, Q=2, seed=1) + assert rcNorm == {0: 1.0, 1: 1.0} def test_richclub_normalized(): G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)]) rcNorm = nx.richclub.rich_club_coefficient(G, Q=2) - assert_equal(rcNorm, {0: 1.0, 1: 1.0}) + assert rcNorm == {0: 1.0, 1: 1.0} def test_richclub2(): T = nx.balanced_tree(2, 10) rc = nx.richclub.rich_club_coefficient(T, normalized=False) - assert_equal(rc, {0: 4092 / (2047 * 2046.0), - 1: (2044.0 / (1023 * 1022)), - 2: (2040.0 / (1022 * 1021))}) + assert rc == { + 0: 4092 / (2047 * 2046.0), + 1: (2044.0 / (1023 * 1022)), + 2: (2040.0 / (1022 * 1021)), + } def test_richclub3(): # tests edgecase G = nx.karate_club_graph() rc = nx.rich_club_coefficient(G, normalized=False) - assert_equal(rc, {0: 156.0 / 1122, - 1: 154.0 / 1056, - 2: 110.0 / 462, - 3: 78.0 / 240, - 4: 44.0 / 90, - 5: 22.0 / 42, - 6: 10.0 / 20, - 7: 10.0 / 20, - 8: 10.0 / 20, - 9: 6.0 / 12, - 10: 2.0 / 6, - 11: 2.0 / 6, - 12: 0.0, - 13: 0.0, - 14: 0.0, - 15: 0.0, }) + assert rc == { + 0: 156.0 / 1122, + 1: 154.0 / 1056, + 2: 110.0 / 462, + 3: 78.0 / 240, + 4: 44.0 / 90, + 5: 22.0 / 42, + 6: 10.0 / 20, + 7: 10.0 / 20, + 8: 10.0 / 20, + 9: 6.0 / 12, + 10: 2.0 / 6, + 11: 2.0 / 6, + 12: 0.0, + 13: 0.0, + 14: 0.0, + 15: 0.0, + } def test_richclub4(): G = nx.Graph() - G.add_edges_from([(0, 1), (0, 2), (0, 3), (0, 4), (4, 5), (5, 9), (6, 9), (7, 9), (8, 9)]) + G.add_edges_from( + [(0, 1), (0, 2), (0, 3), (0, 4), (4, 5), (5, 9), (6, 9), (7, 9), (8, 9)] + ) rc = nx.rich_club_coefficient(G, normalized=False) - assert_equal(rc, {0: 18 / 90.0, - 1: 6 / 12.0, - 2: 0.0, - 3: 0.0}) + assert rc == {0: 18 / 90.0, 1: 6 / 12.0, 2: 0.0, 3: 0.0} -@raises(nx.NetworkXNotImplemented) def test_richclub_exception(): - G = nx.DiGraph() - nx.rich_club_coefficient(G) + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.DiGraph() + nx.rich_club_coefficient(G) -@raises(nx.NetworkXNotImplemented) def test_rich_club_exception2(): - G = nx.MultiGraph() - nx.rich_club_coefficient(G) + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.MultiGraph() + nx.rich_club_coefficient(G) # def test_richclub2_normalized(): diff --git a/networkx/algorithms/tests/test_similarity.py b/networkx/algorithms/tests/test_similarity.py index 17f25d6..66adac2 100644 --- a/networkx/algorithms/tests/test_similarity.py +++ b/networkx/algorithms/tests/test_similarity.py @@ -1,25 +1,62 @@ -#!/usr/bin/env python -from nose.tools import * -from nose import SkipTest +import pytest + import networkx as nx -from networkx.algorithms.similarity import * -from networkx.generators.classic import * +from networkx.algorithms.similarity import ( + graph_edit_distance, + optimal_edit_paths, + optimize_graph_edit_distance, +) +from networkx.generators.classic import ( + circular_ladder_graph, + cycle_graph, + path_graph, + wheel_graph, +) -class TestSimilarity: +def nmatch(n1, n2): + return n1 == n2 + + +def ematch(e1, e2): + return e1 == e2 + + +def getCanonical(): + G = nx.Graph() + G.add_node("A", label="A") + G.add_node("B", label="B") + G.add_node("C", label="C") + G.add_node("D", label="D") + G.add_edge("A", "B", label="a-b") + G.add_edge("B", "C", label="b-c") + G.add_edge("B", "D", label="b-d") + return G + +class TestSimilarity: @classmethod - def setupClass(cls): + def setup_class(cls): global numpy global scipy - try: - import numpy - except ImportError: - raise SkipTest('NumPy not available.') - try: - import scipy - except ImportError: - raise SkipTest('SciPy not available.') + numpy = pytest.importorskip("numpy") + scipy = pytest.importorskip("scipy") + + def test_graph_edit_distance_roots_and_timeout(self): + G0 = nx.star_graph(5) + G1 = G0.copy() + pytest.raises(ValueError, graph_edit_distance, G0, G1, roots=[2]) + pytest.raises(ValueError, graph_edit_distance, G0, G1, roots=[2, 3, 4]) + pytest.raises(nx.NodeNotFound, graph_edit_distance, G0, G1, roots=(9, 3)) + pytest.raises(nx.NodeNotFound, graph_edit_distance, G0, G1, roots=(3, 9)) + pytest.raises(nx.NodeNotFound, graph_edit_distance, G0, G1, roots=(9, 9)) + assert graph_edit_distance(G0, G1, roots=(1, 2)) == 0 + assert graph_edit_distance(G0, G1, roots=(0, 1)) == 8 + assert graph_edit_distance(G0, G1, roots=(1, 2), timeout=5) == 0 + assert graph_edit_distance(G0, G1, roots=(0, 1), timeout=5) == 8 + assert graph_edit_distance(G0, G1, roots=(0, 1), timeout=0.0001) is None + # test raise on 0 timeout + pytest.raises(nx.NetworkXError, graph_edit_distance, G0, G1, timeout=0) def test_graph_edit_distance(self): G0 = nx.Graph() @@ -27,144 +64,652 @@ def test_graph_edit_distance(self): G2 = cycle_graph(6) G3 = wheel_graph(7) - assert_equal(graph_edit_distance(G0, G0), 0) - assert_equal(graph_edit_distance(G0, G1), 11) - assert_equal(graph_edit_distance(G1, G0), 11) - assert_equal(graph_edit_distance(G0, G2), 12) - assert_equal(graph_edit_distance(G2, G0), 12) - assert_equal(graph_edit_distance(G0, G3), 19) - assert_equal(graph_edit_distance(G3, G0), 19) + assert graph_edit_distance(G0, G0) == 0 + assert graph_edit_distance(G0, G1) == 11 + assert graph_edit_distance(G1, G0) == 11 + assert graph_edit_distance(G0, G2) == 12 + assert graph_edit_distance(G2, G0) == 12 + assert graph_edit_distance(G0, G3) == 19 + assert graph_edit_distance(G3, G0) == 19 - assert_equal(graph_edit_distance(G1, G1), 0) - assert_equal(graph_edit_distance(G1, G2), 1) - assert_equal(graph_edit_distance(G2, G1), 1) - assert_equal(graph_edit_distance(G1, G3), 8) - assert_equal(graph_edit_distance(G3, G1), 8) + assert graph_edit_distance(G1, G1) == 0 + assert graph_edit_distance(G1, G2) == 1 + assert graph_edit_distance(G2, G1) == 1 + assert graph_edit_distance(G1, G3) == 8 + assert graph_edit_distance(G3, G1) == 8 - assert_equal(graph_edit_distance(G2, G2), 0) - assert_equal(graph_edit_distance(G2, G3), 7) - assert_equal(graph_edit_distance(G3, G2), 7) + assert graph_edit_distance(G2, G2) == 0 + assert graph_edit_distance(G2, G3) == 7 + assert graph_edit_distance(G3, G2) == 7 - assert_equal(graph_edit_distance(G3, G3), 0) + assert graph_edit_distance(G3, G3) == 0 def test_graph_edit_distance_node_match(self): G1 = cycle_graph(5) G2 = cycle_graph(5) for n, attr in G1.nodes.items(): - attr['color'] = 'red' if n % 2 == 0 else 'blue' + attr["color"] = "red" if n % 2 == 0 else "blue" for n, attr in G2.nodes.items(): - attr['color'] = 'red' if n % 2 == 1 else 'blue' - assert_equal(graph_edit_distance(G1, G2), 0) - assert_equal(graph_edit_distance(G1, G2, node_match=lambda n1, n2: n1['color'] == n2['color']), 1) + attr["color"] = "red" if n % 2 == 1 else "blue" + assert graph_edit_distance(G1, G2) == 0 + assert ( + graph_edit_distance( + G1, G2, node_match=lambda n1, n2: n1["color"] == n2["color"] + ) + == 1 + ) def test_graph_edit_distance_edge_match(self): G1 = path_graph(6) G2 = path_graph(6) for e, attr in G1.edges.items(): - attr['color'] = 'red' if min(e) % 2 == 0 else 'blue' + attr["color"] = "red" if min(e) % 2 == 0 else "blue" for e, attr in G2.edges.items(): - attr['color'] = 'red' if min(e) // 3 == 0 else 'blue' - assert_equal(graph_edit_distance(G1, G2), 0) - assert_equal(graph_edit_distance(G1, G2, edge_match=lambda e1, e2: e1['color'] == e2['color']), 2) + attr["color"] = "red" if min(e) // 3 == 0 else "blue" + assert graph_edit_distance(G1, G2) == 0 + assert ( + graph_edit_distance( + G1, G2, edge_match=lambda e1, e2: e1["color"] == e2["color"] + ) + == 2 + ) def test_graph_edit_distance_node_cost(self): G1 = path_graph(6) G2 = path_graph(6) for n, attr in G1.nodes.items(): - attr['color'] = 'red' if n % 2 == 0 else 'blue' + attr["color"] = "red" if n % 2 == 0 else "blue" for n, attr in G2.nodes.items(): - attr['color'] = 'red' if n % 2 == 1 else 'blue' + attr["color"] = "red" if n % 2 == 1 else "blue" def node_subst_cost(uattr, vattr): - if uattr['color'] == vattr['color']: + if uattr["color"] == vattr["color"]: return 1 else: return 10 def node_del_cost(attr): - if attr['color'] == 'blue': + if attr["color"] == "blue": return 20 else: return 50 def node_ins_cost(attr): - if attr['color'] == 'blue': + if attr["color"] == "blue": return 40 else: return 100 - assert_equal(graph_edit_distance(G1, G2, - node_subst_cost=node_subst_cost, - node_del_cost=node_del_cost, - node_ins_cost=node_ins_cost), 6) + assert ( + graph_edit_distance( + G1, + G2, + node_subst_cost=node_subst_cost, + node_del_cost=node_del_cost, + node_ins_cost=node_ins_cost, + ) + == 6 + ) def test_graph_edit_distance_edge_cost(self): G1 = path_graph(6) G2 = path_graph(6) for e, attr in G1.edges.items(): - attr['color'] = 'red' if min(e) % 2 == 0 else 'blue' + attr["color"] = "red" if min(e) % 2 == 0 else "blue" for e, attr in G2.edges.items(): - attr['color'] = 'red' if min(e) // 3 == 0 else 'blue' + attr["color"] = "red" if min(e) // 3 == 0 else "blue" def edge_subst_cost(gattr, hattr): - if gattr['color'] == hattr['color']: + if gattr["color"] == hattr["color"]: return 0.01 else: return 0.1 def edge_del_cost(attr): - if attr['color'] == 'blue': + if attr["color"] == "blue": return 0.2 else: return 0.5 def edge_ins_cost(attr): - if attr['color'] == 'blue': + if attr["color"] == "blue": return 0.4 else: return 1.0 - assert_equal(graph_edit_distance(G1, G2, - edge_subst_cost=edge_subst_cost, - edge_del_cost=edge_del_cost, - edge_ins_cost=edge_ins_cost), 0.23) + assert ( + graph_edit_distance( + G1, + G2, + edge_subst_cost=edge_subst_cost, + edge_del_cost=edge_del_cost, + edge_ins_cost=edge_ins_cost, + ) + == 0.23 + ) def test_graph_edit_distance_upper_bound(self): G1 = circular_ladder_graph(2) G2 = circular_ladder_graph(6) - assert_equal(graph_edit_distance(G1, G2, upper_bound=5), None) - assert_equal(graph_edit_distance(G1, G2, upper_bound=24), 22) - assert_equal(graph_edit_distance(G1, G2), 22) + assert graph_edit_distance(G1, G2, upper_bound=5) is None + assert graph_edit_distance(G1, G2, upper_bound=24) == 22 + assert graph_edit_distance(G1, G2) == 22 def test_optimal_edit_paths(self): G1 = path_graph(3) G2 = cycle_graph(3) paths, cost = optimal_edit_paths(G1, G2) - assert_equal(cost, 1) - assert_equal(len(paths), 6) + assert cost == 1 + assert len(paths) == 6 def canonical(vertex_path, edge_path): - return tuple(sorted(vertex_path)), tuple(sorted(edge_path, key=lambda x: (None in x, x))) + return ( + tuple(sorted(vertex_path)), + tuple(sorted(edge_path, key=lambda x: (None in x, x))), + ) - expected_paths = [([(0, 0), (1, 1), (2, 2)], [((0, 1), (0, 1)), ((1, 2), (1, 2)), (None, (0, 2))]), - ([(0, 0), (1, 2), (2, 1)], [((0, 1), (0, 2)), ((1, 2), (1, 2)), (None, (0, 1))]), - ([(0, 1), (1, 0), (2, 2)], [((0, 1), (0, 1)), ((1, 2), (0, 2)), (None, (1, 2))]), - ([(0, 1), (1, 2), (2, 0)], [((0, 1), (1, 2)), ((1, 2), (0, 2)), (None, (0, 1))]), - ([(0, 2), (1, 0), (2, 1)], [((0, 1), (0, 2)), ((1, 2), (0, 1)), (None, (1, 2))]), - ([(0, 2), (1, 1), (2, 0)], [((0, 1), (1, 2)), ((1, 2), (0, 1)), (None, (0, 2))])] - assert_equal(set(canonical(*p) for p in paths), - set(canonical(*p) for p in expected_paths)) + expected_paths = [ + ( + [(0, 0), (1, 1), (2, 2)], + [((0, 1), (0, 1)), ((1, 2), (1, 2)), (None, (0, 2))], + ), + ( + [(0, 0), (1, 2), (2, 1)], + [((0, 1), (0, 2)), ((1, 2), (1, 2)), (None, (0, 1))], + ), + ( + [(0, 1), (1, 0), (2, 2)], + [((0, 1), (0, 1)), ((1, 2), (0, 2)), (None, (1, 2))], + ), + ( + [(0, 1), (1, 2), (2, 0)], + [((0, 1), (1, 2)), ((1, 2), (0, 2)), (None, (0, 1))], + ), + ( + [(0, 2), (1, 0), (2, 1)], + [((0, 1), (0, 2)), ((1, 2), (0, 1)), (None, (1, 2))], + ), + ( + [(0, 2), (1, 1), (2, 0)], + [((0, 1), (1, 2)), ((1, 2), (0, 1)), (None, (0, 2))], + ), + ] + assert {canonical(*p) for p in paths} == {canonical(*p) for p in expected_paths} def test_optimize_graph_edit_distance(self): G1 = circular_ladder_graph(2) G2 = circular_ladder_graph(6) bestcost = 1000 for cost in optimize_graph_edit_distance(G1, G2): - assert_less(cost, bestcost) + assert cost < bestcost bestcost = cost - assert_equal(bestcost, 22) + assert bestcost == 22 # def test_graph_edit_distance_bigger(self): # G1 = circular_ladder_graph(12) # G2 = circular_ladder_graph(16) # assert_equal(graph_edit_distance(G1, G2), 22) + + def test_selfloops(self): + G0 = nx.Graph() + G1 = nx.Graph() + G1.add_edges_from((("A", "A"), ("A", "B"))) + G2 = nx.Graph() + G2.add_edges_from((("A", "B"), ("B", "B"))) + G3 = nx.Graph() + G3.add_edges_from((("A", "A"), ("A", "B"), ("B", "B"))) + + assert graph_edit_distance(G0, G0) == 0 + assert graph_edit_distance(G0, G1) == 4 + assert graph_edit_distance(G1, G0) == 4 + assert graph_edit_distance(G0, G2) == 4 + assert graph_edit_distance(G2, G0) == 4 + assert graph_edit_distance(G0, G3) == 5 + assert graph_edit_distance(G3, G0) == 5 + + assert graph_edit_distance(G1, G1) == 0 + assert graph_edit_distance(G1, G2) == 0 + assert graph_edit_distance(G2, G1) == 0 + assert graph_edit_distance(G1, G3) == 1 + assert graph_edit_distance(G3, G1) == 1 + + assert graph_edit_distance(G2, G2) == 0 + assert graph_edit_distance(G2, G3) == 1 + assert graph_edit_distance(G3, G2) == 1 + + assert graph_edit_distance(G3, G3) == 0 + + def test_digraph(self): + G0 = nx.DiGraph() + G1 = nx.DiGraph() + G1.add_edges_from((("A", "B"), ("B", "C"), ("C", "D"), ("D", "A"))) + G2 = nx.DiGraph() + G2.add_edges_from((("A", "B"), ("B", "C"), ("C", "D"), ("A", "D"))) + G3 = nx.DiGraph() + G3.add_edges_from((("A", "B"), ("A", "C"), ("B", "D"), ("C", "D"))) + + assert graph_edit_distance(G0, G0) == 0 + assert graph_edit_distance(G0, G1) == 8 + assert graph_edit_distance(G1, G0) == 8 + assert graph_edit_distance(G0, G2) == 8 + assert graph_edit_distance(G2, G0) == 8 + assert graph_edit_distance(G0, G3) == 8 + assert graph_edit_distance(G3, G0) == 8 + + assert graph_edit_distance(G1, G1) == 0 + assert graph_edit_distance(G1, G2) == 2 + assert graph_edit_distance(G2, G1) == 2 + assert graph_edit_distance(G1, G3) == 4 + assert graph_edit_distance(G3, G1) == 4 + + assert graph_edit_distance(G2, G2) == 0 + assert graph_edit_distance(G2, G3) == 2 + assert graph_edit_distance(G3, G2) == 2 + + assert graph_edit_distance(G3, G3) == 0 + + def test_multigraph(self): + G0 = nx.MultiGraph() + G1 = nx.MultiGraph() + G1.add_edges_from((("A", "B"), ("B", "C"), ("A", "C"))) + G2 = nx.MultiGraph() + G2.add_edges_from((("A", "B"), ("B", "C"), ("B", "C"), ("A", "C"))) + G3 = nx.MultiGraph() + G3.add_edges_from((("A", "B"), ("B", "C"), ("A", "C"), ("A", "C"), ("A", "C"))) + + assert graph_edit_distance(G0, G0) == 0 + assert graph_edit_distance(G0, G1) == 6 + assert graph_edit_distance(G1, G0) == 6 + assert graph_edit_distance(G0, G2) == 7 + assert graph_edit_distance(G2, G0) == 7 + assert graph_edit_distance(G0, G3) == 8 + assert graph_edit_distance(G3, G0) == 8 + + assert graph_edit_distance(G1, G1) == 0 + assert graph_edit_distance(G1, G2) == 1 + assert graph_edit_distance(G2, G1) == 1 + assert graph_edit_distance(G1, G3) == 2 + assert graph_edit_distance(G3, G1) == 2 + + assert graph_edit_distance(G2, G2) == 0 + assert graph_edit_distance(G2, G3) == 1 + assert graph_edit_distance(G3, G2) == 1 + + assert graph_edit_distance(G3, G3) == 0 + + def test_multidigraph(self): + G1 = nx.MultiDiGraph() + G1.add_edges_from( + ( + ("hardware", "kernel"), + ("kernel", "hardware"), + ("kernel", "userspace"), + ("userspace", "kernel"), + ) + ) + G2 = nx.MultiDiGraph() + G2.add_edges_from( + ( + ("winter", "spring"), + ("spring", "summer"), + ("summer", "autumn"), + ("autumn", "winter"), + ) + ) + + assert graph_edit_distance(G1, G2) == 5 + assert graph_edit_distance(G2, G1) == 5 + + # by https://github.com/jfbeaumont + def testCopy(self): + G = nx.Graph() + G.add_node("A", label="A") + G.add_node("B", label="B") + G.add_edge("A", "B", label="a-b") + assert ( + graph_edit_distance(G, G.copy(), node_match=nmatch, edge_match=ematch) == 0 + ) + + def testSame(self): + G1 = nx.Graph() + G1.add_node("A", label="A") + G1.add_node("B", label="B") + G1.add_edge("A", "B", label="a-b") + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_edge("A", "B", label="a-b") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 0 + + def testOneEdgeLabelDiff(self): + G1 = nx.Graph() + G1.add_node("A", label="A") + G1.add_node("B", label="B") + G1.add_edge("A", "B", label="a-b") + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_edge("A", "B", label="bad") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1 + + def testOneNodeLabelDiff(self): + G1 = nx.Graph() + G1.add_node("A", label="A") + G1.add_node("B", label="B") + G1.add_edge("A", "B", label="a-b") + G2 = nx.Graph() + G2.add_node("A", label="Z") + G2.add_node("B", label="B") + G2.add_edge("A", "B", label="a-b") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1 + + def testOneExtraNode(self): + G1 = nx.Graph() + G1.add_node("A", label="A") + G1.add_node("B", label="B") + G1.add_edge("A", "B", label="a-b") + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_edge("A", "B", label="a-b") + G2.add_node("C", label="C") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1 + + def testOneExtraEdge(self): + G1 = nx.Graph() + G1.add_node("A", label="A") + G1.add_node("B", label="B") + G1.add_node("C", label="C") + G1.add_node("C", label="C") + G1.add_edge("A", "B", label="a-b") + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("C", label="C") + G2.add_edge("A", "B", label="a-b") + G2.add_edge("A", "C", label="a-c") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1 + + def testOneExtraNodeAndEdge(self): + G1 = nx.Graph() + G1.add_node("A", label="A") + G1.add_node("B", label="B") + G1.add_edge("A", "B", label="a-b") + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("C", label="C") + G2.add_edge("A", "B", label="a-b") + G2.add_edge("A", "C", label="a-c") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 2 + + def testGraph1(self): + G1 = getCanonical() + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("D", label="D") + G2.add_node("E", label="E") + G2.add_edge("A", "B", label="a-b") + G2.add_edge("B", "D", label="b-d") + G2.add_edge("D", "E", label="d-e") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 3 + + def testGraph2(self): + G1 = getCanonical() + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("C", label="C") + G2.add_node("D", label="D") + G2.add_node("E", label="E") + G2.add_edge("A", "B", label="a-b") + G2.add_edge("B", "C", label="b-c") + G2.add_edge("C", "D", label="c-d") + G2.add_edge("C", "E", label="c-e") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 4 + + def testGraph3(self): + G1 = getCanonical() + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("C", label="C") + G2.add_node("D", label="D") + G2.add_node("E", label="E") + G2.add_node("F", label="F") + G2.add_node("G", label="G") + G2.add_edge("A", "C", label="a-c") + G2.add_edge("A", "D", label="a-d") + G2.add_edge("D", "E", label="d-e") + G2.add_edge("D", "F", label="d-f") + G2.add_edge("D", "G", label="d-g") + G2.add_edge("E", "B", label="e-b") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 12 + + def testGraph4(self): + G1 = getCanonical() + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("C", label="C") + G2.add_node("D", label="D") + G2.add_edge("A", "B", label="a-b") + G2.add_edge("B", "C", label="b-c") + G2.add_edge("C", "D", label="c-d") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 2 + + def testGraph4_a(self): + G1 = getCanonical() + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("C", label="C") + G2.add_node("D", label="D") + G2.add_edge("A", "B", label="a-b") + G2.add_edge("B", "C", label="b-c") + G2.add_edge("A", "D", label="a-d") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 2 + + def testGraph4_b(self): + G1 = getCanonical() + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("C", label="C") + G2.add_node("D", label="D") + G2.add_edge("A", "B", label="a-b") + G2.add_edge("B", "C", label="b-c") + G2.add_edge("B", "D", label="bad") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1 + + def test_simrank_no_source_no_target(self): + G = nx.cycle_graph(5) + expected = { + 0: { + 0: 1, + 1: 0.3951219505902448, + 2: 0.5707317069281646, + 3: 0.5707317069281646, + 4: 0.3951219505902449, + }, + 1: { + 0: 0.3951219505902448, + 1: 1, + 2: 0.3951219505902449, + 3: 0.5707317069281646, + 4: 0.5707317069281646, + }, + 2: { + 0: 0.5707317069281646, + 1: 0.3951219505902449, + 2: 1, + 3: 0.3951219505902449, + 4: 0.5707317069281646, + }, + 3: { + 0: 0.5707317069281646, + 1: 0.5707317069281646, + 2: 0.3951219505902449, + 3: 1, + 4: 0.3951219505902449, + }, + 4: { + 0: 0.3951219505902449, + 1: 0.5707317069281646, + 2: 0.5707317069281646, + 3: 0.3951219505902449, + 4: 1, + }, + } + actual = nx.simrank_similarity(G) + assert expected == actual + + # For a DiGraph test, use the first graph from the paper cited in + # the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126 + G = nx.DiGraph() + G.add_node(0, label="Univ") + G.add_node(1, label="ProfA") + G.add_node(2, label="ProfB") + G.add_node(3, label="StudentA") + G.add_node(4, label="StudentB") + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)]) + + expected = { + 0: {0: 1, 1: 0.0, 2: 0.1323363991265798, 3: 0.0, 4: 0.03387811817640443}, + 1: {0: 0.0, 1: 1, 2: 0.4135512472705618, 3: 0.0, 4: 0.10586911930126384}, + 2: { + 0: 0.1323363991265798, + 1: 0.4135512472705618, + 2: 1, + 3: 0.04234764772050554, + 4: 0.08822426608438655, + }, + 3: {0: 0.0, 1: 0.0, 2: 0.04234764772050554, 3: 1, 4: 0.3308409978164495}, + 4: { + 0: 0.03387811817640443, + 1: 0.10586911930126384, + 2: 0.08822426608438655, + 3: 0.3308409978164495, + 4: 1, + }, + } + # Use the importance_factor from the paper to get the same numbers. + actual = nx.algorithms.similarity.simrank_similarity(G, importance_factor=0.8) + assert expected == actual + + def test_simrank_source_no_target(self): + G = nx.cycle_graph(5) + expected = { + 0: 1, + 1: 0.3951219505902448, + 2: 0.5707317069281646, + 3: 0.5707317069281646, + 4: 0.3951219505902449, + } + actual = nx.simrank_similarity(G, source=0) + assert expected == actual + + # For a DiGraph test, use the first graph from the paper cited in + # the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126 + G = nx.DiGraph() + G.add_node(0, label="Univ") + G.add_node(1, label="ProfA") + G.add_node(2, label="ProfB") + G.add_node(3, label="StudentA") + G.add_node(4, label="StudentB") + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)]) + + expected = {0: 1, 1: 0.0, 2: 0.1323363991265798, 3: 0.0, 4: 0.03387811817640443} + # Use the importance_factor from the paper to get the same numbers. + actual = nx.algorithms.similarity.simrank_similarity( + G, importance_factor=0.8, source=0 + ) + assert expected == actual + + def test_simrank_source_and_target(self): + G = nx.cycle_graph(5) + expected = 1 + actual = nx.simrank_similarity(G, source=0, target=0) + + # For a DiGraph test, use the first graph from the paper cited in + # the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126 + G = nx.DiGraph() + G.add_node(0, label="Univ") + G.add_node(1, label="ProfA") + G.add_node(2, label="ProfB") + G.add_node(3, label="StudentA") + G.add_node(4, label="StudentB") + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)]) + + expected = 0.1323363991265798 + # Use the importance_factor from the paper to get the same numbers. + # Use the pair (0,2) because (0,0) and (0,1) have trivial results. + actual = nx.algorithms.similarity.simrank_similarity( + G, importance_factor=0.8, source=0, target=2 + ) + assert expected == actual + + def test_simrank_numpy_no_source_no_target(self): + G = nx.cycle_graph(5) + expected = numpy.array( + [ + [ + 1.0, + 0.3947180735764555, + 0.570482097206368, + 0.570482097206368, + 0.3947180735764555, + ], + [ + 0.3947180735764555, + 1.0, + 0.3947180735764555, + 0.570482097206368, + 0.570482097206368, + ], + [ + 0.570482097206368, + 0.3947180735764555, + 1.0, + 0.3947180735764555, + 0.570482097206368, + ], + [ + 0.570482097206368, + 0.570482097206368, + 0.3947180735764555, + 1.0, + 0.3947180735764555, + ], + [ + 0.3947180735764555, + 0.570482097206368, + 0.570482097206368, + 0.3947180735764555, + 1.0, + ], + ] + ) + actual = nx.simrank_similarity_numpy(G) + numpy.testing.assert_allclose(expected, actual, atol=1e-7) + + def test_simrank_numpy_source_no_target(self): + G = nx.cycle_graph(5) + expected = numpy.array( + [ + 1.0, + 0.3947180735764555, + 0.570482097206368, + 0.570482097206368, + 0.3947180735764555, + ] + ) + actual = nx.simrank_similarity_numpy(G, source=0) + numpy.testing.assert_allclose(expected, actual, atol=1e-7) + + def test_simrank_numpy_source_and_target(self): + G = nx.cycle_graph(5) + expected = 1.0 + actual = nx.simrank_similarity_numpy(G, source=0, target=0) + numpy.testing.assert_allclose(expected, actual, atol=1e-7) diff --git a/networkx/algorithms/tests/test_simple_paths.py b/networkx/algorithms/tests/test_simple_paths.py index 92de987..57e9d82 100644 --- a/networkx/algorithms/tests/test_simple_paths.py +++ b/networkx/algorithms/tests/test_simple_paths.py @@ -1,19 +1,16 @@ import random -from nose.tools import assert_equal -from nose.tools import assert_false -from nose.tools import assert_raises -from nose.tools import assert_true -from nose.tools import raises +import pytest import networkx as nx from networkx import convert_node_labels_to_integers as cnlti -from networkx.algorithms.simple_paths import _bidirectional_shortest_path from networkx.algorithms.simple_paths import _bidirectional_dijkstra +from networkx.algorithms.simple_paths import _bidirectional_shortest_path from networkx.utils import arbitrary_element +from networkx.utils import pairwise -class TestIsSimplePath(object): +class TestIsSimplePath: """Unit tests for the :func:`networkx.algorithms.simple_paths.is_simple_path` function. @@ -26,7 +23,7 @@ def test_empty_list(self): """ G = nx.trivial_graph() - assert_false(nx.is_simple_path(G, [])) + assert not nx.is_simple_path(G, []) def test_trivial_path(self): """Tests that the trivial path, a path of length one, is @@ -34,7 +31,7 @@ def test_trivial_path(self): """ G = nx.trivial_graph() - assert_true(nx.is_simple_path(G, [0])) + assert nx.is_simple_path(G, [0]) def test_trivial_nonpath(self): """Tests that a list whose sole element is an object not in the @@ -42,76 +39,156 @@ def test_trivial_nonpath(self): """ G = nx.trivial_graph() - assert_false(nx.is_simple_path(G, ['not a node'])) + assert not nx.is_simple_path(G, ["not a node"]) def test_simple_path(self): G = nx.path_graph(2) - assert_true(nx.is_simple_path(G, [0, 1])) + assert nx.is_simple_path(G, [0, 1]) def test_non_simple_path(self): G = nx.path_graph(2) - assert_false(nx.is_simple_path(G, [0, 1, 0])) + assert not nx.is_simple_path(G, [0, 1, 0]) def test_cycle(self): G = nx.cycle_graph(3) - assert_false(nx.is_simple_path(G, [0, 1, 2, 0])) + assert not nx.is_simple_path(G, [0, 1, 2, 0]) def test_missing_node(self): G = nx.path_graph(2) - assert_false(nx.is_simple_path(G, [0, 2])) + assert not nx.is_simple_path(G, [0, 2]) def test_directed_path(self): G = nx.DiGraph([(0, 1), (1, 2)]) - assert_true(nx.is_simple_path(G, [0, 1, 2])) + assert nx.is_simple_path(G, [0, 1, 2]) def test_directed_non_path(self): G = nx.DiGraph([(0, 1), (1, 2)]) - assert_false(nx.is_simple_path(G, [2, 1, 0])) + assert not nx.is_simple_path(G, [2, 1, 0]) def test_directed_cycle(self): G = nx.DiGraph([(0, 1), (1, 2), (2, 0)]) - assert_false(nx.is_simple_path(G, [0, 1, 2, 0])) + assert not nx.is_simple_path(G, [0, 1, 2, 0]) def test_multigraph(self): G = nx.MultiGraph([(0, 1), (0, 1)]) - assert_true(nx.is_simple_path(G, [0, 1])) + assert nx.is_simple_path(G, [0, 1]) def test_multidigraph(self): G = nx.MultiDiGraph([(0, 1), (0, 1), (1, 0), (1, 0)]) - assert_true(nx.is_simple_path(G, [0, 1])) + assert nx.is_simple_path(G, [0, 1]) # Tests for all_simple_paths def test_all_simple_paths(): G = nx.path_graph(4) paths = nx.all_simple_paths(G, 0, 3) - assert_equal(set(tuple(p) for p in paths), {(0, 1, 2, 3)}) + assert {tuple(p) for p in paths} == {(0, 1, 2, 3)} + + +def test_all_simple_paths_with_two_targets_emits_two_paths(): + G = nx.path_graph(4) + G.add_edge(2, 4) + paths = nx.all_simple_paths(G, 0, [3, 4]) + assert {tuple(p) for p in paths} == {(0, 1, 2, 3), (0, 1, 2, 4)} + + +def test_digraph_all_simple_paths_with_two_targets_emits_two_paths(): + G = nx.path_graph(4, create_using=nx.DiGraph()) + G.add_edge(2, 4) + paths = nx.all_simple_paths(G, 0, [3, 4]) + assert {tuple(p) for p in paths} == {(0, 1, 2, 3), (0, 1, 2, 4)} + + +def test_all_simple_paths_with_two_targets_cutoff(): + G = nx.path_graph(4) + G.add_edge(2, 4) + paths = nx.all_simple_paths(G, 0, [3, 4], cutoff=3) + assert {tuple(p) for p in paths} == {(0, 1, 2, 3), (0, 1, 2, 4)} + + +def test_digraph_all_simple_paths_with_two_targets_cutoff(): + G = nx.path_graph(4, create_using=nx.DiGraph()) + G.add_edge(2, 4) + paths = nx.all_simple_paths(G, 0, [3, 4], cutoff=3) + assert {tuple(p) for p in paths} == {(0, 1, 2, 3), (0, 1, 2, 4)} + + +def test_all_simple_paths_with_two_targets_in_line_emits_two_paths(): + G = nx.path_graph(4) + paths = nx.all_simple_paths(G, 0, [2, 3]) + assert {tuple(p) for p in paths} == {(0, 1, 2), (0, 1, 2, 3)} + + +def test_all_simple_paths_ignores_cycle(): + G = nx.cycle_graph(3, create_using=nx.DiGraph()) + G.add_edge(1, 3) + paths = nx.all_simple_paths(G, 0, 3) + assert {tuple(p) for p in paths} == {(0, 1, 3)} + + +def test_all_simple_paths_with_two_targets_inside_cycle_emits_two_paths(): + G = nx.cycle_graph(3, create_using=nx.DiGraph()) + G.add_edge(1, 3) + paths = nx.all_simple_paths(G, 0, [2, 3]) + assert {tuple(p) for p in paths} == {(0, 1, 2), (0, 1, 3)} def test_all_simple_paths_source_target(): G = nx.path_graph(4) paths = nx.all_simple_paths(G, 1, 1) - assert_equal(paths, []) + assert list(paths) == [] def test_all_simple_paths_cutoff(): G = nx.complete_graph(4) paths = nx.all_simple_paths(G, 0, 1, cutoff=1) - assert_equal(set(tuple(p) for p in paths), {(0, 1)}) + assert {tuple(p) for p in paths} == {(0, 1)} paths = nx.all_simple_paths(G, 0, 1, cutoff=2) - assert_equal(set(tuple(p) for p in paths), {(0, 1), (0, 2, 1), (0, 3, 1)}) + assert {tuple(p) for p in paths} == {(0, 1), (0, 2, 1), (0, 3, 1)} + + +def test_all_simple_paths_on_non_trivial_graph(): + """ you may need to draw this graph to make sure it is reasonable """ + G = nx.path_graph(5, create_using=nx.DiGraph()) + G.add_edges_from([(0, 5), (1, 5), (1, 3), (5, 4), (4, 2), (4, 3)]) + paths = nx.all_simple_paths(G, 1, [2, 3]) + assert {tuple(p) for p in paths} == { + (1, 2), + (1, 3, 4, 2), + (1, 5, 4, 2), + (1, 3), + (1, 2, 3), + (1, 5, 4, 3), + (1, 5, 4, 2, 3), + } + paths = nx.all_simple_paths(G, 1, [2, 3], cutoff=3) + assert {tuple(p) for p in paths} == { + (1, 2), + (1, 3, 4, 2), + (1, 5, 4, 2), + (1, 3), + (1, 2, 3), + (1, 5, 4, 3), + } + paths = nx.all_simple_paths(G, 1, [2, 3], cutoff=2) + assert {tuple(p) for p in paths} == {(1, 2), (1, 3), (1, 2, 3)} def test_all_simple_paths_multigraph(): G = nx.MultiGraph([(1, 2), (1, 2)]) - paths = nx.all_simple_paths(G, 1, 2) - assert_equal(set(tuple(p) for p in paths), {(1, 2), (1, 2)}) + paths = nx.all_simple_paths(G, 1, 1) + assert list(paths) == [] + nx.add_path(G, [3, 1, 10, 2]) + paths = list(nx.all_simple_paths(G, 1, 2)) + assert len(paths) == 3 + assert {tuple(p) for p in paths} == {(1, 2), (1, 2), (1, 10, 2)} def test_all_simple_paths_multigraph_with_cutoff(): G = nx.MultiGraph([(1, 2), (1, 2), (1, 10), (10, 2)]) - paths = nx.all_simple_paths(G, 1, 2, cutoff=1) - assert_equal(set(tuple(p) for p in paths), {(1, 2), (1, 2)}) + paths = list(nx.all_simple_paths(G, 1, 2, cutoff=1)) + assert len(paths) == 2 + assert {tuple(p) for p in paths} == {(1, 2), (1, 2)} def test_all_simple_paths_directed(): @@ -119,18 +196,24 @@ def test_all_simple_paths_directed(): nx.add_path(G, [1, 2, 3]) nx.add_path(G, [3, 2, 1]) paths = nx.all_simple_paths(G, 1, 3) - assert_equal(set(tuple(p) for p in paths), {(1, 2, 3)}) + assert {tuple(p) for p in paths} == {(1, 2, 3)} def test_all_simple_paths_empty(): G = nx.path_graph(4) paths = nx.all_simple_paths(G, 0, 3, cutoff=2) - assert_equal(list(list(p) for p in paths), []) + assert list(paths) == [] + + +def test_all_simple_paths_corner_cases(): + assert list(nx.all_simple_paths(nx.empty_graph(2), 0, 0)) == [] + assert list(nx.all_simple_paths(nx.empty_graph(2), 0, 1)) == [] + assert list(nx.all_simple_paths(nx.path_graph(9), 0, 8, 0)) == [] def hamiltonian_path(G, source): source = arbitrary_element(G) - neighbors = set(G[source]) - set([source]) + neighbors = set(G[source]) - {source} n = len(G) for target in neighbors: for path in nx.all_simple_paths(G, source, target): @@ -140,49 +223,262 @@ def hamiltonian_path(G, source): def test_hamiltonian_path(): from itertools import permutations + G = nx.complete_graph(4) paths = [list(p) for p in hamiltonian_path(G, 0)] exact = [[0] + list(p) for p in permutations([1, 2, 3], 3)] - assert_equal(sorted(paths), sorted(exact)) + assert sorted(paths) == sorted(exact) def test_cutoff_zero(): G = nx.complete_graph(4) paths = nx.all_simple_paths(G, 0, 3, cutoff=0) - assert_equal(list(list(p) for p in paths), []) + assert list(list(p) for p in paths) == [] paths = nx.all_simple_paths(nx.MultiGraph(G), 0, 3, cutoff=0) - assert_equal(list(list(p) for p in paths), []) + assert list(list(p) for p in paths) == [] -@raises(nx.NodeNotFound) def test_source_missing(): - G = nx.Graph() - nx.add_path(G, [1, 2, 3]) - paths = list(nx.all_simple_paths(nx.MultiGraph(G), 0, 3)) + with pytest.raises(nx.NodeNotFound): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + list(nx.all_simple_paths(nx.MultiGraph(G), 0, 3)) -@raises(nx.NodeNotFound) def test_target_missing(): - G = nx.Graph() + with pytest.raises(nx.NodeNotFound): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + list(nx.all_simple_paths(nx.MultiGraph(G), 1, 4)) + + +# Tests for all_simple_edge_paths +def test_all_simple_edge_paths(): + G = nx.path_graph(4) + paths = nx.all_simple_edge_paths(G, 0, 3) + assert {tuple(p) for p in paths} == {((0, 1), (1, 2), (2, 3))} + + +def test_all_simple_edge_paths_with_two_targets_emits_two_paths(): + G = nx.path_graph(4) + G.add_edge(2, 4) + paths = nx.all_simple_edge_paths(G, 0, [3, 4]) + assert {tuple(p) for p in paths} == { + ((0, 1), (1, 2), (2, 3)), + ((0, 1), (1, 2), (2, 4)), + } + + +def test_digraph_all_simple_edge_paths_with_two_targets_emits_two_paths(): + G = nx.path_graph(4, create_using=nx.DiGraph()) + G.add_edge(2, 4) + paths = nx.all_simple_edge_paths(G, 0, [3, 4]) + assert {tuple(p) for p in paths} == { + ((0, 1), (1, 2), (2, 3)), + ((0, 1), (1, 2), (2, 4)), + } + + +def test_all_simple_edge_paths_with_two_targets_cutoff(): + G = nx.path_graph(4) + G.add_edge(2, 4) + paths = nx.all_simple_edge_paths(G, 0, [3, 4], cutoff=3) + assert {tuple(p) for p in paths} == { + ((0, 1), (1, 2), (2, 3)), + ((0, 1), (1, 2), (2, 4)), + } + + +def test_digraph_all_simple_edge_paths_with_two_targets_cutoff(): + G = nx.path_graph(4, create_using=nx.DiGraph()) + G.add_edge(2, 4) + paths = nx.all_simple_edge_paths(G, 0, [3, 4], cutoff=3) + assert {tuple(p) for p in paths} == { + ((0, 1), (1, 2), (2, 3)), + ((0, 1), (1, 2), (2, 4)), + } + + +def test_all_simple_edge_paths_with_two_targets_in_line_emits_two_paths(): + G = nx.path_graph(4) + paths = nx.all_simple_edge_paths(G, 0, [2, 3]) + assert {tuple(p) for p in paths} == {((0, 1), (1, 2)), ((0, 1), (1, 2), (2, 3))} + + +def test_all_simple_edge_paths_ignores_cycle(): + G = nx.cycle_graph(3, create_using=nx.DiGraph()) + G.add_edge(1, 3) + paths = nx.all_simple_edge_paths(G, 0, 3) + assert {tuple(p) for p in paths} == {((0, 1), (1, 3))} + + +def test_all_simple_edge_paths_with_two_targets_inside_cycle_emits_two_paths(): + G = nx.cycle_graph(3, create_using=nx.DiGraph()) + G.add_edge(1, 3) + paths = nx.all_simple_edge_paths(G, 0, [2, 3]) + assert {tuple(p) for p in paths} == {((0, 1), (1, 2)), ((0, 1), (1, 3))} + + +def test_all_simple_edge_paths_source_target(): + G = nx.path_graph(4) + paths = nx.all_simple_edge_paths(G, 1, 1) + assert list(paths) == [] + + +def test_all_simple_edge_paths_cutoff(): + G = nx.complete_graph(4) + paths = nx.all_simple_edge_paths(G, 0, 1, cutoff=1) + assert {tuple(p) for p in paths} == {((0, 1),)} + paths = nx.all_simple_edge_paths(G, 0, 1, cutoff=2) + assert {tuple(p) for p in paths} == {((0, 1),), ((0, 2), (2, 1)), ((0, 3), (3, 1))} + + +def test_all_simple_edge_paths_on_non_trivial_graph(): + """ you may need to draw this graph to make sure it is reasonable """ + G = nx.path_graph(5, create_using=nx.DiGraph()) + G.add_edges_from([(0, 5), (1, 5), (1, 3), (5, 4), (4, 2), (4, 3)]) + paths = nx.all_simple_edge_paths(G, 1, [2, 3]) + assert {tuple(p) for p in paths} == { + ((1, 2),), + ((1, 3), (3, 4), (4, 2)), + ((1, 5), (5, 4), (4, 2)), + ((1, 3),), + ((1, 2), (2, 3)), + ((1, 5), (5, 4), (4, 3)), + ((1, 5), (5, 4), (4, 2), (2, 3)), + } + paths = nx.all_simple_edge_paths(G, 1, [2, 3], cutoff=3) + assert {tuple(p) for p in paths} == { + ((1, 2),), + ((1, 3), (3, 4), (4, 2)), + ((1, 5), (5, 4), (4, 2)), + ((1, 3),), + ((1, 2), (2, 3)), + ((1, 5), (5, 4), (4, 3)), + } + paths = nx.all_simple_edge_paths(G, 1, [2, 3], cutoff=2) + assert {tuple(p) for p in paths} == {((1, 2),), ((1, 3),), ((1, 2), (2, 3))} + + +def test_all_simple_edge_paths_multigraph(): + G = nx.MultiGraph([(1, 2), (1, 2)]) + paths = nx.all_simple_edge_paths(G, 1, 1) + assert list(paths) == [] + nx.add_path(G, [3, 1, 10, 2]) + paths = list(nx.all_simple_edge_paths(G, 1, 2)) + assert len(paths) == 3 + assert {tuple(p) for p in paths} == { + ((1, 2, 0),), + ((1, 2, 1),), + ((1, 10, 0), (10, 2, 0)), + } + + +def test_all_simple_edge_paths_multigraph_with_cutoff(): + G = nx.MultiGraph([(1, 2), (1, 2), (1, 10), (10, 2)]) + paths = list(nx.all_simple_edge_paths(G, 1, 2, cutoff=1)) + assert len(paths) == 2 + assert {tuple(p) for p in paths} == {((1, 2, 0),), ((1, 2, 1),)} + + +def test_all_simple_edge_paths_directed(): + G = nx.DiGraph() nx.add_path(G, [1, 2, 3]) - paths = list(nx.all_simple_paths(nx.MultiGraph(G), 1, 4)) + nx.add_path(G, [3, 2, 1]) + paths = nx.all_simple_edge_paths(G, 1, 3) + assert {tuple(p) for p in paths} == {((1, 2), (2, 3))} + + +def test_all_simple_edge_paths_empty(): + G = nx.path_graph(4) + paths = nx.all_simple_edge_paths(G, 0, 3, cutoff=2) + assert list(paths) == [] + + +def test_all_simple_edge_paths_corner_cases(): + assert list(nx.all_simple_edge_paths(nx.empty_graph(2), 0, 0)) == [] + assert list(nx.all_simple_edge_paths(nx.empty_graph(2), 0, 1)) == [] + assert list(nx.all_simple_edge_paths(nx.path_graph(9), 0, 8, 0)) == [] -# Tests for shortest_simple_paths + +def hamiltonian_edge_path(G, source): + source = arbitrary_element(G) + neighbors = set(G[source]) - {source} + n = len(G) + for target in neighbors: + for path in nx.all_simple_edge_paths(G, source, target): + if len(path) == n - 1: + yield path + + +def test_hamiltonian__edge_path(): + from itertools import permutations + + G = nx.complete_graph(4) + paths = hamiltonian_edge_path(G, 0) + exact = [list(pairwise([0] + list(p))) for p in permutations([1, 2, 3], 3)] + assert sorted(exact) == [p for p in sorted(paths)] + + +def test_edge_cutoff_zero(): + G = nx.complete_graph(4) + paths = nx.all_simple_edge_paths(G, 0, 3, cutoff=0) + assert list(list(p) for p in paths) == [] + paths = nx.all_simple_edge_paths(nx.MultiGraph(G), 0, 3, cutoff=0) + assert list(list(p) for p in paths) == [] + + +def test_edge_source_missing(): + with pytest.raises(nx.NodeNotFound): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + list(nx.all_simple_edge_paths(nx.MultiGraph(G), 0, 3)) + + +def test_edge_target_missing(): + with pytest.raises(nx.NodeNotFound): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + list(nx.all_simple_edge_paths(nx.MultiGraph(G), 1, 4)) +# Tests for shortest_simple_paths def test_shortest_simple_paths(): G = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted") paths = nx.shortest_simple_paths(G, 1, 12) - assert_equal(next(paths), [1, 2, 3, 4, 8, 12]) - assert_equal(next(paths), [1, 5, 6, 7, 8, 12]) - assert_equal([len(path) for path in nx.shortest_simple_paths(G, 1, 12)], - sorted([len(path) for path in nx.all_simple_paths(G, 1, 12)])) + assert next(paths) == [1, 2, 3, 4, 8, 12] + assert next(paths) == [1, 5, 6, 7, 8, 12] + assert [len(path) for path in nx.shortest_simple_paths(G, 1, 12)] == sorted( + [len(path) for path in nx.all_simple_paths(G, 1, 12)] + ) def test_shortest_simple_paths_directed(): G = nx.cycle_graph(7, create_using=nx.DiGraph()) paths = nx.shortest_simple_paths(G, 0, 3) - assert_equal([path for path in paths], [[0, 1, 2, 3]]) + assert [path for path in paths] == [[0, 1, 2, 3]] + + +def test_shortest_simple_paths_directed_with_weight_fucntion(): + def cost(u, v, x): + return 1 + + G = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted") + paths = nx.shortest_simple_paths(G, 1, 12) + assert next(paths) == [1, 2, 3, 4, 8, 12] + assert next(paths) == [1, 5, 6, 7, 8, 12] + assert [ + len(path) for path in nx.shortest_simple_paths(G, 1, 12, weight=cost) + ] == sorted([len(path) for path in nx.all_simple_paths(G, 1, 12)]) + + +def test_shortest_simple_paths_with_weight_fucntion(): + def cost(u, v, x): + return 1 + + G = nx.cycle_graph(7, create_using=nx.DiGraph()) + paths = nx.shortest_simple_paths(G, 0, 3, weight=cost) + assert [path for path in paths] == [[0, 1, 2, 3]] def test_Greg_Bernstein(): @@ -194,151 +490,165 @@ def test_Greg_Bernstein(): g1.add_edge("N3", "N0", weight=10.0, capacity=50, name="L0") g1.add_edge("N2", "N3", weight=12.0, capacity=30, name="L2") g1.add_edge("N1", "N2", weight=15.0, capacity=42, name="L3") - solution = [['N1', 'N0', 'N3'], ['N1', 'N2', 'N3'], ['N1', 'N4', 'N0', 'N3']] - result = list(nx.shortest_simple_paths(g1, 'N1', 'N3', weight='weight')) - assert_equal(result, solution) + solution = [["N1", "N0", "N3"], ["N1", "N2", "N3"], ["N1", "N4", "N0", "N3"]] + result = list(nx.shortest_simple_paths(g1, "N1", "N3", weight="weight")) + assert result == solution def test_weighted_shortest_simple_path(): def cost_func(path): - return sum(G.adj[u][v]['weight'] for (u, v) in zip(path, path[1:])) + return sum(G.adj[u][v]["weight"] for (u, v) in zip(path, path[1:])) + G = nx.complete_graph(5) weight = {(u, v): random.randint(1, 100) for (u, v) in G.edges()} - nx.set_edge_attributes(G, weight, 'weight') + nx.set_edge_attributes(G, weight, "weight") cost = 0 - for path in nx.shortest_simple_paths(G, 0, 3, weight='weight'): + for path in nx.shortest_simple_paths(G, 0, 3, weight="weight"): this_cost = cost_func(path) - assert_true(cost <= this_cost) + assert cost <= this_cost cost = this_cost def test_directed_weighted_shortest_simple_path(): def cost_func(path): - return sum(G.adj[u][v]['weight'] for (u, v) in zip(path, path[1:])) + return sum(G.adj[u][v]["weight"] for (u, v) in zip(path, path[1:])) + G = nx.complete_graph(5) G = G.to_directed() weight = {(u, v): random.randint(1, 100) for (u, v) in G.edges()} - nx.set_edge_attributes(G, weight, 'weight') + nx.set_edge_attributes(G, weight, "weight") cost = 0 - for path in nx.shortest_simple_paths(G, 0, 3, weight='weight'): + for path in nx.shortest_simple_paths(G, 0, 3, weight="weight"): this_cost = cost_func(path) - assert_true(cost <= this_cost) + assert cost <= this_cost cost = this_cost def test_weighted_shortest_simple_path_issue2427(): G = nx.Graph() - G.add_edge('IN', 'OUT', weight=2) - G.add_edge('IN', 'A', weight=1) - G.add_edge('IN', 'B', weight=2) - G.add_edge('B', 'OUT', weight=2) - assert_equal(list(nx.shortest_simple_paths(G, 'IN', 'OUT', weight="weight")), - [['IN', 'OUT'], ['IN', 'B', 'OUT']]) + G.add_edge("IN", "OUT", weight=2) + G.add_edge("IN", "A", weight=1) + G.add_edge("IN", "B", weight=2) + G.add_edge("B", "OUT", weight=2) + assert list(nx.shortest_simple_paths(G, "IN", "OUT", weight="weight")) == [ + ["IN", "OUT"], + ["IN", "B", "OUT"], + ] G = nx.Graph() - G.add_edge('IN', 'OUT', weight=10) - G.add_edge('IN', 'A', weight=1) - G.add_edge('IN', 'B', weight=1) - G.add_edge('B', 'OUT', weight=1) - assert_equal(list(nx.shortest_simple_paths(G, 'IN', 'OUT', weight="weight")), - [['IN', 'B', 'OUT'], ['IN', 'OUT']]) + G.add_edge("IN", "OUT", weight=10) + G.add_edge("IN", "A", weight=1) + G.add_edge("IN", "B", weight=1) + G.add_edge("B", "OUT", weight=1) + assert list(nx.shortest_simple_paths(G, "IN", "OUT", weight="weight")) == [ + ["IN", "B", "OUT"], + ["IN", "OUT"], + ] def test_directed_weighted_shortest_simple_path_issue2427(): G = nx.DiGraph() - G.add_edge('IN', 'OUT', weight=2) - G.add_edge('IN', 'A', weight=1) - G.add_edge('IN', 'B', weight=2) - G.add_edge('B', 'OUT', weight=2) - assert_equal(list(nx.shortest_simple_paths(G, 'IN', 'OUT', weight="weight")), - [['IN', 'OUT'], ['IN', 'B', 'OUT']]) + G.add_edge("IN", "OUT", weight=2) + G.add_edge("IN", "A", weight=1) + G.add_edge("IN", "B", weight=2) + G.add_edge("B", "OUT", weight=2) + assert list(nx.shortest_simple_paths(G, "IN", "OUT", weight="weight")) == [ + ["IN", "OUT"], + ["IN", "B", "OUT"], + ] G = nx.DiGraph() - G.add_edge('IN', 'OUT', weight=10) - G.add_edge('IN', 'A', weight=1) - G.add_edge('IN', 'B', weight=1) - G.add_edge('B', 'OUT', weight=1) - assert_equal(list(nx.shortest_simple_paths(G, 'IN', 'OUT', weight="weight")), - [['IN', 'B', 'OUT'], ['IN', 'OUT']]) + G.add_edge("IN", "OUT", weight=10) + G.add_edge("IN", "A", weight=1) + G.add_edge("IN", "B", weight=1) + G.add_edge("B", "OUT", weight=1) + assert list(nx.shortest_simple_paths(G, "IN", "OUT", weight="weight")) == [ + ["IN", "B", "OUT"], + ["IN", "OUT"], + ] def test_weight_name(): G = nx.cycle_graph(7) - nx.set_edge_attributes(G, 1, 'weight') - nx.set_edge_attributes(G, 1, 'foo') - G.adj[1][2]['foo'] = 7 - paths = list(nx.shortest_simple_paths(G, 0, 3, weight='foo')) + nx.set_edge_attributes(G, 1, "weight") + nx.set_edge_attributes(G, 1, "foo") + G.adj[1][2]["foo"] = 7 + paths = list(nx.shortest_simple_paths(G, 0, 3, weight="foo")) solution = [[0, 6, 5, 4, 3], [0, 1, 2, 3]] - assert_equal(paths, solution) + assert paths == solution -@raises(nx.NodeNotFound) def test_ssp_source_missing(): - G = nx.Graph() - nx.add_path(G, [1, 2, 3]) - paths = list(nx.shortest_simple_paths(G, 0, 3)) + with pytest.raises(nx.NodeNotFound): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + list(nx.shortest_simple_paths(G, 0, 3)) -@raises(nx.NodeNotFound) def test_ssp_target_missing(): - G = nx.Graph() - nx.add_path(G, [1, 2, 3]) - paths = list(nx.shortest_simple_paths(G, 1, 4)) + with pytest.raises(nx.NodeNotFound): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + list(nx.shortest_simple_paths(G, 1, 4)) -@raises(nx.NetworkXNotImplemented) def test_ssp_multigraph(): - G = nx.MultiGraph() - nx.add_path(G, [1, 2, 3]) - paths = list(nx.shortest_simple_paths(G, 1, 4)) + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.MultiGraph() + nx.add_path(G, [1, 2, 3]) + list(nx.shortest_simple_paths(G, 1, 4)) -@raises(nx.NetworkXNoPath) -def test_ssp_source_missing(): - G = nx.Graph() - nx.add_path(G, [0, 1, 2]) - nx.add_path(G, [3, 4, 5]) - paths = list(nx.shortest_simple_paths(G, 0, 3)) +def test_ssp_source_missing2(): + with pytest.raises(nx.NetworkXNoPath): + G = nx.Graph() + nx.add_path(G, [0, 1, 2]) + nx.add_path(G, [3, 4, 5]) + list(nx.shortest_simple_paths(G, 0, 3)) def test_bidirectional_shortest_path_restricted_cycle(): cycle = nx.cycle_graph(7) length, path = _bidirectional_shortest_path(cycle, 0, 3) - assert_equal(path, [0, 1, 2, 3]) + assert path == [0, 1, 2, 3] length, path = _bidirectional_shortest_path(cycle, 0, 3, ignore_nodes=[1]) - assert_equal(path, [0, 6, 5, 4, 3]) + assert path == [0, 6, 5, 4, 3] def test_bidirectional_shortest_path_restricted_wheel(): wheel = nx.wheel_graph(6) length, path = _bidirectional_shortest_path(wheel, 1, 3) - assert_true(path in [[1, 0, 3], [1, 2, 3]]) + assert path in [[1, 0, 3], [1, 2, 3]] length, path = _bidirectional_shortest_path(wheel, 1, 3, ignore_nodes=[0]) - assert_equal(path, [1, 2, 3]) + assert path == [1, 2, 3] length, path = _bidirectional_shortest_path(wheel, 1, 3, ignore_nodes=[0, 2]) - assert_equal(path, [1, 5, 4, 3]) - length, path = _bidirectional_shortest_path(wheel, 1, 3, - ignore_edges=[(1, 0), (5, 0), (2, 3)]) - assert_true(path in [[1, 2, 0, 3], [1, 5, 4, 3]]) + assert path == [1, 5, 4, 3] + length, path = _bidirectional_shortest_path( + wheel, 1, 3, ignore_edges=[(1, 0), (5, 0), (2, 3)] + ) + assert path in [[1, 2, 0, 3], [1, 5, 4, 3]] def test_bidirectional_shortest_path_restricted_directed_cycle(): directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph()) length, path = _bidirectional_shortest_path(directed_cycle, 0, 3) - assert_equal(path, [0, 1, 2, 3]) - assert_raises( + assert path == [0, 1, 2, 3] + pytest.raises( nx.NetworkXNoPath, _bidirectional_shortest_path, directed_cycle, - 0, 3, + 0, + 3, ignore_nodes=[1], ) - length, path = _bidirectional_shortest_path(directed_cycle, 0, 3, - ignore_edges=[(2, 1)]) - assert_equal(path, [0, 1, 2, 3]) - assert_raises( + length, path = _bidirectional_shortest_path( + directed_cycle, 0, 3, ignore_edges=[(2, 1)] + ) + assert path == [0, 1, 2, 3] + pytest.raises( nx.NetworkXNoPath, _bidirectional_shortest_path, directed_cycle, - 0, 3, + 0, + 3, ignore_edges=[(1, 2)], ) @@ -348,116 +658,107 @@ def test_bidirectional_shortest_path_ignore(): nx.add_path(G, [1, 2]) nx.add_path(G, [1, 3]) nx.add_path(G, [1, 4]) - assert_raises( - nx.NetworkXNoPath, - _bidirectional_shortest_path, - G, - 1, 2, - ignore_nodes=[1], + pytest.raises( + nx.NetworkXNoPath, _bidirectional_shortest_path, G, 1, 2, ignore_nodes=[1] ) - assert_raises( - nx.NetworkXNoPath, - _bidirectional_shortest_path, - G, - 1, 2, - ignore_nodes=[2], + pytest.raises( + nx.NetworkXNoPath, _bidirectional_shortest_path, G, 1, 2, ignore_nodes=[2] ) G = nx.Graph() nx.add_path(G, [1, 3]) nx.add_path(G, [1, 4]) nx.add_path(G, [3, 2]) - assert_raises( - nx.NetworkXNoPath, - _bidirectional_shortest_path, - G, - 1, 2, - ignore_nodes=[1, 2], + pytest.raises( + nx.NetworkXNoPath, _bidirectional_shortest_path, G, 1, 2, ignore_nodes=[1, 2] ) def validate_path(G, s, t, soln_len, path): - assert_equal(path[0], s) - assert_equal(path[-1], t) - assert_equal(soln_len, sum(G[u][v].get('weight', 1) - for u, v in zip(path[:-1], path[1:]))) + assert path[0] == s + assert path[-1] == t + assert soln_len == sum( + G[u][v].get("weight", 1) for u, v in zip(path[:-1], path[1:]) + ) def validate_length_path(G, s, t, soln_len, length, path): - assert_equal(soln_len, length) + assert soln_len == length validate_path(G, s, t, length, path) def test_bidirectional_dijksta_restricted(): XG = nx.DiGraph() - XG.add_weighted_edges_from([('s', 'u', 10), ('s', 'x', 5), - ('u', 'v', 1), ('u', 'x', 2), - ('v', 'y', 1), ('x', 'u', 3), - ('x', 'v', 5), ('x', 'y', 2), - ('y', 's', 7), ('y', 'v', 6)]) + XG.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + ) XG3 = nx.Graph() - XG3.add_weighted_edges_from([[0, 1, 2], [1, 2, 12], - [2, 3, 1], [3, 4, 5], - [4, 5, 1], [5, 0, 10]]) - validate_length_path(XG, 's', 'v', 9, - *_bidirectional_dijkstra(XG, 's', 'v')) - validate_length_path(XG, 's', 'v', 10, - *_bidirectional_dijkstra(XG, 's', 'v', ignore_nodes=['u'])) - validate_length_path(XG, 's', 'v', 11, - *_bidirectional_dijkstra(XG, 's', 'v', ignore_edges=[('s', 'x')])) - assert_raises( + XG3.add_weighted_edges_from( + [[0, 1, 2], [1, 2, 12], [2, 3, 1], [3, 4, 5], [4, 5, 1], [5, 0, 10]] + ) + validate_length_path(XG, "s", "v", 9, *_bidirectional_dijkstra(XG, "s", "v")) + validate_length_path( + XG, "s", "v", 10, *_bidirectional_dijkstra(XG, "s", "v", ignore_nodes=["u"]) + ) + validate_length_path( + XG, + "s", + "v", + 11, + *_bidirectional_dijkstra(XG, "s", "v", ignore_edges=[("s", "x")]) + ) + pytest.raises( nx.NetworkXNoPath, _bidirectional_dijkstra, XG, - 's', 'v', - ignore_nodes=['u'], - ignore_edges=[('s', 'x')], + "s", + "v", + ignore_nodes=["u"], + ignore_edges=[("s", "x")], ) validate_length_path(XG3, 0, 3, 15, *_bidirectional_dijkstra(XG3, 0, 3)) - validate_length_path(XG3, 0, 3, 16, - *_bidirectional_dijkstra(XG3, 0, 3, ignore_nodes=[1])) - validate_length_path(XG3, 0, 3, 16, - *_bidirectional_dijkstra(XG3, 0, 3, ignore_edges=[(2, 3)])) - assert_raises( + validate_length_path( + XG3, 0, 3, 16, *_bidirectional_dijkstra(XG3, 0, 3, ignore_nodes=[1]) + ) + validate_length_path( + XG3, 0, 3, 16, *_bidirectional_dijkstra(XG3, 0, 3, ignore_edges=[(2, 3)]) + ) + pytest.raises( nx.NetworkXNoPath, _bidirectional_dijkstra, XG3, - 0, 3, + 0, + 3, ignore_nodes=[1], ignore_edges=[(5, 4)], ) -@raises(nx.NetworkXNoPath) def test_bidirectional_dijkstra_no_path(): - G = nx.Graph() - nx.add_path(G, [1, 2, 3]) - nx.add_path(G, [4, 5, 6]) - path = _bidirectional_dijkstra(G, 1, 6) + with pytest.raises(nx.NetworkXNoPath): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + nx.add_path(G, [4, 5, 6]) + _bidirectional_dijkstra(G, 1, 6) def test_bidirectional_dijkstra_ignore(): G = nx.Graph() nx.add_path(G, [1, 2, 10]) nx.add_path(G, [1, 3, 10]) - assert_raises( - nx.NetworkXNoPath, - _bidirectional_dijkstra, - G, - 1, 2, - ignore_nodes=[1], - ) - assert_raises( - nx.NetworkXNoPath, - _bidirectional_dijkstra, - G, - 1, 2, - ignore_nodes=[2], - ) - assert_raises( - nx.NetworkXNoPath, - _bidirectional_dijkstra, - G, - 1, 2, - ignore_nodes=[1, 2], + pytest.raises(nx.NetworkXNoPath, _bidirectional_dijkstra, G, 1, 2, ignore_nodes=[1]) + pytest.raises(nx.NetworkXNoPath, _bidirectional_dijkstra, G, 1, 2, ignore_nodes=[2]) + pytest.raises( + nx.NetworkXNoPath, _bidirectional_dijkstra, G, 1, 2, ignore_nodes=[1, 2] ) diff --git a/networkx/algorithms/tests/test_smallworld.py b/networkx/algorithms/tests/test_smallworld.py new file mode 100644 index 0000000..8b1b537 --- /dev/null +++ b/networkx/algorithms/tests/test_smallworld.py @@ -0,0 +1,59 @@ +import pytest + +numpy = pytest.importorskip("numpy") + +import random + +from networkx import random_reference, lattice_reference, sigma, omega +import networkx as nx + +rng = random.Random(0) +rng = 42 + + +def test_random_reference(): + G = nx.connected_watts_strogatz_graph(50, 6, 0.1, seed=rng) + Gr = random_reference(G, niter=1, seed=rng) + C = nx.average_clustering(G) + Cr = nx.average_clustering(Gr) + assert C > Cr + + pytest.raises(nx.NetworkXError, random_reference, nx.Graph()) + pytest.raises(nx.NetworkXNotImplemented, random_reference, nx.DiGraph()) + + H = nx.Graph(((0, 1), (2, 3))) + Hl = random_reference(H, niter=1, seed=rng) + + +def test_lattice_reference(): + G = nx.connected_watts_strogatz_graph(50, 6, 1, seed=rng) + Gl = lattice_reference(G, niter=1, seed=rng) + L = nx.average_shortest_path_length(G) + Ll = nx.average_shortest_path_length(Gl) + assert Ll > L + + pytest.raises(nx.NetworkXError, lattice_reference, nx.Graph()) + pytest.raises(nx.NetworkXNotImplemented, lattice_reference, nx.DiGraph()) + + H = nx.Graph(((0, 1), (2, 3))) + Hl = lattice_reference(H, niter=1) + + +def test_sigma(): + Gs = nx.connected_watts_strogatz_graph(50, 6, 0.1, seed=rng) + Gr = nx.connected_watts_strogatz_graph(50, 6, 1, seed=rng) + sigmas = sigma(Gs, niter=1, nrand=2, seed=rng) + sigmar = sigma(Gr, niter=1, nrand=2, seed=rng) + assert sigmar < sigmas + + +def test_omega(): + Gl = nx.connected_watts_strogatz_graph(50, 6, 0, seed=rng) + Gr = nx.connected_watts_strogatz_graph(50, 6, 1, seed=rng) + Gs = nx.connected_watts_strogatz_graph(50, 6, 0.1, seed=rng) + omegal = omega(Gl, niter=1, nrand=1, seed=rng) + omegar = omega(Gr, niter=1, nrand=1, seed=rng) + omegas = omega(Gs, niter=1, nrand=1, seed=rng) + print("omegas, omegal, omegar") + print(omegas, omegal, omegar) + assert omegal < omegas and omegas < omegar diff --git a/networkx/algorithms/tests/test_smetric.py b/networkx/algorithms/tests/test_smetric.py index 06b33a5..b6c4570 100644 --- a/networkx/algorithms/tests/test_smetric.py +++ b/networkx/algorithms/tests/test_smetric.py @@ -1,5 +1,4 @@ - -from nose.tools import assert_equal, raises +import pytest import networkx as nx @@ -11,11 +10,13 @@ def test_smetric(): g.add_edge(2, 4) g.add_edge(1, 4) sm = nx.s_metric(g, normalized=False) - assert_equal(sm, 19.0) + assert sm == 19.0 + + # smNorm = nx.s_metric(g,normalized=True) # assert_equal(smNorm, 0.95) -@raises(nx.NetworkXError) def test_normalized(): - sm = nx.s_metric(nx.Graph(), normalized=True) + with pytest.raises(nx.NetworkXError): + sm = nx.s_metric(nx.Graph(), normalized=True) diff --git a/networkx/algorithms/tests/test_sparsifiers.py b/networkx/algorithms/tests/test_sparsifiers.py new file mode 100644 index 0000000..09e934d --- /dev/null +++ b/networkx/algorithms/tests/test_sparsifiers.py @@ -0,0 +1,137 @@ +"""Unit tests for the sparsifier computation functions.""" +import pytest +import networkx as nx +from networkx.utils import py_random_state + + +_seed = 2 + + +def _test_spanner(G, spanner, stretch, weight=None): + """Test whether a spanner is valid. + + This function tests whether the given spanner is a subgraph of the + given graph G with the same node set. It also tests for all shortest + paths whether they adhere to the given stretch. + + Parameters + ---------- + G : NetworkX graph + The original graph for which the spanner was constructed. + + spanner : NetworkX graph + The spanner to be tested. + + stretch : float + The proclaimed stretch of the spanner. + + weight : object + The edge attribute to use as distance. + """ + # check node set + assert set(G.nodes()) == set(spanner.nodes()) + + # check edge set and weights + for u, v in spanner.edges(): + assert G.has_edge(u, v) + if weight: + assert spanner[u][v][weight] == G[u][v][weight] + + # check connectivity and stretch + original_length = dict(nx.shortest_path_length(G, weight=weight)) + spanner_length = dict(nx.shortest_path_length(spanner, weight=weight)) + for u in G.nodes(): + for v in G.nodes(): + if u in original_length and v in original_length[u]: + assert spanner_length[u][v] <= stretch * original_length[u][v] + + +@py_random_state(1) +def _assign_random_weights(G, seed=None): + """Assigns random weights to the edges of a graph. + + Parameters + ---------- + + G : NetworkX graph + The original graph for which the spanner was constructed. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + """ + for u, v in G.edges(): + G[u][v]["weight"] = seed.random() + + +def test_spanner_trivial(): + """Test a trivial spanner with stretch 1.""" + G = nx.complete_graph(20) + spanner = nx.spanner(G, 1, seed=_seed) + + for u, v in G.edges: + assert spanner.has_edge(u, v) + + +def test_spanner_unweighted_complete_graph(): + """Test spanner construction on a complete unweighted graph.""" + G = nx.complete_graph(20) + + spanner = nx.spanner(G, 4, seed=_seed) + _test_spanner(G, spanner, 4) + + spanner = nx.spanner(G, 10, seed=_seed) + _test_spanner(G, spanner, 10) + + +def test_spanner_weighted_complete_graph(): + """Test spanner construction on a complete weighted graph.""" + G = nx.complete_graph(20) + _assign_random_weights(G, seed=_seed) + + spanner = nx.spanner(G, 4, weight="weight", seed=_seed) + _test_spanner(G, spanner, 4, weight="weight") + + spanner = nx.spanner(G, 10, weight="weight", seed=_seed) + _test_spanner(G, spanner, 10, weight="weight") + + +def test_spanner_unweighted_gnp_graph(): + """Test spanner construction on an unweighted gnp graph.""" + G = nx.gnp_random_graph(20, 0.4, seed=_seed) + + spanner = nx.spanner(G, 4, seed=_seed) + _test_spanner(G, spanner, 4) + + spanner = nx.spanner(G, 10, seed=_seed) + _test_spanner(G, spanner, 10) + + +def test_spanner_weighted_gnp_graph(): + """Test spanner construction on an weighted gnp graph.""" + G = nx.gnp_random_graph(20, 0.4, seed=_seed) + _assign_random_weights(G, seed=_seed) + + spanner = nx.spanner(G, 4, weight="weight", seed=_seed) + _test_spanner(G, spanner, 4, weight="weight") + + spanner = nx.spanner(G, 10, weight="weight", seed=_seed) + _test_spanner(G, spanner, 10, weight="weight") + + +def test_spanner_unweighted_disconnected_graph(): + """Test spanner construction on a disconnected graph.""" + G = nx.disjoint_union(nx.complete_graph(10), nx.complete_graph(10)) + + spanner = nx.spanner(G, 4, seed=_seed) + _test_spanner(G, spanner, 4) + + spanner = nx.spanner(G, 10, seed=_seed) + _test_spanner(G, spanner, 10) + + +def test_spanner_invalid_stretch(): + """Check whether an invalid stretch is caught.""" + with pytest.raises(ValueError): + G = nx.empty_graph() + nx.spanner(G, 0) diff --git a/networkx/algorithms/tests/test_structuralholes.py b/networkx/algorithms/tests/test_structuralholes.py index f419532..a0499e6 100644 --- a/networkx/algorithms/tests/test_structuralholes.py +++ b/networkx/algorithms/tests/test_structuralholes.py @@ -1,19 +1,10 @@ -# test_structuralholes.py - unit tests for the structuralholes module -# -# Copyright 2017 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.algorithms.structuralholes` module.""" -from nose.tools import assert_almost_equal, assert_true - import math import networkx as nx +from networkx.testing import almost_equal -class TestStructuralHoles(object): +class TestStructuralHoles: """Unit tests for computing measures of structural holes. The expected values for these functions were originally computed using the @@ -31,95 +22,112 @@ def setup(self): self.D_weights = {(0, 1): 2, (0, 2): 2, (1, 0): 1, (2, 1): 1} # Example from http://www.analytictech.com/connections/v20(1)/holes.htm self.G = nx.Graph() - self.G.add_edges_from([ - ('A', 'B'), ('A', 'F'), ('A', 'G'), ('A', 'E'), ('E', 'G'), - ('F', 'G'), ('B', 'G'), ('B', 'D'), ('D', 'G'), ('G', 'C'), - ]) + self.G.add_edges_from( + [ + ("A", "B"), + ("A", "F"), + ("A", "G"), + ("A", "E"), + ("E", "G"), + ("F", "G"), + ("B", "G"), + ("B", "D"), + ("D", "G"), + ("G", "C"), + ] + ) self.G_weights = { - ('A', 'B'): 2, ('A', 'F'): 3, ('A', 'G'): 5, ('A', 'E'): 2, - ('E', 'G'): 8, ('F', 'G'): 3, ('B', 'G'): 4, ('B', 'D'): 1, - ('D', 'G'): 3, ('G', 'C'): 10, + ("A", "B"): 2, + ("A", "F"): 3, + ("A", "G"): 5, + ("A", "E"): 2, + ("E", "G"): 8, + ("F", "G"): 3, + ("B", "G"): 4, + ("B", "D"): 1, + ("D", "G"): 3, + ("G", "C"): 10, } def test_constraint_directed(self): constraint = nx.constraint(self.D) - assert_almost_equal(round(constraint[0], 3), 1.003) - assert_almost_equal(round(constraint[1], 3), 1.003) - assert_almost_equal(round(constraint[2], 3), 1.389) + assert almost_equal(constraint[0], 1.003, places=3) + assert almost_equal(constraint[1], 1.003, places=3) + assert almost_equal(constraint[2], 1.389, places=3) def test_effective_size_directed(self): effective_size = nx.effective_size(self.D) - assert_almost_equal(round(effective_size[0], 3), 1.167) - assert_almost_equal(round(effective_size[1], 3), 1.167) - assert_almost_equal(round(effective_size[2], 3), 1) + assert almost_equal(effective_size[0], 1.167, places=3) + assert almost_equal(effective_size[1], 1.167, places=3) + assert almost_equal(effective_size[2], 1, places=3) def test_constraint_weighted_directed(self): D = self.D.copy() - nx.set_edge_attributes(D, self.D_weights, 'weight') - constraint = nx.constraint(D, weight='weight') - assert_almost_equal(round(constraint[0], 3), 0.840) - assert_almost_equal(round(constraint[1], 3), 1.143) - assert_almost_equal(round(constraint[2], 3), 1.378) + nx.set_edge_attributes(D, self.D_weights, "weight") + constraint = nx.constraint(D, weight="weight") + assert almost_equal(constraint[0], 0.840, places=3) + assert almost_equal(constraint[1], 1.143, places=3) + assert almost_equal(constraint[2], 1.378, places=3) def test_effective_size_weighted_directed(self): D = self.D.copy() - nx.set_edge_attributes(D, self.D_weights, 'weight') - effective_size = nx.effective_size(D, weight='weight') - assert_almost_equal(round(effective_size[0], 3), 1.567) - assert_almost_equal(round(effective_size[1], 3), 1.083) - assert_almost_equal(round(effective_size[2], 3), 1) + nx.set_edge_attributes(D, self.D_weights, "weight") + effective_size = nx.effective_size(D, weight="weight") + assert almost_equal(effective_size[0], 1.567, places=3) + assert almost_equal(effective_size[1], 1.083, places=3) + assert almost_equal(effective_size[2], 1, places=3) def test_constraint_undirected(self): constraint = nx.constraint(self.G) - assert_almost_equal(round(constraint['G'], 3), 0.400) - assert_almost_equal(round(constraint['A'], 3), 0.595) - assert_almost_equal(round(constraint['C'], 3), 1) + assert almost_equal(constraint["G"], 0.400, places=3) + assert almost_equal(constraint["A"], 0.595, places=3) + assert almost_equal(constraint["C"], 1, places=3) def test_effective_size_undirected_borgatti(self): effective_size = nx.effective_size(self.G) - assert_almost_equal(round(effective_size['G'], 2), 4.67) - assert_almost_equal(round(effective_size['A'], 2), 2.50) - assert_almost_equal(round(effective_size['C'], 2), 1) + assert almost_equal(effective_size["G"], 4.67, places=2) + assert almost_equal(effective_size["A"], 2.50, places=2) + assert almost_equal(effective_size["C"], 1, places=2) def test_effective_size_undirected(self): G = self.G.copy() - nx.set_edge_attributes(G, 1, 'weight') - effective_size = nx.effective_size(G, weight='weight') - assert_almost_equal(round(effective_size['G'], 2), 4.67) - assert_almost_equal(round(effective_size['A'], 2), 2.50) - assert_almost_equal(round(effective_size['C'], 2), 1) + nx.set_edge_attributes(G, 1, "weight") + effective_size = nx.effective_size(G, weight="weight") + assert almost_equal(effective_size["G"], 4.67, places=2) + assert almost_equal(effective_size["A"], 2.50, places=2) + assert almost_equal(effective_size["C"], 1, places=2) def test_constraint_weighted_undirected(self): G = self.G.copy() - nx.set_edge_attributes(G, self.G_weights, 'weight') - constraint = nx.constraint(G, weight='weight') - assert_almost_equal(round(constraint['G'], 3), 0.299) - assert_almost_equal(round(constraint['A'], 3), 0.795) - assert_almost_equal(round(constraint['C'], 3), 1) + nx.set_edge_attributes(G, self.G_weights, "weight") + constraint = nx.constraint(G, weight="weight") + assert almost_equal(constraint["G"], 0.299, places=3) + assert almost_equal(constraint["A"], 0.795, places=3) + assert almost_equal(constraint["C"], 1, places=3) def test_effective_size_weighted_undirected(self): G = self.G.copy() - nx.set_edge_attributes(G, self.G_weights, 'weight') - effective_size = nx.effective_size(G, weight='weight') - assert_almost_equal(round(effective_size['G'], 2), 5.47) - assert_almost_equal(round(effective_size['A'], 2), 2.47) - assert_almost_equal(round(effective_size['C'], 2), 1) + nx.set_edge_attributes(G, self.G_weights, "weight") + effective_size = nx.effective_size(G, weight="weight") + assert almost_equal(effective_size["G"], 5.47, places=2) + assert almost_equal(effective_size["A"], 2.47, places=2) + assert almost_equal(effective_size["C"], 1, places=2) def test_constraint_isolated(self): G = self.G.copy() G.add_node(1) constraint = nx.constraint(G) - assert_true(math.isnan(constraint[1])) + assert math.isnan(constraint[1]) def test_effective_size_isolated(self): G = self.G.copy() G.add_node(1) - nx.set_edge_attributes(G, self.G_weights, 'weight') - effective_size = nx.effective_size(G, weight='weight') - assert_true(math.isnan(effective_size[1])) + nx.set_edge_attributes(G, self.G_weights, "weight") + effective_size = nx.effective_size(G, weight="weight") + assert math.isnan(effective_size[1]) def test_effective_size_borgatti_isolated(self): G = self.G.copy() G.add_node(1) effective_size = nx.effective_size(G) - assert_true(math.isnan(effective_size[1])) + assert math.isnan(effective_size[1]) diff --git a/networkx/algorithms/tests/test_swap.py b/networkx/algorithms/tests/test_swap.py index 807c306..e052a3b 100644 --- a/networkx/algorithms/tests/test_swap.py +++ b/networkx/algorithms/tests/test_swap.py @@ -1,50 +1,56 @@ -#!/usr/bin/env python -from nose.tools import * -from networkx import * +import pytest +import networkx as nx -import random -random.seed(0) +# import random +# random.seed(0) def test_double_edge_swap(): - graph = barabasi_albert_graph(200, 1) + graph = nx.barabasi_albert_graph(200, 1) degrees = sorted(d for n, d in graph.degree()) - G = double_edge_swap(graph, 40) - assert_equal(degrees, sorted(d for n, d in graph.degree())) + G = nx.double_edge_swap(graph, 40) + assert degrees == sorted(d for n, d in graph.degree()) + + +def test_double_edge_swap_seed(): + graph = nx.barabasi_albert_graph(200, 1) + degrees = sorted(d for n, d in graph.degree()) + G = nx.double_edge_swap(graph, 40, seed=1) + assert degrees == sorted(d for n, d in graph.degree()) def test_connected_double_edge_swap(): - graph = barabasi_albert_graph(200, 1) + graph = nx.barabasi_albert_graph(200, 1) degrees = sorted(d for n, d in graph.degree()) - G = connected_double_edge_swap(graph, 40) - assert_true(is_connected(graph)) - assert_equal(degrees, sorted(d for n, d in graph.degree())) + G = nx.connected_double_edge_swap(graph, 40, seed=1) + assert nx.is_connected(graph) + assert degrees == sorted(d for n, d in graph.degree()) -@raises(NetworkXError) def test_double_edge_swap_small(): - G = nx.double_edge_swap(nx.path_graph(3)) + with pytest.raises(nx.NetworkXError): + G = nx.double_edge_swap(nx.path_graph(3)) -@raises(NetworkXError) def test_double_edge_swap_tries(): - G = nx.double_edge_swap(nx.path_graph(10), nswap=1, max_tries=0) + with pytest.raises(nx.NetworkXError): + G = nx.double_edge_swap(nx.path_graph(10), nswap=1, max_tries=0) -@raises(NetworkXError) def test_connected_double_edge_swap_small(): - G = nx.connected_double_edge_swap(nx.path_graph(3)) + with pytest.raises(nx.NetworkXError): + G = nx.connected_double_edge_swap(nx.path_graph(3)) -@raises(NetworkXError) def test_connected_double_edge_swap_not_connected(): - G = nx.path_graph(3) - nx.add_path(G, [10, 11, 12]) - G = nx.connected_double_edge_swap(G) + with pytest.raises(nx.NetworkXError): + G = nx.path_graph(3) + nx.add_path(G, [10, 11, 12]) + G = nx.connected_double_edge_swap(G) def test_degree_seq_c4(): - G = cycle_graph(4) + G = nx.cycle_graph(4) degrees = sorted(d for n, d in G.degree()) - G = double_edge_swap(G, 1, 100) - assert_equal(degrees, sorted(d for n, d in G.degree())) + G = nx.double_edge_swap(G, 1, 100) + assert degrees == sorted(d for n, d in G.degree()) diff --git a/networkx/algorithms/tests/test_threshold.py b/networkx/algorithms/tests/test_threshold.py index b41bb74..ff3a5ab 100644 --- a/networkx/algorithms/tests/test_threshold.py +++ b/networkx/algorithms/tests/test_threshold.py @@ -1,164 +1,197 @@ -#!/usr/bin/env python """ Threshold Graphs ================ """ -from nose.tools import assert_true, assert_false, assert_equal, assert_almost_equal, assert_raises -from nose import SkipTest -from nose.plugins.attrib import attr +import pytest + import networkx as nx import networkx.algorithms.threshold as nxt from networkx.algorithms.isomorphism.isomorph import graph_could_be_isomorphic +from networkx.testing import almost_equal cnlti = nx.convert_node_labels_to_integers -class TestGeneratorThreshold(): +class TestGeneratorThreshold: def test_threshold_sequence_graph_test(self): G = nx.star_graph(10) - assert_true(nxt.is_threshold_graph(G)) - assert_true(nxt.is_threshold_sequence(list(d for n, d in G.degree()))) + assert nxt.is_threshold_graph(G) + assert nxt.is_threshold_sequence(list(d for n, d in G.degree())) G = nx.complete_graph(10) - assert_true(nxt.is_threshold_graph(G)) - assert_true(nxt.is_threshold_sequence(list(d for n, d in G.degree()))) + assert nxt.is_threshold_graph(G) + assert nxt.is_threshold_sequence(list(d for n, d in G.degree())) deg = [3, 2, 2, 1, 1, 1] - assert_false(nxt.is_threshold_sequence(deg)) + assert not nxt.is_threshold_sequence(deg) deg = [3, 2, 2, 1] - assert_true(nxt.is_threshold_sequence(deg)) + assert nxt.is_threshold_sequence(deg) G = nx.generators.havel_hakimi_graph(deg) - assert_true(nxt.is_threshold_graph(G)) + assert nxt.is_threshold_graph(G) def test_creation_sequences(self): deg = [3, 2, 2, 1] G = nx.generators.havel_hakimi_graph(deg) - with assert_raises(ValueError): + with pytest.raises(ValueError): nxt.creation_sequence(deg, with_labels=True, compact=True) cs0 = nxt.creation_sequence(deg) H0 = nxt.threshold_graph(cs0) - assert_equal(''.join(cs0), 'ddid') + assert "".join(cs0) == "ddid" cs1 = nxt.creation_sequence(deg, with_labels=True) H1 = nxt.threshold_graph(cs1) - assert_equal(cs1, [(1, 'd'), (2, 'd'), (3, 'i'), (0, 'd')]) + assert cs1 == [(1, "d"), (2, "d"), (3, "i"), (0, "d")] cs2 = nxt.creation_sequence(deg, compact=True) H2 = nxt.threshold_graph(cs2) - assert_equal(cs2, [2, 1, 1]) - assert_equal(''.join(nxt.uncompact(cs2)), 'ddid') - assert_true(graph_could_be_isomorphic(H0, G)) - assert_true(graph_could_be_isomorphic(H0, H1)) - assert_true(graph_could_be_isomorphic(H0, H2)) + assert cs2 == [2, 1, 1] + assert "".join(nxt.uncompact(cs2)) == "ddid" + assert graph_could_be_isomorphic(H0, G) + assert graph_could_be_isomorphic(H0, H1) + assert graph_could_be_isomorphic(H0, H2) def test_make_compact(self): - assert_equal(nxt.make_compact(['d', 'd', 'd', 'i', 'd', 'd']), [3, 1, 2]) - assert_equal(nxt.make_compact([3, 1, 2]), [3, 1, 2]) - assert_raises(TypeError, nxt.make_compact, [3., 1., 2.]) + assert nxt.make_compact(["d", "d", "d", "i", "d", "d"]) == [3, 1, 2] + assert nxt.make_compact([3, 1, 2]) == [3, 1, 2] + assert pytest.raises(TypeError, nxt.make_compact, [3.0, 1.0, 2.0]) def test_uncompact(self): - assert_equal(nxt.uncompact([3, 1, 2]), ['d', 'd', 'd', 'i', 'd', 'd']) - assert_equal(nxt.uncompact(['d', 'd', 'i', 'd']), ['d', 'd', 'i', 'd']) - assert_equal(nxt.uncompact(nxt.uncompact([(1, 'd'), (2, 'd'), (3, 'i'), (0, 'd')])), - nxt.uncompact([(1, 'd'), (2, 'd'), (3, 'i'), (0, 'd')])) - assert_raises(TypeError, nxt.uncompact, [3., 1., 2.]) + assert nxt.uncompact([3, 1, 2]) == ["d", "d", "d", "i", "d", "d"] + assert nxt.uncompact(["d", "d", "i", "d"]) == ["d", "d", "i", "d"] + assert nxt.uncompact( + nxt.uncompact([(1, "d"), (2, "d"), (3, "i"), (0, "d")]) + ) == nxt.uncompact([(1, "d"), (2, "d"), (3, "i"), (0, "d")]) + assert pytest.raises(TypeError, nxt.uncompact, [3.0, 1.0, 2.0]) def test_creation_sequence_to_weights(self): - assert_equal(nxt.creation_sequence_to_weights([3, 1, 2]), [0.5, 0.5, 0.5, 0.25, 0.75, 0.75]) - assert_raises(TypeError, nxt.creation_sequence_to_weights, [3., 1., 2.]) + assert nxt.creation_sequence_to_weights([3, 1, 2]) == [ + 0.5, + 0.5, + 0.5, + 0.25, + 0.75, + 0.75, + ] + assert pytest.raises( + TypeError, nxt.creation_sequence_to_weights, [3.0, 1.0, 2.0] + ) def test_weights_to_creation_sequence(self): deg = [3, 2, 2, 1] - with assert_raises(ValueError): + with pytest.raises(ValueError): nxt.weights_to_creation_sequence(deg, with_labels=True, compact=True) - assert_equal(nxt.weights_to_creation_sequence(deg, with_labels=True), - [(3, 'd'), (1, 'd'), (2, 'd'), (0, 'd')]) - assert_equal(nxt.weights_to_creation_sequence(deg, compact=True), [4]) + assert nxt.weights_to_creation_sequence(deg, with_labels=True) == [ + (3, "d"), + (1, "d"), + (2, "d"), + (0, "d"), + ] + assert nxt.weights_to_creation_sequence(deg, compact=True) == [4] def test_find_alternating_4_cycle(self): G = nx.Graph() G.add_edge(1, 2) - assert_false(nxt.find_alternating_4_cycle(G)) + assert not nxt.find_alternating_4_cycle(G) def test_shortest_path(self): deg = [3, 2, 2, 1] G = nx.generators.havel_hakimi_graph(deg) cs1 = nxt.creation_sequence(deg, with_labels=True) - for n, m in [(3, 0), (0, 3), (0, 2), (0, 1), (1, 3), - (3, 1), (1, 2), (2, 3)]: - assert_equal(nxt.shortest_path(cs1, n, m), - nx.shortest_path(G, n, m)) + for n, m in [(3, 0), (0, 3), (0, 2), (0, 1), (1, 3), (3, 1), (1, 2), (2, 3)]: + assert nxt.shortest_path(cs1, n, m) == nx.shortest_path(G, n, m) spl = nxt.shortest_path_length(cs1, 3) spl2 = nxt.shortest_path_length([t for v, t in cs1], 2) - assert_equal(spl, spl2) + assert spl == spl2 spld = {} for j, pl in enumerate(spl): n = cs1[j][0] spld[n] = pl - assert_equal(spld, nx.single_source_shortest_path_length(G, 3)) + assert spld == nx.single_source_shortest_path_length(G, 3) - assert_equal(nxt.shortest_path(['d', 'd', 'd', 'i', 'd', 'd'], 1, 2), [1, 2]) - assert_equal(nxt.shortest_path([3, 1, 2], 1, 2), [1, 2]) - assert_raises(TypeError, nxt.shortest_path, [3., 1., 2.], 1, 2) - assert_raises(ValueError, nxt.shortest_path, [3, 1, 2], 'a', 2) - assert_raises(ValueError, nxt.shortest_path, [3, 1, 2], 1, 'b') - assert_equal(nxt.shortest_path([3, 1, 2], 1, 1), [1]) + assert nxt.shortest_path(["d", "d", "d", "i", "d", "d"], 1, 2) == [1, 2] + assert nxt.shortest_path([3, 1, 2], 1, 2) == [1, 2] + assert pytest.raises(TypeError, nxt.shortest_path, [3.0, 1.0, 2.0], 1, 2) + assert pytest.raises(ValueError, nxt.shortest_path, [3, 1, 2], "a", 2) + assert pytest.raises(ValueError, nxt.shortest_path, [3, 1, 2], 1, "b") + assert nxt.shortest_path([3, 1, 2], 1, 1) == [1] def test_shortest_path_length(self): - assert_equal(nxt.shortest_path_length([3, 1, 2], 1), [1, 0, 1, 2, 1, 1]) - assert_equal(nxt.shortest_path_length(['d', 'd', 'd', 'i', 'd', 'd'], 1), - [1, 0, 1, 2, 1, 1]) - assert_equal(nxt.shortest_path_length(('d', 'd', 'd', 'i', 'd', 'd'), 1), - [1, 0, 1, 2, 1, 1]) - assert_raises(TypeError, nxt.shortest_path, [3., 1., 2.], 1) + assert nxt.shortest_path_length([3, 1, 2], 1) == [1, 0, 1, 2, 1, 1] + assert nxt.shortest_path_length(["d", "d", "d", "i", "d", "d"], 1) == [ + 1, + 0, + 1, + 2, + 1, + 1, + ] + assert nxt.shortest_path_length(("d", "d", "d", "i", "d", "d"), 1) == [ + 1, + 0, + 1, + 2, + 1, + 1, + ] + assert pytest.raises(TypeError, nxt.shortest_path, [3.0, 1.0, 2.0], 1) def random_threshold_sequence(self): - assert_equal(len(nxt.random_threshold_sequence(10, 0.5)), 10) - assert_equal(nxt.random_threshold_sequence(10, 0.5, seed=42), - ['d', 'i', 'd', 'd', 'd', 'i', 'i', 'i', 'd', 'd']) - assert_raises(ValueError, nxt.random_threshold_sequence, 10, 1.5) + assert len(nxt.random_threshold_sequence(10, 0.5)) == 10 + assert nxt.random_threshold_sequence(10, 0.5, seed=42) == [ + "d", + "i", + "d", + "d", + "d", + "i", + "i", + "i", + "d", + "d", + ] + assert pytest.raises(ValueError, nxt.random_threshold_sequence, 10, 1.5) def test_right_d_threshold_sequence(self): - assert_equal(nxt.right_d_threshold_sequence(3, 2), ['d', 'i', 'd']) - assert_raises(ValueError, nxt.right_d_threshold_sequence, 2, 3) + assert nxt.right_d_threshold_sequence(3, 2) == ["d", "i", "d"] + assert pytest.raises(ValueError, nxt.right_d_threshold_sequence, 2, 3) def test_left_d_threshold_sequence(self): - assert_equal(nxt.left_d_threshold_sequence(3, 2), ['d', 'i', 'd']) - assert_raises(ValueError, nxt.left_d_threshold_sequence, 2, 3) + assert nxt.left_d_threshold_sequence(3, 2) == ["d", "i", "d"] + assert pytest.raises(ValueError, nxt.left_d_threshold_sequence, 2, 3) def test_weights_thresholds(self): wseq = [3, 4, 3, 3, 5, 6, 5, 4, 5, 6] cs = nxt.weights_to_creation_sequence(wseq, threshold=10) wseq = nxt.creation_sequence_to_weights(cs) cs2 = nxt.weights_to_creation_sequence(wseq) - assert_equal(cs, cs2) + assert cs == cs2 wseq = nxt.creation_sequence_to_weights(nxt.uncompact([3, 1, 2, 3, 3, 2, 3])) - assert_equal(wseq, - [s * 0.125 for s in [4, 4, 4, 3, 5, 5, 2, 2, 2, 6, 6, 6, 1, 1, 7, 7, 7]]) + assert wseq == [ + s * 0.125 for s in [4, 4, 4, 3, 5, 5, 2, 2, 2, 6, 6, 6, 1, 1, 7, 7, 7] + ] wseq = nxt.creation_sequence_to_weights([3, 1, 2, 3, 3, 2, 3]) - assert_equal(wseq, - [s * 0.125 for s in [4, 4, 4, 3, 5, 5, 2, 2, 2, 6, 6, 6, 1, 1, 7, 7, 7]]) + assert wseq == [ + s * 0.125 for s in [4, 4, 4, 3, 5, 5, 2, 2, 2, 6, 6, 6, 1, 1, 7, 7, 7] + ] - wseq = nxt.creation_sequence_to_weights(list(enumerate('ddidiiidididi'))) - assert_equal(wseq, - [s * 0.1 for s in [5, 5, 4, 6, 3, 3, 3, 7, 2, 8, 1, 9, 0]]) + wseq = nxt.creation_sequence_to_weights(list(enumerate("ddidiiidididi"))) + assert wseq == [s * 0.1 for s in [5, 5, 4, 6, 3, 3, 3, 7, 2, 8, 1, 9, 0]] - wseq = nxt.creation_sequence_to_weights('ddidiiidididi') - assert_equal(wseq, - [s * 0.1 for s in [5, 5, 4, 6, 3, 3, 3, 7, 2, 8, 1, 9, 0]]) + wseq = nxt.creation_sequence_to_weights("ddidiiidididi") + assert wseq == [s * 0.1 for s in [5, 5, 4, 6, 3, 3, 3, 7, 2, 8, 1, 9, 0]] - wseq = nxt.creation_sequence_to_weights('ddidiiidididid') + wseq = nxt.creation_sequence_to_weights("ddidiiidididid") ws = [s / float(12) for s in [6, 6, 5, 7, 4, 4, 4, 8, 3, 9, 2, 10, 1, 11]] - assert_true(sum([abs(c - d) for c, d in zip(wseq, ws)]) < 1e-14) + assert sum([abs(c - d) for c, d in zip(wseq, ws)]) < 1e-14 def test_finding_routines(self): G = nx.Graph({1: [2], 2: [3], 3: [4], 4: [5], 5: [6]}) @@ -169,76 +202,77 @@ def test_finding_routines(self): G.add_edge(4, 6) # Alternating 4 cycle - assert_equal(nxt.find_alternating_4_cycle(G), [1, 2, 3, 6]) + assert nxt.find_alternating_4_cycle(G) == [1, 2, 3, 6] # Threshold graph TG = nxt.find_threshold_graph(G) - assert_true(nxt.is_threshold_graph(TG)) - assert_equal(sorted(TG.nodes()), [1, 2, 3, 4, 5, 7]) + assert nxt.is_threshold_graph(TG) + assert sorted(TG.nodes()) == [1, 2, 3, 4, 5, 7] cs = nxt.creation_sequence(dict(TG.degree()), with_labels=True) - assert_equal(nxt.find_creation_sequence(G), cs) + assert nxt.find_creation_sequence(G) == cs def test_fast_versions_properties_threshold_graphs(self): - cs = 'ddiiddid' + cs = "ddiiddid" G = nxt.threshold_graph(cs) - assert_equal(nxt.density('ddiiddid'), nx.density(G)) - assert_equal(sorted(nxt.degree_sequence(cs)), - sorted(d for n, d in G.degree())) + assert nxt.density("ddiiddid") == nx.density(G) + assert sorted(nxt.degree_sequence(cs)) == sorted(d for n, d in G.degree()) ts = nxt.triangle_sequence(cs) - assert_equal(ts, list(nx.triangles(G).values())) - assert_equal(sum(ts) // 3, nxt.triangles(cs)) + assert ts == list(nx.triangles(G).values()) + assert sum(ts) // 3 == nxt.triangles(cs) c1 = nxt.cluster_sequence(cs) c2 = list(nx.clustering(G).values()) - assert_almost_equal(sum([abs(c - d) for c, d in zip(c1, c2)]), 0) + assert almost_equal(sum([abs(c - d) for c, d in zip(c1, c2)]), 0) b1 = nx.betweenness_centrality(G).values() b2 = nxt.betweenness_sequence(cs) - assert_true(sum([abs(c - d) for c, d in zip(b1, b2)]) < 1e-14) + assert sum([abs(c - d) for c, d in zip(b1, b2)]) < 1e-14 - assert_equal(nxt.eigenvalues(cs), [0, 1, 3, 3, 5, 7, 7, 8]) + assert nxt.eigenvalues(cs) == [0, 1, 3, 3, 5, 7, 7, 8] # Degree Correlation - assert_true(abs(nxt.degree_correlation(cs) + 0.593038821954) < 1e-12) - assert_equal(nxt.degree_correlation('diiiddi'), -0.8) - assert_equal(nxt.degree_correlation('did'), -1.0) - assert_equal(nxt.degree_correlation('ddd'), 1.0) - assert_equal(nxt.eigenvalues('dddiii'), [0, 0, 0, 0, 3, 3]) - assert_equal(nxt.eigenvalues('dddiiid'), [0, 1, 1, 1, 4, 4, 7]) + assert abs(nxt.degree_correlation(cs) + 0.593038821954) < 1e-12 + assert nxt.degree_correlation("diiiddi") == -0.8 + assert nxt.degree_correlation("did") == -1.0 + assert nxt.degree_correlation("ddd") == 1.0 + assert nxt.eigenvalues("dddiii") == [0, 0, 0, 0, 3, 3] + assert nxt.eigenvalues("dddiiid") == [0, 1, 1, 1, 4, 4, 7] def test_tg_creation_routines(self): s = nxt.left_d_threshold_sequence(5, 7) s = nxt.right_d_threshold_sequence(5, 7) s1 = nxt.swap_d(s, 1.0, 1.0) + s1 = nxt.swap_d(s, 1.0, 1.0, seed=1) - @attr('numpy') def test_eigenvectors(self): - try: - import numpy as N - eigenval = N.linalg.eigvals - import scipy - except ImportError: - raise SkipTest('SciPy not available.') - - cs = 'ddiiddid' + np = pytest.importorskip("numpy") + eigenval = np.linalg.eigvals + scipy = pytest.importorskip("scipy") + + cs = "ddiiddid" G = nxt.threshold_graph(cs) (tgeval, tgevec) = nxt.eigenvectors(cs) - dot = N.dot - assert_equal([abs(dot(lv, lv) - 1.0) < 1e-9 for lv in tgevec], [True] * 8) + dot = np.dot + assert [abs(dot(lv, lv) - 1.0) < 1e-9 for lv in tgevec] == [True] * 8 lapl = nx.laplacian_matrix(G) -# tgev=[ dot(lv,dot(lapl,lv)) for lv in tgevec ] -# assert_true(sum([abs(c-d) for c,d in zip(tgev,tgeval)]) < 1e-9) -# tgev.sort() -# lev=list(eigenval(lapl)) -# lev.sort() -# assert_true(sum([abs(c-d) for c,d in zip(tgev,lev)]) < 1e-9) + + # tgev=[ dot(lv,dot(lapl,lv)) for lv in tgevec ] + # assert_true(sum([abs(c-d) for c,d in zip(tgev,tgeval)]) < 1e-9) + # tgev.sort() + # lev=list(eigenval(lapl)) + # lev.sort() + # assert_true(sum([abs(c-d) for c,d in zip(tgev,lev)]) < 1e-9) def test_create_using(self): - cs = 'ddiiddid' + cs = "ddiiddid" G = nxt.threshold_graph(cs) - assert_raises(nx.exception.NetworkXError, - nxt.threshold_graph, cs, create_using=nx.DiGraph()) + assert pytest.raises( + nx.exception.NetworkXError, + nxt.threshold_graph, + cs, + create_using=nx.DiGraph(), + ) MG = nxt.threshold_graph(cs, create_using=nx.MultiGraph()) - assert_equal(sorted(MG.edges()), sorted(G.edges())) + assert sorted(MG.edges()) == sorted(G.edges()) diff --git a/networkx/algorithms/tests/test_tournament.py b/networkx/algorithms/tests/test_tournament.py index 71c3d7c..8de6f7c 100644 --- a/networkx/algorithms/tests/test_tournament.py +++ b/networkx/algorithms/tests/test_tournament.py @@ -1,17 +1,6 @@ -# test_tournament.py - unit tests for the tournament module -# -# Copyright 2015 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.algorithms.tournament` module.""" from itertools import combinations -from nose.tools import assert_equal -from nose.tools import assert_false -from nose.tools import assert_true from networkx import DiGraph from networkx.algorithms.tournament import is_reachable @@ -21,7 +10,7 @@ from networkx.algorithms.tournament import hamiltonian_path -class TestIsTournament(object): +class TestIsTournament: """Unit tests for the :func:`networkx.tournament.is_tournament` function. @@ -30,14 +19,14 @@ class TestIsTournament(object): def test_is_tournament(self): G = DiGraph() G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)]) - assert_true(is_tournament(G)) + assert is_tournament(G) def test_self_loops(self): """A tournament must have no self-loops.""" G = DiGraph() G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)]) G.add_edge(0, 0) - assert_false(is_tournament(G)) + assert not is_tournament(G) def test_missing_edges(self): """A tournament must not have any pair of nodes without at least @@ -46,7 +35,7 @@ def test_missing_edges(self): """ G = DiGraph() G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3)]) - assert_false(is_tournament(G)) + assert not is_tournament(G) def test_bidirectional_edges(self): """A tournament must not have any pair of nodes with greater @@ -56,10 +45,10 @@ def test_bidirectional_edges(self): G = DiGraph() G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)]) G.add_edge(1, 0) - assert_false(is_tournament(G)) + assert not is_tournament(G) -class TestRandomTournament(object): +class TestRandomTournament: """Unit tests for the :func:`networkx.tournament.random_tournament` function. @@ -68,10 +57,15 @@ class TestRandomTournament(object): def test_graph_is_tournament(self): for n in range(10): G = random_tournament(5) - assert_true(is_tournament(G)) + assert is_tournament(G) + + def test_graph_is_tournament_seed(self): + for n in range(10): + G = random_tournament(5, seed=1) + assert is_tournament(G) -class TestHamiltonianPath(object): +class TestHamiltonianPath: """Unit tests for the :func:`networkx.tournament.hamiltonian_path` function. @@ -81,8 +75,8 @@ def test_path_is_hamiltonian(self): G = DiGraph() G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)]) path = hamiltonian_path(G) - assert_equal(len(path), 4) - assert_true(all(v in G[u] for u, v in zip(path, path[1:]))) + assert len(path) == 4 + assert all(v in G[u] for u, v in zip(path, path[1:])) def test_hamiltonian_cycle(self): """Tests that :func:`networkx.tournament.hamiltonian_path` @@ -93,12 +87,12 @@ def test_hamiltonian_cycle(self): G = DiGraph() G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)]) path = hamiltonian_path(G) - assert_equal(len(path), 4) - assert_true(all(v in G[u] for u, v in zip(path, path[1:]))) - assert_true(path[0] in G[path[-1]]) + assert len(path) == 4 + assert all(v in G[u] for u, v in zip(path, path[1:])) + assert path[0] in G[path[-1]] -class TestReachability(object): +class TestReachability: """Unit tests for the :func:`networkx.tournament.is_reachable` function. @@ -107,21 +101,21 @@ class TestReachability(object): def test_reachable_pair(self): """Tests for a reachable pair of nodes.""" G = DiGraph([(0, 1), (1, 2), (2, 0)]) - assert_true(is_reachable(G, 0, 2)) + assert is_reachable(G, 0, 2) def test_same_node_is_reachable(self): """Tests that a node is always reachable from itself.""" # G is an arbitrary tournament on ten nodes. G = DiGraph(sorted(p) for p in combinations(range(10), 2)) - assert_true(all(is_reachable(G, v, v) for v in G)) + assert all(is_reachable(G, v, v) for v in G) def test_unreachable_pair(self): """Tests for an unreachable pair of nodes.""" G = DiGraph([(0, 1), (0, 2), (1, 2)]) - assert_false(is_reachable(G, 1, 0)) + assert not is_reachable(G, 1, 0) -class TestStronglyConnected(object): +class TestStronglyConnected: """Unit tests for the :func:`networkx.tournament.is_strongly_connected` function. @@ -130,9 +124,9 @@ class TestStronglyConnected(object): def test_is_strongly_connected(self): """Tests for a strongly connected tournament.""" G = DiGraph([(0, 1), (1, 2), (2, 0)]) - assert_true(is_strongly_connected(G)) + assert is_strongly_connected(G) def test_not_strongly_connected(self): """Tests for a tournament that is not strongly connected.""" G = DiGraph([(0, 1), (0, 2), (1, 2)]) - assert_false(is_strongly_connected(G)) + assert not is_strongly_connected(G) diff --git a/networkx/algorithms/tests/test_triads.py b/networkx/algorithms/tests/test_triads.py index 160c534..9ad198d 100644 --- a/networkx/algorithms/tests/test_triads.py +++ b/networkx/algorithms/tests/test_triads.py @@ -1,25 +1,139 @@ -# test_triads.py - unit tests for the triads module -# -# Copyright 2015 NetworkX developers. -# Copyright 2009 Diederik van Liere . -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.algorithms.triads` module.""" -from nose.tools import assert_equal import networkx as nx +from collections import defaultdict +from random import sample def test_triadic_census(): - """Tests the triadic census function.""" + """Tests the triadic_census function.""" G = nx.DiGraph() - G.add_edges_from(['01', '02', '03', '04', '05', '12', '16', '51', '56', - '65']) - expected = {'030T': 2, '120C': 1, '210': 0, '120U': 0, '012': 9, '102': 3, - '021U': 0, '111U': 0, '003': 8, '030C': 0, '021D': 9, '201': 0, - '111D': 1, '300': 0, '120D': 0, '021C': 2} + G.add_edges_from(["01", "02", "03", "04", "05", "12", "16", "51", "56", "65"]) + expected = { + "030T": 2, + "120C": 1, + "210": 0, + "120U": 0, + "012": 9, + "102": 3, + "021U": 0, + "111U": 0, + "003": 8, + "030C": 0, + "021D": 9, + "201": 0, + "111D": 1, + "300": 0, + "120D": 0, + "021C": 2, + } actual = nx.triadic_census(G) - assert_equal(expected, actual) + assert expected == actual + + +def test_is_triad(): + """Tests the is_triad function""" + G = nx.karate_club_graph() + G = G.to_directed() + for i in range(100): + nodes = sample(G.nodes(), 3) + G2 = G.subgraph(nodes) + assert nx.is_triad(G2) + + +def test_all_triplets(): + """Tests the all_triplets function.""" + G = nx.DiGraph() + G.add_edges_from(["01", "02", "03", "04", "05", "12", "16", "51", "56", "65"]) + expected = [ + f"{i},{j},{k}" + for i in range(7) + for j in range(i + 1, 7) + for k in range(j + 1, 7) + ] + expected = [set(x.split(",")) for x in expected] + actual = list(set(x) for x in nx.all_triplets(G)) + assert all([any([s1 == s2 for s1 in expected]) for s2 in actual]) + + +def test_all_triads(): + """Tests the all_triplets function.""" + G = nx.DiGraph() + G.add_edges_from(["01", "02", "03", "04", "05", "12", "16", "51", "56", "65"]) + expected = [ + f"{i},{j},{k}" + for i in range(7) + for j in range(i + 1, 7) + for k in range(j + 1, 7) + ] + expected = [G.subgraph(x.split(",")) for x in expected] + actual = list(nx.all_triads(G)) + assert all(any([nx.is_isomorphic(G1, G2) for G1 in expected]) for G2 in actual) + + +def test_triad_type(): + """Tests the triad_type function.""" + # 0 edges (1 type) + G = nx.DiGraph({0: [], 1: [], 2: []}) + assert nx.triad_type(G) == "003" + # 1 edge (1 type) + G = nx.DiGraph({0: [1], 1: [], 2: []}) + assert nx.triad_type(G) == "012" + # 2 edges (4 types) + G = nx.DiGraph([(0, 1), (0, 2)]) + assert nx.triad_type(G) == "021D" + G = nx.DiGraph({0: [1], 1: [0], 2: []}) + assert nx.triad_type(G) == "102" + G = nx.DiGraph([(0, 1), (2, 1)]) + assert nx.triad_type(G) == "021U" + G = nx.DiGraph([(0, 1), (1, 2)]) + assert nx.triad_type(G) == "021C" + # 3 edges (4 types) + G = nx.DiGraph([(0, 1), (1, 0), (2, 1)]) + assert nx.triad_type(G) == "111D" + G = nx.DiGraph([(0, 1), (1, 0), (1, 2)]) + assert nx.triad_type(G) == "111U" + G = nx.DiGraph([(0, 1), (1, 2), (0, 2)]) + assert nx.triad_type(G) == "030T" + G = nx.DiGraph([(0, 1), (1, 2), (2, 0)]) + assert nx.triad_type(G) == "030C" + # 4 edges (4 types) + G = nx.DiGraph([(0, 1), (1, 0), (2, 0), (0, 2)]) + assert nx.triad_type(G) == "201" + G = nx.DiGraph([(0, 1), (1, 0), (2, 0), (2, 1)]) + assert nx.triad_type(G) == "120D" + G = nx.DiGraph([(0, 1), (1, 0), (0, 2), (1, 2)]) + assert nx.triad_type(G) == "120U" + G = nx.DiGraph([(0, 1), (1, 0), (0, 2), (2, 1)]) + assert nx.triad_type(G) == "120C" + # 5 edges (1 type) + G = nx.DiGraph([(0, 1), (1, 0), (2, 1), (1, 2), (0, 2)]) + assert nx.triad_type(G) == "210" + # 6 edges (1 type) + G = nx.DiGraph([(0, 1), (1, 0), (1, 2), (2, 1), (0, 2), (2, 0)]) + assert nx.triad_type(G) == "300" + + +def test_triads_by_type(): + """Tests the all_triplets function.""" + G = nx.DiGraph() + G.add_edges_from(["01", "02", "03", "04", "05", "12", "16", "51", "56", "65"]) + all_triads = nx.all_triads(G) + expected = defaultdict(list) + for triad in all_triads: + name = nx.triad_type(triad) + expected[name].append(triad) + actual = nx.triads_by_type(G) + assert set(actual.keys()) == set(expected.keys()) + for tri_type, actual_Gs in actual.items(): + expected_Gs = expected[tri_type] + for a in actual_Gs: + assert any(nx.is_isomorphic(a, e) for e in expected_Gs) + + +def test_random_triad(): + """Tests the random_triad function""" + G = nx.karate_club_graph() + G = G.to_directed() + for i in range(100): + assert nx.is_triad(nx.random_triad(G)) diff --git a/networkx/algorithms/tests/test_vitality.py b/networkx/algorithms/tests/test_vitality.py index db08367..248206e 100644 --- a/networkx/algorithms/tests/test_vitality.py +++ b/networkx/algorithms/tests/test_vitality.py @@ -1,39 +1,36 @@ -from nose.tools import assert_equal - import networkx as nx -class TestClosenessVitality(object): - +class TestClosenessVitality: def test_unweighted(self): G = nx.cycle_graph(3) vitality = nx.closeness_vitality(G) - assert_equal(vitality, {0: 2, 1: 2, 2: 2}) + assert vitality == {0: 2, 1: 2, 2: 2} def test_weighted(self): G = nx.Graph() nx.add_cycle(G, [0, 1, 2], weight=2) - vitality = nx.closeness_vitality(G, weight='weight') - assert_equal(vitality, {0: 4, 1: 4, 2: 4}) + vitality = nx.closeness_vitality(G, weight="weight") + assert vitality == {0: 4, 1: 4, 2: 4} def test_unweighted_digraph(self): G = nx.DiGraph(nx.cycle_graph(3)) vitality = nx.closeness_vitality(G) - assert_equal(vitality, {0: 4, 1: 4, 2: 4}) + assert vitality == {0: 4, 1: 4, 2: 4} def test_weighted_digraph(self): G = nx.DiGraph() nx.add_cycle(G, [0, 1, 2], weight=2) nx.add_cycle(G, [2, 1, 0], weight=2) - vitality = nx.closeness_vitality(G, weight='weight') - assert_equal(vitality, {0: 8, 1: 8, 2: 8}) + vitality = nx.closeness_vitality(G, weight="weight") + assert vitality == {0: 8, 1: 8, 2: 8} def test_weighted_multidigraph(self): G = nx.MultiDiGraph() nx.add_cycle(G, [0, 1, 2], weight=2) nx.add_cycle(G, [2, 1, 0], weight=2) - vitality = nx.closeness_vitality(G, weight='weight') - assert_equal(vitality, {0: 8, 1: 8, 2: 8}) + vitality = nx.closeness_vitality(G, weight="weight") + assert vitality == {0: 8, 1: 8, 2: 8} def test_disconnecting_graph(self): """Tests that the closeness vitality of a node whose removal @@ -41,4 +38,4 @@ def test_disconnecting_graph(self): """ G = nx.path_graph(3) - assert_equal(nx.closeness_vitality(G, node=1), -float('inf')) + assert nx.closeness_vitality(G, node=1) == -float("inf") diff --git a/networkx/algorithms/tests/test_voronoi.py b/networkx/algorithms/tests/test_voronoi.py index aab1265..3269ae6 100644 --- a/networkx/algorithms/tests/test_voronoi.py +++ b/networkx/algorithms/tests/test_voronoi.py @@ -1,18 +1,8 @@ -# test_voronoi.py - unit tests for the networkx.algorithms.voronoi module -# -# Copyright 2016-2018 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. -from nose.tools import assert_equal - import networkx as nx from networkx.utils import pairwise -class TestVoronoiCells(object): +class TestVoronoiCells: """Unit tests for the Voronoi cells function.""" def test_isolates(self): @@ -22,21 +12,21 @@ def test_isolates(self): """ G = nx.empty_graph(5) cells = nx.voronoi_cells(G, {0, 2, 4}) - expected = {0: {0}, 2: {2}, 4: {4}, 'unreachable': {1, 3}} - assert_equal(expected, cells) + expected = {0: {0}, 2: {2}, 4: {4}, "unreachable": {1, 3}} + assert expected == cells def test_undirected_unweighted(self): G = nx.cycle_graph(6) cells = nx.voronoi_cells(G, {0, 3}) expected = {0: {0, 1, 5}, 3: {2, 3, 4}} - assert_equal(expected, cells) + assert expected == cells def test_directed_unweighted(self): # This is the singly-linked directed cycle graph on six nodes. G = nx.DiGraph(pairwise(range(6), cyclic=True)) cells = nx.voronoi_cells(G, {0, 3}) expected = {0: {0, 1, 2}, 3: {3, 4, 5}} - assert_equal(expected, cells) + assert expected == cells def test_directed_inward(self): """Tests that reversing the graph gives the "inward" Voronoi @@ -48,7 +38,7 @@ def test_directed_inward(self): G = G.reverse(copy=False) cells = nx.voronoi_cells(G, {0, 3}) expected = {0: {0, 4, 5}, 3: {1, 2, 3}} - assert_equal(expected, cells) + assert expected == cells def test_undirected_weighted(self): edges = [(0, 1, 10), (1, 2, 1), (2, 3, 1)] @@ -56,7 +46,7 @@ def test_undirected_weighted(self): G.add_weighted_edges_from(edges) cells = nx.voronoi_cells(G, {0, 3}) expected = {0: {0}, 3: {1, 2, 3}} - assert_equal(expected, cells) + assert expected == cells def test_directed_weighted(self): edges = [(0, 1, 10), (1, 2, 1), (2, 3, 1), (3, 2, 1), (2, 1, 1)] @@ -64,7 +54,7 @@ def test_directed_weighted(self): G.add_weighted_edges_from(edges) cells = nx.voronoi_cells(G, {0, 3}) expected = {0: {0}, 3: {1, 2, 3}} - assert_equal(expected, cells) + assert expected == cells def test_multigraph_unweighted(self): """Tests that the Voronoi cells for a multigraph are the same as @@ -76,7 +66,7 @@ def test_multigraph_unweighted(self): H = nx.Graph(G) G_cells = nx.voronoi_cells(G, {0, 3}) H_cells = nx.voronoi_cells(H, {0, 3}) - assert_equal(G_cells, H_cells) + assert G_cells == H_cells def test_multidigraph_unweighted(self): # This is the twice-singly-linked directed cycle graph on six nodes. @@ -85,22 +75,29 @@ def test_multidigraph_unweighted(self): H = nx.DiGraph(G) G_cells = nx.voronoi_cells(G, {0, 3}) H_cells = nx.voronoi_cells(H, {0, 3}) - assert_equal(G_cells, H_cells) + assert G_cells == H_cells def test_multigraph_weighted(self): - edges = [(0, 1, 10), (0, 1, 10), (1, 2, 1), (1, 2, 100), (2, 3, 1), - (2, 3, 100)] + edges = [(0, 1, 10), (0, 1, 10), (1, 2, 1), (1, 2, 100), (2, 3, 1), (2, 3, 100)] G = nx.MultiGraph() G.add_weighted_edges_from(edges) cells = nx.voronoi_cells(G, {0, 3}) expected = {0: {0}, 3: {1, 2, 3}} - assert_equal(expected, cells) + assert expected == cells def test_multidigraph_weighted(self): - edges = [(0, 1, 10), (0, 1, 10), (1, 2, 1), (2, 3, 1), (3, 2, 10), - (3, 2, 1), (2, 1, 10), (2, 1, 1)] + edges = [ + (0, 1, 10), + (0, 1, 10), + (1, 2, 1), + (2, 3, 1), + (3, 2, 10), + (3, 2, 1), + (2, 1, 10), + (2, 1, 1), + ] G = nx.MultiDiGraph() G.add_weighted_edges_from(edges) cells = nx.voronoi_cells(G, {0, 3}) expected = {0: {0}, 3: {1, 2, 3}} - assert_equal(expected, cells) + assert expected == cells diff --git a/networkx/algorithms/tests/test_wiener.py b/networkx/algorithms/tests/test_wiener.py index b011a43..5402a40 100644 --- a/networkx/algorithms/tests/test_wiener.py +++ b/networkx/algorithms/tests/test_wiener.py @@ -1,15 +1,5 @@ -# test_wiener.py - unit tests for the wiener module -# -# Copyright 2015 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.algorithms.wiener` module.""" -from __future__ import division -from nose.tools import eq_ from networkx import complete_graph from networkx import DiGraph @@ -18,7 +8,7 @@ from networkx import wiener_index -class TestWienerIndex(object): +class TestWienerIndex: """Unit tests for computing the Wiener index of a graph.""" def test_disconnected_graph(self): @@ -26,7 +16,7 @@ def test_disconnected_graph(self): positive infinity. """ - eq_(wiener_index(empty_graph(2)), float('inf')) + assert wiener_index(empty_graph(2)) == float("inf") def test_directed(self): """Tests that each pair of nodes in the directed graph is @@ -35,7 +25,7 @@ def test_directed(self): """ G = complete_graph(3) H = DiGraph(G) - eq_(2 * wiener_index(G), wiener_index(H)) + assert (2 * wiener_index(G)) == wiener_index(H) def test_complete_graph(self): """Tests that the Wiener index of the complete graph is simply @@ -44,7 +34,7 @@ def test_complete_graph(self): """ n = 10 G = complete_graph(n) - eq_(wiener_index(G), n * (n - 1) / 2) + assert wiener_index(G) == (n * (n - 1) / 2) def test_path_graph(self): """Tests that the Wiener index of the path graph is correctly @@ -77,4 +67,4 @@ def test_path_graph(self): G = path_graph(n) expected = 2 * sum(i * (n - i) for i in range(1, (n // 2) + 1)) actual = wiener_index(G) - eq_(expected, actual) + assert expected == actual diff --git a/networkx/algorithms/threshold.py b/networkx/algorithms/threshold.py index f58e4f9..5767772 100644 --- a/networkx/algorithms/threshold.py +++ b/networkx/algorithms/threshold.py @@ -1,27 +1,40 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg (hagberg@lanl.gov) -# Pieter Swart (swart@lanl.gov) -# Dan Schult (dschult@colgate.edu) """ Threshold Graphs - Creation, manipulation and identification. """ - -import random # for swap_d from math import sqrt -import networkx +import networkx as nx +from networkx.utils import py_random_state -__all__ = ['is_threshold_graph', 'find_threshold_graph'] +__all__ = ["is_threshold_graph", "find_threshold_graph"] def is_threshold_graph(G): """ - Returns True if G is a threshold graph. + Returns `True` if `G` is a threshold graph. + + Parameters + ---------- + G : NetworkX graph instance + An instance of `Graph`, `DiGraph`, `MultiGraph` or `MultiDiGraph` + + Returns + ------- + bool + `True` if `G` is a threshold graph, `False` otherwise. + + Examples + -------- + >>> from networkx.algorithms.threshold import is_threshold_graph + >>> G = nx.path_graph(3) + >>> is_threshold_graph(G) + True + >>> G = nx.barbell_graph(3, 3) + >>> is_threshold_graph(G) + False + + References + ---------- + .. [1] Threshold graphs: https://en.wikipedia.org/wiki/Threshold_graph """ return is_threshold_sequence(list(d for n, d in G.degree())) @@ -39,12 +52,12 @@ def is_threshold_sequence(degree_sequence): ds = degree_sequence[:] # get a copy so we don't destroy original ds.sort() while ds: - if ds[0] == 0: # if isolated node - ds.pop(0) # remove it + if ds[0] == 0: # if isolated node + ds.pop(0) # remove it continue if ds[-1] != len(ds) - 1: # is the largest degree node dominating? - return False # no, not a threshold degree sequence - ds.pop() # yes, largest is the dominating node + return False # no, not a threshold degree sequence + ds.pop() # yes, largest is the dominating node ds = [d - 1 for d in ds] # remove it and decrement all degrees return True @@ -81,31 +94,31 @@ def creation_sequence(degree_sequence, with_labels=False, compact=False): raise ValueError("compact sequences cannot be labeled") # make an indexed copy - if isinstance(degree_sequence, dict): # labeled degree seqeunce + if isinstance(degree_sequence, dict): # labeled degree seqeunce ds = [[degree, label] for (label, degree) in degree_sequence.items()] else: ds = [[d, i] for i, d in enumerate(degree_sequence)] ds.sort() cs = [] # creation sequence while ds: - if ds[0][0] == 0: # isolated node + if ds[0][0] == 0: # isolated node (d, v) = ds.pop(0) - if len(ds) > 0: # make sure we start with a d - cs.insert(0, (v, 'i')) + if len(ds) > 0: # make sure we start with a d + cs.insert(0, (v, "i")) else: - cs.insert(0, (v, 'd')) + cs.insert(0, (v, "d")) continue - if ds[-1][0] != len(ds) - 1: # Not dominating node + if ds[-1][0] != len(ds) - 1: # Not dominating node return None # not a threshold degree sequence (d, v) = ds.pop() - cs.insert(0, (v, 'd')) - ds = [[d[0] - 1, d[1]] for d in ds] # decrement due to removing node + cs.insert(0, (v, "d")) + ds = [[d[0] - 1, d[1]] for d in ds] # decrement due to removing node if with_labels: return cs if compact: return make_compact(cs) - return [v[1] for v in cs] # not labeled + return [v[1] for v in cs] # not labeled def make_compact(creation_sequence): @@ -116,9 +129,9 @@ def make_compact(creation_sequence): Examples -------- >>> from networkx.algorithms.threshold import make_compact - >>> make_compact(['d', 'i', 'i', 'd', 'd', 'i', 'i', 'i']) + >>> make_compact(["d", "i", "i", "d", "d", "i", "i", "i"]) [1, 2, 2, 3] - >>> make_compact(['d', 'd', 'd', 'i', 'd', 'd']) + >>> make_compact(["d", "d", "d", "i", "d", "d"]) [3, 1, 2] Notice that the first number is the first vertex @@ -131,11 +144,11 @@ def make_compact(creation_sequence): [3, 1, 2] """ first = creation_sequence[0] - if isinstance(first, str): # creation sequence + if isinstance(first, str): # creation sequence cs = creation_sequence[:] - elif isinstance(first, tuple): # labeled creation sequence + elif isinstance(first, tuple): # labeled creation sequence cs = [s[1] for s in creation_sequence] - elif isinstance(first, int): # compact creation sequence + elif isinstance(first, int): # compact creation sequence return creation_sequence else: raise TypeError("Not a valid creation sequence type") @@ -160,19 +173,19 @@ def uncompact(creation_sequence): See creation_sequence. """ first = creation_sequence[0] - if isinstance(first, str): # creation sequence + if isinstance(first, str): # creation sequence return creation_sequence - elif isinstance(first, tuple): # labeled creation sequence + elif isinstance(first, tuple): # labeled creation sequence return creation_sequence - elif isinstance(first, int): # compact creation sequence + elif isinstance(first, int): # compact creation sequence ccscopy = creation_sequence[:] else: raise TypeError("Not a valid creation sequence type") cs = [] while ccscopy: - cs.extend(ccscopy.pop(0) * ['d']) + cs.extend(ccscopy.pop(0) * ["d"]) if ccscopy: - cs.extend(ccscopy.pop(0) * ['i']) + cs.extend(ccscopy.pop(0) * ["i"]) return cs @@ -185,12 +198,12 @@ def creation_sequence_to_weights(creation_sequence): """ # Turn input sequence into a labeled creation sequence first = creation_sequence[0] - if isinstance(first, str): # creation sequence + if isinstance(first, str): # creation sequence if isinstance(creation_sequence, list): wseq = creation_sequence[:] else: wseq = list(creation_sequence) # string like 'ddidid' - elif isinstance(first, tuple): # labeled creation sequence + elif isinstance(first, tuple): # labeled creation sequence wseq = [v[1] for v in creation_sequence] elif isinstance(first, int): # compact creation sequence wseq = uncompact(creation_sequence) @@ -199,31 +212,33 @@ def creation_sequence_to_weights(creation_sequence): # pass through twice--first backwards wseq.reverse() w = 0 - prev = 'i' + prev = "i" for j, s in enumerate(wseq): - if s == 'i': + if s == "i": wseq[j] = w prev = s - elif prev == 'i': + elif prev == "i": prev = s w += 1 wseq.reverse() # now pass through forwards for j, s in enumerate(wseq): - if s == 'd': + if s == "d": wseq[j] = w prev = s - elif prev == 'd': + elif prev == "d": prev = s w += 1 # Now scale weights - if prev == 'd': + if prev == "d": w += 1 - wscale = 1. / float(w) + wscale = 1.0 / float(w) return [ww * wscale for ww in wseq] # return wseq -def weights_to_creation_sequence(weights, threshold=1, with_labels=False, compact=False): +def weights_to_creation_sequence( + weights, threshold=1, with_labels=False, compact=False +): """ Returns a creation sequence for a threshold graph determined by the weights and threshold given as input. @@ -255,7 +270,7 @@ def weights_to_creation_sequence(weights, threshold=1, with_labels=False, compac raise ValueError("compact sequences cannot be labeled") # make an indexed copy - if isinstance(weights, dict): # labeled weights + if isinstance(weights, dict): # labeled weights wseq = [[w, label] for (label, w) in weights.items()] else: wseq = [[w, i] for i, w in enumerate(weights)] @@ -263,16 +278,16 @@ def weights_to_creation_sequence(weights, threshold=1, with_labels=False, compac cs = [] # creation sequence cutoff = threshold - wseq[-1][0] while wseq: - if wseq[0][0] < cutoff: # isolated node + if wseq[0][0] < cutoff: # isolated node (w, label) = wseq.pop(0) - cs.append((label, 'i')) + cs.append((label, "i")) else: (w, label) = wseq.pop() - cs.append((label, 'd')) + cs.append((label, "d")) cutoff = threshold - wseq[-1][0] - if len(wseq) == 1: # make sure we start with a d + if len(wseq) == 1: # make sure we start with a d (w, label) = wseq.pop() - cs.append((label, 'd')) + cs.append((label, "d")) # put in correct order cs.reverse() @@ -280,7 +295,7 @@ def weights_to_creation_sequence(weights, threshold=1, with_labels=False, compac return cs if compact: return make_compact(cs) - return [v[1] for v in cs] # not labeled + return [v[1] for v in cs] # not labeled # Manipulating NetworkX.Graphs in context of threshold graphs @@ -302,9 +317,9 @@ def threshold_graph(creation_sequence, create_using=None): """ # Turn input sequence into a labeled creation sequence first = creation_sequence[0] - if isinstance(first, str): # creation sequence + if isinstance(first, str): # creation sequence ci = list(enumerate(creation_sequence)) - elif isinstance(first, tuple): # labeled creation sequence + elif isinstance(first, tuple): # labeled creation sequence ci = creation_sequence[:] elif isinstance(first, int): # compact creation sequence cs = uncompact(creation_sequence) @@ -313,13 +328,9 @@ def threshold_graph(creation_sequence, create_using=None): print("not a valid creation sequence type") return None - if create_using is None: - G = networkx.Graph() - elif create_using.is_directed(): - raise networkx.NetworkXError("Directed Graph not supported") - else: - G = create_using - G.clear() + G = nx.empty_graph(0, create_using) + if G.is_directed(): + raise nx.NetworkXError("Directed Graph not supported") G.name = "Threshold Graph" @@ -328,7 +339,7 @@ def threshold_graph(creation_sequence, create_using=None): # if type is a d connect to everything previous while ci: (v, node_type) = ci.pop(0) - if node_type == 'd': # dominating type, connect to all existing nodes + if node_type == "d": # dominating type, connect to all existing nodes # We use `for u in list(G):` instead of # `for u in G:` because we edit the graph `G` in # the loop. Hence using an iterator will result in @@ -356,9 +367,34 @@ def find_alternating_4_cycle(G): def find_threshold_graph(G, create_using=None): """ - Return a threshold subgraph that is close to largest in G. + Returns a threshold subgraph that is close to largest in `G`. + The threshold graph will contain the largest degree node in G. + Parameters + ---------- + G : NetworkX graph instance + An instance of `Graph`, or `MultiDiGraph` + create_using : NetworkX graph class or `None` (default), optional + Type of graph to use when constructing the threshold graph. + If `None`, infer the appropriate graph type from the input. + + Returns + ------- + graph : + A graph instance representing the threshold graph + + Examples + -------- + >>> from networkx.algorithms.threshold import find_threshold_graph + >>> G = nx.barbell_graph(3, 3) + >>> T = find_threshold_graph(G) + >>> T.nodes # may vary + NodeView((7, 8, 5, 6)) + + References + ---------- + .. [1] Threshold graphs: https://en.wikipedia.org/wiki/Threshold_graph """ return threshold_graph(find_creation_sequence(G), create_using) @@ -378,16 +414,16 @@ def find_creation_sequence(G): ds.sort() # Update threshold graph nodes if ds[-1][0] == 0: # all are isolated - cs.extend(zip(dsdict, ['i'] * (len(ds) - 1) + ['d'])) - break # Done! + cs.extend(zip(dsdict, ["i"] * (len(ds) - 1) + ["d"])) + break # Done! # pull off isolated nodes while ds[0][0] == 0: (d, iso) = ds.pop(0) - cs.append((iso, 'i')) + cs.append((iso, "i")) # find new biggest node (d, bigv) = ds.pop() # add edges of star to t_g - cs.append((bigv, 'd')) + cs.append((bigv, "d")) # form subgraph of neighbors of big node H = H.subgraph(H.neighbors(bigv)) cs.reverse() @@ -402,8 +438,8 @@ def triangles(creation_sequence): """ # shortcut algorithm that doesn't require computing number # of triangles at each node. - cs = creation_sequence # alias - dr = cs.count("d") # number of d's in sequence + cs = creation_sequence # alias + dr = cs.count("d") # number of d's in sequence ntri = dr * (dr - 1) * (dr - 2) / 6 # number of triangles in clique of nd d's # now add dr choose 2 triangles for every 'i' in sequence where # dr is the number of d's to the right of the current i @@ -422,22 +458,22 @@ def triangle_sequence(creation_sequence): """ cs = creation_sequence seq = [] - dr = cs.count("d") # number of d's to the right of the current pos + dr = cs.count("d") # number of d's to the right of the current pos dcur = (dr - 1) * (dr - 2) // 2 # number of triangles through a node of clique dr - irun = 0 # number of i's in the last run - drun = 0 # number of d's in the last run + irun = 0 # number of i's in the last run + drun = 0 # number of d's in the last run for i, sym in enumerate(cs): if sym == "d": drun += 1 - tri = dcur + (dr - 1) * irun # new triangles at this d + tri = dcur + (dr - 1) * irun # new triangles at this d else: # cs[i]="i": - if prevsym == "d": # new string of i's - dcur += (dr - 1) * irun # accumulate shared shortest paths - irun = 0 # reset i run counter - dr -= drun # reduce number of d's to right - drun = 0 # reset d run counter + if prevsym == "d": # new string of i's + dcur += (dr - 1) * irun # accumulate shared shortest paths + irun = 0 # reset i run counter + dr -= drun # reduce number of d's to right + drun = 0 # reset d run counter irun += 1 - tri = dr * (dr - 1) // 2 # new triangles at this i + tri = dr * (dr - 1) // 2 # new triangles at this i seq.append(tri) prevsym = sym return seq @@ -452,7 +488,7 @@ def cluster_sequence(creation_sequence): cseq = [] for i, deg in enumerate(degseq): tri = triseq[i] - if deg <= 1: # isolated vertex or single pair gets cc 0 + if deg <= 1: # isolated vertex or single pair gets cc 0 cseq.append(0) continue max_size = (deg * (deg - 1)) // 2 @@ -497,7 +533,7 @@ def degree_correlation(creation_sequence): s1 = 0 # deg_i*deg_j s2 = 0 # deg_i^2+deg_j^2 s3 = 0 # deg_i+deg_j - m = 0 # number of edges + m = 0 # number of edges rd = cs.count("d") # number of d nodes to the right rdi = [i for i, sym in enumerate(cs) if sym == "d"] # index of "d"s ds = degree_sequence(cs) @@ -511,15 +547,15 @@ def degree_correlation(creation_sequence): for dj in rdi: degj = ds[dj] s1 += degj * degi - s2 += degi**2 + degj**2 + s2 += degi ** 2 + degj ** 2 s3 += degi + degj m += 1 - denom = (2 * m * s2 - s3 * s3) - numer = (4 * m * s1 - s3 * s3) + denom = 2 * m * s2 - s3 * s3 + numer = 4 * m * s1 - s3 * s3 if denom == 0: if numer == 0: return 1 - raise ValueError("Zero Denominator but Numerator is %s" % numer) + raise ValueError(f"Zero Denominator but Numerator is {numer}") return numer / float(denom) @@ -542,9 +578,9 @@ def shortest_path(creation_sequence, u, v): """ # Turn input sequence into a labeled creation sequence first = creation_sequence[0] - if isinstance(first, str): # creation sequence + if isinstance(first, str): # creation sequence cs = [(i, creation_sequence[i]) for i in range(len(creation_sequence))] - elif isinstance(first, tuple): # labeled creation sequence + elif isinstance(first, tuple): # labeled creation sequence cs = creation_sequence[:] elif isinstance(first, int): # compact creation sequence ci = uncompact(creation_sequence) @@ -554,9 +590,9 @@ def shortest_path(creation_sequence, u, v): verts = [s[0] for s in cs] if v not in verts: - raise ValueError("Vertex %s not in graph from creation_sequence" % v) + raise ValueError(f"Vertex {v} not in graph from creation_sequence") if u not in verts: - raise ValueError("Vertex %s not in graph from creation_sequence" % u) + raise ValueError(f"Vertex {u} not in graph from creation_sequence") # Done checking if u == v: return [u] @@ -564,13 +600,13 @@ def shortest_path(creation_sequence, u, v): uindex = verts.index(u) vindex = verts.index(v) bigind = max(uindex, vindex) - if cs[bigind][1] == 'd': + if cs[bigind][1] == "d": return [u, v] # must be that cs[bigind][1]=='i' cs = cs[bigind:] while cs: vert = cs.pop() - if vert[1] == 'd': + if vert[1] == "d": return [u, vert[0], v] # All after u are type 'i' so no connection return -1 @@ -590,12 +626,12 @@ def shortest_path_length(creation_sequence, i): """ # Turn input sequence into a labeled creation sequence first = creation_sequence[0] - if isinstance(first, str): # creation sequence + if isinstance(first, str): # creation sequence if isinstance(creation_sequence, list): cs = creation_sequence[:] else: cs = list(creation_sequence) - elif isinstance(first, tuple): # labeled creation sequence + elif isinstance(first, tuple): # labeled creation sequence cs = [v[1] for v in creation_sequence] i = [v[0] for v in creation_sequence].index(i) elif isinstance(first, int): # compact creation sequence @@ -605,13 +641,13 @@ def shortest_path_length(creation_sequence, i): # Compute N = len(cs) - spl = [2] * N # length 2 to every node - spl[i] = 0 # except self which is 0 + spl = [2] * N # length 2 to every node + spl[i] = 0 # except self which is 0 # 1 for all d's to the right for j in range(i + 1, N): if cs[j] == "d": spl[j] = 1 - if cs[i] == 'd': # 1 for all nodes to the left + if cs[i] == "d": # 1 for all nodes to the left for j in range(i): spl[j] = 1 # and -1 for any trailing i to indicate unreachable @@ -629,26 +665,26 @@ def betweenness_sequence(creation_sequence, normalized=True): to the iterval [0,1] divide by (n-1)*(n-2). """ cs = creation_sequence - seq = [] # betweenness - lastchar = 'd' # first node is always a 'd' + seq = [] # betweenness + lastchar = "d" # first node is always a 'd' dr = float(cs.count("d")) # number of d's to the right of curren pos - irun = 0 # number of i's in the last run - drun = 0 # number of d's in the last run - dlast = 0.0 # betweenness of last d + irun = 0 # number of i's in the last run + drun = 0 # number of d's in the last run + dlast = 0.0 # betweenness of last d for i, c in enumerate(cs): - if c == 'd': # cs[i]=="d": + if c == "d": # cs[i]=="d": # betweennees = amt shared with eariler d's and i's # + new isolated nodes covered # + new paths to all previous nodes b = dlast + (irun - 1) * irun / dr + 2 * irun * (i - drun - irun) / dr - drun += 1 # update counter - else: # cs[i]="i": - if lastchar == 'd': # if this is a new run of i's - dlast = b # accumulate betweenness - dr -= drun # update number of d's to the right - drun = 0 # reset d counter - irun = 0 # reset i counter - b = 0 # isolated nodes have zero betweenness + drun += 1 # update counter + else: # cs[i]="i": + if lastchar == "d": # if this is a new run of i's + dlast = b # accumulate betweenness + dr -= drun # update number of d's to the right + drun = 0 # reset d counter + irun = 0 # reset i counter + b = 0 # isolated nodes have zero betweenness irun += 1 # add another i to the run seq.append(float(b)) lastchar = c @@ -682,7 +718,7 @@ def eigenvectors(creation_sequence): dr = sum(ccs[::2]) nn = ccs[0] - vec[0] = [1. / sqrt(N)] * N + vec[0] = [1.0 / sqrt(N)] * N val[0] = 0 e = dr dr -= nn @@ -690,7 +726,7 @@ def eigenvectors(creation_sequence): i = 1 dd = 1 while dd < nn: - scale = 1. / sqrt(dd * dd + i) + scale = 1.0 / sqrt(dd * dd + i) vec[i] = i * [-scale] + [dd * scale] + [0] * (N - i - 1) val[i] = e i += 1 @@ -698,7 +734,7 @@ def eigenvectors(creation_sequence): if len(ccs) == 1: return (val, vec) for nn in ccs[1:]: - scale = 1. / sqrt(nn * i * (i + nn)) + scale = 1.0 / sqrt(nn * i * (i + nn)) vec[i] = i * [-nn * scale] + nn * [i * scale] + [0] * (N - i - nn) # find eigenvalue type_d = not type_d @@ -712,7 +748,7 @@ def eigenvectors(creation_sequence): i += 1 dd = 1 while dd < nn: - scale = 1. / sqrt(i - st + dd * dd) + scale = 1.0 / sqrt(i - st + dd * dd) vec[i] = [0] * st + (i - st) * [-scale] + [dd * scale] + [0] * (N - i - 1) val[i] = e i += 1 @@ -752,18 +788,18 @@ def eigenvalues(creation_sequence): See:: @Article{degree-merris-1994, - author = {Russel Merris}, - title = {Degree maximal graphs are Laplacian integral}, - journal = {Linear Algebra Appl.}, - year = {1994}, - volume = {199}, - pages = {381--389}, + author = {Russel Merris}, + title = {Degree maximal graphs are Laplacian integral}, + journal = {Linear Algebra Appl.}, + year = {1994}, + volume = {199}, + pages = {381--389}, } """ degseq = degree_sequence(creation_sequence) degseq.sort() - eiglist = [] # zero is always one eigenvalue + eiglist = [] # zero is always one eigenvalue eig = 0 row = len(degseq) bigdeg = degseq.pop() @@ -782,6 +818,8 @@ def eigenvalues(creation_sequence): # Threshold graph creation routines + +@py_random_state(2) def random_threshold_sequence(n, p, seed=None): """ Create a random threshold sequence of size n. @@ -797,19 +835,19 @@ def random_threshold_sequence(n, p, seed=None): G=nx.threshold_graph(s) + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. """ - if seed is not None: - random.seed(seed) - if not (0 <= p <= 1): raise ValueError("p must be in [0,1]") - cs = ['d'] # threshold sequences always start with a d + cs = ["d"] # threshold sequences always start with a d for i in range(1, n): - if random.random() < p: - cs.append('d') + if seed.random() < p: + cs.append("d") else: - cs.append('i') + cs.append("i") return cs @@ -827,11 +865,11 @@ def right_d_threshold_sequence(n, m): FIXME: describe algorithm """ - cs = ['d'] + ['i'] * (n - 1) # create sequence with n insolated nodes + cs = ["d"] + ["i"] * (n - 1) # create sequence with n insolated nodes # m N-1 - cs[n - 1] = 'd' + cs[n - 1] = "d" sum = n - 1 ind = 1 while sum < m: - cs[ind] = 'd' + cs[ind] = "d" sum += ind ind += 1 - if sum > m: # be sure not to change the first vertex - cs[sum - m] = 'i' + if sum > m: # be sure not to change the first vertex + cs[sum - m] = "i" return cs +@py_random_state(3) def swap_d(cs, p_split=1.0, p_combine=1.0, seed=None): """ Perform a "swap" operation on a threshold sequence. @@ -898,36 +937,37 @@ def swap_d(cs, p_split=1.0, p_combine=1.0, seed=None): This operation maintains the number of nodes and edges in the graph, but shifts the edges from node to node maintaining the threshold quality of the graph. - """ - if seed is not None: - random.seed(seed) + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + """ # preprocess the creation sequence - dlist = [i for (i, node_type) in enumerate(cs[1:-1]) if node_type == 'd'] + dlist = [i for (i, node_type) in enumerate(cs[1:-1]) if node_type == "d"] # split - if random.random() < p_split: - choice = random.choice(dlist) - split_to = random.choice(range(choice)) + if seed.random() < p_split: + choice = seed.choice(dlist) + split_to = seed.choice(range(choice)) flip_side = choice - split_to - if split_to != flip_side and cs[split_to] == 'i' and cs[flip_side] == 'i': - cs[choice] = 'i' - cs[split_to] = 'd' - cs[flip_side] = 'd' + if split_to != flip_side and cs[split_to] == "i" and cs[flip_side] == "i": + cs[choice] = "i" + cs[split_to] = "d" + cs[flip_side] = "d" dlist.remove(choice) # don't add or combine may reverse this action # dlist.extend([split_to,flip_side]) -# print >>sys.stderr,"split at %s to %s and %s"%(choice,split_to,flip_side) + # print >>sys.stderr,"split at %s to %s and %s"%(choice,split_to,flip_side) # combine - if random.random() < p_combine and dlist: - first_choice = random.choice(dlist) - second_choice = random.choice(dlist) + if seed.random() < p_combine and dlist: + first_choice = seed.choice(dlist) + second_choice = seed.choice(dlist) target = first_choice + second_choice - if target >= len(cs) or cs[target] == 'd' or first_choice == second_choice: + if target >= len(cs) or cs[target] == "d" or first_choice == second_choice: return cs # OK to combine - cs[first_choice] = 'i' - cs[second_choice] = 'i' - cs[target] = 'd' -# print >>sys.stderr,"combine %s and %s to make %s."%(first_choice,second_choice,target) + cs[first_choice] = "i" + cs[second_choice] = "i" + cs[target] = "d" + # print >>sys.stderr,"combine %s and %s to make %s."%(first_choice,second_choice,target) return cs diff --git a/networkx/algorithms/tournament.py b/networkx/algorithms/tournament.py index 20fa4af..db2957d 100644 --- a/networkx/algorithms/tournament.py +++ b/networkx/algorithms/tournament.py @@ -1,11 +1,3 @@ -# tournament.py - functions for tournament graphs -# -# Copyright 2015 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Functions concerning tournament graphs. A `tournament graph`_ is a complete oriented graph. In other words, it @@ -18,7 +10,6 @@ To access the functions in this module, you must access them through the :mod:`networkx.algorithms.tournament` module:: - >>> import networkx as nx >>> from networkx.algorithms import tournament >>> G = nx.DiGraph([(0, 1), (1, 2), (2, 0)]) >>> tournament.is_tournament(G) @@ -28,15 +19,21 @@ """ from itertools import combinations -import random import networkx as nx from networkx.algorithms.simple_paths import is_simple_path as is_path from networkx.utils import arbitrary_element from networkx.utils import not_implemented_for +from networkx.utils import py_random_state -__all__ = ['hamiltonian_path', 'is_reachable', 'is_strongly_connected', - 'is_tournament', 'random_tournament', 'score_sequence'] +__all__ = [ + "hamiltonian_path", + "is_reachable", + "is_strongly_connected", + "is_tournament", + "random_tournament", + "score_sequence", +] def index_satisfying(iterable, condition): @@ -62,12 +59,12 @@ def index_satisfying(iterable, condition): # exception. try: return i + 1 - except NameError: - raise ValueError('iterable must be non-empty') + except NameError as e: + raise ValueError("iterable must be non-empty") from e -@not_implemented_for('undirected') -@not_implemented_for('multigraph') +@not_implemented_for("undirected") +@not_implemented_for("multigraph") def is_tournament(G): """Returns True if and only if `G` is a tournament. @@ -92,12 +89,14 @@ def is_tournament(G): """ # In a tournament, there is exactly one directed edge joining each pair. - return (all((v in G[u]) ^ (u in G[v]) for u, v in combinations(G, 2)) and - nx.number_of_selfloops(G) == 0) + return ( + all((v in G[u]) ^ (u in G[v]) for u, v in combinations(G, 2)) + and nx.number_of_selfloops(G) == 0 + ) -@not_implemented_for('undirected') -@not_implemented_for('multigraph') +@not_implemented_for("undirected") +@not_implemented_for("multigraph") def hamiltonian_path(G): """Returns a Hamiltonian path in the given tournament graph. @@ -135,13 +134,17 @@ def hamiltonian_path(G): return hampath -def random_tournament(n): +@py_random_state(1) +def random_tournament(n, seed=None): r"""Returns a random tournament graph on `n` nodes. Parameters ---------- n : int The number of nodes in the returned graph. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -157,14 +160,14 @@ def random_tournament(n): """ # Flip an unbiased coin for each pair of distinct nodes. - coins = (random.random() for i in range((n * (n - 1)) // 2)) + coins = (seed.random() for i in range((n * (n - 1)) // 2)) pairs = combinations(range(n), 2) edges = ((u, v) if r < 0.5 else (v, u) for (u, v), r in zip(pairs, coins)) return nx.DiGraph(edges) -@not_implemented_for('undirected') -@not_implemented_for('multigraph') +@not_implemented_for("undirected") +@not_implemented_for("multigraph") def score_sequence(G): """Returns the score sequence for the given tournament graph. @@ -185,8 +188,8 @@ def score_sequence(G): return sorted(d for v, d in G.out_degree()) -@not_implemented_for('undirected') -@not_implemented_for('multigraph') +@not_implemented_for("undirected") +@not_implemented_for("multigraph") def tournament_matrix(G): r"""Returns the tournament matrix for the given tournament graph. @@ -227,8 +230,8 @@ def tournament_matrix(G): return A - A.T -@not_implemented_for('undirected') -@not_implemented_for('multigraph') +@not_implemented_for("undirected") +@not_implemented_for("multigraph") def is_reachable(G, s, t): """Decides whether there is a path from `s` to `t` in the tournament. @@ -286,9 +289,9 @@ def two_neighborhood(G, v): """ # TODO This is trivially parallelizable. - return {x for x in G - if x == v or x in G[v] or - any(is_path(G, [v, z, x]) for z in G)} + return { + x for x in G if x == v or x in G[v] or any(is_path(G, [v, z, x]) for z in G) + } def is_closed(G, nodes): """Decides whether the given set of nodes is closed. @@ -303,12 +306,11 @@ def is_closed(G, nodes): # TODO This is trivially parallelizable. neighborhoods = [two_neighborhood(G, v) for v in G] - return all(not (is_closed(G, S) and s in S and t not in S) - for S in neighborhoods) + return all(not (is_closed(G, S) and s in S and t not in S) for S in neighborhoods) -@not_implemented_for('undirected') -@not_implemented_for('multigraph') +@not_implemented_for("undirected") +@not_implemented_for("multigraph") def is_strongly_connected(G): """Decides whether the given tournament is strongly connected. diff --git a/networkx/algorithms/traversal/__init__.py b/networkx/algorithms/traversal/__init__.py index 3ef7d2a..93e6cdd 100644 --- a/networkx/algorithms/traversal/__init__.py +++ b/networkx/algorithms/traversal/__init__.py @@ -2,3 +2,4 @@ from .breadth_first_search import * from .depth_first_search import * from .edgedfs import * +from .edgebfs import * diff --git a/networkx/algorithms/traversal/beamsearch.py b/networkx/algorithms/traversal/beamsearch.py index 509a061..101d68e 100644 --- a/networkx/algorithms/traversal/beamsearch.py +++ b/networkx/algorithms/traversal/beamsearch.py @@ -1,17 +1,8 @@ -# beamsearch.py - breadth-first search with limited queueing -# -# Copyright 2016-2018 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Basic algorithms for breadth-first searching the nodes of a graph.""" -import networkx as nx from .breadth_first_search import generic_bfs_edges -__all__ = ['bfs_beam_edges'] +__all__ = ["bfs_beam_edges"] def bfs_beam_edges(G, source, value, width=None): @@ -93,6 +84,4 @@ def successors(v): # `bfs_edges(G, source)` but with a sorted enqueue step. return iter(sorted(G.neighbors(v), key=value, reverse=True)[:width]) - # TODO In Python 3.3+, this should be `yield from ...` - for e in generic_bfs_edges(G, source, successors): - yield e + yield from generic_bfs_edges(G, source, successors) diff --git a/networkx/algorithms/traversal/breadth_first_search.py b/networkx/algorithms/traversal/breadth_first_search.py index 37c1f44..3432ddc 100644 --- a/networkx/algorithms/traversal/breadth_first_search.py +++ b/networkx/algorithms/traversal/breadth_first_search.py @@ -1,26 +1,17 @@ -# breadth_first_search.py - breadth-first traversal of a graph -# -# Copyright (C) 2004-2018 NetworkX Developers -# Aric Hagberg -# Dan Schult -# Pieter Swart -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. -# -# Authors: -# Aric Hagberg -# """Basic algorithms for breadth-first searching the nodes of a graph.""" import networkx as nx from collections import deque -__all__ = ['bfs_edges', 'bfs_tree', 'bfs_predecessors', 'bfs_successors'] +__all__ = [ + "bfs_edges", + "bfs_tree", + "bfs_predecessors", + "bfs_successors", + "descendants_at_distance", +] -def generic_bfs_edges(G, source, neighbors=None): +def generic_bfs_edges(G, source, neighbors=None, depth_limit=None, sort_neighbors=None): """Iterate over edges in a breadth-first search. The breadth-first search begins at `source` and enqueues the @@ -44,6 +35,13 @@ def generic_bfs_edges(G, source, neighbors=None): that returns an iterator over some or all of the neighbors of a given node, in any order. + depth_limit : int, optional(default=len(G)) + Specify the maximum search depth + + sort_neighbors : function + A function that takes the list of neighbors of given node as input, and + returns an *iterator* over these neighbors but with custom ordering. + Yields ------ edge @@ -52,32 +50,43 @@ def generic_bfs_edges(G, source, neighbors=None): Examples -------- >>> G = nx.path_graph(3) - >>> print(list(nx.bfs_edges(G,0))) + >>> print(list(nx.bfs_edges(G, 0))) [(0, 1), (1, 2)] + >>> print(list(nx.bfs_edges(G, source=0, depth_limit=1))) + [(0, 1)] Notes ----- This implementation is from `PADS`_, which was in the public domain - when it was first accessed in July, 2004. + when it was first accessed in July, 2004. The modifications + to allow depth limits are based on the Wikipedia article + "`Depth-limited-search`_". .. _PADS: http://www.ics.uci.edu/~eppstein/PADS/BFS.py - + .. _Depth-limited-search: https://en.wikipedia.org/wiki/Depth-limited_search """ + if callable(sort_neighbors): + _neighbors = neighbors + neighbors = lambda node: iter(sort_neighbors(_neighbors(node))) + visited = {source} - queue = deque([(source, neighbors(source))]) + if depth_limit is None: + depth_limit = len(G) + queue = deque([(source, depth_limit, neighbors(source))]) while queue: - parent, children = queue[0] + parent, depth_now, children = queue[0] try: child = next(children) if child not in visited: yield parent, child visited.add(child) - queue.append((child, neighbors(child))) + if depth_now > 1: + queue.append((child, depth_now - 1, neighbors(child))) except StopIteration: queue.popleft() -def bfs_edges(G, source, reverse=False): +def bfs_edges(G, source, reverse=False, depth_limit=None, sort_neighbors=None): """Iterate over edges in a breadth-first-search starting at source. Parameters @@ -85,12 +94,20 @@ def bfs_edges(G, source, reverse=False): G : NetworkX graph source : node - Specify starting node for breadth-first search and return edges in - the component reachable from source. + Specify starting node for breadth-first search; this function + iterates over only those edges in the component reachable from + this node. reverse : bool, optional If True traverse a directed graph in the reverse direction + depth_limit : int, optional(default=len(G)) + Specify the maximum search depth + + sort_neighbors : function + A function that takes the list of neighbors of given node as input, and + returns an *iterator* over these neighbors but with custom ordering. + Returns ------- edges: generator @@ -103,6 +120,8 @@ def bfs_edges(G, source, reverse=False): >>> G = nx.path_graph(3) >>> list(nx.bfs_edges(G, 0)) [(0, 1), (1, 2)] + >>> list(nx.bfs_edges(G, source=0, depth_limit=1)) + [(0, 1)] To get the nodes in a breadth-first search order:: @@ -115,20 +134,38 @@ def bfs_edges(G, source, reverse=False): Notes ----- - Based on http://www.ics.uci.edu/~eppstein/PADS/BFS.py - by D. Eppstein, July 2004. + The naming of this function is very similar to edge_bfs. The difference + is that 'edge_bfs' yields edges even if they extend back to an already + explored node while 'bfs_edges' yields the edges of the tree that results + from a breadth-first-search (BFS) so no edges are reported if they extend + to already explored nodes. That means 'edge_bfs' reports all edges while + 'bfs_edges' only reports those traversed by a node-based BFS. Yet another + description is that 'bfs_edges' reports the edges traversed during BFS + while 'edge_bfs' reports all edges in the order they are explored. + + Based on http://www.ics.uci.edu/~eppstein/PADS/BFS.py. + by D. Eppstein, July 2004. The modifications + to allow depth limits based on the Wikipedia article + "`Depth-limited-search`_". + + .. _Depth-limited-search: https://en.wikipedia.org/wiki/Depth-limited_search + + See Also + -------- + bfs_tree + dfs_edges + edge_bfs + """ if reverse and G.is_directed(): successors = G.predecessors else: successors = G.neighbors - # TODO In Python 3.3+, this should be `yield from ...` - for e in generic_bfs_edges(G, source, successors): - yield e + yield from generic_bfs_edges(G, source, successors, depth_limit, sort_neighbors) -def bfs_tree(G, source, reverse=False): - """Return an oriented tree constructed from of a breadth-first-search +def bfs_tree(G, source, reverse=False, depth_limit=None, sort_neighbors=None): + """Returns an oriented tree constructed from of a breadth-first-search starting at source. Parameters @@ -136,12 +173,18 @@ def bfs_tree(G, source, reverse=False): G : NetworkX graph source : node - Specify starting node for breadth-first search and return edges in - the component reachable from source. + Specify starting node for breadth-first search reverse : bool, optional If True traverse a directed graph in the reverse direction + depth_limit : int, optional(default=len(G)) + Specify the maximum search depth + + sort_neighbors : function + A function that takes the list of neighbors of given node as input, and + returns an *iterator* over these neighbors but with custom ordering. + Returns ------- T: NetworkX DiGraph @@ -150,21 +193,44 @@ def bfs_tree(G, source, reverse=False): Examples -------- >>> G = nx.path_graph(3) - >>> print(list(nx.bfs_tree(G,1).edges())) + >>> print(list(nx.bfs_tree(G, 1).edges())) [(1, 0), (1, 2)] + >>> H = nx.Graph() + >>> nx.add_path(H, [0, 1, 2, 3, 4, 5, 6]) + >>> nx.add_path(H, [2, 7, 8, 9, 10]) + >>> print(sorted(list(nx.bfs_tree(H, source=3, depth_limit=3).edges()))) + [(1, 0), (2, 1), (2, 7), (3, 2), (3, 4), (4, 5), (5, 6), (7, 8)] + Notes ----- Based on http://www.ics.uci.edu/~eppstein/PADS/BFS.py - by D. Eppstein, July 2004. + by D. Eppstein, July 2004. The modifications + to allow depth limits based on the Wikipedia article + "`Depth-limited-search`_". + + .. _Depth-limited-search: https://en.wikipedia.org/wiki/Depth-limited_search + + See Also + -------- + dfs_tree + bfs_edges + edge_bfs """ T = nx.DiGraph() T.add_node(source) - T.add_edges_from(bfs_edges(G, source, reverse=reverse)) + edges_gen = bfs_edges( + G, + source, + reverse=reverse, + depth_limit=depth_limit, + sort_neighbors=sort_neighbors, + ) + T.add_edges_from(edges_gen) return T -def bfs_predecessors(G, source): +def bfs_predecessors(G, source, depth_limit=None, sort_neighbors=None): """Returns an iterator of predecessors in breadth-first-search from source. Parameters @@ -172,8 +238,14 @@ def bfs_predecessors(G, source): G : NetworkX graph source : node - Specify starting node for breadth-first search and return edges in - the component reachable from source. + Specify starting node for breadth-first search + + depth_limit : int, optional(default=len(G)) + Specify the maximum search depth + + sort_neighbors : function + A function that takes the list of neighbors of given node as input, and + returns an *iterator* over these neighbors but with custom ordering. Returns ------- @@ -188,19 +260,37 @@ def bfs_predecessors(G, source): {1: 0, 2: 1} >>> H = nx.Graph() >>> H.add_edges_from([(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)]) - >>> dict(nx.bfs_predecessors(H, 0)) + >>> print(dict(nx.bfs_predecessors(H, 0))) {1: 0, 2: 0, 3: 1, 4: 1, 5: 2, 6: 2} + >>> M = nx.Graph() + >>> nx.add_path(M, [0, 1, 2, 3, 4, 5, 6]) + >>> nx.add_path(M, [2, 7, 8, 9, 10]) + >>> print(sorted(nx.bfs_predecessors(M, source=1, depth_limit=3))) + [(0, 1), (2, 1), (3, 2), (4, 3), (7, 2), (8, 7)] + Notes ----- Based on http://www.ics.uci.edu/~eppstein/PADS/BFS.py - by D. Eppstein, July 2004. + by D. Eppstein, July 2004. The modifications + to allow depth limits based on the Wikipedia article + "`Depth-limited-search`_". + + .. _Depth-limited-search: https://en.wikipedia.org/wiki/Depth-limited_search + + See Also + -------- + bfs_tree + bfs_edges + edge_bfs """ - for s, t in bfs_edges(G, source): + for s, t in bfs_edges( + G, source, depth_limit=depth_limit, sort_neighbors=sort_neighbors + ): yield (t, s) -def bfs_successors(G, source): +def bfs_successors(G, source, depth_limit=None, sort_neighbors=None): """Returns an iterator of successors in breadth-first-search from source. Parameters @@ -208,8 +298,14 @@ def bfs_successors(G, source): G : NetworkX graph source : node - Specify starting node for breadth-first search and return edges in - the component reachable from source. + Specify starting node for breadth-first search + + depth_limit : int, optional(default=len(G)) + Specify the maximum search depth + + sort_neighbors : function + A function that takes the list of neighbors of given node as input, and + returns an *iterator* over these neighbors but with custom ordering. Returns ------- @@ -220,22 +316,39 @@ def bfs_successors(G, source): Examples -------- >>> G = nx.path_graph(3) - >>> print(dict(nx.bfs_successors(G,0))) + >>> print(dict(nx.bfs_successors(G, 0))) {0: [1], 1: [2]} >>> H = nx.Graph() >>> H.add_edges_from([(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)]) - >>> dict(nx.bfs_successors(H, 0)) + >>> print(dict(nx.bfs_successors(H, 0))) {0: [1, 2], 1: [3, 4], 2: [5, 6]} + >>> G = nx.Graph() + >>> nx.add_path(G, [0, 1, 2, 3, 4, 5, 6]) + >>> nx.add_path(G, [2, 7, 8, 9, 10]) + >>> print(dict(nx.bfs_successors(G, source=1, depth_limit=3))) + {1: [0, 2], 2: [3, 7], 3: [4], 7: [8]} Notes ----- Based on http://www.ics.uci.edu/~eppstein/PADS/BFS.py - by D. Eppstein, July 2004. + by D. Eppstein, July 2004.The modifications + to allow depth limits based on the Wikipedia article + "`Depth-limited-search`_". + + .. _Depth-limited-search: https://en.wikipedia.org/wiki/Depth-limited_search + + See Also + -------- + bfs_tree + bfs_edges + edge_bfs """ parent = source children = [] - for p, c in bfs_edges(G, source): + for p, c in bfs_edges( + G, source, depth_limit=depth_limit, sort_neighbors=sort_neighbors + ): if p == parent: children.append(c) continue @@ -243,3 +356,44 @@ def bfs_successors(G, source): children = [c] parent = p yield (parent, children) + + +def descendants_at_distance(G, source, distance): + """Returns all nodes at a fixed `distance` from `source` in `G`. + + Parameters + ---------- + G : NetworkX DiGraph + A directed graph + source : node in `G` + distance : the distance of the wanted nodes from `source` + + Returns + ------- + set() + The descendants of `source` in `G` at the given `distance` from `source` + """ + if not G.has_node(source): + raise nx.NetworkXError(f"The node {source} is not in the graph.") + current_distance = 0 + queue = {source} + visited = {source} + + # this is basically BFS, except that the queue only stores the nodes at + # current_distance from source at each iteration + while queue: + if current_distance == distance: + return queue + + current_distance += 1 + + next_vertices = set() + for vertex in queue: + for child in G[vertex]: + if child not in visited: + visited.add(child) + next_vertices.add(child) + + queue = next_vertices + + return set() diff --git a/networkx/algorithms/traversal/depth_first_search.py b/networkx/algorithms/traversal/depth_first_search.py index 75cc261..0a5b987 100644 --- a/networkx/algorithms/traversal/depth_first_search.py +++ b/networkx/algorithms/traversal/depth_first_search.py @@ -1,27 +1,24 @@ -# depth_first_search.py - depth-first traversals of a graph -# -# Copyright 2004-2018 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. -# -# Author: -# Aric Hagberg """Basic algorithms for depth-first searching the nodes of a graph.""" import networkx as nx from collections import defaultdict -__all__ = ['dfs_edges', 'dfs_tree', - 'dfs_predecessors', 'dfs_successors', - 'dfs_preorder_nodes', 'dfs_postorder_nodes', - 'dfs_labeled_edges'] +__all__ = [ + "dfs_edges", + "dfs_tree", + "dfs_predecessors", + "dfs_successors", + "dfs_preorder_nodes", + "dfs_postorder_nodes", + "dfs_labeled_edges", +] def dfs_edges(G, source=None, depth_limit=None): """Iterate over edges in a depth-first-search (DFS). + Perform a depth-first-search over the nodes of G and yield + the edges in order. This may not generate all edges in G (see edge_dfs). + Parameters ---------- G : NetworkX graph @@ -64,6 +61,8 @@ def dfs_edges(G, source=None, depth_limit=None): dfs_preorder_nodes dfs_postorder_nodes dfs_labeled_edges + edge_dfs + bfs_edges """ if source is None: # edges for all components @@ -93,7 +92,7 @@ def dfs_edges(G, source=None, depth_limit=None): def dfs_tree(G, source=None, depth_limit=None): - """Return oriented tree constructed from a depth-first-search from source. + """Returns oriented tree constructed from a depth-first-search from source. Parameters ---------- @@ -120,6 +119,13 @@ def dfs_tree(G, source=None, depth_limit=None): >>> list(T.edges()) [(0, 1), (1, 2), (2, 3), (3, 4)] + See Also + -------- + dfs_preorder_nodes + dfs_postorder_nodes + dfs_labeled_edges + edge_dfs + bfs_tree """ T = nx.DiGraph() if source is None: @@ -131,15 +137,14 @@ def dfs_tree(G, source=None, depth_limit=None): def dfs_predecessors(G, source=None, depth_limit=None): - """Return dictionary of predecessors in depth-first-search from source. + """Returns dictionary of predecessors in depth-first-search from source. Parameters ---------- G : NetworkX graph source : node, optional - Specify starting node for depth-first search and return edges in - the component reachable from source. + Specify starting node for depth-first search. depth_limit : int, optional (default=len(G)) Specify the maximum search depth. @@ -169,20 +174,27 @@ def dfs_predecessors(G, source=None, depth_limit=None): .. _PADS: http://www.ics.uci.edu/~eppstein/PADS .. _Depth-limited search: https://en.wikipedia.org/wiki/Depth-limited_search + + See Also + -------- + dfs_preorder_nodes + dfs_postorder_nodes + dfs_labeled_edges + edge_dfs + bfs_tree """ return {t: s for s, t in dfs_edges(G, source, depth_limit)} def dfs_successors(G, source=None, depth_limit=None): - """Return dictionary of successors in depth-first-search from source. + """Returns dictionary of successors in depth-first-search from source. Parameters ---------- G : NetworkX graph source : node, optional - Specify starting node for depth-first search and return edges in - the component reachable from source. + Specify starting node for depth-first search. depth_limit : int, optional (default=len(G)) Specify the maximum search depth. @@ -212,6 +224,14 @@ def dfs_successors(G, source=None, depth_limit=None): .. _PADS: http://www.ics.uci.edu/~eppstein/PADS .. _Depth-limited search: https://en.wikipedia.org/wiki/Depth-limited_search + + See Also + -------- + dfs_preorder_nodes + dfs_postorder_nodes + dfs_labeled_edges + edge_dfs + bfs_tree """ d = defaultdict(list) for s, t in dfs_edges(G, source=source, depth_limit=depth_limit): @@ -227,8 +247,7 @@ def dfs_postorder_nodes(G, source=None, depth_limit=None): G : NetworkX graph source : node, optional - Specify starting node for depth-first search and return edges in - the component reachable from source. + Specify starting node for depth-first search. depth_limit : int, optional (default=len(G)) Specify the maximum search depth. @@ -264,9 +283,11 @@ def dfs_postorder_nodes(G, source=None, depth_limit=None): dfs_edges dfs_preorder_nodes dfs_labeled_edges + edge_dfs + bfs_tree """ edges = nx.dfs_labeled_edges(G, source=source, depth_limit=depth_limit) - return (v for u, v, d in edges if d == 'reverse') + return (v for u, v, d in edges if d == "reverse") def dfs_preorder_nodes(G, source=None, depth_limit=None): @@ -277,7 +298,7 @@ def dfs_preorder_nodes(G, source=None, depth_limit=None): G : NetworkX graph source : node, optional - Specify starting node for depth-first search and return edges in + Specify starting node for depth-first search and return nodes in the component reachable from source. depth_limit : int, optional (default=len(G)) @@ -314,9 +335,10 @@ def dfs_preorder_nodes(G, source=None, depth_limit=None): dfs_edges dfs_postorder_nodes dfs_labeled_edges + bfs_edges """ edges = nx.dfs_labeled_edges(G, source=source, depth_limit=depth_limit) - return (v for u, v, d in edges if d == 'forward') + return (v for u, v, d in edges if d == "forward") def dfs_labeled_edges(G, source=None, depth_limit=None): @@ -396,7 +418,7 @@ def dfs_labeled_edges(G, source=None, depth_limit=None): for start in nodes: if start in visited: continue - yield start, start, 'forward' + yield start, start, "forward" visited.add(start) stack = [(start, depth_limit, iter(G[start]))] while stack: @@ -404,14 +426,14 @@ def dfs_labeled_edges(G, source=None, depth_limit=None): try: child = next(children) if child in visited: - yield parent, child, 'nontree' + yield parent, child, "nontree" else: - yield parent, child, 'forward' + yield parent, child, "forward" visited.add(child) if depth_now > 1: stack.append((child, depth_now - 1, iter(G[child]))) except StopIteration: stack.pop() if stack: - yield stack[-1][0], parent, 'reverse' - yield start, start, 'reverse' + yield stack[-1][0], parent, "reverse" + yield start, start, "reverse" diff --git a/networkx/algorithms/traversal/edgebfs.py b/networkx/algorithms/traversal/edgebfs.py new file mode 100644 index 0000000..0c0784c --- /dev/null +++ b/networkx/algorithms/traversal/edgebfs.py @@ -0,0 +1,175 @@ +""" +============================= +Breadth First Search on Edges +============================= + +Algorithms for a breadth-first traversal of edges in a graph. + +""" +from collections import deque +import networkx as nx + +FORWARD = "forward" +REVERSE = "reverse" + +__all__ = ["edge_bfs"] + + +def edge_bfs(G, source=None, orientation=None): + """A directed, breadth-first-search of edges in `G`, beginning at `source`. + + Yield the edges of G in a breadth-first-search order continuing until + all edges are generated. + + Parameters + ---------- + G : graph + A directed/undirected graph/multigraph. + + source : node, list of nodes + The node from which the traversal begins. If None, then a source + is chosen arbitrarily and repeatedly until all edges from each node in + the graph are searched. + + orientation : None | 'original' | 'reverse' | 'ignore' (default: None) + For directed graphs and directed multigraphs, edge traversals need not + respect the original orientation of the edges. + When set to 'reverse' every edge is traversed in the reverse direction. + When set to 'ignore', every edge is treated as undirected. + When set to 'original', every edge is treated as directed. + In all three cases, the yielded edge tuples add a last entry to + indicate the direction in which that edge was traversed. + If orientation is None, the yielded edge has no direction indicated. + The direction is respected, but not reported. + + Yields + ------ + edge : directed edge + A directed edge indicating the path taken by the breadth-first-search. + For graphs, `edge` is of the form `(u, v)` where `u` and `v` + are the tail and head of the edge as determined by the traversal. + For multigraphs, `edge` is of the form `(u, v, key)`, where `key` is + the key of the edge. When the graph is directed, then `u` and `v` + are always in the order of the actual directed edge. + If orientation is not None then the edge tuple is extended to include + the direction of traversal ('forward' or 'reverse') on that edge. + + Examples + -------- + >>> nodes = [0, 1, 2, 3] + >>> edges = [(0, 1), (1, 0), (1, 0), (2, 0), (2, 1), (3, 1)] + + >>> list(nx.edge_bfs(nx.Graph(edges), nodes)) + [(0, 1), (0, 2), (1, 2), (1, 3)] + + >>> list(nx.edge_bfs(nx.DiGraph(edges), nodes)) + [(0, 1), (1, 0), (2, 0), (2, 1), (3, 1)] + + >>> list(nx.edge_bfs(nx.MultiGraph(edges), nodes)) + [(0, 1, 0), (0, 1, 1), (0, 1, 2), (0, 2, 0), (1, 2, 0), (1, 3, 0)] + + >>> list(nx.edge_bfs(nx.MultiDiGraph(edges), nodes)) + [(0, 1, 0), (1, 0, 0), (1, 0, 1), (2, 0, 0), (2, 1, 0), (3, 1, 0)] + + >>> list(nx.edge_bfs(nx.DiGraph(edges), nodes, orientation="ignore")) + [(0, 1, 'forward'), (1, 0, 'reverse'), (2, 0, 'reverse'), (2, 1, 'reverse'), (3, 1, 'reverse')] + + >>> list(nx.edge_bfs(nx.MultiDiGraph(edges), nodes, orientation="ignore")) + [(0, 1, 0, 'forward'), (1, 0, 0, 'reverse'), (1, 0, 1, 'reverse'), (2, 0, 0, 'reverse'), (2, 1, 0, 'reverse'), (3, 1, 0, 'reverse')] + + Notes + ----- + The goal of this function is to visit edges. It differs from the more + familiar breadth-first-search of nodes, as provided by + :func:`networkx.algorithms.traversal.breadth_first_search.bfs_edges`, in + that it does not stop once every node has been visited. In a directed graph + with edges [(0, 1), (1, 2), (2, 1)], the edge (2, 1) would not be visited + if not for the functionality provided by this function. + + The naming of this function is very similar to bfs_edges. The difference + is that 'edge_bfs' yields edges even if they extend back to an already + explored node while 'bfs_edges' yields the edges of the tree that results + from a breadth-first-search (BFS) so no edges are reported if they extend + to already explored nodes. That means 'edge_bfs' reports all edges while + 'bfs_edges' only report those traversed by a node-based BFS. Yet another + description is that 'bfs_edges' reports the edges traversed during BFS + while 'edge_bfs' reports all edges in the order they are explored. + + See Also + -------- + bfs_edges + bfs_tree + edge_dfs + + """ + nodes = list(G.nbunch_iter(source)) + if not nodes: + return + + directed = G.is_directed() + kwds = {"data": False} + if G.is_multigraph() is True: + kwds["keys"] = True + + # set up edge lookup + if orientation is None: + + def edges_from(node): + return iter(G.edges(node, **kwds)) + + elif not directed or orientation == "original": + + def edges_from(node): + for e in G.edges(node, **kwds): + yield e + (FORWARD,) + + elif orientation == "reverse": + + def edges_from(node): + for e in G.in_edges(node, **kwds): + yield e + (REVERSE,) + + elif orientation == "ignore": + + def edges_from(node): + for e in G.edges(node, **kwds): + yield e + (FORWARD,) + for e in G.in_edges(node, **kwds): + yield e + (REVERSE,) + + else: + raise nx.NetworkXError("invalid orientation argument.") + + if directed: + neighbors = G.successors + + def edge_id(edge): + # remove direction indicator + return edge[:-1] if orientation is not None else edge + + else: + neighbors = G.neighbors + + def edge_id(edge): + return (frozenset(edge[:2]),) + edge[2:] + + check_reverse = directed and orientation in ("reverse", "ignore") + + # start BFS + visited_nodes = {n for n in nodes} + visited_edges = set() + queue = deque([(n, edges_from(n)) for n in nodes]) + while queue: + parent, children_edges = queue.popleft() + for edge in children_edges: + if check_reverse and edge[-1] == REVERSE: + child = edge[0] + else: + child = edge[1] + if child not in visited_nodes: + visited_nodes.add(child) + queue.append((child, edges_from(child))) + edgeid = edge_id(edge) + if edgeid not in visited_edges: + visited_edges.add(edgeid) + yield edge diff --git a/networkx/algorithms/traversal/edgedfs.py b/networkx/algorithms/traversal/edgedfs.py index 5434057..d26b41e 100644 --- a/networkx/algorithms/traversal/edgedfs.py +++ b/networkx/algorithms/traversal/edgedfs.py @@ -6,78 +6,19 @@ Algorithms for a depth-first traversal of edges in a graph. """ +import networkx as nx -FORWARD = 'forward' -REVERSE = 'reverse' +FORWARD = "forward" +REVERSE = "reverse" -__all__ = ['edge_dfs'] +__all__ = ["edge_dfs"] -def helper_funcs(G, orientation): - """ - These are various G-specific functions that help us implement the algorithm - for all graph types: graph, multigraph, directed or not. - - """ - ignore_orientation = G.is_directed() and orientation == 'ignore' - reverse_orientation = G.is_directed() and orientation == 'reverse' - - if ignore_orientation: - # When we ignore the orientation, we still need to know how the edge - # was traversed, so we add an object representing the direction. - def out_edges(u_for_edges, **kwds): - for edge in G.out_edges(u_for_edges, **kwds): - yield edge + (FORWARD,) - for edge in G.in_edges(u_for_edges, **kwds): - yield edge + (REVERSE,) - elif reverse_orientation: - def out_edges(u_for_edges, **kwds): - for edge in G.in_edges(u_for_edges, **kwds): - yield edge + (REVERSE,) - else: - # If "yield from" were an option, we could pass kwds automatically. - out_edges = G.edges - - # If every edge had a unique key, then it would be easier to track which - # edges had been visited. Since that is not available, we will form a - # unique identifier from the edge and key (if present). If the graph - # is undirected, then the head and tail need to be stored as a frozenset. - if ignore_orientation or reverse_orientation: - # edge is a 4-tuple: (u, v, key, direction) - # u and v always represent the true tail and head of the edge. - def key(edge): - # We want everything but the direction. - return edge[:-1] - else: - if G.is_directed(): - def key(edge): - return edge - else: - # edge is a 3-tuple: (u, v, key) - def key(edge): - new_edge = (frozenset(edge[:2]),) + edge[2:] - return new_edge - - def traversed_tailhead(edge): - """ - Returns the tail and head of an edge, as it was traversed. - - So in general, this is different from the true tail and head. - (Also, undirected edges have no true tail or head.) +def edge_dfs(G, source=None, orientation=None): + """A directed, depth-first-search of edges in `G`, beginning at `source`. - """ - if (ignore_orientation or reverse_orientation) and edge[-1] == REVERSE: - tail, head = edge[1], edge[0] - else: - tail, head = edge[0], edge[1] - return tail, head - - return out_edges, key, traversed_tailhead - - -def edge_dfs(G, source=None, orientation='original'): - """ - A directed, depth-first traversal of edges in `G`, beginning at `source`. + Yield the edges of G in a depth-first-search order continuing until + all edges are generated. Parameters ---------- @@ -89,32 +30,31 @@ def edge_dfs(G, source=None, orientation='original'): is chosen arbitrarily and repeatedly until all edges from each node in the graph are searched. - orientation : 'original' | 'reverse' | 'ignore' + orientation : None | 'original' | 'reverse' | 'ignore' (default: None) For directed graphs and directed multigraphs, edge traversals need not - respect the original orientation of the edges. When set to 'reverse', - then every edge will be traversed in the reverse direction. When set to - 'ignore', then each directed edge is treated as a single undirected - edge that can be traversed in either direction. For undirected graphs - and undirected multigraphs, this parameter is meaningless and is not - consulted by the algorithm. + respect the original orientation of the edges. + When set to 'reverse' every edge is traversed in the reverse direction. + When set to 'ignore', every edge is treated as undirected. + When set to 'original', every edge is treated as directed. + In all three cases, the yielded edge tuples add a last entry to + indicate the direction in which that edge was traversed. + If orientation is None, the yielded edge has no direction indicated. + The direction is respected, but not reported. Yields ------ edge : directed edge A directed edge indicating the path taken by the depth-first traversal. For graphs, `edge` is of the form `(u, v)` where `u` and `v` - are the tail and head of the edge as determined by the traversal. For - multigraphs, `edge` is of the form `(u, v, key)`, where `key` is + are the tail and head of the edge as determined by the traversal. + For multigraphs, `edge` is of the form `(u, v, key)`, where `key` is the key of the edge. When the graph is directed, then `u` and `v` - are always in the order of the actual directed edge. If orientation is - 'reverse' or 'ignore', then `edge` takes the form - `(u, v, key, direction)` where direction is a string, 'forward' or - 'reverse', that indicates if the edge was traversed in the forward - (tail to head) or reverse (head to tail) direction, respectively. + are always in the order of the actual directed edge. + If orientation is not None then the edge tuple is extended to include + the direction of traversal ('forward' or 'reverse') on that edge. Examples -------- - >>> import networkx as nx >>> nodes = [0, 1, 2, 3] >>> edges = [(0, 1), (1, 0), (1, 0), (2, 1), (3, 1)] @@ -130,10 +70,10 @@ def edge_dfs(G, source=None, orientation='original'): >>> list(nx.edge_dfs(nx.MultiDiGraph(edges), nodes)) [(0, 1, 0), (1, 0, 0), (1, 0, 1), (2, 1, 0), (3, 1, 0)] - >>> list(nx.edge_dfs(nx.DiGraph(edges), nodes, orientation='ignore')) + >>> list(nx.edge_dfs(nx.DiGraph(edges), nodes, orientation="ignore")) [(0, 1, 'forward'), (1, 0, 'forward'), (2, 1, 'reverse'), (3, 1, 'reverse')] - >>> list(nx.edge_dfs(nx.MultiDiGraph(edges), nodes, orientation='ignore')) + >>> list(nx.edge_dfs(nx.MultiDiGraph(edges), nodes, orientation="ignore")) [(0, 1, 0, 'forward'), (1, 0, 0, 'forward'), (1, 0, 1, 'reverse'), (2, 1, 0, 'reverse'), (3, 1, 0, 'reverse')] Notes @@ -152,24 +92,69 @@ def edge_dfs(G, source=None, orientation='original'): """ nodes = list(G.nbunch_iter(source)) if not nodes: - raise StopIteration + return + + directed = G.is_directed() + kwds = {"data": False} + if G.is_multigraph() is True: + kwds["keys"] = True + + # set up edge lookup + if orientation is None: + + def edges_from(node): + return iter(G.edges(node, **kwds)) + + elif not directed or orientation == "original": + + def edges_from(node): + for e in G.edges(node, **kwds): + yield e + (FORWARD,) + + elif orientation == "reverse": + + def edges_from(node): + for e in G.in_edges(node, **kwds): + yield e + (REVERSE,) + + elif orientation == "ignore": + + def edges_from(node): + for e in G.edges(node, **kwds): + yield e + (FORWARD,) + for e in G.in_edges(node, **kwds): + yield e + (REVERSE,) + + else: + raise nx.NetworkXError("invalid orientation argument.") + + # set up formation of edge_id to easily look up if edge already returned + if directed: + + def edge_id(edge): + # remove direction indicator + return edge[:-1] if orientation is not None else edge + + else: - kwds = {'data': False} - if G.is_multigraph(): - kwds['keys'] = True + def edge_id(edge): + # single id for undirected requires frozenset on nodes + return (frozenset(edge[:2]),) + edge[2:] - out_edges, key, tailhead = helper_funcs(G, orientation) + # Basic setup + check_reverse = directed and orientation in ("reverse", "ignore") visited_edges = set() visited_nodes = set() edges = {} + # start DFS for start_node in nodes: stack = [start_node] while stack: current_node = stack[-1] if current_node not in visited_nodes: - edges[current_node] = iter(out_edges(current_node, **kwds)) + edges[current_node] = edges_from(current_node) visited_nodes.add(current_node) try: @@ -178,9 +163,12 @@ def edge_dfs(G, source=None, orientation='original'): # No more edges from the current node. stack.pop() else: - edge_key = key(edge) - if edge_key not in visited_edges: - visited_edges.add(edge_key) + edgeid = edge_id(edge) + if edgeid not in visited_edges: + visited_edges.add(edgeid) # Mark the traversed "to" node as to-be-explored. - stack.append(tailhead(edge)[1]) + if check_reverse and edge[-1] == REVERSE: + stack.append(edge[0]) + else: + stack.append(edge[1]) yield edge diff --git a/networkx/algorithms/traversal/tests/__init__.py b/networkx/algorithms/traversal/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/algorithms/traversal/tests/test_beamsearch.py b/networkx/algorithms/traversal/tests/test_beamsearch.py index ec68e96..249cc2f 100644 --- a/networkx/algorithms/traversal/tests/test_beamsearch.py +++ b/networkx/algorithms/traversal/tests/test_beamsearch.py @@ -1,15 +1,4 @@ -# test_beamsearch.py - unit tests for the beamsearch module -# -# Copyright 2016-2018 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the beam search functions.""" -from unittest import TestCase - -from nose.tools import assert_equal import networkx as nx @@ -18,7 +7,7 @@ def identity(x): return x -class TestBeamSearch(TestCase): +class TestBeamSearch: """Unit tests for the beam search function.""" def test_narrow(self): @@ -30,9 +19,9 @@ def test_narrow(self): # search terminates. G = nx.cycle_graph(4) edges = nx.bfs_beam_edges(G, 0, identity, width=1) - assert_equal(list(edges), [(0, 3), (3, 2)]) + assert list(edges) == [(0, 3), (3, 2)] def test_wide(self): G = nx.cycle_graph(4) edges = nx.bfs_beam_edges(G, 0, identity, width=2) - assert_equal(list(edges), [(0, 3), (0, 1), (3, 2)]) + assert list(edges) == [(0, 3), (0, 1), (3, 2)] diff --git a/networkx/algorithms/traversal/tests/test_bfs.py b/networkx/algorithms/traversal/tests/test_bfs.py index 09cc2b6..b450de2 100644 --- a/networkx/algorithms/traversal/tests/test_bfs.py +++ b/networkx/algorithms/traversal/tests/test_bfs.py @@ -1,42 +1,100 @@ -from nose.tools import assert_equal +from functools import partial import networkx as nx class TestBFS: - - def setUp(self): + @classmethod + def setup_class(cls): # simple graph G = nx.Graph() G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4)]) - self.G = G + cls.G = G def test_successor(self): - assert_equal(dict(nx.bfs_successors(self.G, source=0)), - {0: [1], 1: [2, 3], 2: [4]}) + assert dict(nx.bfs_successors(self.G, source=0)) == {0: [1], 1: [2, 3], 2: [4]} def test_predecessor(self): - assert_equal(dict(nx.bfs_predecessors(self.G, source=0)), - {1: 0, 2: 1, 3: 1, 4: 2}) + assert dict(nx.bfs_predecessors(self.G, source=0)) == {1: 0, 2: 1, 3: 1, 4: 2} def test_bfs_tree(self): T = nx.bfs_tree(self.G, source=0) - assert_equal(sorted(T.nodes()), sorted(self.G.nodes())) - assert_equal(sorted(T.edges()), [(0, 1), (1, 2), (1, 3), (2, 4)]) + assert sorted(T.nodes()) == sorted(self.G.nodes()) + assert sorted(T.edges()) == [(0, 1), (1, 2), (1, 3), (2, 4)] def test_bfs_edges(self): edges = nx.bfs_edges(self.G, source=0) - assert_equal(list(edges), [(0, 1), (1, 2), (1, 3), (2, 4)]) + assert list(edges) == [(0, 1), (1, 2), (1, 3), (2, 4)] def test_bfs_edges_reverse(self): D = nx.DiGraph() D.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4)]) edges = nx.bfs_edges(D, source=4, reverse=True) - assert_equal(list(edges), [(4, 2), (4, 3), (2, 1), (1, 0)]) + assert list(edges) == [(4, 2), (4, 3), (2, 1), (1, 0)] + + def test_bfs_edges_sorting(self): + D = nx.DiGraph() + D.add_edges_from([(0, 1), (0, 2), (1, 4), (1, 3), (2, 5)]) + sort_desc = partial(sorted, reverse=True) + edges_asc = nx.bfs_edges(D, source=0, sort_neighbors=sorted) + edges_desc = nx.bfs_edges(D, source=0, sort_neighbors=sort_desc) + assert list(edges_asc) == [(0, 1), (0, 2), (1, 3), (1, 4), (2, 5)] + assert list(edges_desc) == [(0, 2), (0, 1), (2, 5), (1, 4), (1, 3)] def test_bfs_tree_isolates(self): G = nx.Graph() G.add_node(1) G.add_node(2) T = nx.bfs_tree(G, source=1) - assert_equal(sorted(T.nodes()), [1]) - assert_equal(sorted(T.edges()), []) + assert sorted(T.nodes()) == [1] + assert sorted(T.edges()) == [] + + +class TestBreadthLimitedSearch: + @classmethod + def setup_class(cls): + # a tree + G = nx.Graph() + nx.add_path(G, [0, 1, 2, 3, 4, 5, 6]) + nx.add_path(G, [2, 7, 8, 9, 10]) + cls.G = G + # a disconnected graph + D = nx.Graph() + D.add_edges_from([(0, 1), (2, 3)]) + nx.add_path(D, [2, 7, 8, 9, 10]) + cls.D = D + + def test_limited_bfs_successor(self): + assert dict(nx.bfs_successors(self.G, source=1, depth_limit=3)) == { + 1: [0, 2], + 2: [3, 7], + 3: [4], + 7: [8], + } + result = { + n: sorted(s) for n, s in nx.bfs_successors(self.D, source=7, depth_limit=2) + } + assert result == {8: [9], 2: [3], 7: [2, 8]} + + def test_limited_bfs_predecessor(self): + assert dict(nx.bfs_predecessors(self.G, source=1, depth_limit=3)) == { + 0: 1, + 2: 1, + 3: 2, + 4: 3, + 7: 2, + 8: 7, + } + assert dict(nx.bfs_predecessors(self.D, source=7, depth_limit=2)) == { + 2: 7, + 3: 2, + 8: 7, + 9: 8, + } + + def test_limited_bfs_tree(self): + T = nx.bfs_tree(self.G, source=3, depth_limit=1) + assert sorted(T.edges()) == [(3, 2), (3, 4)] + + def test_limited_bfs_edges(self): + edges = nx.bfs_edges(self.G, source=9, depth_limit=4) + assert list(edges) == [(9, 8), (9, 10), (8, 7), (7, 2), (2, 1), (2, 3)] diff --git a/networkx/algorithms/traversal/tests/test_dfs.py b/networkx/algorithms/traversal/tests/test_dfs.py index 7d7bc42..1be3b4a 100644 --- a/networkx/algorithms/traversal/tests/test_dfs.py +++ b/networkx/algorithms/traversal/tests/test_dfs.py @@ -1,139 +1,148 @@ -#!/usr/bin/env python -from nose.tools import * import networkx as nx class TestDFS: - - def setUp(self): + @classmethod + def setup_class(cls): # simple graph G = nx.Graph() G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4)]) - self.G = G + cls.G = G # simple graph, disconnected D = nx.Graph() D.add_edges_from([(0, 1), (2, 3)]) - self.D = D + cls.D = D def test_preorder_nodes(self): - assert_equal(list(nx.dfs_preorder_nodes(self.G, source=0)), - [0, 1, 2, 4, 3]) - assert_equal(list(nx.dfs_preorder_nodes(self.D)), [0, 1, 2, 3]) + assert list(nx.dfs_preorder_nodes(self.G, source=0)) == [0, 1, 2, 4, 3] + assert list(nx.dfs_preorder_nodes(self.D)) == [0, 1, 2, 3] def test_postorder_nodes(self): - assert_equal(list(nx.dfs_postorder_nodes(self.G, source=0)), - [3, 4, 2, 1, 0]) - assert_equal(list(nx.dfs_postorder_nodes(self.D)), [1, 0, 3, 2]) + assert list(nx.dfs_postorder_nodes(self.G, source=0)) == [3, 4, 2, 1, 0] + assert list(nx.dfs_postorder_nodes(self.D)) == [1, 0, 3, 2] def test_successor(self): - assert_equal(nx.dfs_successors(self.G, source=0), - {0: [1], 1: [2], 2: [4], 4: [3]}) - assert_equal(nx.dfs_successors(self.D), {0: [1], 2: [3]}) + assert nx.dfs_successors(self.G, source=0) == {0: [1], 1: [2], 2: [4], 4: [3]} + assert nx.dfs_successors(self.D) == {0: [1], 2: [3]} def test_predecessor(self): - assert_equal(nx.dfs_predecessors(self.G, source=0), - {1: 0, 2: 1, 3: 4, 4: 2}) - assert_equal(nx.dfs_predecessors(self.D), {1: 0, 3: 2}) + assert nx.dfs_predecessors(self.G, source=0) == {1: 0, 2: 1, 3: 4, 4: 2} + assert nx.dfs_predecessors(self.D) == {1: 0, 3: 2} def test_dfs_tree(self): exp_nodes = sorted(self.G.nodes()) exp_edges = [(0, 1), (1, 2), (2, 4), (4, 3)] # Search from first node T = nx.dfs_tree(self.G, source=0) - assert_equal(sorted(T.nodes()), exp_nodes) - assert_equal(sorted(T.edges()), exp_edges) + assert sorted(T.nodes()) == exp_nodes + assert sorted(T.edges()) == exp_edges # Check source=None T = nx.dfs_tree(self.G, source=None) - assert_equal(sorted(T.nodes()), exp_nodes) - assert_equal(sorted(T.edges()), exp_edges) + assert sorted(T.nodes()) == exp_nodes + assert sorted(T.edges()) == exp_edges # Check source=None is the default T = nx.dfs_tree(self.G) - assert_equal(sorted(T.nodes()), exp_nodes) - assert_equal(sorted(T.edges()), exp_edges) + assert sorted(T.nodes()) == exp_nodes + assert sorted(T.edges()) == exp_edges def test_dfs_edges(self): edges = nx.dfs_edges(self.G, source=0) - assert_equal(list(edges), [(0, 1), (1, 2), (2, 4), (4, 3)]) + assert list(edges) == [(0, 1), (1, 2), (2, 4), (4, 3)] edges = nx.dfs_edges(self.D) - assert_equal(list(edges), [(0, 1), (2, 3)]) + assert list(edges) == [(0, 1), (2, 3)] def test_dfs_labeled_edges(self): edges = list(nx.dfs_labeled_edges(self.G, source=0)) - forward = [(u, v) for (u, v, d) in edges if d == 'forward'] - assert_equal(forward, [(0, 0), (0, 1), (1, 2), (2, 4), (4, 3)]) + forward = [(u, v) for (u, v, d) in edges if d == "forward"] + assert forward == [(0, 0), (0, 1), (1, 2), (2, 4), (4, 3)] def test_dfs_labeled_disconnected_edges(self): edges = list(nx.dfs_labeled_edges(self.D)) - forward = [(u, v) for (u, v, d) in edges if d == 'forward'] - assert_equal(forward, [(0, 0), (0, 1), (2, 2), (2, 3)]) + forward = [(u, v) for (u, v, d) in edges if d == "forward"] + assert forward == [(0, 0), (0, 1), (2, 2), (2, 3)] def test_dfs_tree_isolates(self): G = nx.Graph() G.add_node(1) G.add_node(2) T = nx.dfs_tree(G, source=1) - assert_equal(sorted(T.nodes()), [1]) - assert_equal(sorted(T.edges()), []) + assert sorted(T.nodes()) == [1] + assert sorted(T.edges()) == [] T = nx.dfs_tree(G, source=None) - assert_equal(sorted(T.nodes()), [1, 2]) - assert_equal(sorted(T.edges()), []) + assert sorted(T.nodes()) == [1, 2] + assert sorted(T.edges()) == [] class TestDepthLimitedSearch: - - def setUp(self): + @classmethod + def setup_class(cls): # a tree G = nx.Graph() nx.add_path(G, [0, 1, 2, 3, 4, 5, 6]) nx.add_path(G, [2, 7, 8, 9, 10]) - self.G = G + cls.G = G # a disconnected graph D = nx.Graph() D.add_edges_from([(0, 1), (2, 3)]) nx.add_path(D, [2, 7, 8, 9, 10]) - self.D = D - - def dls_test_preorder_nodes(self): - assert_equal(list(nx.dfs_preorder_nodes(self.G, source=0, - depth_limit=2)), [0, 1, 2]) - assert_equal(list(nx.dfs_preorder_nodes(self.D, source=1, - depth_limit=2)), ([1, 0])) - - def dls_test_postorder_nodes(self): - assert_equal(list(nx.dfs_postorder_nodes(self.G, - source=3, depth_limit=3)), [1, 7, 2, 5, 4, 3]) - assert_equal(list(nx.dfs_postorder_nodes(self.D, - source=2, depth_limit=2)), ([3, 7, 2])) - - def dls_test_successor(self): + cls.D = D + + def test_dls_preorder_nodes(self): + assert list(nx.dfs_preorder_nodes(self.G, source=0, depth_limit=2)) == [0, 1, 2] + assert list(nx.dfs_preorder_nodes(self.D, source=1, depth_limit=2)) == ([1, 0]) + + def test_dls_postorder_nodes(self): + assert list(nx.dfs_postorder_nodes(self.G, source=3, depth_limit=3)) == [ + 1, + 7, + 2, + 5, + 4, + 3, + ] + assert list(nx.dfs_postorder_nodes(self.D, source=2, depth_limit=2)) == ( + [3, 7, 2] + ) + + def test_dls_successor(self): result = nx.dfs_successors(self.G, source=4, depth_limit=3) - assert_equal({n: set(v) for n, v in result.items()}, - {2: {1, 7}, 3: {2}, 4: {3, 5}, 5: {6}}) + assert {n: set(v) for n, v in result.items()} == { + 2: {1, 7}, + 3: {2}, + 4: {3, 5}, + 5: {6}, + } result = nx.dfs_successors(self.D, source=7, depth_limit=2) - assert_equal({n: set(v) for n, v in result.items()}, - {8: {9}, 2: {3}, 7: {8, 2}}) - - def dls_test_predecessor(self): - assert_equal(nx.dfs_predecessors(self.G, source=0, depth_limit=3), - {1: 0, 2: 1, 3: 2, 7: 2}) - assert_equal(nx.dfs_predecessors(self.D, source=2, depth_limit=3), - {8: 7, 9: 8, 3: 2, 7: 2}) + assert {n: set(v) for n, v in result.items()} == {8: {9}, 2: {3}, 7: {8, 2}} + + def test_dls_predecessor(self): + assert nx.dfs_predecessors(self.G, source=0, depth_limit=3) == { + 1: 0, + 2: 1, + 3: 2, + 7: 2, + } + assert nx.dfs_predecessors(self.D, source=2, depth_limit=3) == { + 8: 7, + 9: 8, + 3: 2, + 7: 2, + } def test_dls_tree(self): T = nx.dfs_tree(self.G, source=3, depth_limit=1) - assert_equal(sorted(T.edges()), [(3, 2), (3, 4)]) + assert sorted(T.edges()) == [(3, 2), (3, 4)] def test_dls_edges(self): edges = nx.dfs_edges(self.G, source=9, depth_limit=4) - assert_equal(list(edges), [(9, 8), (8, 7), - (7, 2), (2, 1), (2, 3), (9, 10)]) + assert list(edges) == [(9, 8), (8, 7), (7, 2), (2, 1), (2, 3), (9, 10)] def test_dls_labeled_edges(self): edges = list(nx.dfs_labeled_edges(self.G, source=5, depth_limit=1)) - forward = [(u, v) for (u, v, d) in edges if d == 'forward'] - assert_equal(forward, [(5, 5), (5, 4), (5, 6)]) + forward = [(u, v) for (u, v, d) in edges if d == "forward"] + assert forward == [(5, 5), (5, 4), (5, 6)] def test_dls_labeled_disconnected_edges(self): edges = list(nx.dfs_labeled_edges(self.G, source=6, depth_limit=2)) - forward = [(u, v) for (u, v, d) in edges if d == 'forward'] - assert_equal(forward, [(6, 6), (6, 5), (5, 4)]) + forward = [(u, v) for (u, v, d) in edges if d == "forward"] + assert forward == [(6, 6), (6, 5), (5, 4)] diff --git a/networkx/algorithms/traversal/tests/test_edgebfs.py b/networkx/algorithms/traversal/tests/test_edgebfs.py new file mode 100644 index 0000000..170be25 --- /dev/null +++ b/networkx/algorithms/traversal/tests/test_edgebfs.py @@ -0,0 +1,151 @@ +import pytest + +import networkx as nx + +edge_bfs = nx.edge_bfs + +FORWARD = nx.algorithms.edgedfs.FORWARD +REVERSE = nx.algorithms.edgedfs.REVERSE + + +class TestEdgeBFS: + @classmethod + def setup_class(cls): + cls.nodes = [0, 1, 2, 3] + cls.edges = [(0, 1), (1, 0), (1, 0), (2, 0), (2, 1), (3, 1)] + + def test_empty(self): + G = nx.Graph() + edges = list(edge_bfs(G)) + assert edges == [] + + def test_graph_single_source(self): + G = nx.Graph(self.edges) + G.add_edge(4, 5) + x = list(edge_bfs(G, [0])) + x_ = [(0, 1), (0, 2), (1, 2), (1, 3)] + assert x == x_ + + def test_graph(self): + G = nx.Graph(self.edges) + x = list(edge_bfs(G, self.nodes)) + x_ = [(0, 1), (0, 2), (1, 2), (1, 3)] + assert x == x_ + + def test_digraph(self): + G = nx.DiGraph(self.edges) + x = list(edge_bfs(G, self.nodes)) + x_ = [(0, 1), (1, 0), (2, 0), (2, 1), (3, 1)] + assert x == x_ + + def test_digraph_orientation_invalid(self): + G = nx.DiGraph(self.edges) + edge_iterator = edge_bfs(G, self.nodes, orientation="hello") + pytest.raises(nx.NetworkXError, list, edge_iterator) + + def test_digraph_orientation_none(self): + G = nx.DiGraph(self.edges) + x = list(edge_bfs(G, self.nodes, orientation=None)) + x_ = [(0, 1), (1, 0), (2, 0), (2, 1), (3, 1)] + assert x == x_ + + def test_digraph_orientation_original(self): + G = nx.DiGraph(self.edges) + x = list(edge_bfs(G, self.nodes, orientation="original")) + x_ = [ + (0, 1, FORWARD), + (1, 0, FORWARD), + (2, 0, FORWARD), + (2, 1, FORWARD), + (3, 1, FORWARD), + ] + assert x == x_ + + def test_digraph2(self): + G = nx.DiGraph() + nx.add_path(G, range(4)) + x = list(edge_bfs(G, [0])) + x_ = [(0, 1), (1, 2), (2, 3)] + assert x == x_ + + def test_digraph_rev(self): + G = nx.DiGraph(self.edges) + x = list(edge_bfs(G, self.nodes, orientation="reverse")) + x_ = [ + (1, 0, REVERSE), + (2, 0, REVERSE), + (0, 1, REVERSE), + (2, 1, REVERSE), + (3, 1, REVERSE), + ] + assert x == x_ + + def test_digraph_rev2(self): + G = nx.DiGraph() + nx.add_path(G, range(4)) + x = list(edge_bfs(G, [3], orientation="reverse")) + x_ = [(2, 3, REVERSE), (1, 2, REVERSE), (0, 1, REVERSE)] + assert x == x_ + + def test_multigraph(self): + G = nx.MultiGraph(self.edges) + x = list(edge_bfs(G, self.nodes)) + x_ = [(0, 1, 0), (0, 1, 1), (0, 1, 2), (0, 2, 0), (1, 2, 0), (1, 3, 0)] + # This is an example of where hash randomization can break. + # There are 3! * 2 alternative outputs, such as: + # [(0, 1, 1), (1, 0, 0), (0, 1, 2), (1, 3, 0), (1, 2, 0)] + # But note, the edges (1,2,0) and (1,3,0) always follow the (0,1,k) + # edges. So the algorithm only guarantees a partial order. A total + # order is guaranteed only if the graph data structures are ordered. + assert x == x_ + + def test_multidigraph(self): + G = nx.MultiDiGraph(self.edges) + x = list(edge_bfs(G, self.nodes)) + x_ = [(0, 1, 0), (1, 0, 0), (1, 0, 1), (2, 0, 0), (2, 1, 0), (3, 1, 0)] + assert x == x_ + + def test_multidigraph_rev(self): + G = nx.MultiDiGraph(self.edges) + x = list(edge_bfs(G, self.nodes, orientation="reverse")) + x_ = [ + (1, 0, 0, REVERSE), + (1, 0, 1, REVERSE), + (2, 0, 0, REVERSE), + (0, 1, 0, REVERSE), + (2, 1, 0, REVERSE), + (3, 1, 0, REVERSE), + ] + assert x == x_ + + def test_digraph_ignore(self): + G = nx.DiGraph(self.edges) + x = list(edge_bfs(G, self.nodes, orientation="ignore")) + x_ = [ + (0, 1, FORWARD), + (1, 0, REVERSE), + (2, 0, REVERSE), + (2, 1, REVERSE), + (3, 1, REVERSE), + ] + assert x == x_ + + def test_digraph_ignore2(self): + G = nx.DiGraph() + nx.add_path(G, range(4)) + x = list(edge_bfs(G, [0], orientation="ignore")) + x_ = [(0, 1, FORWARD), (1, 2, FORWARD), (2, 3, FORWARD)] + assert x == x_ + + def test_multidigraph_ignore(self): + G = nx.MultiDiGraph(self.edges) + x = list(edge_bfs(G, self.nodes, orientation="ignore")) + x_ = [ + (0, 1, 0, FORWARD), + (1, 0, 0, REVERSE), + (1, 0, 1, REVERSE), + (2, 0, 0, REVERSE), + (2, 1, 0, REVERSE), + (3, 1, 0, REVERSE), + ] + assert x == x_ diff --git a/networkx/algorithms/traversal/tests/test_edgedfs.py b/networkx/algorithms/traversal/tests/test_edgedfs.py index 2d92eaa..6c12ae2 100644 --- a/networkx/algorithms/traversal/tests/test_edgedfs.py +++ b/networkx/algorithms/traversal/tests/test_edgedfs.py @@ -1,7 +1,9 @@ -from nose.tools import * +import pytest import networkx as nx + edge_dfs = nx.algorithms.edge_dfs + FORWARD = nx.algorithms.edgedfs.FORWARD REVERSE = nx.algorithms.edgedfs.REVERSE @@ -16,48 +18,65 @@ # this can fail, see TestEdgeDFS.test_multigraph. -class TestEdgeDFS(object): - def setUp(self): - self.nodes = [0, 1, 2, 3] - self.edges = [(0, 1), (1, 0), (1, 0), (2, 1), (3, 1)] +class TestEdgeDFS: + @classmethod + def setup_class(cls): + cls.nodes = [0, 1, 2, 3] + cls.edges = [(0, 1), (1, 0), (1, 0), (2, 1), (3, 1)] def test_empty(self): G = nx.Graph() edges = list(edge_dfs(G)) - assert_equal(edges, []) + assert edges == [] def test_graph(self): G = nx.Graph(self.edges) x = list(edge_dfs(G, self.nodes)) x_ = [(0, 1), (1, 2), (1, 3)] - assert_equal(x, x_) + assert x == x_ def test_digraph(self): G = nx.DiGraph(self.edges) x = list(edge_dfs(G, self.nodes)) x_ = [(0, 1), (1, 0), (2, 1), (3, 1)] - assert_equal(x, x_) + assert x == x_ + + def test_digraph_orientation_invalid(self): + G = nx.DiGraph(self.edges) + edge_iterator = edge_dfs(G, self.nodes, orientation="hello") + pytest.raises(nx.NetworkXError, list, edge_iterator) + + def test_digraph_orientation_none(self): + G = nx.DiGraph(self.edges) + x = list(edge_dfs(G, self.nodes, orientation=None)) + x_ = [(0, 1), (1, 0), (2, 1), (3, 1)] + assert x == x_ + + def test_digraph_orientation_original(self): + G = nx.DiGraph(self.edges) + x = list(edge_dfs(G, self.nodes, orientation="original")) + x_ = [(0, 1, FORWARD), (1, 0, FORWARD), (2, 1, FORWARD), (3, 1, FORWARD)] + assert x == x_ def test_digraph2(self): G = nx.DiGraph() nx.add_path(G, range(4)) x = list(edge_dfs(G, [0])) x_ = [(0, 1), (1, 2), (2, 3)] - assert_equal(x, x_) + assert x == x_ def test_digraph_rev(self): G = nx.DiGraph(self.edges) - x = list(edge_dfs(G, self.nodes, orientation='reverse')) - x_ = [(1, 0, REVERSE), (0, 1, REVERSE), - (2, 1, REVERSE), (3, 1, REVERSE)] - assert_equal(x, x_) + x = list(edge_dfs(G, self.nodes, orientation="reverse")) + x_ = [(1, 0, REVERSE), (0, 1, REVERSE), (2, 1, REVERSE), (3, 1, REVERSE)] + assert x == x_ def test_digraph_rev2(self): G = nx.DiGraph() nx.add_path(G, range(4)) - x = list(edge_dfs(G, [3], orientation='reverse')) + x = list(edge_dfs(G, [3], orientation="reverse")) x_ = [(2, 3, REVERSE), (1, 2, REVERSE), (0, 1, REVERSE)] - assert_equal(x, x_) + assert x == x_ def test_multigraph(self): G = nx.MultiGraph(self.edges) @@ -69,42 +88,47 @@ def test_multigraph(self): # But note, the edges (1,2,0) and (1,3,0) always follow the (0,1,k) # edges. So the algorithm only guarantees a partial order. A total # order is guaranteed only if the graph data structures are ordered. - assert_equal(x, x_) + assert x == x_ def test_multidigraph(self): G = nx.MultiDiGraph(self.edges) x = list(edge_dfs(G, self.nodes)) x_ = [(0, 1, 0), (1, 0, 0), (1, 0, 1), (2, 1, 0), (3, 1, 0)] - assert_equal(x, x_) + assert x == x_ def test_multidigraph_rev(self): G = nx.MultiDiGraph(self.edges) - x = list(edge_dfs(G, self.nodes, orientation='reverse')) - x_ = [(1, 0, 0, REVERSE), - (0, 1, 0, REVERSE), - (1, 0, 1, REVERSE), - (2, 1, 0, REVERSE), - (3, 1, 0, REVERSE)] - assert_equal(x, x_) + x = list(edge_dfs(G, self.nodes, orientation="reverse")) + x_ = [ + (1, 0, 0, REVERSE), + (0, 1, 0, REVERSE), + (1, 0, 1, REVERSE), + (2, 1, 0, REVERSE), + (3, 1, 0, REVERSE), + ] + assert x == x_ def test_digraph_ignore(self): G = nx.DiGraph(self.edges) - x = list(edge_dfs(G, self.nodes, orientation='ignore')) - x_ = [(0, 1, FORWARD), (1, 0, FORWARD), - (2, 1, REVERSE), (3, 1, REVERSE)] - assert_equal(x, x_) + x = list(edge_dfs(G, self.nodes, orientation="ignore")) + x_ = [(0, 1, FORWARD), (1, 0, FORWARD), (2, 1, REVERSE), (3, 1, REVERSE)] + assert x == x_ def test_digraph_ignore2(self): G = nx.DiGraph() nx.add_path(G, range(4)) - x = list(edge_dfs(G, [0], orientation='ignore')) + x = list(edge_dfs(G, [0], orientation="ignore")) x_ = [(0, 1, FORWARD), (1, 2, FORWARD), (2, 3, FORWARD)] - assert_equal(x, x_) + assert x == x_ def test_multidigraph_ignore(self): G = nx.MultiDiGraph(self.edges) - x = list(edge_dfs(G, self.nodes, orientation='ignore')) - x_ = [(0, 1, 0, FORWARD), (1, 0, 0, FORWARD), - (1, 0, 1, REVERSE), (2, 1, 0, REVERSE), - (3, 1, 0, REVERSE)] - assert_equal(x, x_) + x = list(edge_dfs(G, self.nodes, orientation="ignore")) + x_ = [ + (0, 1, 0, FORWARD), + (1, 0, 0, FORWARD), + (1, 0, 1, REVERSE), + (2, 1, 0, REVERSE), + (3, 1, 0, REVERSE), + ] + assert x == x_ diff --git a/networkx/algorithms/tree/__init__.py b/networkx/algorithms/tree/__init__.py index 0bb8fd4..7120d4b 100644 --- a/networkx/algorithms/tree/__init__.py +++ b/networkx/algorithms/tree/__init__.py @@ -3,3 +3,4 @@ from .mst import * from .recognition import * from .operations import * +from .decomposition import * diff --git a/networkx/algorithms/tree/branchings.py b/networkx/algorithms/tree/branchings.py index e7e2d5f..c68564e 100644 --- a/networkx/algorithms/tree/branchings.py +++ b/networkx/algorithms/tree/branchings.py @@ -1,4 +1,3 @@ -# encoding: utf-8 """ Algorithms for finding optimum branchings and spanning arborescences. @@ -10,7 +9,7 @@ """ # TODO: Implement method from Gabow, Galil, Spence and Tarjan: # -#@article{ +# @article{ # year={1986}, # issn={0209-9683}, # journal={Combinatorica}, @@ -26,40 +25,42 @@ # Robert E.}, # pages={109-122}, # language={English} -#} +# } -from __future__ import division -from __future__ import print_function import string -import random from operator import itemgetter import networkx as nx +from networkx.utils import py_random_state + +from .recognition import is_arborescence, is_branching -from .recognition import * __all__ = [ - 'branching_weight', 'greedy_branching', - 'maximum_branching', 'minimum_branching', - 'maximum_spanning_arborescence', 'minimum_spanning_arborescence', - 'Edmonds' + "branching_weight", + "greedy_branching", + "maximum_branching", + "minimum_branching", + "maximum_spanning_arborescence", + "minimum_spanning_arborescence", + "Edmonds", ] -KINDS = set(['max', 'min']) +KINDS = {"max", "min"} STYLES = { - 'branching': 'branching', - 'arborescence': 'arborescence', - 'spanning arborescence': 'arborescence' + "branching": "branching", + "arborescence": "arborescence", + "spanning arborescence": "arborescence", } -INF = float('inf') +INF = float("inf") +@py_random_state(1) def random_string(L=15, seed=None): - random.seed(seed) - return ''.join([random.choice(string.ascii_letters) for n in range(L)]) + return "".join([seed.choice(string.ascii_letters) for n in range(L)]) def _min_weight(weight): @@ -70,7 +71,7 @@ def _max_weight(weight): return weight -def branching_weight(G, attr='weight', default=1): +def branching_weight(G, attr="weight", default=1): """ Returns the total weight of a branching. @@ -78,7 +79,8 @@ def branching_weight(G, attr='weight', default=1): return sum(edge[2].get(attr, default) for edge in G.edges(data=True)) -def greedy_branching(G, attr='weight', default=1, kind='max'): +@py_random_state(4) +def greedy_branching(G, attr="weight", default=1, kind="max", seed=None): """ Returns a branching obtained through a greedy algorithm. @@ -101,6 +103,9 @@ def greedy_branching(G, attr='weight', default=1, kind='max'): `default` specifies what value it should take. kind : str The type of optimum to search for: 'min' or 'max' greedy branching. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -111,17 +116,16 @@ def greedy_branching(G, attr='weight', default=1, kind='max'): if kind not in KINDS: raise nx.NetworkXException("Unknown value for `kind`.") - if kind == 'min': + if kind == "min": reverse = False else: reverse = True if attr is None: # Generate a random string the graph probably won't have. - attr = random_string() + attr = random_string(seed=seed) - edges = [(u, v, data.get(attr, default)) - for (u, v, data) in G.edges(data=True)] + edges = [(u, v, data.get(attr, default)) for (u, v, data) in G.edges(data=True)] # We sort by weight, but also by nodes to normalize behavior across runs. try: @@ -173,14 +177,14 @@ class MultiDiGraph_EdgeKey(nx.MultiDiGraph): """ def __init__(self, incoming_graph_data=None, **attr): - cls = super(MultiDiGraph_EdgeKey, self) + cls = super() cls.__init__(incoming_graph_data=incoming_graph_data, **attr) self._cls = cls self.edge_index = {} def remove_node(self, n): - keys = set([]) + keys = set() for keydict in self.pred[n].values(): keys.update(keydict) for keydict in self.succ[n].values(): @@ -195,10 +199,6 @@ def remove_nodes_from(self, nbunch): for n in nbunch: self.remove_node(n) - def fresh_copy(self): - # Needed to make .copy() work - return MultiDiGraph_EdgeKey() - def add_edge(self, u_for_edge, v_for_edge, key_for_edge, **attr): """ Key is now required. @@ -208,7 +208,7 @@ def add_edge(self, u_for_edge, v_for_edge, key_for_edge, **attr): if key in self.edge_index: uu, vv, _ = self.edge_index[key] if (u != uu) or (v != vv): - raise Exception("Key {0!r} is already in use.".format(key)) + raise Exception(f"Key {key!r} is already in use.") self._cls.add_edge(u, v, key, **attr) self.edge_index[key] = (u, v, self.succ[u][v][key]) @@ -220,8 +220,8 @@ def add_edges_from(self, ebunch_to_add, **attr): def remove_edge_with_key(self, key): try: u, v, _ = self.edge_index[key] - except KeyError: - raise KeyError('Invalid edge key {0!r}'.format(key)) + except KeyError as e: + raise KeyError(f"Invalid edge key {key!r}") from e else: del self.edge_index[key] self._cls.remove_edge(u, v, key) @@ -253,7 +253,7 @@ def first_key(i, vv): return nodes, edges -class Edmonds(object): +class Edmonds: """ Edmonds algorithm for finding optimal branchings and spanning arborescences. @@ -270,9 +270,9 @@ def __init__(self, G, seed=None): # Since we will be creating graphs with new nodes, we need to make # sure that our node names do not conflict with the real node names. - self.template = random_string(seed=seed) + '_{0}' + self.template = random_string(seed=seed) + "_{0}" - def _init(self, attr, default, kind, style): + def _init(self, attr, default, kind, style, preserve_attrs, seed): if kind not in KINDS: raise nx.NetworkXException("Unknown value for `kind`.") @@ -283,22 +283,33 @@ def _init(self, attr, default, kind, style): self.style = style # Determine how we are going to transform the weights. - if kind == 'min': + if kind == "min": self.trans = trans = _min_weight else: self.trans = trans = _max_weight if attr is None: # Generate a random attr the graph probably won't have. - attr = random_string() + attr = random_string(seed=seed) # This is the actual attribute used by the algorithm. self._attr = attr + # This attribute is used to store whether a particular edge is still + # a candidate. We generate a random attr to remove clashes with + # preserved edges + self.candidate_attr = "candidate_" + random_string(seed=seed) + # The object we manipulate at each step is a multidigraph. self.G = G = MultiDiGraph_EdgeKey() for key, (u, v, data) in enumerate(self.G_original.edges(data=True)): d = {attr: trans(data.get(attr, default))} + + if preserve_attrs: + for (d_k, d_v) in data.items(): + if d_k != attr: + d[d_k] = d_v + G.add_edge(u, v, key, **d) self.level = 0 @@ -312,8 +323,8 @@ def _init(self, attr, default, kind, style): # graph B^i. So we will have strictly more B^i than the paper does. self.B = MultiDiGraph_EdgeKey() self.B.edge_index = {} - self.graphs = [] # G^i - self.branchings = [] # B^i + self.graphs = [] # G^i + self.branchings = [] # B^i self.uf = nx.utils.UnionFind() # A list of lists of edge indexes. Each list is a circuit for graph G^i. @@ -326,7 +337,15 @@ def _init(self, attr, default, kind, style): # in circuit G^0 (depsite their weights being different). self.minedge_circuit = [] - def find_optimum(self, attr='weight', default=1, kind='max', style='branching'): + def find_optimum( + self, + attr="weight", + default=1, + kind="max", + style="branching", + preserve_attrs=False, + seed=None, + ): """ Returns a branching from G. @@ -345,6 +364,12 @@ def find_optimum(self, attr='weight', default=1, kind='max', style='branching'): branching is also an arborescence, then the branching is an optimal spanning arborescences. A given graph G need not have an optimal spanning arborescence. + preserve_attrs : bool + If True, preserve the other edge attributes of the original + graph (that are not the one passed to `attr`) + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -352,13 +377,13 @@ def find_optimum(self, attr='weight', default=1, kind='max', style='branching'): The branching. """ - self._init(attr, default, kind, style) + self._init(attr, default, kind, style, preserve_attrs, seed) uf = self.uf # This enormous while loop could use some refactoring... G, B = self.G, self.B - D = set([]) + D = set() nodes = iter(list(G.nodes())) attr = self._attr G_pred = G.pred @@ -387,9 +412,9 @@ def desired_edge(v): # meet the break condition (b) from the paper: # (b) every node of G^i is in D^i and E^i is a branching # Construction guarantees that it's a branching. - assert(len(G) == len(B)) + assert len(G) == len(B) if len(B): - assert(is_branching(B)) + assert is_branching(B) if self.store: self.graphs.append(G.copy()) @@ -403,16 +428,16 @@ def desired_edge(v): break else: if v in D: - #print("v in D", v) + # print("v in D", v) continue # Put v into bucket D^i. - #print("Adding node {0}".format(v)) + # print(f"Adding node {v}") D.add(v) B.add_node(v) edge, weight = desired_edge(v) - #print("Max edge is {0!r}".format(edge)) + # print(f"Max edge is {edge!r}") if edge is None: # If there is no edge, continue with a new node at (I1). continue @@ -435,20 +460,20 @@ def desired_edge(v): # Conditions for adding the edge. # If weight < 0, then it cannot help in finding a maximum branching. - if self.style == 'branching' and weight <= 0: + if self.style == "branching" and weight <= 0: acceptable = False else: acceptable = True - #print("Edge is acceptable: {0}".format(acceptable)) + # print(f"Edge is acceptable: {acceptable}") if acceptable: dd = {attr: weight} B.add_edge(u, v, edge[2], **dd) - G[u][v][edge[2]]['candidate'] = True + G[u][v][edge[2]][self.candidate_attr] = True uf.union(u, v) if Q_edges is not None: - #print("Edge introduced a simple cycle:") - #print(Q_nodes, Q_edges) + # print("Edge introduced a simple cycle:") + # print(Q_nodes, Q_edges) # Move to method # Previous meaning of u and v is no longer important. @@ -478,7 +503,7 @@ def desired_edge(v): # Now we mutate it. new_node = self.template.format(self.level) - #print(minweight, minedge, Q_incoming_weight) + # print(minweight, minedge, Q_incoming_weight) G.add_node(new_node) new_edges = [] @@ -510,8 +535,8 @@ def desired_edge(v): for u, v, key, data in new_edges: G.add_edge(u, v, key, **data) - if 'candidate' in data: - del data['candidate'] + if self.candidate_attr in data: + del data[self.candidate_attr] B.add_edge(u, v, key, **data) uf.union(u, v) @@ -520,7 +545,7 @@ def desired_edge(v): # (I3) Branch construction. # print(self.level) - H = self.G_original.fresh_copy() + H = self.G_original.__class__() def is_root(G, u, edgekeys): """ @@ -531,8 +556,8 @@ def is_root(G, u, edgekeys): """ if u not in G: - #print(G.nodes(), u) - raise Exception('{0!r} not in G'.format(u)) + # print(G.nodes(), u) + raise Exception(f"{u!r} not in G") for v in G.pred[u]: for edgekey in G.pred[u][v]: if edgekey in edgekeys: @@ -557,13 +582,12 @@ def is_root(G, u, edgekeys): # at level i+1. circuit = self.circuits[self.level] # print - #print(merged_node, self.level, circuit) - #print("before", edges) + # print(merged_node, self.level, circuit) + # print("before", edges) # Note, we ask if it is a root in the full graph, not the branching. # The branching alone doesn't have all the edges. - isroot, edgekey = is_root(self.graphs[self.level + 1], - merged_node, edges) + isroot, edgekey = is_root(self.graphs[self.level + 1], merged_node, edges) edges.update(circuit) if isroot: minedge = self.minedge_circuit[self.level] @@ -578,8 +602,8 @@ def is_root(G, u, edgekeys): # transitions to some corresponding node at the current level. # We want to remove an edge from the cycle that transitions # into the corresponding node. - #print("edgekey is: ", edgekey) - #print("circuit is: ", circuit) + # print("edgekey is: ", edgekey) + # print("circuit is: ", circuit) # The branching at level i G = self.graphs[self.level] # print(G.edge_index) @@ -590,7 +614,7 @@ def is_root(G, u, edgekeys): break else: raise Exception("Couldn't find edge incoming to merged node.") - #print("not a root. removing {0}".format(edgekey)) + # print(f"not a root. removing {edgekey}") edges.remove(edgekey) @@ -600,39 +624,54 @@ def is_root(G, u, edgekeys): for edgekey in edges: u, v, d = self.graphs[0].edge_index[edgekey] dd = {self.attr: self.trans(d[self.attr])} - # TODO: make this preserve the key. In fact, make this use the - # same edge attributes as the original graph. + + # Optionally, preserve the other edge attributes of the original + # graph + if preserve_attrs: + for (key, value) in d.items(): + if key not in [self.attr, self.candidate_attr]: + dd[key] = value + + # TODO: make this preserve the key. H.add_edge(u, v, **dd) return H -def maximum_branching(G, attr='weight', default=1): +def maximum_branching(G, attr="weight", default=1, preserve_attrs=False): ed = Edmonds(G) - B = ed.find_optimum(attr, default, kind='max', style='branching') + B = ed.find_optimum( + attr, default, kind="max", style="branching", preserve_attrs=preserve_attrs + ) return B -def minimum_branching(G, attr='weight', default=1): +def minimum_branching(G, attr="weight", default=1, preserve_attrs=False): ed = Edmonds(G) - B = ed.find_optimum(attr, default, kind='min', style='branching') + B = ed.find_optimum( + attr, default, kind="min", style="branching", preserve_attrs=preserve_attrs + ) return B -def maximum_spanning_arborescence(G, attr='weight', default=1): +def maximum_spanning_arborescence(G, attr="weight", default=1, preserve_attrs=False): ed = Edmonds(G) - B = ed.find_optimum(attr, default, kind='max', style='arborescence') + B = ed.find_optimum( + attr, default, kind="max", style="arborescence", preserve_attrs=preserve_attrs + ) if not is_arborescence(B): - msg = 'No maximum spanning arborescence in G.' + msg = "No maximum spanning arborescence in G." raise nx.exception.NetworkXException(msg) return B -def minimum_spanning_arborescence(G, attr='weight', default=1): +def minimum_spanning_arborescence(G, attr="weight", default=1, preserve_attrs=False): ed = Edmonds(G) - B = ed.find_optimum(attr, default, kind='min', style='arborescence') + B = ed.find_optimum( + attr, default, kind="min", style="arborescence", preserve_attrs=preserve_attrs + ) if not is_arborescence(B): - msg = 'No minimum spanning arborescence in G.' + msg = "No minimum spanning arborescence in G." raise nx.exception.NetworkXException(msg) return B @@ -649,6 +688,9 @@ def minimum_spanning_arborescence(G, attr='weight', default=1): default : float The value of the edge attribute used if an edge does not have the attribute `attr`. +preserve_attrs : bool + If True, preserve the other attributes of the original graph (that are not + passed to `attr`) Returns ------- @@ -656,22 +698,29 @@ def minimum_spanning_arborescence(G, attr='weight', default=1): A {kind} {style}. """ -docstring_arborescence = docstring_branching + """ +docstring_arborescence = ( + docstring_branching + + """ Raises ------ NetworkXException If the graph does not contain a {kind} {style}. """ +) -maximum_branching.__doc__ = \ - docstring_branching.format(kind='maximum', style='branching') +maximum_branching.__doc__ = docstring_branching.format( + kind="maximum", style="branching" +) -minimum_branching.__doc__ = \ - docstring_branching.format(kind='minimum', style='branching') +minimum_branching.__doc__ = docstring_branching.format( + kind="minimum", style="branching" +) -maximum_spanning_arborescence.__doc__ = \ - docstring_arborescence.format(kind='maximum', style='spanning arborescence') +maximum_spanning_arborescence.__doc__ = docstring_arborescence.format( + kind="maximum", style="spanning arborescence" +) -minimum_spanning_arborescence.__doc__ = \ - docstring_arborescence.format(kind='minimum', style='spanning arborescence') +minimum_spanning_arborescence.__doc__ = docstring_arborescence.format( + kind="minimum", style="spanning arborescence" +) diff --git a/networkx/algorithms/tree/coding.py b/networkx/algorithms/tree/coding.py index 5122171..0147e7e 100644 --- a/networkx/algorithms/tree/coding.py +++ b/networkx/algorithms/tree/coding.py @@ -1,13 +1,3 @@ -# -*- encoding: utf-8 -*- -# -# coding.py - functions for encoding and decoding trees as sequences -# -# Copyright 2015-2018 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Functions for encoding and decoding trees. Since a tree is a highly restricted form of graph, it can be represented @@ -24,8 +14,13 @@ import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['from_nested_tuple', 'from_prufer_sequence', 'NotATree', - 'to_nested_tuple', 'to_prufer_sequence'] +__all__ = [ + "from_nested_tuple", + "from_prufer_sequence", + "NotATree", + "to_nested_tuple", + "to_prufer_sequence", +] class NotATree(nx.NetworkXException): @@ -36,7 +31,7 @@ class NotATree(nx.NetworkXException): """ -@not_implemented_for('directed') +@not_implemented_for("directed") def to_nested_tuple(T, root, canonical_form=False): """Returns a nested tuple representation of the given tree. @@ -125,9 +120,9 @@ def _make_tuple(T, root, _parent): # Do some sanity checks on the input. if not nx.is_tree(T): - raise nx.NotATree('provided graph is not a tree') + raise nx.NotATree("provided graph is not a tree") if root not in T: - raise nx.NodeNotFound('Graph {} contains no node {}'.format(T, root)) + raise nx.NodeNotFound(f"Graph {T} contains no node {root}") return _make_tuple(T, root, None) @@ -215,7 +210,7 @@ def _make_tree(sequence): return T -@not_implemented_for('directed') +@not_implemented_for("directed") def to_prufer_sequence(T): r"""Returns the Prüfer sequence of the given tree. @@ -258,7 +253,7 @@ def to_prufer_sequence(T): relabel the nodes of your tree to the appropriate format. This implementation is from [1]_ and has a running time of - $O(n \log n)$. + $O(n)$. See also -------- @@ -291,19 +286,19 @@ def to_prufer_sequence(T): # Perform some sanity checks on the input. n = len(T) if n < 2: - msg = 'Prüfer sequence undefined for trees with fewer than two nodes' + msg = "Prüfer sequence undefined for trees with fewer than two nodes" raise nx.NetworkXPointlessConcept(msg) if not nx.is_tree(T): - raise nx.NotATree('provided graph is not a tree') + raise nx.NotATree("provided graph is not a tree") if set(T) != set(range(n)): - raise KeyError('tree must have node labels {0, ..., n - 1}') + raise KeyError("tree must have node labels {0, ..., n - 1}") degree = dict(T.degree()) def parents(u): return next(v for v in T[u] if degree[v] > 1) - index = u = min(k for k in range(n) if degree[k] == 1) + index = u = next(k for k in range(n) if degree[k] == 1) result = [] for i in range(n - 2): v = parents(u) @@ -312,7 +307,7 @@ def parents(u): if v < index and degree[v] == 1: u = v else: - index = u = min(k for k in range(index + 1, n) if degree[k] == 1) + index = u = next(k for k in range(index + 1, n) if degree[k] == 1) return result @@ -347,7 +342,7 @@ def from_prufer_sequence(sequence): relabel the nodes of your tree to the appropriate format. This implementation is from [1]_ and has a running time of - $O(n \log n)$. + $O(n)$. References ---------- @@ -387,7 +382,7 @@ def from_prufer_sequence(sequence): # tree. After the loop, there should be exactly two nodes that are # not in this set. not_orphaned = set() - index = u = min(k for k in range(n) if degree[k] == 1) + index = u = next(k for k in range(n) if degree[k] == 1) for v in sequence: T.add_edge(u, v) not_orphaned.add(u) @@ -395,7 +390,7 @@ def from_prufer_sequence(sequence): if v < index and degree[v] == 1: u = v else: - index = u = min(k for k in range(index + 1, n) if degree[k] == 1) + index = u = next(k for k in range(index + 1, n) if degree[k] == 1) # At this point, there must be exactly two orphaned nodes; join them. orphans = set(T) - not_orphaned u, v = orphans diff --git a/networkx/algorithms/tree/decomposition.py b/networkx/algorithms/tree/decomposition.py new file mode 100644 index 0000000..b795307 --- /dev/null +++ b/networkx/algorithms/tree/decomposition.py @@ -0,0 +1,86 @@ +r"""Function for computing a junction tree of a graph.""" + +import networkx as nx +from networkx.utils import not_implemented_for +from networkx.algorithms import moral, complete_to_chordal_graph, chordal_graph_cliques +from itertools import combinations + +__all__ = ["junction_tree"] + + +@not_implemented_for("multigraph", "MultiDiGraph") +def junction_tree(G): + r"""Returns a junction tree of a given graph. + + A junction tree (or clique tree) is constructed from a (un)directed graph G. + The tree is constructed based on a moralized and triangulated version of G. + The tree's nodes consist of maximal cliques and sepsets of the revised graph. + The sepset of two cliques is the intersection of the nodes of these cliques, + e.g. the sepset of (A,B,C) and (A,C,E,F) is (A,C). These nodes are often called + "variables" in this literature. The tree is bipartitie with each sepset + connected to its two cliques. + + Junction Trees are not unique as the order of clique consideration determines + which sepsets are included. + + The junction tree algorithm consists of five steps [1]_: + + 1. Moralize the graph + 2. Triangulate the graph + 3. Find maximal cliques + 4. Build the tree from cliques, connecting cliques with shared + nodes, set edge-weight to number of shared variables + 5. Find maximum spanning tree + + + Parameters + ---------- + G : networkx.Graph + Directed or undirected graph. + + Returns + ------- + junction_tree : networkx.Graph + The corresponding junction tree of `G`. + + Raises + ------ + NetworkXNotImplemented + Raised if `G` is an instance of `MultiGraph` or `MultiDiGraph`. + + References + ---------- + .. [1] Junction tree algorithm: + https://en.wikipedia.org/wiki/Junction_tree_algorithm + + .. [2] Finn V. Jensen and Frank Jensen. 1994. Optimal + junction trees. In Proceedings of the Tenth international + conference on Uncertainty in artificial intelligence (UAI’94). + Morgan Kaufmann Publishers Inc., San Francisco, CA, USA, 360–366. + """ + + clique_graph = nx.Graph() + + if G.is_directed(): + G = moral.moral_graph(G) + chordal_graph, _ = complete_to_chordal_graph(G) + + cliques = [tuple(sorted(i)) for i in chordal_graph_cliques(chordal_graph)] + clique_graph.add_nodes_from(cliques, type="clique") + + for edge in combinations(cliques, 2): + set_edge_0 = set(edge[0]) + set_edge_1 = set(edge[1]) + if not set_edge_0.isdisjoint(set_edge_1): + sepset = tuple(sorted(set_edge_0.intersection(set_edge_1))) + clique_graph.add_edge(edge[0], edge[1], weight=len(sepset), sepset=sepset) + + junction_tree = nx.maximum_spanning_tree(clique_graph) + + for edge in list(junction_tree.edges(data=True)): + junction_tree.add_node(edge[2]["sepset"], type="sepset") + junction_tree.add_edge(edge[0], edge[2]["sepset"]) + junction_tree.add_edge(edge[1], edge[2]["sepset"]) + junction_tree.remove_edge(edge[0], edge[1]) + + return junction_tree diff --git a/networkx/algorithms/tree/mst.py b/networkx/algorithms/tree/mst.py index 79e81f8..bad8c13 100644 --- a/networkx/algorithms/tree/mst.py +++ b/networkx/algorithms/tree/mst.py @@ -1,11 +1,3 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2017 NetworkX Developers -# Aric Hagberg -# Dan Schult -# Pieter Swart -# Loïc Séguin-C. -# All rights reserved. -# BSD license. """ Algorithms for calculating min/max spanning trees/forests. @@ -19,14 +11,17 @@ from networkx.utils import UnionFind, not_implemented_for __all__ = [ - 'minimum_spanning_edges', 'maximum_spanning_edges', - 'minimum_spanning_tree', 'maximum_spanning_tree', + "minimum_spanning_edges", + "maximum_spanning_edges", + "minimum_spanning_tree", + "maximum_spanning_tree", ] -@not_implemented_for('multigraph') -def boruvka_mst_edges(G, minimum=True, weight='weight', - keys=False, data=True, ignore_nan=False): +@not_implemented_for("multigraph") +def boruvka_mst_edges( + G, minimum=True, weight="weight", keys=False, data=True, ignore_nan=False +): """Iterate over edges of a Borůvka's algorithm min/max spanning tree. Parameters @@ -68,15 +63,15 @@ def best_edge(component): """ sign = 1 if minimum else -1 - minwt = float('inf') + minwt = float("inf") boundary = None for e in nx.edge_boundary(G, component, data=True): wt = e[-1].get(weight, 1) * sign if isnan(wt): if ignore_nan: continue - msg = "NaN found as an edge weight. Edge %s" - raise ValueError(msg % (e,)) + msg = f"NaN found as an edge weight. Edge {e}" + raise ValueError(msg) if wt < minwt: minwt = wt boundary = e @@ -120,8 +115,9 @@ def best_edge(component): forest.union(u, v) -def kruskal_mst_edges(G, minimum, weight='weight', - keys=True, data=True, ignore_nan=False): +def kruskal_mst_edges( + G, minimum, weight="weight", keys=True, data=True, ignore_nan=False +): """Iterate over edges of a Kruskal's algorithm min/max spanning tree. Parameters @@ -160,9 +156,10 @@ def filter_nan_edges(edges=edges, weight=weight): if isnan(wt): if ignore_nan: continue - msg = "NaN found as an edge weight. Edge %s" - raise ValueError(msg % ((u, v, k, d),)) + msg = f"NaN found as an edge weight. Edge {(u, v, k, d)}" + raise ValueError(msg) yield wt, u, v, k, d + else: edges = G.edges(data=True) @@ -173,9 +170,10 @@ def filter_nan_edges(edges=edges, weight=weight): if isnan(wt): if ignore_nan: continue - msg = "NaN found as an edge weight. Edge %s" - raise ValueError(msg % ((u, v, d),)) + msg = f"NaN found as an edge weight. Edge {(u, v, d)}" + raise ValueError(msg) yield wt, u, v, d + edges = sorted(filter_nan_edges(), key=itemgetter(0)) # Multigraphs need to handle edge keys in addition to edge data. if G.is_multigraph(): @@ -202,8 +200,7 @@ def filter_nan_edges(edges=edges, weight=weight): subtrees.union(u, v) -def prim_mst_edges(G, minimum, weight='weight', - keys=True, data=True, ignore_nan=False): +def prim_mst_edges(G, minimum, weight="weight", keys=True, data=True, ignore_nan=False): """Iterate over edges of Prim's algorithm min/max spanning tree. Parameters @@ -235,15 +232,15 @@ def prim_mst_edges(G, minimum, weight='weight', push = heappush pop = heappop - nodes = list(G) + nodes = set(G) c = count() sign = 1 if minimum else -1 while nodes: - u = nodes.pop(0) + u = nodes.pop() frontier = [] - visited = [u] + visited = {u} if is_multigraph: for v, keydict in G.adj[u].items(): for k, d in keydict.items(): @@ -251,8 +248,8 @@ def prim_mst_edges(G, minimum, weight='weight', if isnan(wt): if ignore_nan: continue - msg = "NaN found as an edge weight. Edge %s" - raise ValueError(msg % ((u, v, k, d),)) + msg = f"NaN found as an edge weight. Edge {(u, v, k, d)}" + raise ValueError(msg) push(frontier, (wt, next(c), u, v, k, d)) else: for v, d in G.adj[u].items(): @@ -260,15 +257,15 @@ def prim_mst_edges(G, minimum, weight='weight', if isnan(wt): if ignore_nan: continue - msg = "NaN found as an edge weight. Edge %s" - raise ValueError(msg % ((u, v, d),)) + msg = f"NaN found as an edge weight. Edge {(u, v, d)}" + raise ValueError(msg) push(frontier, (wt, next(c), u, v, d)) while frontier: if is_multigraph: W, _, u, v, k, d = pop(frontier) else: W, _, u, v, d = pop(frontier) - if v in visited: + if v in visited or v not in nodes: continue # Multigraphs need to handle edge keys in addition to edge data. if is_multigraph and keys: @@ -282,8 +279,8 @@ def prim_mst_edges(G, minimum, weight='weight', else: yield u, v # update frontier - visited.append(v) - nodes.remove(v) + visited.add(v) + nodes.discard(v) if is_multigraph: for w, keydict in G.adj[v].items(): if w in visited: @@ -300,16 +297,17 @@ def prim_mst_edges(G, minimum, weight='weight', ALGORITHMS = { - 'boruvka': boruvka_mst_edges, - u'borůvka': boruvka_mst_edges, - 'kruskal': kruskal_mst_edges, - 'prim': prim_mst_edges + "boruvka": boruvka_mst_edges, + "borůvka": boruvka_mst_edges, + "kruskal": kruskal_mst_edges, + "prim": prim_mst_edges, } -@not_implemented_for('directed') -def minimum_spanning_edges(G, algorithm='kruskal', weight='weight', - keys=True, data=True, ignore_nan=False): +@not_implemented_for("directed") +def minimum_spanning_edges( + G, algorithm="kruskal", weight="weight", keys=True, data=True, ignore_nan=False +): """Generate edges in a minimum spanning forest of an undirected weighted graph. @@ -363,19 +361,19 @@ def minimum_spanning_edges(G, algorithm='kruskal', weight='weight', >>> G = nx.cycle_graph(4) >>> G.add_edge(0, 3, weight=2) - >>> mst = tree.minimum_spanning_edges(G, algorithm='kruskal', data=False) + >>> mst = tree.minimum_spanning_edges(G, algorithm="kruskal", data=False) >>> edgelist = list(mst) - >>> sorted(edgelist) - [(0, 1), (1, 2), (2, 3)] + >>> sorted(sorted(e) for e in edgelist) + [[0, 1], [1, 2], [2, 3]] Find minimum spanning edges by Prim's algorithm >>> G = nx.cycle_graph(4) >>> G.add_edge(0, 3, weight=2) - >>> mst = tree.minimum_spanning_edges(G, algorithm='prim', data=False) + >>> mst = tree.minimum_spanning_edges(G, algorithm="prim", data=False) >>> edgelist = list(mst) - >>> sorted(edgelist) - [(0, 1), (1, 2), (2, 3)] + >>> sorted(sorted(e) for e in edgelist) + [[0, 1], [1, 2], [2, 3]] Notes ----- @@ -391,17 +389,19 @@ def minimum_spanning_edges(G, algorithm='kruskal', weight='weight', """ try: algo = ALGORITHMS[algorithm] - except KeyError: - msg = '{} is not a valid choice for an algorithm.'.format(algorithm) - raise ValueError(msg) + except KeyError as e: + msg = f"{algorithm} is not a valid choice for an algorithm." + raise ValueError(msg) from e - return algo(G, minimum=True, weight=weight, keys=keys, data=data, - ignore_nan=ignore_nan) + return algo( + G, minimum=True, weight=weight, keys=keys, data=data, ignore_nan=ignore_nan + ) -@not_implemented_for('directed') -def maximum_spanning_edges(G, algorithm='kruskal', weight='weight', - keys=True, data=True, ignore_nan=False): +@not_implemented_for("directed") +def maximum_spanning_edges( + G, algorithm="kruskal", weight="weight", keys=True, data=True, ignore_nan=False +): """Generate edges in a maximum spanning forest of an undirected weighted graph. @@ -455,19 +455,19 @@ def maximum_spanning_edges(G, algorithm='kruskal', weight='weight', >>> G = nx.cycle_graph(4) >>> G.add_edge(0, 3, weight=2) - >>> mst = tree.maximum_spanning_edges(G, algorithm='kruskal', data=False) + >>> mst = tree.maximum_spanning_edges(G, algorithm="kruskal", data=False) >>> edgelist = list(mst) - >>> sorted(edgelist) - [(0, 1), (0, 3), (1, 2)] + >>> sorted(sorted(e) for e in edgelist) + [[0, 1], [0, 3], [1, 2]] Find maximum spanning edges by Prim's algorithm >>> G = nx.cycle_graph(4) - >>> G.add_edge(0, 3, weight=2) # assign weight 2 to edge 0-3 - >>> mst = tree.maximum_spanning_edges(G, algorithm='prim', data=False) + >>> G.add_edge(0, 3, weight=2) # assign weight 2 to edge 0-3 + >>> mst = tree.maximum_spanning_edges(G, algorithm="prim", data=False) >>> edgelist = list(mst) - >>> sorted(edgelist) - [(0, 1), (0, 3), (3, 2)] + >>> sorted(sorted(e) for e in edgelist) + [[0, 1], [0, 3], [2, 3]] Notes ----- @@ -482,16 +482,16 @@ def maximum_spanning_edges(G, algorithm='kruskal', weight='weight', """ try: algo = ALGORITHMS[algorithm] - except KeyError: - msg = '{} is not a valid choice for an algorithm.'.format(algorithm) - raise ValueError(msg) + except KeyError as e: + msg = f"{algorithm} is not a valid choice for an algorithm." + raise ValueError(msg) from e - return algo(G, minimum=False, weight=weight, keys=keys, data=data, - ignore_nan=ignore_nan) + return algo( + G, minimum=False, weight=weight, keys=keys, data=data, ignore_nan=ignore_nan + ) -def minimum_spanning_tree(G, weight='weight', algorithm='kruskal', - ignore_nan=False): +def minimum_spanning_tree(G, weight="weight", algorithm="kruskal", ignore_nan=False): """Returns a minimum spanning tree or forest on an undirected graph `G`. Parameters @@ -540,17 +540,17 @@ def minimum_spanning_tree(G, weight='weight', algorithm='kruskal', Isolated nodes with self-loops are in the tree as edgeless isolated nodes. """ - edges = minimum_spanning_edges(G, algorithm, weight, keys=True, - data=True, ignore_nan=ignore_nan) - T = G.fresh_copy() # Same graph class as G + edges = minimum_spanning_edges( + G, algorithm, weight, keys=True, data=True, ignore_nan=ignore_nan + ) + T = G.__class__() # Same graph class as G T.graph.update(G.graph) T.add_nodes_from(G.nodes.items()) T.add_edges_from(edges) return T -def maximum_spanning_tree(G, weight='weight', algorithm='kruskal', - ignore_nan=False): +def maximum_spanning_tree(G, weight="weight", algorithm="kruskal", ignore_nan=False): """Returns a maximum spanning tree or forest on an undirected graph `G`. Parameters @@ -563,7 +563,7 @@ def maximum_spanning_tree(G, weight='weight', algorithm='kruskal', Data key to use for edge weights. algorithm : string - The algorithm to use when finding a minimum spanning tree. Valid + The algorithm to use when finding a maximum spanning tree. Valid choices are 'kruskal', 'prim', or 'boruvka'. The default is 'kruskal'. @@ -575,7 +575,7 @@ def maximum_spanning_tree(G, weight='weight', algorithm='kruskal', Returns ------- G : NetworkX Graph - A minimum spanning tree or forest. + A maximum spanning tree or forest. Examples @@ -601,10 +601,11 @@ def maximum_spanning_tree(G, weight='weight', algorithm='kruskal', Isolated nodes with self-loops are in the tree as edgeless isolated nodes. """ - edges = maximum_spanning_edges(G, algorithm, weight, keys=True, - data=True, ignore_nan=ignore_nan) + edges = maximum_spanning_edges( + G, algorithm, weight, keys=True, data=True, ignore_nan=ignore_nan + ) edges = list(edges) - T = G.fresh_copy() # Same graph class as G + T = G.__class__() # Same graph class as G T.graph.update(G.graph) T.add_nodes_from(G.nodes.items()) T.add_edges_from(edges) diff --git a/networkx/algorithms/tree/operations.py b/networkx/algorithms/tree/operations.py index a65f7ff..1a017a5 100644 --- a/networkx/algorithms/tree/operations.py +++ b/networkx/algorithms/tree/operations.py @@ -1,19 +1,11 @@ -# operations.py - binary operations on trees -# -# Copyright 2015 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Operations on trees.""" from functools import partial from itertools import chain import networkx as nx -from networkx.utils import accumulate +from itertools import accumulate -__all__ = ['join'] +__all__ = ["join"] def join(rooted_trees, label_attribute=None): @@ -74,22 +66,27 @@ def join(rooted_trees, label_attribute=None): # Relabel the nodes so that their union is the integers starting at 1. if label_attribute is None: - label_attribute = '_old' - relabel = partial(nx.convert_node_labels_to_integers, - label_attribute=label_attribute) + label_attribute = "_old" + relabel = partial( + nx.convert_node_labels_to_integers, label_attribute=label_attribute + ) lengths = (len(tree) for tree in trees[:-1]) first_labels = chain([0], accumulate(lengths)) - trees = [relabel(tree, first_label=first_label + 1) - for tree, first_label in zip(trees, first_labels)] + trees = [ + relabel(tree, first_label=first_label + 1) + for tree, first_label in zip(trees, first_labels) + ] # Get the relabeled roots. - roots = [next(v for v, d in tree.nodes(data=True) if d.get('_old') == root) - for tree, root in zip(trees, roots)] + roots = [ + next(v for v, d in tree.nodes(data=True) if d.get("_old") == root) + for tree, root in zip(trees, roots) + ] # Remove the old node labels. for tree in trees: for v in tree: - tree.nodes[v].pop('_old') + tree.nodes[v].pop("_old") # Add all sets of nodes and edges, with data. nodes = (tree.nodes(data=True) for tree in trees) diff --git a/networkx/algorithms/tree/recognition.py b/networkx/algorithms/tree/recognition.py index e9d9dc5..5fbff54 100644 --- a/networkx/algorithms/tree/recognition.py +++ b/networkx/algorithms/tree/recognition.py @@ -1,4 +1,3 @@ -#-*- coding: utf-8 -*- """ Recognition Tests ================= @@ -76,16 +75,11 @@ import networkx as nx -__author__ = """\n""".join([ - 'Ferdinando Papale ', - 'chebee7i ', -]) +__all__ = ["is_arborescence", "is_branching", "is_forest", "is_tree"] -__all__ = ['is_arborescence', 'is_branching', 'is_forest', 'is_tree'] - -@nx.utils.not_implemented_for('undirected') +@nx.utils.not_implemented_for("undirected") def is_arborescence(G): """ Returns True if `G` is an arborescence. @@ -114,7 +108,7 @@ def is_arborescence(G): return is_tree(G) and max(d for n, d in G.in_degree()) <= 1 -@nx.utils.not_implemented_for('undirected') +@nx.utils.not_implemented_for("undirected") def is_branching(G): """ Returns True if `G` is a branching. @@ -174,14 +168,14 @@ def is_forest(G): """ if len(G) == 0: - raise nx.exception.NetworkXPointlessConcept('G has no nodes.') + raise nx.exception.NetworkXPointlessConcept("G has no nodes.") if G.is_directed(): - components = nx.weakly_connected_component_subgraphs + components = (G.subgraph(c) for c in nx.weakly_connected_components(G)) else: - components = nx.connected_component_subgraphs + components = (G.subgraph(c) for c in nx.connected_components(G)) - return all(len(c) - 1 == c.number_of_edges() for c in components(G)) + return all(len(c) - 1 == c.number_of_edges() for c in components) def is_tree(G): @@ -215,7 +209,7 @@ def is_tree(G): """ if len(G) == 0: - raise nx.exception.NetworkXPointlessConcept('G has no nodes.') + raise nx.exception.NetworkXPointlessConcept("G has no nodes.") if G.is_directed(): is_connected = nx.is_weakly_connected diff --git a/networkx/algorithms/tree/tests/__init__.py b/networkx/algorithms/tree/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/algorithms/tree/tests/test_branchings.py b/networkx/algorithms/tree/tests/test_branchings.py index 3121566..7f5e9cc 100644 --- a/networkx/algorithms/tree/tests/test_branchings.py +++ b/networkx/algorithms/tree/tests/test_branchings.py @@ -1,16 +1,12 @@ -from nose import SkipTest -from nose.tools import * +import pytest + +np = pytest.importorskip("numpy") import networkx as nx -try: - import numpy as np -except: - raise SkipTest('NumPy not available.') from networkx.algorithms.tree import branchings from networkx.algorithms.tree import recognition -from networkx.testing import * # # Explicitly discussed examples from Edmonds paper. @@ -18,6 +14,7 @@ # Used in Figures A-F. # +# fmt: off G_array = np.array([ # 0 1 2 3 4 5 6 7 8 [0, 0, 12, 0, 12, 0, 0, 0, 0], # 0 @@ -30,26 +27,20 @@ [0, 0, 0, 19, 0, 0, 15, 0, 0], # 7 [0, 0, 0, 0, 0, 0, 0, 18, 0], # 8 ], dtype=int) - -# We convert to MultiDiGraph after using from_numpy_matrix -# https://github.com/networkx/networkx/pull/1305 +# fmt: on def G1(): - G = nx.DiGraph() - G = nx.from_numpy_matrix(G_array, create_using=G) - G = nx.MultiDiGraph(G) + G = nx.from_numpy_array(G_array, create_using=nx.MultiDiGraph) return G def G2(): # Now we shift all the weights by -10. # Should not affect optimal arborescence, but does affect optimal branching. - G = nx.DiGraph() Garr = G_array.copy() Garr[np.nonzero(Garr)] -= 10 - G = nx.from_numpy_matrix(Garr, create_using=G) - G = nx.MultiDiGraph(G) + G = nx.from_numpy_array(Garr, create_using=nx.MultiDiGraph) return G @@ -57,8 +48,14 @@ def G2(): # also an optimal spanning arborescence. # optimal_arborescence_1 = [ - (0, 2, 12), (2, 1, 17), (2, 3, 21), (1, 5, 13), - (3, 4, 17), (3, 6, 18), (6, 8, 15), (8, 7, 18), + (0, 2, 12), + (2, 1, 17), + (2, 3, 21), + (1, 5, 13), + (3, 4, 17), + (3, 6, 18), + (6, 8, 15), + (8, 7, 18), ] # For G2, the optimal branching of G1 (with shifted weights) is no longer @@ -72,27 +69,55 @@ def G2(): # # These are maximal branchings or arborescences. optimal_branching_2a = [ - (5, 6, 4), (6, 2, 11), (6, 8, 5), (8, 7, 8), - (2, 1, 7), (2, 3, 11), (3, 4, 7), + (5, 6, 4), + (6, 2, 11), + (6, 8, 5), + (8, 7, 8), + (2, 1, 7), + (2, 3, 11), + (3, 4, 7), ] optimal_branching_2b = [ - (8, 7, 8), (7, 3, 9), (3, 4, 7), (3, 6, 8), - (6, 2, 11), (2, 1, 7), (1, 5, 3), + (8, 7, 8), + (7, 3, 9), + (3, 4, 7), + (3, 6, 8), + (6, 2, 11), + (2, 1, 7), + (1, 5, 3), ] optimal_arborescence_2 = [ - (0, 2, 2), (2, 1, 7), (2, 3, 11), (1, 5, 3), - (3, 4, 7), (3, 6, 8), (6, 8, 5), (8, 7, 8), + (0, 2, 2), + (2, 1, 7), + (2, 3, 11), + (1, 5, 3), + (3, 4, 7), + (3, 6, 8), + (6, 8, 5), + (8, 7, 8), ] # Two suboptimal maximal branchings on G1 obtained from a greedy algorithm. # 1a matches what is shown in Figure G in Edmonds's paper. greedy_subopt_branching_1a = [ - (5, 6, 14), (6, 2, 21), (6, 8, 15), (8, 7, 18), - (2, 1, 17), (2, 3, 21), (3, 0, 5), (3, 4, 17), + (5, 6, 14), + (6, 2, 21), + (6, 8, 15), + (8, 7, 18), + (2, 1, 17), + (2, 3, 21), + (3, 0, 5), + (3, 4, 17), ] greedy_subopt_branching_1b = [ - (8, 7, 18), (7, 6, 15), (6, 2, 21), (2, 1, 17), - (2, 3, 21), (1, 5, 13), (3, 0, 5), (3, 4, 17), + (8, 7, 18), + (7, 6, 15), + (6, 2, 21), + (2, 1, 17), + (2, 3, 21), + (1, 5, 13), + (3, 0, 5), + (3, 4, 17), ] @@ -103,16 +128,16 @@ def build_branching(edges): return G -def sorted_edges(G, attr='weight', default=1): +def sorted_edges(G, attr="weight", default=1): edges = [(u, v, data.get(attr, default)) for (u, v, data) in G.edges(data=True)] edges = sorted(edges, key=lambda x: (x[2], x[1], x[0])) return edges -def assert_equal_branchings(G1, G2, attr='weight', default=1): +def assert_equal_branchings(G1, G2, attr="weight", default=1): edges1 = list(G1.edges(data=True)) edges2 = list(G2.edges(data=True)) - assert_equal(len(edges1), len(edges2)) + assert len(edges1) == len(edges2) # Grab the weights only. e1 = sorted_edges(G1, attr, default) @@ -124,46 +149,47 @@ def assert_equal_branchings(G1, G2, attr='weight', default=1): print for a, b in zip(e1, e2): - assert_equal(a[:2], b[:2]) + assert a[:2] == b[:2] np.testing.assert_almost_equal(a[2], b[2]) ################ + def test_optimal_branching1(): G = build_branching(optimal_arborescence_1) - assert_true(recognition.is_arborescence(G), True) - assert_equal(branchings.branching_weight(G), 131) + assert recognition.is_arborescence(G), True + assert branchings.branching_weight(G) == 131 def test_optimal_branching2a(): G = build_branching(optimal_branching_2a) - assert_true(recognition.is_arborescence(G), True) - assert_equal(branchings.branching_weight(G), 53) + assert recognition.is_arborescence(G), True + assert branchings.branching_weight(G) == 53 def test_optimal_branching2b(): G = build_branching(optimal_branching_2b) - assert_true(recognition.is_arborescence(G), True) - assert_equal(branchings.branching_weight(G), 53) + assert recognition.is_arborescence(G), True + assert branchings.branching_weight(G) == 53 def test_optimal_arborescence2(): G = build_branching(optimal_arborescence_2) - assert_true(recognition.is_arborescence(G), True) - assert_equal(branchings.branching_weight(G), 51) + assert recognition.is_arborescence(G), True + assert branchings.branching_weight(G) == 51 def test_greedy_suboptimal_branching1a(): G = build_branching(greedy_subopt_branching_1a) - assert_true(recognition.is_arborescence(G), True) - assert_equal(branchings.branching_weight(G), 128) + assert recognition.is_arborescence(G), True + assert branchings.branching_weight(G) == 128 def test_greedy_suboptimal_branching1b(): G = build_branching(greedy_subopt_branching_1b) - assert_true(recognition.is_arborescence(G), True) - assert_equal(branchings.branching_weight(G), 127) + assert recognition.is_arborescence(G), True + assert branchings.branching_weight(G) == 127 def test_greedy_max1(): @@ -181,13 +207,19 @@ def test_greedy_max2(): # Different default weight. # G = G1() - del G[1][0][0]['weight'] + del G[1][0][0]["weight"] B = branchings.greedy_branching(G, default=6) # Chosen so that edge (3,0,5) is not selected and (1,0,6) is instead. edges = [ - (1, 0, 6), (1, 5, 13), (7, 6, 15), (2, 1, 17), - (3, 4, 17), (8, 7, 18), (2, 3, 21), (6, 2, 21), + (1, 0, 6), + (1, 5, 13), + (7, 6, 15), + (2, 1, 17), + (3, 4, 17), + (8, 7, 18), + (2, 3, 21), + (6, 2, 21), ] B_ = build_branching(edges) assert_equal_branchings(B, B_) @@ -201,8 +233,14 @@ def test_greedy_max3(): # This is mostly arbitrary...the output was generated by running the algo. edges = [ - (2, 1, 1), (3, 0, 1), (3, 4, 1), (5, 8, 1), - (6, 2, 1), (7, 3, 1), (7, 6, 1), (8, 7, 1), + (2, 1, 1), + (3, 0, 1), + (3, 4, 1), + (5, 8, 1), + (6, 2, 1), + (7, 3, 1), + (7, 6, 1), + (8, 7, 1), ] B_ = build_branching(edges) assert_equal_branchings(B, B_, default=1) @@ -210,11 +248,17 @@ def test_greedy_max3(): def test_greedy_min(): G = G1() - B = branchings.greedy_branching(G, kind='min') + B = branchings.greedy_branching(G, kind="min") edges = [ - (1, 0, 4), (0, 2, 12), (0, 4, 12), (2, 5, 12), - (4, 7, 12), (5, 8, 12), (5, 6, 14), (7, 3, 19) + (1, 0, 4), + (0, 2, 12), + (0, 4, 12), + (2, 5, 12), + (4, 7, 12), + (5, 8, 12), + (5, 6, 14), + (7, 3, 19), ] B_ = build_branching(edges) assert_equal_branchings(B, B_) @@ -254,8 +298,14 @@ def test_edmonds2_minarbor(): # This was obtained from algorithm. Need to verify it independently. # Branch weight is: 96 edges = [ - (3, 0, 5), (0, 2, 12), (0, 4, 12), (2, 5, 12), - (4, 7, 12), (5, 8, 12), (5, 6, 14), (2, 1, 17) + (3, 0, 5), + (0, 2, 12), + (0, 4, 12), + (2, 5, 12), + (4, 7, 12), + (5, 8, 12), + (5, 6, 14), + (2, 1, 17), ] x_ = build_branching(edges) assert_equal_branchings(x, x_) @@ -277,6 +327,7 @@ def test_edmonds3_minbranch2(): x_ = build_branching(edges) assert_equal_branchings(x, x_) + # Need more tests @@ -284,34 +335,41 @@ def test_mst(): # Make sure we get the same results for undirected graphs. # Example from: https://en.wikipedia.org/wiki/Kruskal's_algorithm G = nx.Graph() - edgelist = [(0, 3, [('weight', 5)]), - (0, 1, [('weight', 7)]), - (1, 3, [('weight', 9)]), - (1, 2, [('weight', 8)]), - (1, 4, [('weight', 7)]), - (3, 4, [('weight', 15)]), - (3, 5, [('weight', 6)]), - (2, 4, [('weight', 5)]), - (4, 5, [('weight', 8)]), - (4, 6, [('weight', 9)]), - (5, 6, [('weight', 11)])] + edgelist = [ + (0, 3, [("weight", 5)]), + (0, 1, [("weight", 7)]), + (1, 3, [("weight", 9)]), + (1, 2, [("weight", 8)]), + (1, 4, [("weight", 7)]), + (3, 4, [("weight", 15)]), + (3, 5, [("weight", 6)]), + (2, 4, [("weight", 5)]), + (4, 5, [("weight", 8)]), + (4, 6, [("weight", 9)]), + (5, 6, [("weight", 11)]), + ] G.add_edges_from(edgelist) G = G.to_directed() x = branchings.minimum_spanning_arborescence(G) - edges = [(set([0, 1]), 7), (set([0, 3]), 5), (set([3, 5]), 6), - (set([1, 4]), 7), (set([4, 2]), 5), (set([4, 6]), 9)] + edges = [ + ({0, 1}, 7), + ({0, 3}, 5), + ({3, 5}, 6), + ({1, 4}, 7), + ({4, 2}, 5), + ({4, 6}, 9), + ] - assert_equal(x.number_of_edges(), len(edges)) + assert x.number_of_edges() == len(edges) for u, v, d in x.edges(data=True): - assert_true((set([u, v]), d['weight']) in edges) + assert ({u, v}, d["weight"]) in edges def test_mixed_nodetypes(): # Smoke test to make sure no TypeError is raised for mixed node types. G = nx.Graph() - edgelist = [(0, 3, [('weight', 5)]), - (0, '1', [('weight', 5)])] + edgelist = [(0, 3, [("weight", 5)]), (0, "1", [("weight", 5)])] G.add_edges_from(edgelist) G = G.to_directed() x = branchings.minimum_spanning_arborescence(G) @@ -322,8 +380,7 @@ def test_edmonds1_minbranch(): # but with all edges negative. edges = [(u, v, -w) for (u, v, w) in optimal_arborescence_1] - G = nx.DiGraph() - G = nx.from_numpy_matrix(-G_array, create_using=G) + G = nx.from_numpy_array(-G_array, create_using=nx.DiGraph) # Quickly make sure max branching is empty. x = branchings.maximum_branching(G) @@ -334,3 +391,61 @@ def test_edmonds1_minbranch(): x = branchings.minimum_branching(G) x_ = build_branching(edges) assert_equal_branchings(x, x_) + + +def test_edge_attribute_preservation_normal_graph(): + # Test that edge attributes are preserved when finding an optimum graph + # using the Edmonds class for normal graphs. + G = nx.Graph() + + edgelist = [ + (0, 1, [("weight", 5), ("otherattr", 1), ("otherattr2", 3)]), + (0, 2, [("weight", 5), ("otherattr", 2), ("otherattr2", 2)]), + (1, 2, [("weight", 6), ("otherattr", 3), ("otherattr2", 1)]), + ] + G.add_edges_from(edgelist) + + ed = branchings.Edmonds(G) + B = ed.find_optimum("weight", preserve_attrs=True, seed=1) + + assert B[0][1]["otherattr"] == 1 + assert B[0][1]["otherattr2"] == 3 + + +def test_edge_attribute_preservation_multigraph(): + + # Test that edge attributes are preserved when finding an optimum graph + # using the Edmonds class for multigraphs. + G = nx.MultiGraph() + + edgelist = [ + (0, 1, [("weight", 5), ("otherattr", 1), ("otherattr2", 3)]), + (0, 2, [("weight", 5), ("otherattr", 2), ("otherattr2", 2)]), + (1, 2, [("weight", 6), ("otherattr", 3), ("otherattr2", 1)]), + ] + G.add_edges_from(edgelist * 2) # Make sure we have duplicate edge paths + + ed = branchings.Edmonds(G) + B = ed.find_optimum("weight", preserve_attrs=True) + + assert B[0][1][0]["otherattr"] == 1 + assert B[0][1][0]["otherattr2"] == 3 + + +def test_edge_attribute_discard(): + # Test that edge attributes are discarded if we do not specify to keep them + G = nx.Graph() + + edgelist = [ + (0, 1, [("weight", 5), ("otherattr", 1), ("otherattr2", 3)]), + (0, 2, [("weight", 5), ("otherattr", 2), ("otherattr2", 2)]), + (1, 2, [("weight", 6), ("otherattr", 3), ("otherattr2", 1)]), + ] + G.add_edges_from(edgelist) + + ed = branchings.Edmonds(G) + B = ed.find_optimum("weight", preserve_attrs=False) + + edge_dict = B[0][1] + with pytest.raises(KeyError): + _ = edge_dict["otherattr"] diff --git a/networkx/algorithms/tree/tests/test_coding.py b/networkx/algorithms/tree/tests/test_coding.py index 2909d6f..0bc2ce9 100644 --- a/networkx/algorithms/tree/tests/test_coding.py +++ b/networkx/algorithms/tree/tests/test_coding.py @@ -1,47 +1,35 @@ -# -*- encoding: utf-8 -*- -# test_coding.py - unit tests for the coding module -# -# Copyright 2015-2018 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`~networkx.algorithms.tree.coding` module.""" from itertools import product -from nose.tools import assert_equal -from nose.tools import assert_true -from nose.tools import raises - +import pytest import networkx as nx from networkx.testing import assert_nodes_equal from networkx.testing import assert_edges_equal -class TestPruferSequence(object): +class TestPruferSequence: """Unit tests for the Prüfer sequence encoding and decoding functions. """ - @raises(nx.NotATree) def test_nontree(self): - G = nx.cycle_graph(3) - nx.to_prufer_sequence(G) + with pytest.raises(nx.NotATree): + G = nx.cycle_graph(3) + nx.to_prufer_sequence(G) - @raises(nx.NetworkXPointlessConcept) def test_null_graph(self): - nx.to_prufer_sequence(nx.null_graph()) + with pytest.raises(nx.NetworkXPointlessConcept): + nx.to_prufer_sequence(nx.null_graph()) - @raises(nx.NetworkXPointlessConcept) def test_trivial_graph(self): - nx.to_prufer_sequence(nx.trivial_graph()) + with pytest.raises(nx.NetworkXPointlessConcept): + nx.to_prufer_sequence(nx.trivial_graph()) - @raises(KeyError) def test_bad_integer_labels(self): - T = nx.Graph(nx.utils.pairwise('abc')) - nx.to_prufer_sequence(T) + with pytest.raises(KeyError): + T = nx.Graph(nx.utils.pairwise("abc")) + nx.to_prufer_sequence(T) def test_encoding(self): """Tests for encoding a tree as a Prüfer sequence using the @@ -51,7 +39,7 @@ def test_encoding(self): # Example from Wikipedia. tree = nx.Graph([(0, 3), (1, 3), (2, 3), (3, 4), (4, 5)]) sequence = nx.to_prufer_sequence(tree) - assert_equal(sequence, [3, 3, 3, 4]) + assert sequence == [3, 3, 3, 4] def test_decoding(self): """Tests for decoding a tree from a Prüfer sequence.""" @@ -81,23 +69,23 @@ def test_inverse(self): for seq in product(range(4), repeat=2): seq2 = nx.to_prufer_sequence(nx.from_prufer_sequence(seq)) - assert_equal(list(seq), seq2) + assert list(seq) == seq2 -class TestNestedTuple(object): +class TestNestedTuple: """Unit tests for the nested tuple encoding and decoding functions. """ - @raises(nx.NotATree) def test_nontree(self): - G = nx.cycle_graph(3) - nx.to_nested_tuple(G, 0) + with pytest.raises(nx.NotATree): + G = nx.cycle_graph(3) + nx.to_nested_tuple(G, 0) - @raises(nx.NodeNotFound) def test_unknown_root(self): - G = nx.path_graph(2) - nx.to_nested_tuple(G, 'bogus') + with pytest.raises(nx.NodeNotFound): + G = nx.path_graph(2) + nx.to_nested_tuple(G, "bogus") def test_encoding(self): T = nx.full_rary_tree(2, 2 ** 3 - 1) @@ -113,13 +101,13 @@ def test_canonical_form(self): root = 0 actual = nx.to_nested_tuple(T, root, canonical_form=True) expected = ((), ((), ()), ((), ())) - assert_equal(actual, expected) + assert actual == expected def test_decoding(self): balanced = (((), ()), ((), ())) expected = nx.full_rary_tree(2, 2 ** 3 - 1) actual = nx.from_nested_tuple(balanced) - assert_true(nx.is_isomorphic(expected, actual)) + assert nx.is_isomorphic(expected, actual) def test_sensible_relabeling(self): balanced = (((), ()), ((), ())) diff --git a/networkx/algorithms/tree/tests/test_decomposition.py b/networkx/algorithms/tree/tests/test_decomposition.py new file mode 100644 index 0000000..8c37605 --- /dev/null +++ b/networkx/algorithms/tree/tests/test_decomposition.py @@ -0,0 +1,79 @@ +import networkx as nx +from networkx.algorithms.tree.decomposition import junction_tree + + +def test_junction_tree_directed_confounders(): + B = nx.DiGraph() + B.add_edges_from([("A", "C"), ("B", "C"), ("C", "D"), ("C", "E")]) + + G = junction_tree(B) + J = nx.Graph() + J.add_edges_from( + [ + (("C", "E"), ("C",)), + (("C",), ("A", "B", "C")), + (("A", "B", "C"), ("C",)), + (("C",), ("C", "D")), + ] + ) + + assert nx.is_isomorphic(G, J) + + +def test_junction_tree_directed_unconnected_nodes(): + B = nx.DiGraph() + B.add_nodes_from([("A", "B", "C", "D")]) + G = junction_tree(B) + + J = nx.Graph() + J.add_nodes_from([("A", "B", "C", "D")]) + + assert nx.is_isomorphic(G, J) + + +def test_junction_tree_directed_cascade(): + B = nx.DiGraph() + B.add_edges_from([("A", "B"), ("B", "C"), ("C", "D")]) + G = junction_tree(B) + + J = nx.Graph() + J.add_edges_from( + [ + (("A", "B"), ("B",)), + (("B",), ("B", "C")), + (("B", "C"), ("C",)), + (("C",), ("C", "D")), + ] + ) + assert nx.is_isomorphic(G, J) + + +def test_junction_tree_directed_unconnected_edges(): + B = nx.DiGraph() + B.add_edges_from([("A", "B"), ("C", "D"), ("E", "F")]) + G = junction_tree(B) + + J = nx.Graph() + J.add_nodes_from([("A", "B"), ("C", "D"), ("E", "F")]) + + assert nx.is_isomorphic(G, J) + + +def test_junction_tree_undirected(): + B = nx.Graph() + B.add_edges_from([("A", "C"), ("A", "D"), ("B", "C"), ("C", "E")]) + G = junction_tree(B) + + J = nx.Graph() + J.add_edges_from( + [ + (("A", "D"), ("A",)), + (("A",), ("A", "C")), + (("A", "C"), ("C",)), + (("C",), ("B", "C")), + (("B", "C"), ("C",)), + (("C",), ("C", "E")), + ] + ) + + assert nx.is_isomorphic(G, J) diff --git a/networkx/algorithms/tree/tests/test_mst.py b/networkx/algorithms/tree/tests/test_mst.py index 4c17a83..cc042e0 100644 --- a/networkx/algorithms/tree/tests/test_mst.py +++ b/networkx/algorithms/tree/tests/test_mst.py @@ -1,29 +1,17 @@ -# -*- encoding: utf-8 -*- -# test_mst.py - unit tests for minimum spanning tree functions -# -# Copyright 2016-2018 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.algorithms.tree.mst` module.""" -from unittest import TestCase -from nose.tools import assert_equal -from nose.tools import raises, assert_raises +import pytest import networkx as nx -from networkx.testing import (assert_graphs_equal, assert_nodes_equal, - assert_edges_equal) +from networkx.testing import assert_nodes_equal, assert_edges_equal -@raises(ValueError) def test_unknown_algorithm(): - nx.minimum_spanning_tree(nx.Graph(), algorithm='random') + with pytest.raises(ValueError): + nx.minimum_spanning_tree(nx.Graph(), algorithm="random") -class MinimumSpanningTreeTestBase(object): +class MinimumSpanningTreeTestBase: """Base class for test classes for minimum spanning tree algorithms. This class contains some common tests that will be inherited by @@ -35,7 +23,7 @@ class MinimumSpanningTreeTestBase(object): """ - def setUp(self): + def setup_method(self, method): """Creates an example graph and stores the expected minimum and maximum spanning tree edges. @@ -44,23 +32,37 @@ def setUp(self): self.algo = self.algorithm # This example graph comes from Wikipedia: # https://en.wikipedia.org/wiki/Kruskal's_algorithm - edges = [(0, 1, 7), (0, 3, 5), (1, 2, 8), (1, 3, 9), (1, 4, 7), - (2, 4, 5), (3, 4, 15), (3, 5, 6), (4, 5, 8), (4, 6, 9), - (5, 6, 11)] + edges = [ + (0, 1, 7), + (0, 3, 5), + (1, 2, 8), + (1, 3, 9), + (1, 4, 7), + (2, 4, 5), + (3, 4, 15), + (3, 5, 6), + (4, 5, 8), + (4, 6, 9), + (5, 6, 11), + ] self.G = nx.Graph() self.G.add_weighted_edges_from(edges) - self.minimum_spanning_edgelist = [(0, 1, {'weight': 7}), - (0, 3, {'weight': 5}), - (1, 4, {'weight': 7}), - (2, 4, {'weight': 5}), - (3, 5, {'weight': 6}), - (4, 6, {'weight': 9})] - self.maximum_spanning_edgelist = [(0, 1, {'weight': 7}), - (1, 2, {'weight': 8}), - (1, 3, {'weight': 9}), - (3, 4, {'weight': 15}), - (4, 6, {'weight': 9}), - (5, 6, {'weight': 11})] + self.minimum_spanning_edgelist = [ + (0, 1, {"weight": 7}), + (0, 3, {"weight": 5}), + (1, 4, {"weight": 7}), + (2, 4, {"weight": 5}), + (3, 5, {"weight": 6}), + (4, 6, {"weight": 9}), + ] + self.maximum_spanning_edgelist = [ + (0, 1, {"weight": 7}), + (1, 2, {"weight": 8}), + (1, 3, {"weight": 9}), + (3, 4, {"weight": 15}), + (4, 6, {"weight": 9}), + (5, 6, {"weight": 11}), + ] def test_minimum_edges(self): edges = nx.minimum_spanning_edges(self.G, algorithm=self.algo) @@ -77,8 +79,7 @@ def test_maximum_edges(self): assert_edges_equal(actual, self.maximum_spanning_edgelist) def test_without_data(self): - edges = nx.minimum_spanning_edges(self.G, algorithm=self.algo, - data=False) + edges = nx.minimum_spanning_edges(self.G, algorithm=self.algo, data=False) # Edges from the spanning edges functions don't come in sorted # orientation, so we need to sort each edge individually. actual = sorted((min(u, v), max(u, v)) for u, v in edges) @@ -88,19 +89,73 @@ def test_without_data(self): def test_nan_weights(self): # Edge weights NaN never appear in the spanning tree. see #2164 G = self.G - G.add_edge(0, 12, weight=float('nan')) - edges = nx.minimum_spanning_edges(G, algorithm=self.algo, - data=False, ignore_nan=True) + G.add_edge(0, 12, weight=float("nan")) + edges = nx.minimum_spanning_edges( + G, algorithm=self.algo, data=False, ignore_nan=True + ) actual = sorted((min(u, v), max(u, v)) for u, v in edges) expected = [(u, v) for u, v, d in self.minimum_spanning_edgelist] assert_edges_equal(actual, expected) # Now test for raising exception - edges = nx.minimum_spanning_edges(G, algorithm=self.algo, - data=False, ignore_nan=False) - assert_raises(ValueError, list, edges) + edges = nx.minimum_spanning_edges( + G, algorithm=self.algo, data=False, ignore_nan=False + ) + with pytest.raises(ValueError): + list(edges) # test default for ignore_nan as False edges = nx.minimum_spanning_edges(G, algorithm=self.algo, data=False) - assert_raises(ValueError, list, edges) + with pytest.raises(ValueError): + list(edges) + + def test_nan_weights_order(self): + # now try again with a nan edge at the beginning of G.nodes + edges = [ + (0, 1, 7), + (0, 3, 5), + (1, 2, 8), + (1, 3, 9), + (1, 4, 7), + (2, 4, 5), + (3, 4, 15), + (3, 5, 6), + (4, 5, 8), + (4, 6, 9), + (5, 6, 11), + ] + G = nx.Graph() + G.add_weighted_edges_from([(u + 1, v + 1, wt) for u, v, wt in edges]) + G.add_edge(0, 7, weight=float("nan")) + edges = nx.minimum_spanning_edges( + G, algorithm=self.algo, data=False, ignore_nan=True + ) + actual = sorted((min(u, v), max(u, v)) for u, v in edges) + shift = [(u + 1, v + 1) for u, v, d in self.minimum_spanning_edgelist] + assert_edges_equal(actual, shift) + + def test_isolated_node(self): + # now try again with an isolated node + edges = [ + (0, 1, 7), + (0, 3, 5), + (1, 2, 8), + (1, 3, 9), + (1, 4, 7), + (2, 4, 5), + (3, 4, 15), + (3, 5, 6), + (4, 5, 8), + (4, 6, 9), + (5, 6, 11), + ] + G = nx.Graph() + G.add_weighted_edges_from([(u + 1, v + 1, wt) for u, v, wt in edges]) + G.add_node(0) + edges = nx.minimum_spanning_edges( + G, algorithm=self.algo, data=False, ignore_nan=True + ) + actual = sorted((min(u, v), max(u, v)) for u, v in edges) + shift = [(u + 1, v + 1) for u, v, d in self.minimum_spanning_edgelist] + assert_edges_equal(actual, shift) def test_minimum_tree(self): T = nx.minimum_spanning_tree(self.G, algorithm=self.algo) @@ -122,19 +177,19 @@ def test_empty_graph(self): G = nx.empty_graph(3) T = nx.minimum_spanning_tree(G, algorithm=self.algo) assert_nodes_equal(sorted(T), list(range(3))) - assert_equal(T.number_of_edges(), 0) + assert T.number_of_edges() == 0 def test_attributes(self): G = nx.Graph() - G.add_edge(1, 2, weight=1, color='red', distance=7) - G.add_edge(2, 3, weight=1, color='green', distance=2) - G.add_edge(1, 3, weight=10, color='blue', distance=1) - G.graph['foo'] = 'bar' + G.add_edge(1, 2, weight=1, color="red", distance=7) + G.add_edge(2, 3, weight=1, color="green", distance=2) + G.add_edge(1, 3, weight=10, color="blue", distance=1) + G.graph["foo"] = "bar" T = nx.minimum_spanning_tree(G, algorithm=self.algo) - assert_equal(T.graph, G.graph) + assert T.graph == G.graph assert_nodes_equal(T, G) for u, v in T.edges(): - assert_equal(T.adj[u][v], G.adj[u][v]) + assert T.adj[u][v] == G.adj[u][v] def test_weight_attribute(self): G = nx.Graph() @@ -142,27 +197,28 @@ def test_weight_attribute(self): G.add_edge(0, 2, weight=30, distance=1) G.add_edge(1, 2, weight=1, distance=1) G.add_node(3) - T = nx.minimum_spanning_tree(G, algorithm=self.algo, weight='distance') + T = nx.minimum_spanning_tree(G, algorithm=self.algo, weight="distance") assert_nodes_equal(sorted(T), list(range(4))) assert_edges_equal(sorted(T.edges()), [(0, 2), (1, 2)]) - T = nx.maximum_spanning_tree(G, algorithm=self.algo, weight='distance') + T = nx.maximum_spanning_tree(G, algorithm=self.algo, weight="distance") assert_nodes_equal(sorted(T), list(range(4))) assert_edges_equal(sorted(T.edges()), [(0, 1), (0, 2)]) -class TestBoruvka(MinimumSpanningTreeTestBase, TestCase): +class TestBoruvka(MinimumSpanningTreeTestBase): """Unit tests for computing a minimum (or maximum) spanning tree using Borůvka's algorithm. """ - algorithm = 'boruvka' + + algorithm = "boruvka" def test_unicode_name(self): """Tests that using a Unicode string can correctly indicate Borůvka's algorithm. """ - edges = nx.minimum_spanning_edges(self.G, algorithm=u'borůvka') + edges = nx.minimum_spanning_edges(self.G, algorithm="borůvka") # Edges from the spanning edges functions don't come in sorted # orientation, so we need to sort each edge individually. actual = sorted((min(u, v), max(u, v), d) for u, v, d in edges) @@ -178,11 +234,11 @@ def test_multigraph_keys_min(self): """ G = nx.MultiGraph() - G.add_edge(0, 1, key='a', weight=2) - G.add_edge(0, 1, key='b', weight=1) + G.add_edge(0, 1, key="a", weight=2) + G.add_edge(0, 1, key="b", weight=1) min_edges = nx.minimum_spanning_edges mst_edges = min_edges(G, algorithm=self.algo, data=False) - assert_edges_equal([(0, 1, 'b')], list(mst_edges)) + assert_edges_equal([(0, 1, "b")], list(mst_edges)) def test_multigraph_keys_max(self): """Tests that the maximum spanning edges of a multigraph @@ -190,38 +246,40 @@ def test_multigraph_keys_max(self): """ G = nx.MultiGraph() - G.add_edge(0, 1, key='a', weight=2) - G.add_edge(0, 1, key='b', weight=1) + G.add_edge(0, 1, key="a", weight=2) + G.add_edge(0, 1, key="b", weight=1) max_edges = nx.maximum_spanning_edges mst_edges = max_edges(G, algorithm=self.algo, data=False) - assert_edges_equal([(0, 1, 'a')], list(mst_edges)) + assert_edges_equal([(0, 1, "a")], list(mst_edges)) -class TestKruskal(MultigraphMSTTestBase, TestCase): +class TestKruskal(MultigraphMSTTestBase): """Unit tests for computing a minimum (or maximum) spanning tree using Kruskal's algorithm. """ - algorithm = 'kruskal' + + algorithm = "kruskal" -class TestPrim(MultigraphMSTTestBase, TestCase): +class TestPrim(MultigraphMSTTestBase): """Unit tests for computing a minimum (or maximum) spanning tree using Prim's algorithm. """ - algorithm = 'prim' + + algorithm = "prim" def test_multigraph_keys_tree(self): G = nx.MultiGraph() - G.add_edge(0, 1, key='a', weight=2) - G.add_edge(0, 1, key='b', weight=1) + G.add_edge(0, 1, key="a", weight=2) + G.add_edge(0, 1, key="b", weight=1) T = nx.minimum_spanning_tree(G) - assert_edges_equal([(0, 1, 1)], list(T.edges(data='weight'))) + assert_edges_equal([(0, 1, 1)], list(T.edges(data="weight"))) def test_multigraph_keys_tree_max(self): G = nx.MultiGraph() - G.add_edge(0, 1, key='a', weight=2) - G.add_edge(0, 1, key='b', weight=1) + G.add_edge(0, 1, key="a", weight=2) + G.add_edge(0, 1, key="b", weight=1) T = nx.maximum_spanning_tree(G) - assert_edges_equal([(0, 1, 2)], list(T.edges(data='weight'))) + assert_edges_equal([(0, 1, 2)], list(T.edges(data="weight"))) diff --git a/networkx/algorithms/tree/tests/test_operations.py b/networkx/algorithms/tree/tests/test_operations.py index 3d99835..21b2afd 100644 --- a/networkx/algorithms/tree/tests/test_operations.py +++ b/networkx/algorithms/tree/tests/test_operations.py @@ -1,23 +1,13 @@ -# test_operations.py - unit tests for the operations module -# -# Copyright 2015 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.algorithms.tree.operations` module. """ -from nose.tools import assert_equal -from nose.tools import assert_true import networkx as nx from networkx.testing import assert_nodes_equal from networkx.testing import assert_edges_equal -class TestJoin(object): +class TestJoin: """Unit tests for the :func:`networkx.tree.join` function.""" def test_empty_sequence(self): @@ -26,8 +16,8 @@ def test_empty_sequence(self): """ T = nx.join([]) - assert_equal(len(T), 1) - assert_equal(T.number_of_edges(), 0) + assert len(T) == 1 + assert T.number_of_edges() == 0 def test_single(self): """Tests that joining just one tree yields a tree with one more @@ -45,4 +35,4 @@ def test_basic(self): trees = [(nx.full_rary_tree(2, 2 ** 2 - 1), 0) for i in range(2)] actual = nx.join(trees) expected = nx.full_rary_tree(2, 2 ** 3 - 1) - assert_true(nx.is_isomorphic(actual, expected)) + assert nx.is_isomorphic(actual, expected) diff --git a/networkx/algorithms/tree/tests/test_recognition.py b/networkx/algorithms/tree/tests/test_recognition.py index 08d289e..e664e8a 100644 --- a/networkx/algorithms/tree/tests/test_recognition.py +++ b/networkx/algorithms/tree/tests/test_recognition.py @@ -1,80 +1,86 @@ - -from nose.tools import * +import pytest import networkx as nx -class TestTreeRecognition(object): +class TestTreeRecognition: graph = nx.Graph multigraph = nx.MultiGraph - def setUp(self): + @classmethod + def setup_class(cls): - self.T1 = self.graph() + cls.T1 = cls.graph() - self.T2 = self.graph() - self.T2.add_node(1) + cls.T2 = cls.graph() + cls.T2.add_node(1) - self.T3 = self.graph() - self.T3.add_nodes_from(range(5)) + cls.T3 = cls.graph() + cls.T3.add_nodes_from(range(5)) edges = [(i, i + 1) for i in range(4)] - self.T3.add_edges_from(edges) + cls.T3.add_edges_from(edges) - self.T5 = self.multigraph() - self.T5.add_nodes_from(range(5)) + cls.T5 = cls.multigraph() + cls.T5.add_nodes_from(range(5)) edges = [(i, i + 1) for i in range(4)] - self.T5.add_edges_from(edges) + cls.T5.add_edges_from(edges) - self.T6 = self.graph() - self.T6.add_nodes_from([6, 7]) - self.T6.add_edge(6, 7) + cls.T6 = cls.graph() + cls.T6.add_nodes_from([6, 7]) + cls.T6.add_edge(6, 7) - self.F1 = nx.compose(self.T6, self.T3) + cls.F1 = nx.compose(cls.T6, cls.T3) - self.N4 = self.graph() - self.N4.add_node(1) - self.N4.add_edge(1, 1) + cls.N4 = cls.graph() + cls.N4.add_node(1) + cls.N4.add_edge(1, 1) - self.N5 = self.graph() - self.N5.add_nodes_from(range(5)) + cls.N5 = cls.graph() + cls.N5.add_nodes_from(range(5)) - self.N6 = self.graph() - self.N6.add_nodes_from(range(3)) - self.N6.add_edges_from([(0, 1), (1, 2), (2, 0)]) + cls.N6 = cls.graph() + cls.N6.add_nodes_from(range(3)) + cls.N6.add_edges_from([(0, 1), (1, 2), (2, 0)]) - self.NF1 = nx.compose(self.T6, self.N6) + cls.NF1 = nx.compose(cls.T6, cls.N6) - @raises(nx.NetworkXPointlessConcept) def test_null_tree(self): - nx.is_tree(self.graph()) - nx.is_tree(self.multigraph()) + with pytest.raises(nx.NetworkXPointlessConcept): + nx.is_tree(self.graph()) + + def test_null_tree2(self): + with pytest.raises(nx.NetworkXPointlessConcept): + nx.is_tree(self.multigraph()) - @raises(nx.NetworkXPointlessConcept) def test_null_forest(self): - nx.is_forest(self.graph()) - nx.is_forest(self.multigraph()) + with pytest.raises(nx.NetworkXPointlessConcept): + nx.is_forest(self.graph()) + + def test_null_forest2(self): + with pytest.raises(nx.NetworkXPointlessConcept): + nx.is_forest(self.multigraph()) def test_is_tree(self): - assert_true(nx.is_tree(self.T2)) - assert_true(nx.is_tree(self.T3)) - assert_true(nx.is_tree(self.T5)) + assert nx.is_tree(self.T2) + assert nx.is_tree(self.T3) + assert nx.is_tree(self.T5) def test_is_not_tree(self): - assert_false(nx.is_tree(self.N4)) - assert_false(nx.is_tree(self.N5)) - assert_false(nx.is_tree(self.N6)) + assert not nx.is_tree(self.N4) + assert not nx.is_tree(self.N5) + assert not nx.is_tree(self.N6) def test_is_forest(self): - assert_true(nx.is_forest(self.T2)) - assert_true(nx.is_forest(self.T3)) - assert_true(nx.is_forest(self.T5)) - assert_true(nx.is_forest(self.F1)) - assert_true(nx.is_forest(self.N5)) + assert nx.is_forest(self.T2) + assert nx.is_forest(self.T3) + assert nx.is_forest(self.T5) + assert nx.is_forest(self.F1) + assert nx.is_forest(self.N5) def test_is_not_forest(self): - assert_false(nx.is_forest(self.N4)) - assert_false(nx.is_forest(self.N6)) - assert_false(nx.is_forest(self.NF1)) + assert not nx.is_forest(self.N4) + assert not nx.is_forest(self.N6) + assert not nx.is_forest(self.NF1) class TestDirectedTreeRecognition(TestTreeRecognition): @@ -86,39 +92,39 @@ def test_disconnected_graph(): # https://github.com/networkx/networkx/issues/1144 G = nx.Graph() G.add_edges_from([(0, 1), (1, 2), (2, 0), (3, 4)]) - assert_false(nx.is_tree(G)) + assert not nx.is_tree(G) G = nx.DiGraph() G.add_edges_from([(0, 1), (1, 2), (2, 0), (3, 4)]) - assert_false(nx.is_tree(G)) + assert not nx.is_tree(G) def test_dag_nontree(): G = nx.DiGraph() G.add_edges_from([(0, 1), (0, 2), (1, 2)]) - assert_false(nx.is_tree(G)) - assert_true(nx.is_directed_acyclic_graph(G)) + assert not nx.is_tree(G) + assert nx.is_directed_acyclic_graph(G) def test_multicycle(): G = nx.MultiDiGraph() G.add_edges_from([(0, 1), (0, 1)]) - assert_false(nx.is_tree(G)) - assert_true(nx.is_directed_acyclic_graph(G)) + assert not nx.is_tree(G) + assert nx.is_directed_acyclic_graph(G) def test_emptybranch(): G = nx.DiGraph() G.add_nodes_from(range(10)) - assert_true(nx.is_branching(G)) - assert_false(nx.is_arborescence(G)) + assert nx.is_branching(G) + assert not nx.is_arborescence(G) def test_path(): G = nx.DiGraph() nx.add_path(G, range(5)) - assert_true(nx.is_branching(G)) - assert_true(nx.is_arborescence(G)) + assert nx.is_branching(G) + assert nx.is_arborescence(G) def test_notbranching1(): @@ -126,8 +132,8 @@ def test_notbranching1(): G = nx.MultiDiGraph() G.add_nodes_from(range(10)) G.add_edges_from([(0, 1), (1, 0)]) - assert_false(nx.is_branching(G)) - assert_false(nx.is_arborescence(G)) + assert not nx.is_branching(G) + assert not nx.is_arborescence(G) def test_notbranching2(): @@ -135,8 +141,8 @@ def test_notbranching2(): G = nx.MultiDiGraph() G.add_nodes_from(range(10)) G.add_edges_from([(0, 1), (0, 2), (3, 2)]) - assert_false(nx.is_branching(G)) - assert_false(nx.is_arborescence(G)) + assert not nx.is_branching(G) + assert not nx.is_arborescence(G) def test_notarborescence1(): @@ -144,8 +150,8 @@ def test_notarborescence1(): G = nx.MultiDiGraph() G.add_nodes_from(range(10)) G.add_edges_from([(0, 1), (0, 2), (1, 3), (5, 6)]) - assert_true(nx.is_branching(G)) - assert_false(nx.is_arborescence(G)) + assert nx.is_branching(G) + assert not nx.is_arborescence(G) def test_notarborescence2(): @@ -153,5 +159,5 @@ def test_notarborescence2(): G = nx.MultiDiGraph() nx.add_path(G, range(5)) G.add_edge(6, 4) - assert_false(nx.is_branching(G)) - assert_false(nx.is_arborescence(G)) + assert not nx.is_branching(G) + assert not nx.is_arborescence(G) diff --git a/networkx/algorithms/triads.py b/networkx/algorithms/triads.py index 82fac01..cf48932 100644 --- a/networkx/algorithms/triads.py +++ b/networkx/algorithms/triads.py @@ -1,36 +1,116 @@ -# triads.py - functions for analyzing triads of a graph -# -# Copyright 2015 NetworkX developers. +# See https://github.com/networkx/networkx/pull/1474 # Copyright 2011 Reya Group # Copyright 2011 Alex Levenson # Copyright 2011 Diederik van Liere -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Functions for analyzing triads of a graph.""" -from __future__ import division -from networkx.utils import not_implemented_for +from itertools import combinations, permutations +from collections import defaultdict +from random import sample -__author__ = '\n'.join(['Alex Levenson (alex@isnontinvain.com)', - 'Diederik van Liere (diederik.vanliere@rotman.utoronto.ca)']) +import networkx as nx +from networkx.utils import not_implemented_for -__all__ = ['triadic_census'] +__all__ = [ + "triadic_census", + "is_triad", + "all_triplets", + "all_triads", + "triads_by_type", + "triad_type", + "random_triad", +] #: The integer codes representing each type of triad. #: #: Triads that are the same up to symmetry have the same code. -TRICODES = (1, 2, 2, 3, 2, 4, 6, 8, 2, 6, 5, 7, 3, 8, 7, 11, 2, 6, 4, 8, 5, 9, - 9, 13, 6, 10, 9, 14, 7, 14, 12, 15, 2, 5, 6, 7, 6, 9, 10, 14, 4, 9, - 9, 12, 8, 13, 14, 15, 3, 7, 8, 11, 7, 12, 14, 15, 8, 14, 13, 15, - 11, 15, 15, 16) +TRICODES = ( + 1, + 2, + 2, + 3, + 2, + 4, + 6, + 8, + 2, + 6, + 5, + 7, + 3, + 8, + 7, + 11, + 2, + 6, + 4, + 8, + 5, + 9, + 9, + 13, + 6, + 10, + 9, + 14, + 7, + 14, + 12, + 15, + 2, + 5, + 6, + 7, + 6, + 9, + 10, + 14, + 4, + 9, + 9, + 12, + 8, + 13, + 14, + 15, + 3, + 7, + 8, + 11, + 7, + 12, + 14, + 15, + 8, + 14, + 13, + 15, + 11, + 15, + 15, + 16, +) #: The names of each type of triad. The order of the elements is #: important: it corresponds to the tricodes given in :data:`TRICODES`. -TRIAD_NAMES = ('003', '012', '102', '021D', '021U', '021C', '111D', '111U', - '030T', '030C', '201', '120D', '120U', '120C', '210', '300') +TRIAD_NAMES = ( + "003", + "012", + "102", + "021D", + "021U", + "021C", + "111D", + "111U", + "030T", + "030C", + "201", + "120D", + "120U", + "120C", + "210", + "300", +) #: A dictionary mapping triad code to triad name. @@ -45,12 +125,11 @@ def _tricode(G, v, u, w): the binary representation of an integer. """ - combos = ((v, u, 1), (u, v, 2), (v, w, 4), (w, v, 8), (u, w, 16), - (w, u, 32)) + combos = ((v, u, 1), (u, v, 2), (v, w, 4), (w, v, 8), (u, w, 16), (w, u, 32)) return sum(x for u, v, x in combos if v in G[u]) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def triadic_census(G): """Determines the triadic census of a directed graph. @@ -65,7 +144,7 @@ def triadic_census(G): Returns ------- census : dict - Dictionary with triad names as keys and number of occurrences as values. + Dictionary with triad type as keys and number of occurrences as values. Notes ----- @@ -97,20 +176,265 @@ def triadic_census(G): neighbors = (vnbrs | set(G.succ[u]) | set(G.pred[u])) - {u, v} # Calculate dyadic triads instead of counting them. if v in G[u] and u in G[v]: - census['102'] += n - len(neighbors) - 2 + census["102"] += n - len(neighbors) - 2 else: - census['012'] += n - len(neighbors) - 2 + census["012"] += n - len(neighbors) - 2 # Count connected triads. for w in neighbors: - if m[u] < m[w] or (m[v] < m[w] < m[u] and - v not in G.pred[w] and - v not in G.succ[w]): + if m[u] < m[w] or ( + m[v] < m[w] < m[u] and v not in G.pred[w] and v not in G.succ[w] + ): code = _tricode(G, v, u, w) census[TRICODE_TO_NAME[code]] += 1 - # null triads = total number of possible triads - all found triads # # Use integer division here, since we know this formula guarantees an # integral value. - census['003'] = ((n * (n - 1) * (n - 2)) // 6) - sum(census.values()) + census["003"] = ((n * (n - 1) * (n - 2)) // 6) - sum(census.values()) return census + + +def is_triad(G): + """Returns True if the graph G is a triad, else False. + + Parameters + ---------- + G : graph + A NetworkX Graph + + Returns + ------- + istriad : boolean + Whether G is a valid triad + """ + if isinstance(G, nx.Graph): + if G.order() == 3 and nx.is_directed(G): + if not any((n, n) in G.edges() for n in G.nodes()): + return True + return False + + +@not_implemented_for("undirected") +def all_triplets(G): + """Returns a generator of all possible sets of 3 nodes in a DiGraph. + + Parameters + ---------- + G : digraph + A NetworkX DiGraph + + Returns + ------- + triplets : generator of 3-tuples + Generator of tuples of 3 nodes + """ + triplets = combinations(G.nodes(), 3) + return triplets + + +@not_implemented_for("undirected") +def all_triads(G): + """A generator of all possible triads in G. + + Parameters + ---------- + G : digraph + A NetworkX DiGraph + + Returns + ------- + all_triads : generator of DiGraphs + Generator of triads (order-3 DiGraphs) + """ + triplets = combinations(G.nodes(), 3) + for triplet in triplets: + yield G.subgraph(triplet).copy() + + +@not_implemented_for("undirected") +def triads_by_type(G): + """Returns a list of all triads for each triad type in a directed graph. + + Parameters + ---------- + G : digraph + A NetworkX DiGraph + + Returns + ------- + tri_by_type : dict + Dictionary with triad types as keys and lists of triads as values. + """ + # num_triads = o * (o - 1) * (o - 2) // 6 + # if num_triads > TRIAD_LIMIT: print(WARNING) + all_tri = all_triads(G) + tri_by_type = defaultdict(list) + for triad in all_tri: + name = triad_type(triad) + tri_by_type[name].append(triad) + return tri_by_type + + +@not_implemented_for("undirected") +def triad_type(G): + """Returns the sociological triad type for a triad. + + Parameters + ---------- + G : digraph + A NetworkX DiGraph with 3 nodes + + Returns + ------- + triad_type : str + A string identifying the triad type + + Notes + ----- + There can be 6 unique edges in a triad (order-3 DiGraph) (so 2^^6=64 unique + triads given 3 nodes). These 64 triads each display exactly 1 of 16 + topologies of triads (topologies can be permuted). These topologies are + identified by the following notation: + + {m}{a}{n}{type} (for example: 111D, 210, 102) + + Here: + + {m} = number of mutual ties (takes 0, 1, 2, 3); a mutual tie is (0,1) + AND (1,0) + {a} = number of assymmetric ties (takes 0, 1, 2, 3); an assymmetric tie + is (0,1) BUT NOT (1,0) or vice versa + {n} = number of null ties (takes 0, 1, 2, 3); a null tie is NEITHER + (0,1) NOR (1,0) + {type} = a letter (takes U, D, C, T) corresponding to up, down, cyclical + and transitive. This is only used for topologies that can have + more than one form (eg: 021D and 021U). + + References + ---------- + .. [1] Snijders, T. (2012). "Transitivity and triads." University of + Oxford. + http://www.stats.ox.ac.uk/snijders/Trans_Triads_ha.pdf + """ + if not is_triad(G): + raise nx.NetworkXAlgorithmError("G is not a triad (order-3 DiGraph)") + num_edges = len(G.edges()) + if num_edges == 0: + return "003" + elif num_edges == 1: + return "012" + elif num_edges == 2: + e1, e2 = G.edges() + if set(e1) == set(e2): + return "102" + elif e1[0] == e2[0]: + return "021D" + elif e1[1] == e2[1]: + return "021U" + elif e1[1] == e2[0] or e2[1] == e1[0]: + return "021C" + elif num_edges == 3: + for (e1, e2, e3) in permutations(G.edges(), 3): + if set(e1) == set(e2): + if e3[0] in e1: + return "111U" + # e3[1] in e1: + return "111D" + elif set(e1).symmetric_difference(set(e2)) == set(e3): + if {e1[0], e2[0], e3[0]} == {e1[0], e2[0], e3[0]} == set(G.nodes()): + return "030C" + # e3 == (e1[0], e2[1]) and e2 == (e1[1], e3[1]): + return "030T" + elif num_edges == 4: + for (e1, e2, e3, e4) in permutations(G.edges(), 4): + if set(e1) == set(e2): + # identify pair of symmetric edges (which necessarily exists) + if set(e3) == set(e4): + return "201" + if {e3[0]} == {e4[0]} == set(e3).intersection(set(e4)): + return "120D" + if {e3[1]} == {e4[1]} == set(e3).intersection(set(e4)): + return "120U" + if e3[1] == e4[0]: + return "120C" + elif num_edges == 5: + return "210" + elif num_edges == 6: + return "300" + + +@not_implemented_for("undirected") +def random_triad(G): + """Returns a random triad from a directed graph. + + Parameters + ---------- + G : digraph + A NetworkX DiGraph + + Returns + ------- + G2 : subgraph + A randomly selected triad (order-3 NetworkX DiGraph) + """ + nodes = sample(G.nodes(), 3) + G2 = G.subgraph(nodes) + return G2 + + +""" +@not_implemented_for('undirected') +def triadic_closures(G): + '''Returns a list of order-3 subgraphs of G that are triadic closures. + + Parameters + ---------- + G : digraph + A NetworkX DiGraph + + Returns + ------- + closures : list + List of triads of G that are triadic closures + ''' + pass + + +@not_implemented_for('undirected') +def focal_closures(G, attr_name): + '''Returns a list of order-3 subgraphs of G that are focally closed. + + Parameters + ---------- + G : digraph + A NetworkX DiGraph + attr_name : str + An attribute name + + + Returns + ------- + closures : list + List of triads of G that are focally closed on attr_name + ''' + pass + + +@not_implemented_for('undirected') +def balanced_triads(G, crit_func): + '''Returns a list of order-3 subgraphs of G that are stable. + + Parameters + ---------- + G : digraph + A NetworkX DiGraph + crit_func : function + A function that determines if a triad (order-3 digraph) is stable + + Returns + ------- + triads : list + List of triads in G that are stable + ''' + pass +""" diff --git a/networkx/algorithms/vitality.py b/networkx/algorithms/vitality.py index 181e818..88e174d 100644 --- a/networkx/algorithms/vitality.py +++ b/networkx/algorithms/vitality.py @@ -1,14 +1,3 @@ -# Copyright (C) 2010 by -# Aric Hagberg (hagberg@lanl.gov) -# Renato Fabbri -# Copyright (C) 2012 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# Copyright (C) 2016-2018 by NetworkX developers. -# -# All rights reserved. -# BSD license. """ Vitality measures. """ @@ -16,7 +5,7 @@ import networkx as nx -__all__ = ['closeness_vitality'] +__all__ = ["closeness_vitality"] def closeness_vitality(G, node=None, weight=None, wiener_index=None): @@ -81,7 +70,6 @@ def closeness_vitality(G, node=None, weight=None, wiener_index=None): if node is not None: after = nx.wiener_index(G.subgraph(set(G) - {node}), weight=weight) return wiener_index - after - vitality = partial(closeness_vitality, G, weight=weight, - wiener_index=wiener_index) + vitality = partial(closeness_vitality, G, weight=weight, wiener_index=wiener_index) # TODO This can be trivially parallelized. return {v: vitality(node=v) for v in G} diff --git a/networkx/algorithms/voronoi.py b/networkx/algorithms/voronoi.py index 9fc7b80..184afa2 100644 --- a/networkx/algorithms/voronoi.py +++ b/networkx/algorithms/voronoi.py @@ -1,19 +1,11 @@ -# voronoi.py - functions for computing the Voronoi partition of a graph -# -# Copyright 2016-2018 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Functions for computing the Voronoi cells of a graph.""" import networkx as nx from networkx.utils import groups -__all__ = ['voronoi_cells'] +__all__ = ["voronoi_cells"] -def voronoi_cells(G, center_nodes, weight='weight'): +def voronoi_cells(G, center_nodes, weight="weight"): """Returns the Voronoi cells centered at `center_nodes` with respect to the shortest-path distance metric. @@ -89,5 +81,5 @@ def voronoi_cells(G, center_nodes, weight='weight'): # We collect all unreachable nodes under a special key, if there are any. unreachable = set(G) - set(nearest) if unreachable: - cells['unreachable'] = unreachable + cells["unreachable"] = unreachable return cells diff --git a/networkx/algorithms/wiener.py b/networkx/algorithms/wiener.py index 66d380e..a574cd7 100644 --- a/networkx/algorithms/wiener.py +++ b/networkx/algorithms/wiener.py @@ -1,13 +1,4 @@ -# wiener.py - functions related to the Wiener index of a graph -# -# Copyright 2015 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Functions related to the Wiener index of a graph.""" -from __future__ import division from itertools import chain @@ -15,7 +6,7 @@ from .components import is_strongly_connected from .shortest_paths import shortest_path_length as spl -__all__ = ['wiener_index'] +__all__ = ["wiener_index"] #: Rename the :func:`chain.from_iterable` function for the sake of #: brevity. @@ -64,7 +55,6 @@ def wiener_index(G, weight=None): equals the number of pairs of the *n* nodes, since each pair of nodes is at distance one:: - >>> import networkx as nx >>> n = 10 >>> G = nx.complete_graph(n) >>> nx.wiener_index(G) == n * (n - 1) / 2 @@ -78,9 +68,10 @@ def wiener_index(G, weight=None): """ is_directed = G.is_directed() - if (is_directed and not is_strongly_connected(G)) or \ - (not is_directed and not is_connected(G)): - return float('inf') + if (is_directed and not is_strongly_connected(G)) or ( + not is_directed and not is_connected(G) + ): + return float("inf") total = sum(chaini(p.values() for v, p in spl(G, weight=weight))) # Need to account for double counting pairs of nodes in undirected graphs. return total if is_directed else total / 2 diff --git a/networkx/classes/coreviews.py b/networkx/classes/coreviews.py index 6c49f08..e22f685 100644 --- a/networkx/classes/coreviews.py +++ b/networkx/classes/coreviews.py @@ -1,25 +1,20 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg (hagberg@lanl.gov), -# Pieter Swart (swart@lanl.gov), -# Dan Schult(dschult@colgate.edu) """ """ -from collections import Mapping -import networkx as nx - -__all__ = ['AtlasView', 'AdjacencyView', 'MultiAdjacencyView', - 'UnionAtlas', 'UnionAdjacency', - 'UnionMultiInner', 'UnionMultiAdjacency', - 'FilterAtlas', 'FilterAdjacency', - 'FilterMultiInner', 'FilterMultiAdjacency', - 'ReadOnlyGraph', - ] +from collections.abc import Mapping + +__all__ = [ + "AtlasView", + "AdjacencyView", + "MultiAdjacencyView", + "UnionAtlas", + "UnionAdjacency", + "UnionMultiInner", + "UnionMultiAdjacency", + "FilterAtlas", + "FilterAdjacency", + "FilterMultiInner", + "FilterMultiAdjacency", +] class AtlasView(Mapping): @@ -34,13 +29,14 @@ class AtlasView(Mapping): AdjacencyView - View into dict-of-dict-of-dict MultiAdjacencyView - View into dict-of-dict-of-dict-of-dict """ - __slots__ = ('_atlas',) + + __slots__ = ("_atlas",) def __getstate__(self): - return {'_atlas': self._atlas} + return {"_atlas": self._atlas} def __setstate__(self, state): - self._atlas = state['_atlas'] + self._atlas = state["_atlas"] def __init__(self, d): self._atlas = d @@ -61,7 +57,7 @@ def __str__(self): return str(self._atlas) # {nbr: self[nbr] for nbr in self}) def __repr__(self): - return '%s(%r)' % (self.__class__.__name__, self._atlas) + return f"{self.__class__.__name__}({self._atlas!r})" class AdjacencyView(AtlasView): @@ -76,7 +72,8 @@ class AdjacencyView(AtlasView): AtlasView - View into dict-of-dict MultiAdjacencyView - View into dict-of-dict-of-dict-of-dict """ - __slots__ = () # Still uses AtlasView slots names _atlas + + __slots__ = () # Still uses AtlasView slots names _atlas def __getitem__(self, name): return AtlasView(self._atlas[name]) @@ -97,7 +94,8 @@ class MultiAdjacencyView(AdjacencyView): AtlasView - View into dict-of-dict AdjacencyView - View into dict-of-dict-of-dict """ - __slots__ = () # Still uses AtlasView slots names _atlas + + __slots__ = () # Still uses AtlasView slots names _atlas def __getitem__(self, name): return AdjacencyView(self._atlas[name]) @@ -119,14 +117,15 @@ class UnionAtlas(Mapping): UnionAdjacency - View into dict-of-dict-of-dict UnionMultiAdjacency - View into dict-of-dict-of-dict-of-dict """ - __slots__ = ('_succ', '_pred') + + __slots__ = ("_succ", "_pred") def __getstate__(self): - return {'_succ': self._succ, '_pred': self._pred} + return {"_succ": self._succ, "_pred": self._pred} def __setstate__(self, state): - self._succ = state['_succ'] - self._pred = state['_pred'] + self._succ = state["_succ"] + self._pred = state["_pred"] def __init__(self, succ, pred): self._succ = succ @@ -157,7 +156,7 @@ def __str__(self): return str({nbr: self[nbr] for nbr in self}) def __repr__(self): - return '%s(%r, %r)' % (self.__class__.__name__, self._succ, self._pred) + return f"{self.__class__.__name__}({self._succ!r}, {self._pred!r})" class UnionAdjacency(Mapping): @@ -177,18 +176,19 @@ class UnionAdjacency(Mapping): UnionAtlas - View into dict-of-dict UnionMultiAdjacency - View into dict-of-dict-of-dict-of-dict """ - __slots__ = ('_succ', '_pred') + + __slots__ = ("_succ", "_pred") def __getstate__(self): - return {'_succ': self._succ, '_pred': self._pred} + return {"_succ": self._succ, "_pred": self._pred} def __setstate__(self, state): - self._succ = state['_succ'] - self._pred = state['_pred'] + self._succ = state["_succ"] + self._pred = state["_pred"] def __init__(self, succ, pred): # keys must be the same for two input dicts - assert(len(set(succ.keys()) ^ set(pred.keys())) == 0) + assert len(set(succ.keys()) ^ set(pred.keys())) == 0 self._succ = succ self._pred = pred @@ -208,7 +208,7 @@ def __str__(self): return str({nbr: self[nbr] for nbr in self}) def __repr__(self): - return '%s(%r, %r)' % (self.__class__.__name__, self._succ, self._pred) + return f"{self.__class__.__name__}({self._succ!r}, {self._pred!r})" class UnionMultiInner(UnionAtlas): @@ -225,7 +225,8 @@ class UnionMultiInner(UnionAtlas): UnionAdjacency - View into dict-of-dict-of-dict UnionMultiAdjacency - View into dict-of-dict-of-dict-of-dict """ - __slots__ = () # Still uses UnionAtlas slots names _succ, _pred + + __slots__ = () # Still uses UnionAtlas slots names _succ, _pred def __getitem__(self, node): in_succ = node in self._succ @@ -253,33 +254,13 @@ class UnionMultiAdjacency(UnionAdjacency): UnionAtlas - View into dict-of-dict UnionMultiInner - View into dict-of-dict-of-dict """ - __slots__ = () # Still uses UnionAdjacency slots names _succ, _pred + + __slots__ = () # Still uses UnionAdjacency slots names _succ, _pred def __getitem__(self, node): return UnionMultiInner(self._succ[node], self._pred[node]) -class ReadOnlyGraph(object): - """A Mixin Class to mask the write methods of a graph class.""" - - def not_allowed(self, *args, **kwds): - msg = "SubGraph Views are readonly. Mutations not allowed" - raise nx.NetworkXError(msg) - - add_node = not_allowed - remove_node = not_allowed - add_nodes_from = not_allowed - remove_nodes_from = not_allowed - - add_edge = not_allowed - remove_edge = not_allowed - add_edges_from = not_allowed - add_weighted_edges_from = not_allowed - remove_edges_from = not_allowed - - clear = not_allowed - - class FilterAtlas(Mapping): # nodedict, nbrdict, keydict def __init__(self, d, NODE_OK): self._atlas = d @@ -289,31 +270,36 @@ def __len__(self): return sum(1 for n in self) def __iter__(self): - if hasattr(self.NODE_OK, 'nodes'): + try: # check that NODE_OK has attr 'nodes' + node_ok_shorter = 2 * len(self.NODE_OK.nodes) < len(self._atlas) + except AttributeError: + node_ok_shorter = False + if node_ok_shorter: return (n for n in self.NODE_OK.nodes if n in self._atlas) return (n for n in self._atlas if self.NODE_OK(n)) def __getitem__(self, key): if key in self._atlas and self.NODE_OK(key): return self._atlas[key] - raise KeyError("Key {} not found".format(key)) + raise KeyError(f"Key {key} not found") def copy(self): - if hasattr(self.NODE_OK, 'nodes'): - return {u: self._atlas[u] for u in self.NODE_OK.nodes - if u in self._atlas} - return {u: d for u, d in self._atlas.items() - if self.NODE_OK(u)} + try: # check that NODE_OK has attr 'nodes' + node_ok_shorter = 2 * len(self.NODE_OK.nodes) < len(self._atlas) + except AttributeError: + node_ok_shorter = False + if node_ok_shorter: + return {u: self._atlas[u] for u in self.NODE_OK.nodes if u in self._atlas} + return {u: d for u, d in self._atlas.items() if self.NODE_OK(u)} def __str__(self): return str({nbr: self[nbr] for nbr in self}) def __repr__(self): - return '%s(%r, %r)' % (self.__class__.__name__, self._atlas, - self.NODE_OK) + return f"{self.__class__.__name__}({self._atlas!r}, {self.NODE_OK!r})" -class FilterAdjacency(Mapping): # edgedict +class FilterAdjacency(Mapping): # edgedict def __init__(self, d, NODE_OK, EDGE_OK): self._atlas = d self.NODE_OK = NODE_OK @@ -323,38 +309,60 @@ def __len__(self): return sum(1 for n in self) def __iter__(self): - if hasattr(self.NODE_OK, 'nodes'): + try: # check that NODE_OK has attr 'nodes' + node_ok_shorter = 2 * len(self.NODE_OK.nodes) < len(self._atlas) + except AttributeError: + node_ok_shorter = False + if node_ok_shorter: return (n for n in self.NODE_OK.nodes if n in self._atlas) return (n for n in self._atlas if self.NODE_OK(n)) def __getitem__(self, node): if node in self._atlas and self.NODE_OK(node): + def new_node_ok(nbr): return self.NODE_OK(nbr) and self.EDGE_OK(node, nbr) + return FilterAtlas(self._atlas[node], new_node_ok) - raise KeyError("Key {} not found".format(node)) + raise KeyError(f"Key {node} not found") def copy(self): - if hasattr(self.NODE_OK, 'nodes'): - return {u: {v: d for v, d in self._atlas[u].items() - if self.NODE_OK(v) if self.EDGE_OK(u, v)} - for u in self.NODE_OK.nodes if u in self._atlas} - return {u: {v: d for v, d in nbrs.items() if self.NODE_OK(v) - if self.EDGE_OK(u, v)} - for u, nbrs in self._atlas.items() - if self.NODE_OK(u)} + try: # check that NODE_OK has attr 'nodes' + node_ok_shorter = 2 * len(self.NODE_OK.nodes) < len(self._atlas) + except AttributeError: + node_ok_shorter = False + if node_ok_shorter: + return { + u: { + v: d + for v, d in self._atlas[u].items() + if self.NODE_OK(v) + if self.EDGE_OK(u, v) + } + for u in self.NODE_OK.nodes + if u in self._atlas + } + return { + u: {v: d for v, d in nbrs.items() if self.NODE_OK(v) if self.EDGE_OK(u, v)} + for u, nbrs in self._atlas.items() + if self.NODE_OK(u) + } def __str__(self): return str({nbr: self[nbr] for nbr in self}) def __repr__(self): - return '%s(%r, %r, %r)' % (self.__class__.__name__, self._atlas, - self.NODE_OK, self.EDGE_OK) + name = self.__class__.__name__ + return f"{name}({self._atlas!r}, {self.NODE_OK!r}, {self.EDGE_OK!r})" class FilterMultiInner(FilterAdjacency): # muliedge_seconddict def __iter__(self): - if hasattr(self.NODE_OK, 'nodes'): + try: # check that NODE_OK has attr 'nodes' + node_ok_shorter = 2 * len(self.NODE_OK.nodes) < len(self._atlas) + except AttributeError: + node_ok_shorter = False + if node_ok_shorter: my_nodes = (n for n in self.NODE_OK.nodes if n in self._atlas) else: my_nodes = (n for n in self._atlas if self.NODE_OK(n)) @@ -369,36 +377,63 @@ def __iter__(self): def __getitem__(self, nbr): if nbr in self._atlas and self.NODE_OK(nbr): + def new_node_ok(key): return self.EDGE_OK(nbr, key) + return FilterAtlas(self._atlas[nbr], new_node_ok) - raise KeyError("Key {} not found".format(nbr)) + raise KeyError(f"Key {nbr} not found") def copy(self): - if hasattr(self.NODE_OK, 'nodes'): - return {v: {k: d for k, d in self._atlas[v].items() - if self.EDGE_OK(v, k)} - for v in self.NODE_OK.nodes if v in self._atlas} - return {v: {k: d for k, d in nbrs.items() if self.EDGE_OK(v, k)} - for v, nbrs in self._atlas.items() if self.NODE_OK(v)} + try: # check that NODE_OK has attr 'nodes' + node_ok_shorter = 2 * len(self.NODE_OK.nodes) < len(self._atlas) + except AttributeError: + node_ok_shorter = False + if node_ok_shorter: + return { + v: {k: d for k, d in self._atlas[v].items() if self.EDGE_OK(v, k)} + for v in self.NODE_OK.nodes + if v in self._atlas + } + return { + v: {k: d for k, d in nbrs.items() if self.EDGE_OK(v, k)} + for v, nbrs in self._atlas.items() + if self.NODE_OK(v) + } class FilterMultiAdjacency(FilterAdjacency): # multiedgedict def __getitem__(self, node): if node in self._atlas and self.NODE_OK(node): + def edge_ok(nbr, key): return self.NODE_OK(nbr) and self.EDGE_OK(node, nbr, key) + return FilterMultiInner(self._atlas[node], self.NODE_OK, edge_ok) - raise KeyError("Key {} not found".format(node)) + raise KeyError(f"Key {node} not found") def copy(self): - if hasattr(self.NODE_OK, 'nodes'): + try: # check that NODE_OK has attr 'nodes' + node_ok_shorter = 2 * len(self.NODE_OK.nodes) < len(self._atlas) + except AttributeError: + node_ok_shorter = False + if node_ok_shorter: my_nodes = self.NODE_OK.nodes - return {u: {v: {k: d for k, d in kd.items() - if self.EDGE_OK(u, v, k)} - for v, kd in self._atlas[u].items() if v in my_nodes} - for u in my_nodes if u in self._atlas} - return {u: {v: {k: d for k, d in kd.items() - if self.EDGE_OK(u, v, k)} - for v, kd in nbrs.items() if self.NODE_OK(v)} - for u, nbrs in self._atlas.items() if self.NODE_OK(u)} + return { + u: { + v: {k: d for k, d in kd.items() if self.EDGE_OK(u, v, k)} + for v, kd in self._atlas[u].items() + if v in my_nodes + } + for u in my_nodes + if u in self._atlas + } + return { + u: { + v: {k: d for k, d in kd.items() if self.EDGE_OK(u, v, k)} + for v, kd in nbrs.items() + if self.NODE_OK(v) + } + for u, nbrs in self._atlas.items() + if self.NODE_OK(u) + } diff --git a/networkx/classes/digraph.py b/networkx/classes/digraph.py index e429ce1..7a2ea0d 100644 --- a/networkx/classes/digraph.py +++ b/networkx/classes/digraph.py @@ -1,21 +1,16 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg -# Dan Schult -# Pieter Swart """Base class for directed graphs.""" from copy import deepcopy import networkx as nx from networkx.classes.graph import Graph from networkx.classes.coreviews import AdjacencyView -from networkx.classes.reportviews import OutEdgeView, InEdgeView, \ - DiDegreeView, InDegreeView, OutDegreeView +from networkx.classes.reportviews import ( + OutEdgeView, + InEdgeView, + DiDegreeView, + InDegreeView, + OutDegreeView, +) from networkx.exception import NetworkXError import networkx.convert as convert @@ -117,23 +112,23 @@ class DiGraph(Graph): Add node attributes using add_node(), add_nodes_from() or G.nodes - >>> G.add_node(1, time='5pm') - >>> G.add_nodes_from([3], time='2pm') + >>> G.add_node(1, time="5pm") + >>> G.add_nodes_from([3], time="2pm") >>> G.nodes[1] {'time': '5pm'} - >>> G.nodes[1]['room'] = 714 - >>> del G.nodes[1]['room'] # remove attribute + >>> G.nodes[1]["room"] = 714 + >>> del G.nodes[1]["room"] # remove attribute >>> list(G.nodes(data=True)) [(1, {'time': '5pm'}), (3, {'time': '2pm'})] Add edge attributes using add_edge(), add_edges_from(), subscript notation, or G.edges. - >>> G.add_edge(1, 2, weight=4.7 ) - >>> G.add_edges_from([(3, 4), (4, 5)], color='red') - >>> G.add_edges_from([(1, 2, {'color':'blue'}), (2, 3, {'weight':8})]) - >>> G[1][2]['weight'] = 4.7 - >>> G.edges[1, 2]['weight'] = 4 + >>> G.add_edge(1, 2, weight=4.7) + >>> G.add_edges_from([(3, 4), (4, 5)], color="red") + >>> G.add_edges_from([(1, 2, {"color": "blue"}), (2, 3, {"weight": 8})]) + >>> G[1][2]["weight"] = 4.7 + >>> G.edges[1, 2]["weight"] = 4 Warning: we protect the graph data structure by making `G.edges[1, 2]` a read-only dict-like structure. However, you can assign to attributes @@ -145,7 +140,7 @@ class DiGraph(Graph): Many common graph features allow python syntax to speed reporting. - >>> 1 in G # check if node in graph + >>> 1 in G # check if node in graph True >>> [n for n in G if n < 3] # iterate through nodes [1, 2] @@ -153,17 +148,17 @@ class DiGraph(Graph): 5 Often the best way to traverse all edges of a graph is via the neighbors. - The neighbors are reported as an adjacency-dict `G.adj` or as `G.adjacency()` + The neighbors are reported as an adjacency-dict `G.adj` or `G.adjacency()` >>> for n, nbrsdict in G.adjacency(): ... for nbr, eattr in nbrsdict.items(): - ... if 'weight' in eattr: - ... # Do something useful with the edges - ... pass + ... if "weight" in eattr: + ... # Do something useful with the edges + ... pass But the edges reporting object is often more convenient: - >>> for u, v, weight in G.edges(data='weight'): + >>> for u, v, weight in G.edges(data="weight"): ... if weight is not None: ... # Do something useful with the edges ... pass @@ -194,14 +189,19 @@ class DiGraph(Graph): maintained but extra features can be added. To replace one of the dicts create a new graph class by changing the class(!) variable holding the factory for that dict-like structure. The variable names are - node_dict_factory, adjlist_inner_dict_factory, adjlist_outer_dict_factory, - and edge_attr_dict_factory. + node_dict_factory, node_attr_dict_factory, adjlist_inner_dict_factory, + adjlist_outer_dict_factory, edge_attr_dict_factory and graph_attr_dict_factory. node_dict_factory : function, (default: dict) Factory function to be used to create the dict containing node attributes, keyed by node id. It should require no arguments and return a dict-like object + node_attr_dict_factory: function, (default: dict) + Factory function to be used to create the node attribute + dict which holds attribute values keyed by attribute name. + It should require no arguments and return a dict-like object + adjlist_outer_dict_factory : function, (default: dict) Factory function to be used to create the outer-most dict in the data structure that holds adjacency info keyed by node. @@ -214,9 +214,28 @@ class DiGraph(Graph): edge_attr_dict_factory : function, optional (default: dict) Factory function to be used to create the edge attribute - dict which holds attrbute values keyed by attribute name. + dict which holds attribute values keyed by attribute name. + It should require no arguments and return a dict-like object. + + graph_attr_dict_factory : function, (default: dict) + Factory function to be used to create the graph attribute + dict which holds attribute values keyed by attribute name. It should require no arguments and return a dict-like object. + Typically, if your extension doesn't impact the data structure all + methods will inherited without issue except: `to_directed/to_undirected`. + By default these methods create a DiGraph/Graph class and you probably + want them to create your extension of a DiGraph/Graph. To facilitate + this we define two class variables that you can set in your subclass. + + to_directed_class : callable, (default: DiGraph or MultiDiGraph) + Class to create a new graph structure in the `to_directed` method. + If `None`, a NetworkX class (DiGraph or MultiDiGraph) is used. + + to_undirected_class : callable, (default: Graph or MultiGraph) + Class to create a new graph structure in the `to_undirected` method. + If `None`, a NetworkX class (Graph or MultiGraph) is used. + Examples -------- @@ -225,9 +244,11 @@ class DiGraph(Graph): This reduces the memory used, but you lose edge attributes. >>> class ThinGraph(nx.Graph): - ... all_edge_dict = {'weight': 1} + ... all_edge_dict = {"weight": 1} + ... ... def single_edge_dict(self): ... return self.all_edge_dict + ... ... edge_attr_dict_factory = single_edge_dict >>> G = ThinGraph() >>> G.add_edge(2, 1) @@ -243,25 +264,6 @@ class DiGraph(Graph): a dictionary-like object. """ - def __getstate__(self): - attr = self.__dict__.copy() - # remove lazy property attributes - if 'nodes' in attr: - del attr['nodes'] - if 'edges' in attr: - del attr['edges'] - if 'out_edges' in attr: - del attr['out_edges'] - if 'in_edges' in attr: - del attr['in_edges'] - if 'degree' in attr: - del attr['degree'] - if 'in_degree' in attr: - del attr['in_degree'] - if 'out_degree' in attr: - del attr['out_degree'] - return attr - def __init__(self, incoming_graph_data=None, **attr): """Initialize a graph with edges, name, or graph attributes. @@ -283,9 +285,9 @@ def __init__(self, incoming_graph_data=None, **attr): Examples -------- - >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc - >>> G = nx.Graph(name='my graph') - >>> e = [(1, 2), (2, 3), (3, 4)] # list of edges + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G = nx.Graph(name="my graph") + >>> e = [(1, 2), (2, 3), (3, 4)] # list of edges >>> G = nx.Graph(e) Arbitrary graph attribute pairs (key=value) may be assigned @@ -295,19 +297,20 @@ def __init__(self, incoming_graph_data=None, **attr): {'day': 'Friday'} """ - self.node_dict_factory = ndf = self.node_dict_factory + self.graph_attr_dict_factory = self.graph_attr_dict_factory + self.node_dict_factory = self.node_dict_factory + self.node_attr_dict_factory = self.node_attr_dict_factory self.adjlist_outer_dict_factory = self.adjlist_outer_dict_factory self.adjlist_inner_dict_factory = self.adjlist_inner_dict_factory self.edge_attr_dict_factory = self.edge_attr_dict_factory - self.root_graph = self - self.graph = {} # dictionary for graph attributes - self._node = ndf() # dictionary for node attributes + self.graph = self.graph_attr_dict_factory() # dictionary for graph attributes + self._node = self.node_dict_factory() # dictionary for node attr # We store two adjacency lists: - # the predecessors of node n are stored in the dict self._pred + # the predecessors of node n are stored in the dict self._pred # the successors of node n are stored in the dict self._succ=self._adj - self._adj = ndf() # empty adjacency dictionary - self._pred = ndf() # predecessor + self._adj = self.adjlist_outer_dict_factory() # empty adjacency dict + self._pred = self.adjlist_outer_dict_factory() # predecessor self._succ = self._adj # successor # attempt to load graph with data @@ -388,9 +391,9 @@ def add_node(self, node_for_adding, **attr): Examples -------- - >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_node(1) - >>> G.add_node('Hello') + >>> G.add_node("Hello") >>> K3 = nx.Graph([(0, 1), (1, 2), (2, 0)]) >>> G.add_node(K3) >>> G.number_of_nodes() @@ -399,7 +402,7 @@ def add_node(self, node_for_adding, **attr): Use keywords set/change node attributes: >>> G.add_node(1, size=10) - >>> G.add_node(3, weight=0.4, UTM=('13S', 382871, 3972649)) + >>> G.add_node(3, weight=0.4, UTM=("13S", 382871, 3972649)) Notes ----- @@ -414,7 +417,8 @@ def add_node(self, node_for_adding, **attr): if node_for_adding not in self._succ: self._succ[node_for_adding] = self.adjlist_inner_dict_factory() self._pred[node_for_adding] = self.adjlist_inner_dict_factory() - self._node[node_for_adding] = attr + attr_dict = self._node[node_for_adding] = self.node_attr_dict_factory() + attr_dict.update(attr) else: # update attr even if node already exists self._node[node_for_adding].update(attr) @@ -439,8 +443,8 @@ def add_nodes_from(self, nodes_for_adding, **attr): Examples -------- - >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc - >>> G.add_nodes_from('Hello') + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.add_nodes_from("Hello") >>> K3 = nx.Graph([(0, 1), (1, 2), (2, 0)]) >>> G.add_nodes_from(K3) >>> sorted(G.nodes(), key=str) @@ -453,12 +457,12 @@ def add_nodes_from(self, nodes_for_adding, **attr): Use (node, attrdict) tuples to update attributes for specific nodes. - >>> G.add_nodes_from([(1, dict(size=11)), (2, {'color':'blue'})]) - >>> G.nodes[1]['size'] + >>> G.add_nodes_from([(1, dict(size=11)), (2, {"color": "blue"})]) + >>> G.nodes[1]["size"] 11 >>> H = nx.Graph() >>> H.add_nodes_from(G.nodes(data=True)) - >>> H.nodes[1]['size'] + >>> H.nodes[1]["size"] 11 """ @@ -470,7 +474,8 @@ def add_nodes_from(self, nodes_for_adding, **attr): if n not in self._succ: self._succ[n] = self.adjlist_inner_dict_factory() self._pred[n] = self.adjlist_inner_dict_factory() - self._node[n] = attr.copy() + attr_dict = self._node[n] = self.node_attr_dict_factory() + attr_dict.update(attr) else: self._node[n].update(attr) except TypeError: @@ -480,7 +485,8 @@ def add_nodes_from(self, nodes_for_adding, **attr): self._pred[nn] = self.adjlist_inner_dict_factory() newdict = attr.copy() newdict.update(ndict) - self._node[nn] = newdict + attr_dict = self._node[nn] = self.node_attr_dict_factory() + attr_dict.update(newdict) else: olddict = self._node[nn] olddict.update(attr) @@ -519,14 +525,14 @@ def remove_node(self, n): try: nbrs = self._succ[n] del self._node[n] - except KeyError: # NetworkXError if n not in self - raise NetworkXError("The node %s is not in the digraph." % (n,)) + except KeyError as e: # NetworkXError if n not in self + raise NetworkXError(f"The node {n} is not in the digraph.") from e for u in nbrs: - del self._pred[u][n] # remove all edges n-u in digraph - del self._succ[n] # remove node from succ + del self._pred[u][n] # remove all edges n-u in digraph + del self._succ[n] # remove node from succ for u in self._pred[n]: - del self._succ[u][n] # remove all edges n-u in digraph - del self._pred[n] # remove node from pred + del self._succ[u][n] # remove all edges n-u in digraph + del self._pred[n] # remove node from pred def remove_nodes_from(self, nodes): """Remove multiple nodes. @@ -557,11 +563,11 @@ def remove_nodes_from(self, nodes): succs = self._succ[n] del self._node[n] for u in succs: - del self._pred[u][n] # remove all edges n-u in digraph - del self._succ[n] # now remove node + del self._pred[u][n] # remove all edges n-u in digraph + del self._succ[n] # now remove node for u in self._pred[n]: - del self._succ[u][n] # remove all edges n-u in digraph - del self._pred[n] # now remove node + del self._succ[u][n] # remove all edges n-u in digraph + del self._pred[n] # now remove node except KeyError: pass # silent failure on remove @@ -598,11 +604,11 @@ def add_edge(self, u_of_edge, v_of_edge, **attr): -------- The following all add the edge e=(1, 2) to graph G: - >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> e = (1, 2) - >>> G.add_edge(1, 2) # explicit two-node form - >>> G.add_edge(*e) # single edge as tuple of two nodes - >>> G.add_edges_from( [(1, 2)] ) # add edges from iterable container + >>> G.add_edge(1, 2) # explicit two-node form + >>> G.add_edge(*e) # single edge as tuple of two nodes + >>> G.add_edges_from([(1, 2)]) # add edges from iterable container Associate data to edges using keywords: @@ -620,11 +626,11 @@ def add_edge(self, u_of_edge, v_of_edge, **attr): if u not in self._succ: self._succ[u] = self.adjlist_inner_dict_factory() self._pred[u] = self.adjlist_inner_dict_factory() - self._node[u] = {} + self._node[u] = self.node_attr_dict_factory() if v not in self._succ: self._succ[v] = self.adjlist_inner_dict_factory() self._pred[v] = self.adjlist_inner_dict_factory() - self._node[v] = {} + self._node[v] = self.node_attr_dict_factory() # add the edge datadict = self._adj[u].get(v, self.edge_attr_dict_factory()) datadict.update(attr) @@ -659,15 +665,15 @@ def add_edges_from(self, ebunch_to_add, **attr): Examples -------- - >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc - >>> G.add_edges_from([(0, 1), (1, 2)]) # using a list of edge tuples + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.add_edges_from([(0, 1), (1, 2)]) # using a list of edge tuples >>> e = zip(range(0, 3), range(1, 4)) - >>> G.add_edges_from(e) # Add the path graph 0-1-2-3 + >>> G.add_edges_from(e) # Add the path graph 0-1-2-3 Associate data to edges >>> G.add_edges_from([(1, 2), (2, 3)], weight=3) - >>> G.add_edges_from([(3, 4), (1, 4)], label='WN2898') + >>> G.add_edges_from([(3, 4), (1, 4)], label="WN2898") """ for e in ebunch_to_add: ne = len(e) @@ -677,16 +683,15 @@ def add_edges_from(self, ebunch_to_add, **attr): u, v = e dd = {} else: - raise NetworkXError( - "Edge tuple %s must be a 2-tuple or 3-tuple." % (e,)) + raise NetworkXError(f"Edge tuple {e} must be a 2-tuple or 3-tuple.") if u not in self._succ: self._succ[u] = self.adjlist_inner_dict_factory() self._pred[u] = self.adjlist_inner_dict_factory() - self._node[u] = {} + self._node[u] = self.node_attr_dict_factory() if v not in self._succ: self._succ[v] = self.adjlist_inner_dict_factory() self._pred[v] = self.adjlist_inner_dict_factory() - self._node[v] = {} + self._node[v] = self.node_attr_dict_factory() datadict = self._adj[u].get(v, self.edge_attr_dict_factory()) datadict.update(attr) datadict.update(dd) @@ -712,19 +717,19 @@ def remove_edge(self, u, v): Examples -------- - >>> G = nx.Graph() # or DiGraph, etc + >>> G = nx.Graph() # or DiGraph, etc >>> nx.add_path(G, [0, 1, 2, 3]) >>> G.remove_edge(0, 1) >>> e = (1, 2) - >>> G.remove_edge(*e) # unpacks e from an edge tuple - >>> e = (2, 3, {'weight':7}) # an edge with attribute data - >>> G.remove_edge(*e[:2]) # select first part of edge tuple + >>> G.remove_edge(*e) # unpacks e from an edge tuple + >>> e = (2, 3, {"weight": 7}) # an edge with attribute data + >>> G.remove_edge(*e[:2]) # select first part of edge tuple """ try: del self._succ[u][v] del self._pred[v][u] - except KeyError: - raise NetworkXError("The edge %s-%s not in graph." % (u, v)) + except KeyError as e: + raise NetworkXError(f"The edge {u}-{v} not in graph.") from e def remove_edges_from(self, ebunch): """Remove all edges specified in ebunch. @@ -759,38 +764,75 @@ def remove_edges_from(self, ebunch): del self._pred[v][u] def has_successor(self, u, v): - """Return True if node u has successor v. + """Returns True if node u has successor v. This is true if graph has the edge u->v. """ - return (u in self._succ and v in self._succ[u]) + return u in self._succ and v in self._succ[u] def has_predecessor(self, u, v): - """Return True if node u has predecessor v. + """Returns True if node u has predecessor v. This is true if graph has the edge u<-v. """ - return (u in self._pred and v in self._pred[u]) + return u in self._pred and v in self._pred[u] def successors(self, n): - """Return an iterator over successor nodes of n. + """Returns an iterator over successor nodes of n. + + A successor of n is a node m such that there exists a directed + edge from n to m. + Parameters + ---------- + n : node + A node in the graph + + Raises + ------- + NetworkXError + If n is not in the graph. + + See Also + -------- + predecessors + + Notes + ----- neighbors() and successors() are the same. """ try: return iter(self._succ[n]) - except KeyError: - raise NetworkXError("The node %s is not in the digraph." % (n,)) + except KeyError as e: + raise NetworkXError(f"The node {n} is not in the digraph.") from e # digraph definitions neighbors = successors def predecessors(self, n): - """Return an iterator over predecessor nodes of n.""" + """Returns an iterator over predecessor nodes of n. + + A predecessor of n is a node m such that there exists a directed + edge from m to n. + + Parameters + ---------- + n : node + A node in the graph + + Raises + ------- + NetworkXError + If n is not in the graph. + + See Also + -------- + successors + """ try: return iter(self._pred[n]) - except KeyError: - raise NetworkXError("The node %s is not in the digraph." % (n,)) + except KeyError as e: + raise NetworkXError(f"The node {n} is not in the digraph.") from e @property def edges(self): @@ -838,14 +880,14 @@ def edges(self): Examples -------- - >>> G = nx.DiGraph() # or MultiDiGraph, etc + >>> G = nx.DiGraph() # or MultiDiGraph, etc >>> nx.add_path(G, [0, 1, 2]) >>> G.add_edge(2, 3, weight=5) >>> [e for e in G.edges] [(0, 1), (1, 2), (2, 3)] >>> G.edges.data() # default data is {} (empty dict) OutEdgeDataView([(0, 1, {}), (1, 2, {}), (2, 3, {'weight': 5})]) - >>> G.edges.data('weight', default=1) + >>> G.edges.data("weight", default=1) OutEdgeDataView([(0, 1, 1), (1, 2, 1), (2, 3, 5)]) >>> G.edges([0, 2]) # only edges incident to these nodes OutEdgeDataView([(0, 1), (2, 3)]) @@ -853,9 +895,7 @@ def edges(self): OutEdgeDataView([(0, 1)]) """ - self.__dict__['edges'] = edges = OutEdgeView(self) - self.__dict__['out_edges'] = edges - return edges + return OutEdgeView(self) # alias out_edges to edges out_edges = edges @@ -889,8 +929,7 @@ def in_edges(self): -------- edges """ - self.__dict__['in_edges'] = in_edges = InEdgeView(self) - return in_edges + return InEdgeView(self) @property def degree(self): @@ -929,16 +968,15 @@ def degree(self): Examples -------- - >>> G = nx.DiGraph() # or MultiDiGraph + >>> G = nx.DiGraph() # or MultiDiGraph >>> nx.add_path(G, [0, 1, 2, 3]) - >>> G.degree(0) # node 0 with degree 1 + >>> G.degree(0) # node 0 with degree 1 1 >>> list(G.degree([0, 1, 2])) [(0, 1), (1, 2), (2, 2)] """ - self.__dict__['degree'] = degree = DiDegreeView(self) - return degree + return DiDegreeView(self) @property def in_degree(self): @@ -979,14 +1017,13 @@ def in_degree(self): -------- >>> G = nx.DiGraph() >>> nx.add_path(G, [0, 1, 2, 3]) - >>> G.in_degree(0) # node 0 with degree 0 + >>> G.in_degree(0) # node 0 with degree 0 0 >>> list(G.in_degree([0, 1, 2])) [(0, 0), (1, 1), (2, 1)] """ - self.__dict__['in_degree'] = in_degree = InDegreeView(self) - return in_degree + return InDegreeView(self) @property def out_degree(self): @@ -1027,14 +1064,13 @@ def out_degree(self): -------- >>> G = nx.DiGraph() >>> nx.add_path(G, [0, 1, 2, 3]) - >>> G.out_degree(0) # node 0 with degree 1 + >>> G.out_degree(0) # node 0 with degree 1 1 >>> list(G.out_degree([0, 1, 2])) [(0, 1), (1, 1), (2, 1)] """ - self.__dict__['out_degree'] = out_degree = OutDegreeView(self) - return out_degree + return OutDegreeView(self) def clear(self): """Remove all nodes and edges from the graph. @@ -1049,124 +1085,41 @@ def clear(self): [] >>> list(G.edges) [] + """ self._succ.clear() self._pred.clear() self._node.clear() self.graph.clear() - def is_multigraph(self): - """Return True if graph is a multigraph, False otherwise.""" - return False - - def is_directed(self): - """Return True if graph is directed, False otherwise.""" - return True - - def fresh_copy(self): - """Return a fresh copy graph with the same data structure. - - A fresh copy has no nodes, edges or graph attributes. It is - the same data structure as the current graph. This method is - typically used to create an empty version of the graph. - - Notes - ----- - If you subclass the base class you should overwrite this method - to return your class of graph. - """ - return DiGraph() - - def copy(self, as_view=False): - """Return a copy of the graph. - - The copy method by default returns a shallow copy of the graph - and attributes. That is, if an attribute is a container, that - container is shared by the original an the copy. - Use Python's `copy.deepcopy` for new containers. - - If `as_view` is True then a view is returned instead of a copy. - - Notes - ----- - All copies reproduce the graph structure, but data attributes - may be handled in different ways. There are four types of copies - of a graph that people might want. - - Deepcopy -- The default behavior is a "deepcopy" where the graph - structure as well as all data attributes and any objects they might - contain are copied. The entire graph object is new so that changes - in the copy do not affect the original object. (see Python's - copy.deepcopy) - - Data Reference (Shallow) -- For a shallow copy the graph structure - is copied but the edge, node and graph attribute dicts are - references to those in the original graph. This saves - time and memory but could cause confusion if you change an attribute - in one graph and it changes the attribute in the other. - NetworkX does not provide this level of shallow copy. - - Independent Shallow -- This copy creates new independent attribute - dicts and then does a shallow copy of the attributes. That is, any - attributes that are containers are shared between the new graph - and the original. This is exactly what `dict.copy()` provides. - You can obtain this style copy using: - - >>> G = nx.path_graph(5) - >>> H = G.copy() - >>> H = G.copy(as_view=False) - >>> H = nx.Graph(G) - >>> H = G.fresh_copy().__class__(G) - - Fresh Data -- For fresh data, the graph structure is copied while - new empty data attribute dicts are created. The resulting graph - is independent of the original and it has no edge, node or graph - attributes. Fresh copies are not enabled. Instead use: - - >>> H = G.fresh_copy() - >>> H.add_nodes_from(G) - >>> H.add_edges_from(G.edges) - - View -- Inspired by dict-views, graph-views act like read-only - versions of the original graph, providing a copy of the original - structure without requiring any memory for copying the information. - - See the Python copy module for more information on shallow - and deep copies, https://docs.python.org/2/library/copy.html. - - Parameters - ---------- - as_view : bool, optional (default=False) - If True, the returned graph-view provides a read-only view - of the original graph without actually copying any data. - - Returns - ------- - G : Graph - A copy of the graph. - - See Also - -------- - to_directed: return a directed copy of the graph. + def clear_edges(self): + """Remove all edges from the graph without altering nodes. Examples -------- >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc - >>> H = G.copy() + >>> G.clear_edges() + >>> list(G.nodes) + [0, 1, 2, 3] + >>> list(G.edges) + [] """ - if as_view is True: - return nx.graphviews.DiGraphView(self) - G = self.fresh_copy() - G.graph.update(self.graph) - G.add_nodes_from((n, d.copy()) for n, d in self._node.items()) - G.add_edges_from((u, v, datadict.copy()) - for u, nbrs in self._adj.items() - for v, datadict in nbrs.items()) - return G + for predecessor_dict in self._pred.values(): + predecessor_dict.clear() + for successor_dict in self._succ.values(): + successor_dict.clear() + + def is_multigraph(self): + """Returns True if graph is a multigraph, False otherwise.""" + return False + + def is_directed(self): + """Returns True if graph is directed, False otherwise.""" + return True def to_undirected(self, reciprocal=False, as_view=False): - """Return an undirected representation of the digraph. + """Returns an undirected representation of the digraph. Parameters ---------- @@ -1206,7 +1159,7 @@ def to_undirected(self, reciprocal=False, as_view=False): shallow copy of the data. See the Python copy module for more information on shallow - and deep copies, https://docs.python.org/2/library/copy.html. + and deep copies, https://docs.python.org/3/library/copy.html. Warning: If you have subclassed DiGraph to use dict-like objects in the data structure, those changes do not transfer to the @@ -1214,7 +1167,7 @@ def to_undirected(self, reciprocal=False, as_view=False): Examples -------- - >>> G = nx.path_graph(2) # or MultiGraph, etc + >>> G = nx.path_graph(2) # or MultiGraph, etc >>> H = G.to_directed() >>> list(H.edges) [(0, 1), (1, 0)] @@ -1222,69 +1175,30 @@ def to_undirected(self, reciprocal=False, as_view=False): >>> list(G2.edges) [(0, 1)] """ + graph_class = self.to_undirected_class() if as_view is True: - return nx.graphviews.GraphView(self) + return nx.graphviews.generic_graph_view(self, Graph) # deepcopy when not a view G = Graph() G.graph.update(deepcopy(self.graph)) G.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) if reciprocal is True: - G.add_edges_from((u, v, deepcopy(d)) - for u, nbrs in self._adj.items() - for v, d in nbrs.items() - if v in self._pred[u]) + G.add_edges_from( + (u, v, deepcopy(d)) + for u, nbrs in self._adj.items() + for v, d in nbrs.items() + if v in self._pred[u] + ) else: - G.add_edges_from((u, v, deepcopy(d)) - for u, nbrs in self._adj.items() - for v, d in nbrs.items()) + G.add_edges_from( + (u, v, deepcopy(d)) + for u, nbrs in self._adj.items() + for v, d in nbrs.items() + ) return G - def subgraph(self, nodes): - """Return a SubGraph view of the subgraph induced on `nodes`. - - The induced subgraph of the graph contains the nodes in `nodes` - and the edges between those nodes. - - Parameters - ---------- - nodes : list, iterable - A container of nodes which will be iterated through once. - - Returns - ------- - G : SubGraph View - A subgraph view of the graph. The graph structure cannot be - changed but node/edge attributes can and are shared with the - original graph. - - Notes - ----- - The graph, edge and node attributes are shared with the original graph. - Changes to the graph structure is ruled out by the view, but changes - to attributes are reflected in the original graph. - - To create a subgraph with its own copy of the edge/node attributes use: - G.subgraph(nodes).copy() - - For an inplace reduction of a graph to a subgraph you can remove nodes: - G.remove_nodes_from([n for n in G if n not in set(nodes)]) - - Examples - -------- - >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc - >>> H = G.subgraph([0, 1, 2]) - >>> list(H.edges) - [(0, 1), (1, 2)] - """ - induced_nodes = nx.filters.show_nodes(self.nbunch_iter(nodes)) - SubGraph = nx.graphviews.SubDiGraph - # if already a subgraph, don't make a chain - if hasattr(self, '_NODE_OK'): - return SubGraph(self._graph, induced_nodes, self._EDGE_OK) - return SubGraph(self, induced_nodes) - def reverse(self, copy=True): - """Return the reverse of the graph. + """Returns the reverse of the graph. The reverse is a graph with the same nodes and edges but with the directions of the edges reversed. @@ -1297,10 +1211,9 @@ def reverse(self, copy=True): the original graph. """ if copy: - H = self.fresh_copy() + H = self.__class__() H.graph.update(deepcopy(self.graph)) - H.add_nodes_from((n, deepcopy(d)) for n, d in self.node.items()) - H.add_edges_from((v, u, deepcopy(d)) for u, v, d - in self.edges(data=True)) + H.add_nodes_from((n, deepcopy(d)) for n, d in self.nodes.items()) + H.add_edges_from((v, u, deepcopy(d)) for u, v, d in self.edges(data=True)) return H - return nx.graphviews.ReverseView(self) + return nx.graphviews.reverse_view(self) diff --git a/networkx/classes/filters.py b/networkx/classes/filters.py index de8b61e..aefcbdf 100644 --- a/networkx/classes/filters.py +++ b/networkx/classes/filters.py @@ -1,24 +1,20 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Author: Aric Hagberg (hagberg@lanl.gov), -# Pieter Swart (swart@lanl.gov), -# Dan Schult(dschult@colgate.edu) """Filter factories to hide or show sets of nodes and edges. These filters return the function used when creating `SubGraph`. """ -__all__ = ['no_filter', 'hide_nodes', - 'hide_edges', 'hide_multiedges', - 'hide_diedges', 'hide_multidiedges', - 'show_nodes', - 'show_edges', 'show_multiedges', - 'show_diedges', 'show_multidiedges', - ] +__all__ = [ + "no_filter", + "hide_nodes", + "hide_edges", + "hide_multiedges", + "hide_diedges", + "hide_multidiedges", + "show_nodes", + "show_edges", + "show_multiedges", + "show_diedges", + "show_multidiedges", +] def no_filter(*items): @@ -51,7 +47,7 @@ def hide_multiedges(edges): # write show_nodes as a class to make SubGraph pickleable -class show_nodes(object): +class show_nodes: def __init__(self, nodes): self.nodes = set(nodes) diff --git a/networkx/classes/function.py b/networkx/classes/function.py index 50853ad..0f2c351 100644 --- a/networkx/classes/function.py +++ b/networkx/classes/function.py @@ -1,49 +1,65 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg -# Pieter Swart -# Dan Schult """Functional interface to graph methods and assorted utilities. """ -from __future__ import division from collections import Counter from itertools import chain -try: - from itertools import zip_longest -except ImportError: - from itertools import izip_longest as zip_longest import networkx as nx from networkx.utils import pairwise, not_implemented_for - -__all__ = ['nodes', 'edges', 'degree', 'degree_histogram', 'neighbors', - 'number_of_nodes', 'number_of_edges', 'density', - 'is_directed', 'info', 'freeze', 'is_frozen', 'subgraph', - 'induced_subgraph', 'edge_subgraph', 'restricted_view', - 'reverse_view', 'to_directed', 'to_undirected', - 'add_star', 'add_path', 'add_cycle', - 'create_empty_copy', 'set_node_attributes', - 'get_node_attributes', 'set_edge_attributes', - 'get_edge_attributes', 'all_neighbors', 'non_neighbors', - 'non_edges', 'common_neighbors', 'is_weighted', - 'is_negatively_weighted', 'is_empty', - 'selfloop_edges', 'nodes_with_selfloops', 'number_of_selfloops', - ] +from networkx.classes.graphviews import subgraph_view, reverse_view + + +__all__ = [ + "nodes", + "edges", + "degree", + "degree_histogram", + "neighbors", + "number_of_nodes", + "number_of_edges", + "density", + "is_directed", + "info", + "freeze", + "is_frozen", + "subgraph", + "subgraph_view", + "induced_subgraph", + "reverse_view", + "edge_subgraph", + "restricted_view", + "to_directed", + "to_undirected", + "add_star", + "add_path", + "add_cycle", + "create_empty_copy", + "set_node_attributes", + "get_node_attributes", + "set_edge_attributes", + "get_edge_attributes", + "all_neighbors", + "non_neighbors", + "non_edges", + "common_neighbors", + "is_weighted", + "is_negatively_weighted", + "is_empty", + "selfloop_edges", + "nodes_with_selfloops", + "number_of_selfloops", + "path_weight", + "is_path", +] def nodes(G): - """Return an iterator over the graph nodes.""" + """Returns an iterator over the graph nodes.""" return G.nodes() def edges(G, nbunch=None): - """Return an edge view of edges incident to nodes in nbunch. + """Returns an edge view of edges incident to nodes in nbunch. Return all edges if nbunch is unspecified or nbunch=None. @@ -53,29 +69,29 @@ def edges(G, nbunch=None): def degree(G, nbunch=None, weight=None): - """Return a degree view of single node or of nbunch of nodes. + """Returns a degree view of single node or of nbunch of nodes. If nbunch is omitted, then return degrees of *all* nodes. """ return G.degree(nbunch, weight) def neighbors(G, n): - """Return a list of nodes connected to node n. """ + """Returns a list of nodes connected to node n. """ return G.neighbors(n) def number_of_nodes(G): - """Return the number of nodes in the graph.""" + """Returns the number of nodes in the graph.""" return G.number_of_nodes() def number_of_edges(G): - """Return the number of edges in the graph. """ + """Returns the number of edges in the graph. """ return G.number_of_edges() def density(G): - r"""Return the density of a graph. + r"""Returns the density of a graph. The density for undirected graphs is @@ -110,7 +126,7 @@ def density(G): def degree_histogram(G): - """Return a list of the frequency of each degree value. + """Returns a list of the frequency of each degree value. Parameters ---------- @@ -137,7 +153,7 @@ def is_directed(G): return G.is_directed() -def frozen(*args): +def frozen(*args, **kwargs): """Dummy method for raising errors when trying to modify frozen graphs""" raise nx.NetworkXError("Frozen graph can't be modified") @@ -158,9 +174,9 @@ def freeze(G): >>> G = nx.path_graph(4) >>> G = nx.freeze(G) >>> try: - ... G.add_edge(4, 5) + ... G.add_edge(4, 5) ... except nx.NetworkXError as e: - ... print(str(e)) + ... print(str(e)) Frozen graph can't be modified Notes @@ -183,6 +199,7 @@ def freeze(G): G.remove_nodes_from = frozen G.add_edge = frozen G.add_edges_from = frozen + G.add_weighted_edges_from = frozen G.remove_edge = frozen G.remove_edges_from = frozen G.clear = frozen @@ -191,7 +208,7 @@ def freeze(G): def is_frozen(G): - """Return True if graph is frozen. + """Returns True if graph is frozen. Parameters ---------- @@ -234,7 +251,11 @@ def add_star(G_to_add_to, nodes_for_star, **attr): >>> nx.add_star(G, [10, 11, 12], weight=2) """ nlist = iter(nodes_for_star) - v = next(nlist) + try: + v = next(nlist) + except StopIteration: + return + G_to_add_to.add_node(v) edges = ((v, n) for n in nlist) G_to_add_to.add_edges_from(edges, **attr) @@ -290,15 +311,23 @@ def add_cycle(G_to_add_to, nodes_for_cycle, **attr): Examples -------- - >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> nx.add_cycle(G, [0, 1, 2, 3]) >>> nx.add_cycle(G, [10, 11, 12], weight=7) """ - G_to_add_to.add_edges_from(pairwise(nodes_for_cycle, cyclic=True), **attr) + nlist = iter(nodes_for_cycle) + try: + first_node = next(nlist) + except StopIteration: + return + G_to_add_to.add_node(first_node) + G_to_add_to.add_edges_from( + pairwise(chain((first_node,), nlist), cyclic=True), **attr + ) def subgraph(G, nbunch): - """Return the subgraph induced on nodes in nbunch. + """Returns the subgraph induced on nodes in nbunch. Parameters ---------- @@ -321,7 +350,7 @@ def subgraph(G, nbunch): def induced_subgraph(G, nbunch): - """Return a SubGraph view of `G` showing only nodes in nbunch. + """Returns a SubGraph view of `G` showing only nodes in nbunch. The induced subgraph of a graph on a set of nodes N is the graph with nodes N and edges from G which have both ends in N. @@ -355,20 +384,13 @@ def induced_subgraph(G, nbunch): Examples -------- - >>> import networkx as nx >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc >>> H = G.subgraph([0, 1, 2]) >>> list(H.edges) [(0, 1), (1, 2)] """ induced_nodes = nx.filters.show_nodes(G.nbunch_iter(nbunch)) - if G.is_multigraph(): - if G.is_directed(): - return nx.graphviews.SubMultiDiGraph(G, induced_nodes) - return nx.graphviews.SubMultiGraph(G, induced_nodes) - if G.is_directed(): - return nx.graphviews.SubDiGraph(G, induced_nodes) - return nx.graphviews.SubGraph(G, induced_nodes) + return nx.graphviews.subgraph_view(G, induced_nodes) def edge_subgraph(G, edges): @@ -397,14 +419,13 @@ def edge_subgraph(G, edges): If you create a subgraph of a subgraph recursively you can end up with a chain of subgraphs that becomes very slow with about 15 nested subgraph views. Luckily the edge_subgraph filter nests - nicely so you can use the original graph (`subgraph.root_graph`) - as G in this function to avoid chains. We do not rule out chains - programmatically so that odd cases like an `edge_subgraph` of a - `restricted_view` can be created. + nicely so you can use the original graph as G in this function + to avoid chains. We do not rule out chains programmatically so + that odd cases like an `edge_subgraph` of a `restricted_view` + can be created. Examples -------- - >>> import networkx as nx >>> G = nx.path_graph(5) >>> H = G.edge_subgraph([(0, 1), (3, 4)]) >>> list(H.nodes) @@ -413,7 +434,6 @@ def edge_subgraph(G, edges): [(0, 1), (3, 4)] """ nxf = nx.filters - nxg = nx.graphviews edges = set(edges) nodes = set() for e in edges: @@ -422,14 +442,14 @@ def edge_subgraph(G, edges): if G.is_multigraph(): if G.is_directed(): induced_edges = nxf.show_multidiedges(edges) - return nxg.SubMultiDiGraph(G, induced_nodes, induced_edges) - induced_edges = nxf.show_multiedges(edges) - return nxg.SubMultiGraph(G, induced_nodes, induced_edges) - if G.is_directed(): - induced_edges = nxf.show_diedges(edges) - return nxg.SubDiGraph(G, induced_nodes, induced_edges) - induced_edges = nxf.show_edges(edges) - return nxg.SubGraph(G, induced_nodes, induced_edges) + else: + induced_edges = nxf.show_multiedges(edges) + else: + if G.is_directed(): + induced_edges = nxf.show_diedges(edges) + else: + induced_edges = nxf.show_edges(edges) + return nx.graphviews.subgraph_view(G, induced_nodes, induced_edges) def restricted_view(G, nodes, edges): @@ -460,13 +480,12 @@ def restricted_view(G, nodes, edges): If you create a subgraph of a subgraph recursively you may end up with a chain of subgraph views. Such chains can get quite slow for lengths near 15. To avoid long chains, try to make your subgraph - based on the original graph (`subgraph.root_graph`). We do not - rule out chains programmatically so that odd cases like an - `edge_subgraph` of a `restricted_view` can be created. + based on the original graph. We do not rule out chains programmatically + so that odd cases like an `edge_subgraph` of a `restricted_view` + can be created. Examples -------- - >>> import networkx as nx >>> G = nx.path_graph(5) >>> H = nx.restricted_view(G, [0], [(1, 2), (3, 4)]) >>> list(H.nodes) @@ -475,54 +494,42 @@ def restricted_view(G, nodes, edges): [(2, 3)] """ nxf = nx.filters - nxg = nx.graphviews - h_nodes = nxf.hide_nodes(nodes) + hide_nodes = nxf.hide_nodes(nodes) if G.is_multigraph(): if G.is_directed(): - h_edges = nxf.hide_multidiedges(edges) - return nxg.SubMultiDiGraph(G, h_nodes, h_edges) - h_edges = nxf.hide_multiedges(edges) - return nxg.SubMultiGraph(G, h_nodes, h_edges) - if G.is_directed(): - h_edges = nxf.hide_diedges(edges) - return nxg.SubDiGraph(G, h_nodes, h_edges) - h_edges = nxf.hide_edges(edges) - return nxg.SubGraph(G, h_nodes, h_edges) - - -@not_implemented_for('undirected') -def reverse_view(digraph): - """Provide a reverse view of the digraph with edges reversed. - - Identical to digraph.reverse(copy=False) - """ - if digraph.is_multigraph(): - return nx.graphviews.MultiReverseView(digraph) - return nx.graphviews.ReverseView(digraph) + hide_edges = nxf.hide_multidiedges(edges) + else: + hide_edges = nxf.hide_multiedges(edges) + else: + if G.is_directed(): + hide_edges = nxf.hide_diedges(edges) + else: + hide_edges = nxf.hide_edges(edges) + return nx.graphviews.subgraph_view(G, hide_nodes, hide_edges) def to_directed(graph): - """Return a directed view of the graph `graph`. + """Returns a directed view of the graph `graph`. Identical to graph.to_directed(as_view=True) + Note that graph.to_directed defaults to `as_view=False` + while this function always provides a view. """ - if graph.is_multigraph(): - return nx.graphviews.MultiDiGraphView(graph) - return nx.graphviews.DiGraphView(graph) + return graph.to_directed(as_view=True) def to_undirected(graph): - """Return an undirected view of the graph `graph`. + """Returns an undirected view of the graph `graph`. Identical to graph.to_undirected(as_view=True) + Note that graph.to_undirected defaults to `as_view=False` + while this function always provides a view. """ - if graph.is_multigraph(): - return nx.graphviews.MultiGraphView(graph) - return nx.graphviews.GraphView(graph) + return graph.to_undirected(as_view=True) def create_empty_copy(G, with_data=True): - """Return a copy of the graph G with all of the edges removed. + """Returns a copy of the graph G with all of the edges removed. Parameters ---------- @@ -537,7 +544,7 @@ def create_empty_copy(G, with_data=True): empty_graph """ - H = G.fresh_copy() + H = G.__class__() H.add_nodes_from(G.nodes(data=with_data)) if with_data: H.graph.update(G.graph) @@ -545,7 +552,10 @@ def create_empty_copy(G, with_data=True): def info(G, n=None): - """Print short summary of information for the graph G or the node n. + """Return a summary of information for the graph G or a single node n. + + The summary includes the number of nodes and edges (or neighbours for a single + node), and their average degree. Parameters ---------- @@ -553,38 +563,51 @@ def info(G, n=None): A graph n : node (any hashable) A node in the graph G + + Returns + ------- + info : str + A string containing the short summary + + Raises + ------ + NetworkXError + If n is not in the graph G + """ - info = '' # append this all to a string + info = "" # append this all to a string if n is None: - info += "Name: %s\n" % G.name + info += f"Name: {G.name}\n" type_name = [type(G).__name__] - info += "Type: %s\n" % ",".join(type_name) - info += "Number of nodes: %d\n" % G.number_of_nodes() - info += "Number of edges: %d\n" % G.number_of_edges() + info += f"Type: {','.join(type_name)}\n" + info += f"Number of nodes: {G.number_of_nodes()}\n" + info += f"Number of edges: {G.number_of_edges()}\n" nnodes = G.number_of_nodes() if len(G) > 0: if G.is_directed(): deg = sum(d for n, d in G.in_degree()) / float(nnodes) - info += "Average in degree: %8.4f\n" % deg + info += f"Average in degree: {deg:8.4f}\n" deg = sum(d for n, d in G.out_degree()) / float(nnodes) - info += "Average out degree: %8.4f" % deg + info += f"Average out degree: {deg:8.4f}" else: s = sum(dict(G.degree()).values()) - info += "Average degree: %8.4f" % (float(s) / float(nnodes)) - + info += f"Average degree: {(float(s) / float(nnodes)):8.4f}" else: if n not in G: - raise nx.NetworkXError("node %s not in graph" % (n,)) - info += "Node % s has the following properties:\n" % n - info += "Degree: %d\n" % G.degree(n) + raise nx.NetworkXError(f"node {n} not in graph") + info += f"Node {n} has the following properties:\n" + info += f"Degree: {G.degree(n)}\n" info += "Neighbors: " - info += ' '.join(str(nbr) for nbr in G.neighbors(n)) + info += " ".join(str(nbr) for nbr in G.neighbors(n)) return info def set_node_attributes(G, values, name=None): """Sets node attributes from a given value or dictionary of values. + .. Warning:: The call order of arguments `values` and `name` + switched between v1.x & v2.x. + Parameters ---------- G : NetworkX Graph @@ -594,11 +617,12 @@ def set_node_attributes(G, values, name=None): not a dictionary, then it is treated as a single attribute value that is then applied to every node in `G`. This means that if you provide a mutable object, like a list, updates to that object - will be reflected in the node attribute for each edge. The attribute - name will be `name`. + will be reflected in the node attribute for every node. + The attribute name will be `name`. - If `values` is a dict or a dict of dict, the corresponding node's - attributes will be updated to `values`. + If `values` is a dict or a dict of dict, it should be keyed + by node to either an attribute value or a dict of attribute key/value + pairs used to update the node's attributes. name : string (optional, default=None) Name of the node attribute to set if values is a scalar. @@ -613,8 +637,8 @@ def set_node_attributes(G, values, name=None): >>> bb = nx.betweenness_centrality(G) >>> isinstance(bb, dict) True - >>> nx.set_node_attributes(G, bb, 'betweenness') - >>> G.nodes[1]['betweenness'] + >>> nx.set_node_attributes(G, bb, "betweenness") + >>> G.nodes[1]["betweenness"] 1.0 If you provide a list as the second argument, updates to the list @@ -622,26 +646,27 @@ def set_node_attributes(G, values, name=None): >>> G = nx.path_graph(3) >>> labels = [] - >>> nx.set_node_attributes(G, labels, 'labels') - >>> labels.append('foo') - >>> G.nodes[0]['labels'] + >>> nx.set_node_attributes(G, labels, "labels") + >>> labels.append("foo") + >>> G.nodes[0]["labels"] ['foo'] - >>> G.nodes[1]['labels'] + >>> G.nodes[1]["labels"] ['foo'] - >>> G.nodes[2]['labels'] + >>> G.nodes[2]["labels"] ['foo'] If you provide a dictionary of dictionaries as the second argument, - the entire dictionary will be used to update node attributes:: + the outer dictionary is assumed to be keyed by node to an inner + dictionary of node attributes for that node:: >>> G = nx.path_graph(3) - >>> attrs = {0: {'attr1': 20, 'attr2': 'nothing'}, 1: {'attr2': 3}} + >>> attrs = {0: {"attr1": 20, "attr2": "nothing"}, 1: {"attr2": 3}} >>> nx.set_node_attributes(G, attrs) - >>> G.nodes[0]['attr1'] + >>> G.nodes[0]["attr1"] 20 - >>> G.nodes[0]['attr2'] + >>> G.nodes[0]["attr2"] 'nothing' - >>> G.nodes[1]['attr2'] + >>> G.nodes[1]["attr2"] 3 >>> G.nodes[2] {} @@ -683,8 +708,8 @@ def get_node_attributes(G, name): Examples -------- >>> G = nx.Graph() - >>> G.add_nodes_from([1, 2, 3], color='red') - >>> color = nx.get_node_attributes(G, 'color') + >>> G.add_nodes_from([1, 2, 3], color="red") + >>> color = nx.get_node_attributes(G, "color") >>> color[1] 'red' """ @@ -694,6 +719,9 @@ def get_node_attributes(G, name): def set_edge_attributes(G, values, name=None): """Sets edge attributes from a given value or dictionary of values. + .. Warning:: The call order of arguments `values` and `name` + switched between v1.x & v2.x. + Parameters ---------- G : NetworkX Graph @@ -706,11 +734,12 @@ def set_edge_attributes(G, values, name=None): will be reflected in the edge attribute for each edge. The attribute name will be `name`. - If `values` is a dict or a dict of dict, the corresponding edge' - attributes will be updated to `values`. For multigraphs, the tuples - must be of the form ``(u, v, key)``, where `u` and `v` are nodes - and `key` is the key corresponding to the edge. For non-multigraphs, - the keys must be tuples of the form ``(u, v)``. + If `values` is a dict or a dict of dict, it should be keyed + by edge tuple to either an attribute value or a dict of attribute + key/value pairs used to update the edge's attributes. + For multigraphs, the edge tuples must be of the form ``(u, v, key)``, + where `u` and `v` are nodes and `key` is the edge key. + For non-multigraphs, the keys must be tuples of the form ``(u, v)``. name : string (optional, default=None) Name of the edge attribute to set if values is a scalar. @@ -723,33 +752,32 @@ def set_edge_attributes(G, values, name=None): >>> G = nx.path_graph(3) >>> bb = nx.edge_betweenness_centrality(G, normalized=False) - >>> nx.set_edge_attributes(G, bb, 'betweenness') - >>> G.edges[1, 2]['betweenness'] + >>> nx.set_edge_attributes(G, bb, "betweenness") + >>> G.edges[1, 2]["betweenness"] 2.0 If you provide a list as the second argument, updates to the list will be reflected in the edge attribute for each edge:: >>> labels = [] - >>> nx.set_edge_attributes(G, labels, 'labels') - >>> labels.append('foo') - >>> G.edges[0, 1]['labels'] + >>> nx.set_edge_attributes(G, labels, "labels") + >>> labels.append("foo") + >>> G.edges[0, 1]["labels"] ['foo'] - >>> G.edges[1, 2]['labels'] + >>> G.edges[1, 2]["labels"] ['foo'] If you provide a dictionary of dictionaries as the second argument, the entire dictionary will be used to update edge attributes:: >>> G = nx.path_graph(3) - >>> attrs = {(0, 1): {'attr1': 20, 'attr2': 'nothing'}, - ... (1, 2): {'attr2': 3}} + >>> attrs = {(0, 1): {"attr1": 20, "attr2": "nothing"}, (1, 2): {"attr2": 3}} >>> nx.set_edge_attributes(G, attrs) - >>> G[0][1]['attr1'] + >>> G[0][1]["attr1"] 20 - >>> G[0][1]['attr2'] + >>> G[0][1]["attr2"] 'nothing' - >>> G[1][2]['attr2'] + >>> G[1][2]["attr2"] 3 """ @@ -808,8 +836,8 @@ def get_edge_attributes(G, name): Examples -------- >>> G = nx.Graph() - >>> nx.add_path(G, [1, 2, 3], color='red') - >>> color = nx.get_edge_attributes(G, 'color') + >>> nx.add_path(G, [1, 2, 3], color="red") + >>> color = nx.get_edge_attributes(G, "color") >>> color[(1, 2)] 'red' """ @@ -890,9 +918,9 @@ def non_edges(graph): yield (u, v) -@not_implemented_for('directed') +@not_implemented_for("directed") def common_neighbors(G, u, v): - """Return the common neighbors of two nodes in a graph. + """Returns the common neighbors of two nodes in a graph. Parameters ---------- @@ -919,16 +947,16 @@ def common_neighbors(G, u, v): [2, 3, 4] """ if u not in G: - raise nx.NetworkXError('u is not in the graph.') + raise nx.NetworkXError("u is not in the graph.") if v not in G: - raise nx.NetworkXError('v is not in the graph.') + raise nx.NetworkXError("v is not in the graph.") # Return a generator explicitly instead of yielding so that the above # checks are executed eagerly. return (w for w in G[u] if w in G[v] and w not in (u, v)) -def is_weighted(G, edge=None, weight='weight'): +def is_weighted(G, edge=None, weight="weight"): """Returns True if `G` has weighted edges. Parameters @@ -970,7 +998,7 @@ def is_weighted(G, edge=None, weight='weight'): if edge is not None: data = G.get_edge_data(*edge) if data is None: - msg = 'Edge {!r} does not exist.'.format(edge) + msg = f"Edge {edge!r} does not exist." raise nx.NetworkXError(msg) return weight in data @@ -981,7 +1009,7 @@ def is_weighted(G, edge=None, weight='weight'): return all(weight in data for u, v, data in G.edges(data=True)) -def is_negatively_weighted(G, edge=None, weight='weight'): +def is_negatively_weighted(G, edge=None, weight="weight"): """Returns True if `G` has negatively weighted edges. Parameters @@ -1014,11 +1042,11 @@ def is_negatively_weighted(G, edge=None, weight='weight'): >>> G.add_edge(1, 2, weight=4) >>> nx.is_negatively_weighted(G, (1, 2)) False - >>> G[2][4]['weight'] = -2 + >>> G[2][4]["weight"] = -2 >>> nx.is_negatively_weighted(G) True >>> G = nx.DiGraph() - >>> edges = [('0', '3', 3), ('0', '1', -5), ('1', '0', -2)] + >>> edges = [("0", "3", 3), ("0", "1", -5), ("1", "0", -2)] >>> G.add_weighted_edges_from(edges) >>> nx.is_negatively_weighted(G) True @@ -1027,12 +1055,11 @@ def is_negatively_weighted(G, edge=None, weight='weight'): if edge is not None: data = G.get_edge_data(*edge) if data is None: - msg = 'Edge {!r} does not exist.'.format(edge) + msg = f"Edge {edge!r} does not exist." raise nx.NetworkXError(msg) return weight in data and data[weight] < 0 - return any(weight in data and data[weight] < 0 - for u, v, data in G.edges(data=True)) + return any(weight in data and data[weight] < 0 for u, v, data in G.edges(data=True)) def is_empty(G): @@ -1075,7 +1102,7 @@ def nodes_with_selfloops(G): Examples -------- - >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_edge(1, 1) >>> G.add_edge(1, 2) >>> list(nx.nodes_with_selfloops(G)) @@ -1113,7 +1140,7 @@ def selfloop_edges(G, data=False, keys=False, default=None): Examples -------- - >>> G = nx.MultiGraph() # or Graph, DiGraph, MultiDiGraph, etc + >>> G = nx.MultiGraph() # or Graph, DiGraph, MultiDiGraph, etc >>> ekey = G.add_edge(1, 1) >>> ekey = G.add_edge(1, 2) >>> list(nx.selfloop_edges(G)) @@ -1128,44 +1155,62 @@ def selfloop_edges(G, data=False, keys=False, default=None): if data is True: if G.is_multigraph(): if keys is True: - return ((n, n, k, d) - for n, nbrs in G.adj.items() - if n in nbrs for k, d in nbrs[n].items()) + return ( + (n, n, k, d) + for n, nbrs in G.adj.items() + if n in nbrs + for k, d in nbrs[n].items() + ) else: - return ((n, n, d) - for n, nbrs in G.adj.items() - if n in nbrs for d in nbrs[n].values()) + return ( + (n, n, d) + for n, nbrs in G.adj.items() + if n in nbrs + for d in nbrs[n].values() + ) else: return ((n, n, nbrs[n]) for n, nbrs in G.adj.items() if n in nbrs) elif data is not False: if G.is_multigraph(): if keys is True: - return ((n, n, k, d.get(data, default)) - for n, nbrs in G.adj.items() - if n in nbrs for k, d in nbrs[n].items()) + return ( + (n, n, k, d.get(data, default)) + for n, nbrs in G.adj.items() + if n in nbrs + for k, d in nbrs[n].items() + ) else: - return ((n, n, d.get(data, default)) - for n, nbrs in G.adj.items() - if n in nbrs for d in nbrs[n].values()) + return ( + (n, n, d.get(data, default)) + for n, nbrs in G.adj.items() + if n in nbrs + for d in nbrs[n].values() + ) else: - return ((n, n, nbrs[n].get(data, default)) - for n, nbrs in G.adj.items() if n in nbrs) + return ( + (n, n, nbrs[n].get(data, default)) + for n, nbrs in G.adj.items() + if n in nbrs + ) else: if G.is_multigraph(): if keys is True: - return ((n, n, k) - for n, nbrs in G.adj.items() - if n in nbrs for k in nbrs[n]) + return ( + (n, n, k) for n, nbrs in G.adj.items() if n in nbrs for k in nbrs[n] + ) else: - return ((n, n) - for n, nbrs in G.adj.items() - if n in nbrs for d in nbrs[n].values()) + return ( + (n, n) + for n, nbrs in G.adj.items() + if n in nbrs + for i in range(len(nbrs[n])) # for easy edge removal (#4068) + ) else: return ((n, n) for n, nbrs in G.adj.items() if n in nbrs) def number_of_selfloops(G): - """Return the number of selfloop edges. + """Returns the number of selfloop edges. A selfloop edge has the same node at both ends. @@ -1180,10 +1225,71 @@ def number_of_selfloops(G): Examples -------- - >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_edge(1, 1) >>> G.add_edge(1, 2) >>> nx.number_of_selfloops(G) 1 """ return sum(1 for _ in nx.selfloop_edges(G)) + + +def is_path(G, path): + """Returns whether or not the specified path exists + + Parameters + ---------- + G : graph + A NetworkX graph. + + path: list + A list of node labels which defines the path to traverse + + Returns + ------- + isPath: bool + A boolean representing whether or not the path exists + + """ + for node, nbr in nx.utils.pairwise(path): + if nbr not in G[node]: + return False + return True + + +def path_weight(G, path, weight): + """Returns total cost associated with specified path and weight + + Parameters + ---------- + G : graph + A NetworkX graph. + + path: list + A list of node labels which defines the path to traverse + + weight: string + A string indicating which edge attribute to use for path cost + + Returns + ------- + cost: int + A integer representing the total cost with respect to the + specified weight of the specified path + + Raises + ------ + NetworkXNoPath + If the specified edge does not exist. + """ + multigraph = G.is_multigraph() + cost = 0 + + if not nx.is_path(G, path): + raise nx.NetworkXNoPath("path does not exist") + for node, nbr in nx.utils.pairwise(path): + if multigraph: + cost += min([v[weight] for v in G[node][nbr].values()]) + else: + cost += G[node][nbr][weight] + return cost diff --git a/networkx/classes/graph.py b/networkx/classes/graph.py index d3d1753..f594e8e 100644 --- a/networkx/classes/graph.py +++ b/networkx/classes/graph.py @@ -1,13 +1,3 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Author: Aric Hagberg (hagberg@lanl.gov), -# Pieter Swart (swart@lanl.gov), -# Dan Schult(dschult@colgate.edu) """Base class for undirected graphs. The Graph class allows any hashable object as a node @@ -17,20 +7,16 @@ For directed graphs see DiGraph and MultiDiGraph. """ -from __future__ import division -import warnings from copy import deepcopy -from collections import Mapping import networkx as nx -from networkx.classes.coreviews import AtlasView, AdjacencyView +from networkx.classes.coreviews import AdjacencyView from networkx.classes.reportviews import NodeView, EdgeView, DegreeView from networkx.exception import NetworkXError import networkx.convert as convert -from networkx.utils import pairwise -class Graph(object): +class Graph: """ Base class for undirected graphs. @@ -127,23 +113,23 @@ class Graph(object): Add node attributes using add_node(), add_nodes_from() or G.nodes - >>> G.add_node(1, time='5pm') - >>> G.add_nodes_from([3], time='2pm') + >>> G.add_node(1, time="5pm") + >>> G.add_nodes_from([3], time="2pm") >>> G.nodes[1] {'time': '5pm'} - >>> G.nodes[1]['room'] = 714 # node must exist already to use G.nodes - >>> del G.nodes[1]['room'] # remove attribute + >>> G.nodes[1]["room"] = 714 # node must exist already to use G.nodes + >>> del G.nodes[1]["room"] # remove attribute >>> list(G.nodes(data=True)) [(1, {'time': '5pm'}), (3, {'time': '2pm'})] Add edge attributes using add_edge(), add_edges_from(), subscript notation, or G.edges. - >>> G.add_edge(1, 2, weight=4.7 ) - >>> G.add_edges_from([(3, 4), (4, 5)], color='red') - >>> G.add_edges_from([(1, 2, {'color': 'blue'}), (2, 3, {'weight': 8})]) - >>> G[1][2]['weight'] = 4.7 - >>> G.edges[1, 2]['weight'] = 4 + >>> G.add_edge(1, 2, weight=4.7) + >>> G.add_edges_from([(3, 4), (4, 5)], color="red") + >>> G.add_edges_from([(1, 2, {"color": "blue"}), (2, 3, {"weight": 8})]) + >>> G[1][2]["weight"] = 4.7 + >>> G.edges[1, 2]["weight"] = 4 Warning: we protect the graph data structure by making `G.edges` a read-only dict-like structure. However, you can assign to attributes @@ -155,7 +141,7 @@ class Graph(object): Many common graph features allow python syntax to speed reporting. - >>> 1 in G # check if node in graph + >>> 1 in G # check if node in graph True >>> [n for n in G if n < 3] # iterate through nodes [1, 2] @@ -163,17 +149,17 @@ class Graph(object): 5 Often the best way to traverse all edges of a graph is via the neighbors. - The neighbors are reported as an adjacency-dict `G.adj` or as `G.adjacency()` + The neighbors are reported as an adjacency-dict `G.adj` or `G.adjacency()` >>> for n, nbrsdict in G.adjacency(): ... for nbr, eattr in nbrsdict.items(): - ... if 'weight' in eattr: - ... # Do something useful with the edges - ... pass + ... if "weight" in eattr: + ... # Do something useful with the edges + ... pass But the edges() method is often more convenient: - >>> for u, v, weight in G.edges.data('weight'): + >>> for u, v, weight in G.edges.data("weight"): ... if weight is not None: ... # Do something useful with the edges ... pass @@ -183,8 +169,8 @@ class Graph(object): Simple graph information is obtained using object-attributes and methods. Reporting typically provides views instead of containers to reduce memory usage. The views update as the graph is updated similarly to dict-views. - The objects `nodes, `edges` and `adj` provide access to data attributes - via lookup (e.g. `nodes[n], `edges[u, v]`, `adj[u][v]`) and iteration + The objects `nodes`, `edges` and `adj` provide access to data attributes + via lookup (e.g. `nodes[n]`, `edges[u, v]`, `adj[u][v]`) and iteration (e.g. `nodes.items()`, `nodes.data('color')`, `nodes.data('color', default='blue')` and similarly for `edges`) Views exist for `nodes`, `edges`, `neighbors()`/`adj` and `degree`. @@ -204,14 +190,19 @@ class Graph(object): maintained but extra features can be added. To replace one of the dicts create a new graph class by changing the class(!) variable holding the factory for that dict-like structure. The variable names are - node_dict_factory, adjlist_inner_dict_factory, adjlist_outer_dict_factory, - and edge_attr_dict_factory. + node_dict_factory, node_attr_dict_factory, adjlist_inner_dict_factory, + adjlist_outer_dict_factory, edge_attr_dict_factory and graph_attr_dict_factory. node_dict_factory : function, (default: dict) Factory function to be used to create the dict containing node attributes, keyed by node id. It should require no arguments and return a dict-like object + node_attr_dict_factory: function, (default: dict) + Factory function to be used to create the node attribute + dict which holds attribute values keyed by attribute name. + It should require no arguments and return a dict-like object + adjlist_outer_dict_factory : function, (default: dict) Factory function to be used to create the outer-most dict in the data structure that holds adjacency info keyed by node. @@ -224,9 +215,28 @@ class Graph(object): edge_attr_dict_factory : function, (default: dict) Factory function to be used to create the edge attribute - dict which holds attrbute values keyed by attribute name. + dict which holds attribute values keyed by attribute name. It should require no arguments and return a dict-like object. + graph_attr_dict_factory : function, (default: dict) + Factory function to be used to create the graph attribute + dict which holds attribute values keyed by attribute name. + It should require no arguments and return a dict-like object. + + Typically, if your extension doesn't impact the data structure all + methods will inherit without issue except: `to_directed/to_undirected`. + By default these methods create a DiGraph/Graph class and you probably + want them to create your extension of a DiGraph/Graph. To facilitate + this we define two class variables that you can set in your subclass. + + to_directed_class : callable, (default: DiGraph or MultiDiGraph) + Class to create a new graph structure in the `to_directed` method. + If `None`, a NetworkX class (DiGraph or MultiDiGraph) is used. + + to_undirected_class : callable, (default: Graph or MultiGraph) + Class to create a new graph structure in the `to_undirected` method. + If `None`, a NetworkX class (Graph or MultiGraph) is used. + Examples -------- @@ -235,9 +245,11 @@ class Graph(object): This reduces the memory used, but you lose edge attributes. >>> class ThinGraph(nx.Graph): - ... all_edge_dict = {'weight': 1} + ... all_edge_dict = {"weight": 1} + ... ... def single_edge_dict(self): ... return self.all_edge_dict + ... ... edge_attr_dict_factory = single_edge_dict >>> G = ThinGraph() >>> G.add_edge(2, 1) @@ -251,21 +263,29 @@ class Graph(object): creating graph subclasses by overwriting the base class `dict` with a dictionary-like object. """ + node_dict_factory = dict + node_attr_dict_factory = dict adjlist_outer_dict_factory = dict adjlist_inner_dict_factory = dict edge_attr_dict_factory = dict + graph_attr_dict_factory = dict + + def to_directed_class(self): + """Returns the class to use for empty directed copies. - def __getstate__(self): - attr = self.__dict__.copy() - # remove lazy property attributes - if 'nodes' in attr: - del attr['nodes'] - if 'edges' in attr: - del attr['edges'] - if 'degree' in attr: - del attr['degree'] - return attr + If you subclass the base classes, use this to designate + what directed class to use for `to_directed()` copies. + """ + return nx.DiGraph + + def to_undirected_class(self): + """Returns the class to use for empty undirected copies. + + If you subclass the base classes, use this to designate + what directed class to use for `to_directed()` copies. + """ + return Graph def __init__(self, incoming_graph_data=None, **attr): """Initialize a graph with edges, name, or graph attributes. @@ -289,7 +309,7 @@ def __init__(self, incoming_graph_data=None, **attr): Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc - >>> G = nx.Graph(name='my graph') + >>> G = nx.Graph(name="my graph") >>> e = [(1, 2), (2, 3), (3, 4)] # list of edges >>> G = nx.Graph(e) @@ -300,14 +320,15 @@ def __init__(self, incoming_graph_data=None, **attr): {'day': 'Friday'} """ - self.node_dict_factory = ndf = self.node_dict_factory + self.graph_attr_dict_factory = self.graph_attr_dict_factory + self.node_dict_factory = self.node_dict_factory + self.node_attr_dict_factory = self.node_attr_dict_factory self.adjlist_outer_dict_factory = self.adjlist_outer_dict_factory self.adjlist_inner_dict_factory = self.adjlist_inner_dict_factory self.edge_attr_dict_factory = self.edge_attr_dict_factory - self.root_graph = self - self.graph = {} # dictionary for graph attributes - self._node = ndf() # empty node attribute dict + self.graph = self.graph_attr_dict_factory() # dictionary for graph attributes + self._node = self.node_dict_factory() # empty node attribute dict self._adj = self.adjlist_outer_dict_factory() # empty adjacency dict # attempt to load graph with data if incoming_graph_data is not None: @@ -342,14 +363,14 @@ def name(self): keyed by the string `"name"`. as well as an attribute (technically a property) `G.name`. This is entirely user controlled. """ - return self.graph.get('name', '') + return self.graph.get("name", "") @name.setter def name(self, s): - self.graph['name'] = s + self.graph["name"] = s def __str__(self): - """Return the graph name. + """Returns the graph name. Returns ------- @@ -358,7 +379,7 @@ def __str__(self): Examples -------- - >>> G = nx.Graph(name='foo') + >>> G = nx.Graph(name="foo") >>> str(G) 'foo' """ @@ -383,7 +404,7 @@ def __iter__(self): return iter(self._node) def __contains__(self, n): - """Return True if n is a node, False otherwise. Use: 'n in G'. + """Returns True if n is a node, False otherwise. Use: 'n in G'. Examples -------- @@ -397,13 +418,17 @@ def __contains__(self, n): return False def __len__(self): - """Return the number of nodes. Use: 'len(G)'. + """Returns the number of nodes in the graph. Use: 'len(G)'. Returns ------- nnodes : int The number of nodes in the graph. + See Also + -------- + number_of_nodes, order which are identical + Examples -------- >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc @@ -414,7 +439,7 @@ def __len__(self): return len(self._node) def __getitem__(self, n): - """Return a dict of neighbors of node n. Use: 'G[n]'. + """Returns a dict of neighbors of node n. Use: 'G[n]'. Parameters ---------- @@ -457,7 +482,7 @@ def add_node(self, node_for_adding, **attr): -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_node(1) - >>> G.add_node('Hello') + >>> G.add_node("Hello") >>> K3 = nx.Graph([(0, 1), (1, 2), (2, 0)]) >>> G.add_node(K3) >>> G.number_of_nodes() @@ -466,7 +491,7 @@ def add_node(self, node_for_adding, **attr): Use keywords set/change node attributes: >>> G.add_node(1, size=10) - >>> G.add_node(3, weight=0.4, UTM=('13S', 382871, 3972649)) + >>> G.add_node(3, weight=0.4, UTM=("13S", 382871, 3972649)) Notes ----- @@ -480,7 +505,8 @@ def add_node(self, node_for_adding, **attr): """ if node_for_adding not in self._node: self._adj[node_for_adding] = self.adjlist_inner_dict_factory() - self._node[node_for_adding] = attr + attr_dict = self._node[node_for_adding] = self.node_attr_dict_factory() + attr_dict.update(attr) else: # update attr even if node already exists self._node[node_for_adding].update(attr) @@ -506,7 +532,7 @@ def add_nodes_from(self, nodes_for_adding, **attr): Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc - >>> G.add_nodes_from('Hello') + >>> G.add_nodes_from("Hello") >>> K3 = nx.Graph([(0, 1), (1, 2), (2, 0)]) >>> G.add_nodes_from(K3) >>> sorted(G.nodes(), key=str) @@ -519,12 +545,12 @@ def add_nodes_from(self, nodes_for_adding, **attr): Use (node, attrdict) tuples to update attributes for specific nodes. - >>> G.add_nodes_from([(1, dict(size=11)), (2, {'color':'blue'})]) - >>> G.nodes[1]['size'] + >>> G.add_nodes_from([(1, dict(size=11)), (2, {"color": "blue"})]) + >>> G.nodes[1]["size"] 11 >>> H = nx.Graph() >>> H.add_nodes_from(G.nodes(data=True)) - >>> H.nodes[1]['size'] + >>> H.nodes[1]["size"] 11 """ @@ -535,7 +561,8 @@ def add_nodes_from(self, nodes_for_adding, **attr): try: if n not in self._node: self._adj[n] = self.adjlist_inner_dict_factory() - self._node[n] = attr.copy() + attr_dict = self._node[n] = self.node_attr_dict_factory() + attr_dict.update(attr) else: self._node[n].update(attr) except TypeError: @@ -544,7 +571,8 @@ def add_nodes_from(self, nodes_for_adding, **attr): self._adj[nn] = self.adjlist_inner_dict_factory() newdict = attr.copy() newdict.update(ndict) - self._node[nn] = newdict + attr_dict = self._node[nn] = self.node_attr_dict_factory() + attr_dict.update(newdict) else: olddict = self._node[nn] olddict.update(attr) @@ -584,11 +612,11 @@ def remove_node(self, n): try: nbrs = list(adj[n]) # list handles self-loops (allows mutation) del self._node[n] - except KeyError: # NetworkXError if n not in self - raise NetworkXError("The node %s is not in the graph." % (n,)) + except KeyError as e: # NetworkXError if n not in self + raise NetworkXError(f"The node {n} is not in the graph.") from e for u in nbrs: - del adj[u][n] # remove all edges n-u in graph - del adj[n] # now remove node + del adj[u][n] # remove all edges n-u in graph + del adj[n] # now remove node def remove_nodes_from(self, nodes): """Remove multiple nodes. @@ -619,7 +647,7 @@ def remove_nodes_from(self, nodes): for n in nodes: try: del self._node[n] - for u in list(adj[n]): # list handles self-loops + for u in list(adj[n]): # list handles self-loops del adj[u][n] # (allows mutation of dict in loop) del adj[n] except KeyError: @@ -681,26 +709,26 @@ def nodes(self): To get the node data along with the nodes: - >>> G.add_node(1, time='5pm') - >>> G.nodes[0]['foo'] = 'bar' + >>> G.add_node(1, time="5pm") + >>> G.nodes[0]["foo"] = "bar" >>> list(G.nodes(data=True)) [(0, {'foo': 'bar'}), (1, {'time': '5pm'}), (2, {})] >>> list(G.nodes.data()) [(0, {'foo': 'bar'}), (1, {'time': '5pm'}), (2, {})] - >>> list(G.nodes(data='foo')) + >>> list(G.nodes(data="foo")) [(0, 'bar'), (1, None), (2, None)] - >>> list(G.nodes.data('foo')) + >>> list(G.nodes.data("foo")) [(0, 'bar'), (1, None), (2, None)] - >>> list(G.nodes(data='time')) + >>> list(G.nodes(data="time")) [(0, None), (1, '5pm'), (2, None)] - >>> list(G.nodes.data('time')) + >>> list(G.nodes.data("time")) [(0, None), (1, '5pm'), (2, None)] - >>> list(G.nodes(data='time', default='Not Available')) + >>> list(G.nodes(data="time", default="Not Available")) [(0, 'Not Available'), (1, '5pm'), (2, 'Not Available')] - >>> list(G.nodes.data('time', default='Not Available')) + >>> list(G.nodes.data("time", default="Not Available")) [(0, 'Not Available'), (1, '5pm'), (2, 'Not Available')] If some of your nodes have an attribute and the rest are assumed @@ -712,7 +740,7 @@ def nodes(self): >>> G.add_node(0) >>> G.add_node(1, weight=2) >>> G.add_node(2, weight=3) - >>> dict(G.nodes(data='weight', default=1)) + >>> dict(G.nodes(data="weight", default=1)) {0: 1, 1: 2, 2: 3} """ @@ -720,47 +748,11 @@ def nodes(self): # Lazy View creation: overload the (class) property on the instance # Then future G.nodes use the existing View # setattr doesn't work because attribute already exists - self.__dict__['nodes'] = nodes + self.__dict__["nodes"] = nodes return nodes - # for backwards compatibility with 1.x, will be removed for 3.x - node = nodes - - def add_path(self, nodes, **attr): - msg = "add_path is deprecated. Use nx.add_path instead." - warnings.warn(msg, DeprecationWarning) - return nx.add_path(self, nodes, **attr) - - def add_cycle(self, nodes, **attr): - msg = "add_cycle is deprecated. Use nx.add_cycle instead." - warnings.warn(msg, DeprecationWarning) - return nx.add_cycle(self, nodes, **attr) - - def add_star(self, nodes, **attr): - msg = "add_star is deprecated. Use nx.add_star instead." - warnings.warn(msg, DeprecationWarning) - return nx.add_star(self, nodes, **attr) - - def nodes_with_selfloops(self): - msg = "nodes_with_selfloops is deprecated." \ - "Use nx.nodes_with_selfloops instead." - warnings.warn(msg, DeprecationWarning) - return nx.nodes_with_selfloops(self) - - def number_of_selfloops(self): - msg = "number_of_selfloops is deprecated." \ - "Use nx.number_of_selfloops instead." - warnings.warn(msg, DeprecationWarning) - return nx.number_of_selfloops(self) - - def selfloop_edges(self, data=False, keys=False, default=None): - msg = "selfloop_edges is deprecated. Use nx.selfloop_edges instead." - warnings.warn(msg, DeprecationWarning) - return nx.selfloop_edges(self, data=False, keys=False, default=None) - # Done with backward compatibility methods for 1.x - def number_of_nodes(self): - """Return the number of nodes in the graph. + """Returns the number of nodes in the graph. Returns ------- @@ -774,13 +766,13 @@ def number_of_nodes(self): Examples -------- >>> G = nx.path_graph(3) # or DiGraph, MultiGraph, MultiDiGraph, etc - >>> len(G) + >>> G.number_of_nodes() 3 """ return len(self._node) def order(self): - """Return the number of nodes in the graph. + """Returns the number of nodes in the graph. Returns ------- @@ -791,11 +783,16 @@ def order(self): -------- number_of_nodes, __len__ which are identical + Examples + -------- + >>> G = nx.path_graph(3) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.order() + 3 """ return len(self._node) def has_node(self, n): - """Return True if the graph contains the node n. + """Returns True if the graph contains the node n. Identical to `n in G` @@ -853,10 +850,10 @@ def add_edge(self, u_of_edge, v_of_edge, **attr): -------- The following all add the edge e=(1, 2) to graph G: - >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> e = (1, 2) - >>> G.add_edge(1, 2) # explicit two-node form - >>> G.add_edge(*e) # single edge as tuple of two nodes + >>> G.add_edge(1, 2) # explicit two-node form + >>> G.add_edge(*e) # single edge as tuple of two nodes >>> G.add_edges_from([(1, 2)]) # add edges from iterable container Associate data to edges using keywords: @@ -874,10 +871,10 @@ def add_edge(self, u_of_edge, v_of_edge, **attr): # add nodes if u not in self._node: self._adj[u] = self.adjlist_inner_dict_factory() - self._node[u] = {} + self._node[u] = self.node_attr_dict_factory() if v not in self._node: self._adj[v] = self.adjlist_inner_dict_factory() - self._node[v] = {} + self._node[v] = self.node_attr_dict_factory() # add the edge datadict = self._adj[u].get(v, self.edge_attr_dict_factory()) datadict.update(attr) @@ -912,15 +909,15 @@ def add_edges_from(self, ebunch_to_add, **attr): Examples -------- - >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc - >>> G.add_edges_from([(0, 1), (1, 2)]) # using a list of edge tuples + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.add_edges_from([(0, 1), (1, 2)]) # using a list of edge tuples >>> e = zip(range(0, 3), range(1, 4)) - >>> G.add_edges_from(e) # Add the path graph 0-1-2-3 + >>> G.add_edges_from(e) # Add the path graph 0-1-2-3 Associate data to edges >>> G.add_edges_from([(1, 2), (2, 3)], weight=3) - >>> G.add_edges_from([(3, 4), (1, 4)], label='WN2898') + >>> G.add_edges_from([(3, 4), (1, 4)], label="WN2898") """ for e in ebunch_to_add: ne = len(e) @@ -930,21 +927,20 @@ def add_edges_from(self, ebunch_to_add, **attr): u, v = e dd = {} # doesn't need edge_attr_dict_factory else: - raise NetworkXError( - "Edge tuple %s must be a 2-tuple or 3-tuple." % (e,)) + raise NetworkXError(f"Edge tuple {e} must be a 2-tuple or 3-tuple.") if u not in self._node: self._adj[u] = self.adjlist_inner_dict_factory() - self._node[u] = {} + self._node[u] = self.node_attr_dict_factory() if v not in self._node: self._adj[v] = self.adjlist_inner_dict_factory() - self._node[v] = {} + self._node[v] = self.node_attr_dict_factory() datadict = self._adj[u].get(v, self.edge_attr_dict_factory()) datadict.update(attr) datadict.update(dd) self._adj[u][v] = datadict self._adj[v][u] = datadict - def add_weighted_edges_from(self, ebunch_to_add, weight='weight', **attr): + def add_weighted_edges_from(self, ebunch_to_add, weight="weight", **attr): """Add weighted edges in `ebunch_to_add` with specified weight attr Parameters @@ -971,11 +967,10 @@ def add_weighted_edges_from(self, ebunch_to_add, weight='weight', **attr): Examples -------- - >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_weighted_edges_from([(0, 1, 3.0), (1, 2, 7.5)]) """ - self.add_edges_from(((u, v, {weight: d}) for u, v, d in ebunch_to_add), - **attr) + self.add_edges_from(((u, v, {weight: d}) for u, v, d in ebunch_to_add), **attr) def remove_edge(self, u, v): """Remove the edge between u and v. @@ -999,16 +994,16 @@ def remove_edge(self, u, v): >>> G = nx.path_graph(4) # or DiGraph, etc >>> G.remove_edge(0, 1) >>> e = (1, 2) - >>> G.remove_edge(*e) # unpacks e from an edge tuple - >>> e = (2, 3, {'weight':7}) # an edge with attribute data - >>> G.remove_edge(*e[:2]) # select first part of edge tuple + >>> G.remove_edge(*e) # unpacks e from an edge tuple + >>> e = (2, 3, {"weight": 7}) # an edge with attribute data + >>> G.remove_edge(*e[:2]) # select first part of edge tuple """ try: del self._adj[u][v] if u != v: # self-loop needs only one entry removed del self._adj[v][u] - except KeyError: - raise NetworkXError("The edge %s-%s is not in the graph" % (u, v)) + except KeyError as e: + raise NetworkXError(f"The edge {u}-{v} is not in the graph") from e def remove_edges_from(self, ebunch): """Remove all edges specified in ebunch. @@ -1033,7 +1028,7 @@ def remove_edges_from(self, ebunch): Examples -------- >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc - >>> ebunch=[(1, 2), (2, 3)] + >>> ebunch = [(1, 2), (2, 3)] >>> G.remove_edges_from(ebunch) """ adj = self._adj @@ -1044,8 +1039,124 @@ def remove_edges_from(self, ebunch): if u != v: # self loop needs only one entry removed del adj[v][u] + def update(self, edges=None, nodes=None): + """Update the graph using nodes/edges/graphs as input. + + Like dict.update, this method takes a graph as input, adding the + graph's nodes and edges to this graph. It can also take two inputs: + edges and nodes. Finally it can take either edges or nodes. + To specify only nodes the keyword `nodes` must be used. + + The collections of edges and nodes are treated similarly to + the add_edges_from/add_nodes_from methods. When iterated, they + should yield 2-tuples (u, v) or 3-tuples (u, v, datadict). + + Parameters + ---------- + edges : Graph object, collection of edges, or None + The first parameter can be a graph or some edges. If it has + attributes `nodes` and `edges`, then it is taken to be a + Graph-like object and those attributes are used as collections + of nodes and edges to be added to the graph. + If the first parameter does not have those attributes, it is + treated as a collection of edges and added to the graph. + If the first argument is None, no edges are added. + nodes : collection of nodes, or None + The second parameter is treated as a collection of nodes + to be added to the graph unless it is None. + If `edges is None` and `nodes is None` an exception is raised. + If the first parameter is a Graph, then `nodes` is ignored. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> G.update(nx.complete_graph(range(4, 10))) + >>> from itertools import combinations + >>> edges = ( + ... (u, v, {"power": u * v}) + ... for u, v in combinations(range(10, 20), 2) + ... if u * v < 225 + ... ) + >>> nodes = [1000] # for singleton, use a container + >>> G.update(edges, nodes) + + Notes + ----- + It you want to update the graph using an adjacency structure + it is straightforward to obtain the edges/nodes from adjacency. + The following examples provide common cases, your adjacency may + be slightly different and require tweaks of these examples. + + >>> # dict-of-set/list/tuple + >>> adj = {1: {2, 3}, 2: {1, 3}, 3: {1, 2}} + >>> e = [(u, v) for u, nbrs in adj.items() for v in nbrs] + >>> G.update(edges=e, nodes=adj) + + >>> DG = nx.DiGraph() + >>> # dict-of-dict-of-attribute + >>> adj = {1: {2: 1.3, 3: 0.7}, 2: {1: 1.4}, 3: {1: 0.7}} + >>> e = [ + ... (u, v, {"weight": d}) + ... for u, nbrs in adj.items() + ... for v, d in nbrs.items() + ... ] + >>> DG.update(edges=e, nodes=adj) + + >>> # dict-of-dict-of-dict + >>> adj = {1: {2: {"weight": 1.3}, 3: {"color": 0.7, "weight": 1.2}}} + >>> e = [ + ... (u, v, {"weight": d}) + ... for u, nbrs in adj.items() + ... for v, d in nbrs.items() + ... ] + >>> DG.update(edges=e, nodes=adj) + + >>> # predecessor adjacency (dict-of-set) + >>> pred = {1: {2, 3}, 2: {3}, 3: {3}} + >>> e = [(v, u) for u, nbrs in pred.items() for v in nbrs] + + >>> # MultiGraph dict-of-dict-of-dict-of-attribute + >>> MDG = nx.MultiDiGraph() + >>> adj = { + ... 1: {2: {0: {"weight": 1.3}, 1: {"weight": 1.2}}}, + ... 3: {2: {0: {"weight": 0.7}}}, + ... } + >>> e = [ + ... (u, v, ekey, d) + ... for u, nbrs in adj.items() + ... for v, keydict in nbrs.items() + ... for ekey, d in keydict.items() + ... ] + >>> MDG.update(edges=e) + + See Also + -------- + add_edges_from: add multiple edges to a graph + add_nodes_from: add multiple nodes to a graph + """ + if edges is not None: + if nodes is not None: + self.add_nodes_from(nodes) + self.add_edges_from(edges) + else: + # check if edges is a Graph object + try: + graph_nodes = edges.nodes + graph_edges = edges.edges + except AttributeError: + # edge not Graph-like + self.add_edges_from(edges) + else: # edges is Graph-like + self.add_nodes_from(graph_nodes.data()) + self.add_edges_from(graph_edges.data()) + self.graph.update(edges.graph) + elif nodes is not None: + self.add_nodes_from(nodes) + else: + raise NetworkXError("update needs nodes or edges input") + def has_edge(self, u, v): - """Return True if the edge (u, v) is in the graph. + """Returns True if the edge (u, v) is in the graph. This is the same as `v in G[u]` without KeyError exceptions. @@ -1068,7 +1179,7 @@ def has_edge(self, u, v): >>> e = (0, 1) >>> G.has_edge(*e) # e is a 2-tuple (u, v) True - >>> e = (0, 1, {'weight':7}) + >>> e = (0, 1, {"weight": 7}) >>> G.has_edge(*e[:2]) # e is a 3-tuple (u, v, data_dictionary) True @@ -1086,7 +1197,7 @@ def has_edge(self, u, v): return False def neighbors(self, n): - """Return an iterator over all neighbors of node n. + """Returns an iterator over all neighbors of node n. This is identical to `iter(G[n])` @@ -1113,12 +1224,11 @@ def neighbors(self, n): Notes ----- - It is usually more convenient (and faster) to access the - adjacency dictionary as ``G[n]``: + Alternate ways to access the neighbors are ``G.adj[n]`` or ``G[n]``: - >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc - >>> G.add_edge('a', 'b', weight=7) - >>> G['a'] + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.add_edge("a", "b", weight=7) + >>> G["a"] AtlasView({'b': {'weight': 7}}) >>> G = nx.path_graph(4) >>> [n for n in G[0]] @@ -1126,8 +1236,8 @@ def neighbors(self, n): """ try: return iter(self._adj[n]) - except KeyError: - raise NetworkXError("The node %s is not in the graph." % (n,)) + except KeyError as e: + raise NetworkXError(f"The node {n} is not in the graph.") from e @property def edges(self): @@ -1171,27 +1281,26 @@ def edges(self): Examples -------- - >>> G = nx.path_graph(3) # or MultiGraph, etc + >>> G = nx.path_graph(3) # or MultiGraph, etc >>> G.add_edge(2, 3, weight=5) >>> [e for e in G.edges] [(0, 1), (1, 2), (2, 3)] >>> G.edges.data() # default data is {} (empty dict) EdgeDataView([(0, 1, {}), (1, 2, {}), (2, 3, {'weight': 5})]) - >>> G.edges.data('weight', default=1) + >>> G.edges.data("weight", default=1) EdgeDataView([(0, 1, 1), (1, 2, 1), (2, 3, 5)]) >>> G.edges([0, 3]) # only edges incident to these nodes EdgeDataView([(0, 1), (3, 2)]) >>> G.edges(0) # only edges incident to a single node (use G.adj[0]?) EdgeDataView([(0, 1)]) """ - self.__dict__['edges'] = edges = EdgeView(self) - return edges + return EdgeView(self) def get_edge_data(self, u, v, default=None): - """Return the attribute dictionary associated with edge (u, v). + """Returns the attribute dictionary associated with edge (u, v). This is identical to `G[u][v]` except the default is returned - instead of an exception is the edge doesn't exist. + instead of an exception if the edge doesn't exist. Parameters ---------- @@ -1213,10 +1322,10 @@ def get_edge_data(self, u, v, default=None): Warning: Assigning to `G[u][v]` is not permitted. But it is safe to assign attributes `G[u][v]['foo']` - >>> G[0][1]['weight'] = 7 - >>> G[0][1]['weight'] + >>> G[0][1]["weight"] = 7 + >>> G[0][1]["weight"] 7 - >>> G[1][0]['weight'] + >>> G[1][0]["weight"] 7 >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc @@ -1225,7 +1334,7 @@ def get_edge_data(self, u, v, default=None): >>> e = (0, 1) >>> G.get_edge_data(*e) # tuple form {} - >>> G.get_edge_data('a', 'b', default=0) # edge not in graph, return 0 + >>> G.get_edge_data("a", "b", default=0) # edge not in graph, return 0 0 """ try: @@ -1234,7 +1343,7 @@ def get_edge_data(self, u, v, default=None): return default def adjacency(self): - """Return an iterator over (node, adjacency dict) tuples for all nodes. + """Returns an iterator over (node, adjacency dict) tuples for all nodes. For directed graphs, only outgoing neighbors/adjacencies are included. @@ -1291,8 +1400,7 @@ def degree(self): >>> list(G.degree([0, 1, 2])) [(0, 1), (1, 2), (2, 2)] """ - self.__dict__['degree'] = degree = DegreeView(self) - return degree + return DegreeView(self) def clear(self): """Remove all nodes and edges from the graph. @@ -1313,34 +1421,35 @@ def clear(self): self._node.clear() self.graph.clear() + def clear_edges(self): + """Remove all edges from the graph without altering nodes. + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.clear_edges() + >>> list(G.nodes) + [0, 1, 2, 3] + >>> list(G.edges) + [] + """ + for neighbours_dict in self._adj.values(): + neighbours_dict.clear() + def is_multigraph(self): - """Return True if graph is a multigraph, False otherwise.""" + """Returns True if graph is a multigraph, False otherwise.""" return False def is_directed(self): - """Return True if graph is directed, False otherwise.""" + """Returns True if graph is directed, False otherwise.""" return False - def fresh_copy(self): - """Return a fresh copy graph with the same data structure. - - A fresh copy has no nodes, edges or graph attributes. It is - the same data structure as the current graph. This method is - typically used to create an empty version of the graph. - - Notes - ----- - If you subclass the base class you should overwrite this method - to return your class of graph. - """ - return Graph() - def copy(self, as_view=False): - """Return a copy of the graph. + """Returns a copy of the graph. - The copy method by default returns a shallow copy of the graph - and attributes. That is, if an attribute is a container, that - container is shared by the original an the copy. + The copy method by default returns an independent shallow copy + of the graph and attributes. That is, if an attribute is a + container, that container is shared by the original an the copy. Use Python's `copy.deepcopy` for new containers. If `as_view` is True then a view is returned instead of a copy. @@ -1351,11 +1460,10 @@ def copy(self, as_view=False): may be handled in different ways. There are four types of copies of a graph that people might want. - Deepcopy -- The default behavior is a "deepcopy" where the graph - structure as well as all data attributes and any objects they might - contain are copied. The entire graph object is new so that changes - in the copy do not affect the original object. (see Python's - copy.deepcopy) + Deepcopy -- A "deepcopy" copies the graph structure as well as + all data attributes and any objects they might contain. + The entire graph object is new so that changes in the copy + do not affect the original object. (see Python's copy.deepcopy) Data Reference (Shallow) -- For a shallow copy the graph structure is copied but the edge, node and graph attribute dicts are @@ -1374,14 +1482,14 @@ def copy(self, as_view=False): >>> H = G.copy() >>> H = G.copy(as_view=False) >>> H = nx.Graph(G) - >>> H = G.fresh_copy().__class__(G) + >>> H = G.__class__(G) Fresh Data -- For fresh data, the graph structure is copied while new empty data attribute dicts are created. The resulting graph is independent of the original and it has no edge, node or graph attributes. Fresh copies are not enabled. Instead use: - >>> H = G.fresh_copy() + >>> H = G.__class__() >>> H.add_nodes_from(G) >>> H.add_edges_from(G.edges) @@ -1390,7 +1498,7 @@ def copy(self, as_view=False): structure without requiring any memory for copying the information. See the Python copy module for more information on shallow - and deep copies, https://docs.python.org/2/library/copy.html. + and deep copies, https://docs.python.org/3/library/copy.html. Parameters ---------- @@ -1414,17 +1522,19 @@ def copy(self, as_view=False): """ if as_view is True: - return nx.graphviews.GraphView(self) - G = self.fresh_copy() + return nx.graphviews.generic_graph_view(self) + G = self.__class__() G.graph.update(self.graph) G.add_nodes_from((n, d.copy()) for n, d in self._node.items()) - G.add_edges_from((u, v, datadict.copy()) - for u, nbrs in self._adj.items() - for v, datadict in nbrs.items()) + G.add_edges_from( + (u, v, datadict.copy()) + for u, nbrs in self._adj.items() + for v, datadict in nbrs.items() + ) return G def to_directed(self, as_view=False): - """Return a directed representation of the graph. + """Returns a directed representation of the graph. Returns ------- @@ -1443,7 +1553,7 @@ def to_directed(self, as_view=False): shallow copy of the data. See the Python copy module for more information on shallow - and deep copies, https://docs.python.org/2/library/copy.html. + and deep copies, https://docs.python.org/3/library/copy.html. Warning: If you have subclassed Graph to use dict-like objects in the data structure, those changes do not transfer to the @@ -1465,20 +1575,22 @@ def to_directed(self, as_view=False): >>> list(H.edges) [(0, 1)] """ + graph_class = self.to_directed_class() if as_view is True: - return nx.graphviews.DiGraphView(self) + return nx.graphviews.generic_graph_view(self, graph_class) # deepcopy when not a view - from networkx import DiGraph - G = DiGraph() + G = graph_class() G.graph.update(deepcopy(self.graph)) G.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) - G.add_edges_from((u, v, deepcopy(data)) - for u, nbrs in self._adj.items() - for v, data in nbrs.items()) + G.add_edges_from( + (u, v, deepcopy(data)) + for u, nbrs in self._adj.items() + for v, data in nbrs.items() + ) return G def to_undirected(self, as_view=False): - """Return an undirected copy of the graph. + """Returns an undirected copy of the graph. Parameters ---------- @@ -1504,7 +1616,7 @@ def to_undirected(self, as_view=False): shallow copy of the data. See the Python copy module for more information on shallow - and deep copies, https://docs.python.org/2/library/copy.html. + and deep copies, https://docs.python.org/3/library/copy.html. Warning: If you have subclassed DiGraph to use dict-like objects in the data structure, those changes do not transfer to the @@ -1512,7 +1624,7 @@ def to_undirected(self, as_view=False): Examples -------- - >>> G = nx.path_graph(2) # or MultiGraph, etc + >>> G = nx.path_graph(2) # or MultiGraph, etc >>> H = G.to_directed() >>> list(H.edges) [(0, 1), (1, 0)] @@ -1520,19 +1632,22 @@ def to_undirected(self, as_view=False): >>> list(G2.edges) [(0, 1)] """ + graph_class = self.to_undirected_class() if as_view is True: - return nx.graphviews.GraphView(self) + return nx.graphviews.generic_graph_view(self, graph_class) # deepcopy when not a view - G = Graph() + G = graph_class() G.graph.update(deepcopy(self.graph)) G.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) - G.add_edges_from((u, v, deepcopy(d)) - for u, nbrs in self._adj.items() - for v, d in nbrs.items()) + G.add_edges_from( + (u, v, deepcopy(d)) + for u, nbrs in self._adj.items() + for v, d in nbrs.items() + ) return G def subgraph(self, nodes): - """Return a SubGraph view of the subgraph induced on `nodes`. + """Returns a SubGraph view of the subgraph induced on `nodes`. The induced subgraph of the graph contains the nodes in `nodes` and the edges between those nodes. @@ -1561,6 +1676,26 @@ def subgraph(self, nodes): For an inplace reduction of a graph to a subgraph you can remove nodes: G.remove_nodes_from([n for n in G if n not in set(nodes)]) + Subgraph views are sometimes NOT what you want. In most cases where + you want to do more than simply look at the induced edges, it makes + more sense to just create the subgraph as its own graph with code like: + + :: + + # Create a subgraph SG based on a (possibly multigraph) G + SG = G.__class__() + SG.add_nodes_from((n, G.nodes[n]) for n in largest_wcc) + if SG.is_multigraph(): + SG.add_edges_from((n, nbr, key, d) + for n, nbrs in G.adj.items() if n in largest_wcc + for nbr, keydict in nbrs.items() if nbr in largest_wcc + for key, d in keydict.items()) + else: + SG.add_edges_from((n, nbr, d) + for n, nbrs in G.adj.items() if n in largest_wcc + for nbr, d in nbrs.items() if nbr in largest_wcc) + SG.graph.update(G.graph) + Examples -------- >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc @@ -1569,11 +1704,11 @@ def subgraph(self, nodes): [(0, 1), (1, 2)] """ induced_nodes = nx.filters.show_nodes(self.nbunch_iter(nodes)) - SubGraph = nx.graphviews.SubGraph # if already a subgraph, don't make a chain - if hasattr(self, '_NODE_OK'): - return SubGraph(self._graph, induced_nodes, self._EDGE_OK) - return SubGraph(self, induced_nodes) + subgraph = nx.graphviews.subgraph_view + if hasattr(self, "_NODE_OK"): + return subgraph(self._graph, induced_nodes, self._EDGE_OK) + return subgraph(self, induced_nodes) def edge_subgraph(self, edges): """Returns the subgraph induced by the specified edges. @@ -1616,7 +1751,7 @@ def edge_subgraph(self, edges): return nx.edge_subgraph(self, edges) def size(self, weight=None): - """Return the number of edges or total of all edge weights. + """Returns the number of edges or total of all edge weights. Parameters ---------- @@ -1643,12 +1778,12 @@ def size(self, weight=None): >>> G.size() 3 - >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc - >>> G.add_edge('a', 'b', weight=2) - >>> G.add_edge('b', 'c', weight=4) + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.add_edge("a", "b", weight=2) + >>> G.add_edge("b", "c", weight=4) >>> G.size() 2 - >>> G.size(weight='weight') + >>> G.size(weight="weight") 6.0 """ s = sum(d for v, d in self.degree(weight=weight)) @@ -1659,7 +1794,7 @@ def size(self, weight=None): return s // 2 if weight is None else s / 2 def number_of_edges(self, u=None, v=None): - """Return the number of edges between two nodes. + """Returns the number of edges between two nodes. Parameters ---------- @@ -1711,7 +1846,7 @@ def number_of_edges(self, u=None, v=None): return 0 def nbunch_iter(self, nbunch=None): - """Return an iterator over nodes contained in nbunch that are + """Returns an iterator over nodes contained in nbunch that are also in the graph. The nodes in nbunch are checked for membership in the graph @@ -1750,11 +1885,12 @@ def nbunch_iter(self, nbunch=None): or None, a :exc:`NetworkXError` is raised. Also, if any object in nbunch is not hashable, a :exc:`NetworkXError` is raised. """ - if nbunch is None: # include all nodes via iterator + if nbunch is None: # include all nodes via iterator bunch = iter(self._adj) elif nbunch in self: # if nbunch is a single node bunch = iter([nbunch]) - else: # if nbunch is a sequence of nodes + else: # if nbunch is a sequence of nodes + def bunch_iter(nlist, adj): try: for n in nlist: @@ -1763,14 +1899,15 @@ def bunch_iter(nlist, adj): except TypeError as e: message = e.args[0] # capture error for non-sequence/iterator nbunch. - if 'iter' in message: + if "iter" in message: msg = "nbunch is not a node or a sequence of nodes." - raise NetworkXError(msg) + raise NetworkXError(msg) from e # capture error for unhashable node. - elif 'hashable' in message: - msg = "Node {} in sequence nbunch is not a valid node." - raise NetworkXError(msg.format(n)) + elif "hashable" in message: + msg = f"Node {n} in sequence nbunch is not a valid node." + raise NetworkXError(msg) from e else: raise + bunch = bunch_iter(nbunch, self._adj) return bunch diff --git a/networkx/classes/graphviews.py b/networkx/classes/graphviews.py index 01614a2..8cca239 100644 --- a/networkx/classes/graphviews.py +++ b/networkx/classes/graphviews.py @@ -1,37 +1,15 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Author: Aric Hagberg (hagberg@lanl.gov), -# Pieter Swart (swart@lanl.gov), -# Dan Schult(dschult@colgate.edu) """View of Graphs as SubGraph, Reverse, Directed, Undirected. In some algorithms it is convenient to temporarily morph a graph to exclude some nodes or edges. It should be better to do that via a view than to remove and then re-add. - In other algorithms it is convenient to temporarily morph a graph to reverse directed edges, or treat a directed graph as undirected, etc. This module provides those graph views. The resulting views are essentially read-only graphs that -report data from the orignal graph object. We provide three -attributes related to the underlying graph object. - - G._graph : the parent graph used for looking up graph data. - G.root_graph : the root graph of the potential chain of views. - For example, if you have a subgraph of a reversed view of - an edge_subgraph of a graph, this points to original graph. - G.fresh_copy() : a method to return a null copy of the graph - represented by the view. This is useful if you want to - create a graph with the same data structure (directed/multi) - as the current view. This is similar to G.root_graph.__class__() - but reflects the fact that (Un)DirectedView could make the - type of data structure different from the root_graph. +report data from the orignal graph object. We provide an +attribute G._graph which points to the underlying graph object. Note: Since graphviews look like graphs, one can end up with view-of-view-of-view chains. Be careful with chains because @@ -43,201 +21,186 @@ the middle subgraph. In general, determining how to short-cut the chain is tricky and much harder with restricted_views than with induced subgraphs. -Often it is easiest to use `.copy()` to avoid chains. +Often it is easiest to use .copy() to avoid chains. """ -from collections import Mapping - -from networkx.classes import Graph, DiGraph, MultiGraph, MultiDiGraph -from networkx.classes.coreviews import ReadOnlyGraph, \ - AtlasView, AdjacencyView, MultiAdjacencyView, \ - FilterAtlas, FilterAdjacency, FilterMultiAdjacency, \ - UnionAdjacency, UnionMultiAdjacency -from networkx.classes.filters import no_filter, show_nodes, show_edges -from networkx.exception import NetworkXError, NetworkXNotImplemented +from networkx.classes.coreviews import ( + UnionAdjacency, + UnionMultiAdjacency, + FilterAtlas, + FilterAdjacency, + FilterMultiAdjacency, +) +from networkx.classes.filters import no_filter +from networkx.exception import NetworkXError from networkx.utils import not_implemented_for +import networkx as nx -__all__ = ['SubGraph', 'SubDiGraph', 'SubMultiGraph', 'SubMultiDiGraph', - 'ReverseView', 'MultiReverseView', - 'DiGraphView', 'MultiDiGraphView', - 'GraphView', 'MultiGraphView', - ] - - -class SubGraph(ReadOnlyGraph, Graph): - def __init__(self, graph, filter_node=no_filter, filter_edge=no_filter): - self._graph = graph - self.root_graph = graph.root_graph - self._NODE_OK = filter_node - self._EDGE_OK = filter_edge - - # Set graph interface - self.graph = graph.graph - self._node = FilterAtlas(graph._node, filter_node) - self._adj = FilterAdjacency(graph._adj, filter_node, filter_edge) - - -class SubDiGraph(ReadOnlyGraph, DiGraph): - def __init__(self, graph, filter_node=no_filter, filter_edge=no_filter): - self._graph = graph - self.root_graph = graph - while hasattr(self.root_graph, '_graph'): - self.root_graph = self.root_graph._graph - self._NODE_OK = filter_node - self._EDGE_OK = filter_edge - - # Set graph interface - self.graph = graph.graph - self._node = FilterAtlas(graph._node, filter_node) - self._adj = FilterAdjacency(graph._adj, filter_node, filter_edge) - self._pred = FilterAdjacency(graph._pred, filter_node, - lambda u, v: filter_edge(v, u)) - self._succ = self._adj - - -class SubMultiGraph(ReadOnlyGraph, MultiGraph): - def __init__(self, graph, filter_node=no_filter, filter_edge=no_filter): - self._graph = graph - self.root_graph = graph - while hasattr(self.root_graph, '_graph'): - self.root_graph = self.root_graph._graph - self._NODE_OK = filter_node - self._EDGE_OK = filter_edge - - # Set graph interface - self.graph = graph.graph - self._node = FilterAtlas(graph._node, filter_node) - self._adj = FilterMultiAdjacency(graph._adj, filter_node, filter_edge) - - -class SubMultiDiGraph(ReadOnlyGraph, MultiDiGraph): - def __init__(self, graph, filter_node=no_filter, filter_edge=no_filter): - self._graph = graph - self.root_graph = graph - while hasattr(self.root_graph, '_graph'): - self.root_graph = self.root_graph._graph - self._NODE_OK = filter_node - self._EDGE_OK = filter_edge - - # Set graph interface - self.graph = graph.graph - self._node = FilterAtlas(graph._node, filter_node) - FMA = FilterMultiAdjacency - self._adj = FMA(graph._adj, filter_node, filter_edge) - self._pred = FMA(graph._pred, filter_node, - lambda u, v, k: filter_edge(v, u, k)) - self._succ = self._adj - - -class ReverseView(ReadOnlyGraph, DiGraph): - def __init__(self, graph): - if not graph.is_directed(): - msg = "not implemented for undirected type" - raise NetworkXNotImplemented(msg) - - self._graph = graph - self.root_graph = graph - while hasattr(self.root_graph, '_graph'): - self.root_graph = self.root_graph._graph - # Set graph interface - self.graph = graph.graph - self._node = graph._node - self._adj = graph._pred - self._pred = graph._succ - self._succ = self._adj - - -class MultiReverseView(ReadOnlyGraph, MultiDiGraph): - def __init__(self, graph): - if not graph.is_directed(): - msg = "not implemented for undirected type" - raise NetworkXNotImplemented(msg) - - self._graph = graph - self.root_graph = graph - while hasattr(self.root_graph, '_graph'): - self.root_graph = self.root_graph._graph - # Set graph interface - self.graph = graph.graph - self._node = graph._node - self._adj = graph._pred - self._pred = graph._succ - self._succ = self._adj - - -class DiGraphView(ReadOnlyGraph, DiGraph): - def __init__(self, graph): - if graph.is_multigraph(): - msg = 'Wrong View class. Use MultiDiGraphView.' - raise NetworkXError(msg) - self._graph = graph - self.root_graph = graph - while hasattr(self.root_graph, '_graph'): - self.root_graph = self.root_graph._graph - self.graph = graph.graph - self._node = graph._node - if graph.is_directed(): - self._pred = graph._pred - self._succ = graph._succ - else: - self._pred = graph._adj - self._succ = graph._adj - self._adj = self._succ - - -class MultiDiGraphView(ReadOnlyGraph, MultiDiGraph): - def __init__(self, graph): - if not graph.is_multigraph(): - msg = 'Wrong View class. Use DiGraphView.' - raise NetworkXError(msg) - self._graph = graph - self.root_graph = graph - while hasattr(self.root_graph, '_graph'): - self.root_graph = self.root_graph._graph - self.graph = graph.graph - self._node = graph._node - if graph.is_directed(): - self._pred = graph._pred - self._succ = graph._succ - else: - self._pred = graph._adj - self._succ = graph._adj - self._adj = self._succ - - -class GraphView(ReadOnlyGraph, Graph): - UnionAdj = UnionAdjacency - - def __init__(self, graph): - if graph.is_multigraph(): - msg = 'Wrong View class. Use MultiGraphView.' - raise NetworkXError(msg) - self._graph = graph - self.root_graph = graph - while hasattr(self.root_graph, '_graph'): - self.root_graph = self.root_graph._graph - self.graph = graph.graph - self._node = graph._node - if graph.is_directed(): - self._adj = self.UnionAdj(graph._succ, graph._pred) +__all__ = ["generic_graph_view", "subgraph_view", "reverse_view"] + + +def generic_graph_view(G, create_using=None): + if create_using is None: + newG = G.__class__() + else: + newG = nx.empty_graph(0, create_using) + if G.is_multigraph() != newG.is_multigraph(): + raise NetworkXError("Multigraph for G must agree with create_using") + newG = nx.freeze(newG) + + # create view by assigning attributes from G + newG._graph = G + newG.graph = G.graph + + newG._node = G._node + if newG.is_directed(): + if G.is_directed(): + newG._succ = G._succ + newG._pred = G._pred + newG._adj = G._succ else: - self._adj = graph._adj - - -class MultiGraphView(ReadOnlyGraph, MultiGraph): - UnionAdj = UnionMultiAdjacency - - def __init__(self, graph): - if not graph.is_multigraph(): - msg = 'Wrong View class. Use GraphView.' - raise NetworkXError(msg) - self._graph = graph - self.root_graph = graph - while hasattr(self.root_graph, '_graph'): - self.root_graph = self.root_graph._graph - self.graph = graph.graph - self._node = graph._node - if graph.is_directed(): - self._adj = self.UnionAdj(graph._succ, graph._pred) + newG._succ = G._adj + newG._pred = G._adj + newG._adj = G._adj + elif G.is_directed(): + if G.is_multigraph(): + newG._adj = UnionMultiAdjacency(G._succ, G._pred) else: - self._adj = graph._adj + newG._adj = UnionAdjacency(G._succ, G._pred) + else: + newG._adj = G._adj + return newG + + +def subgraph_view(G, filter_node=no_filter, filter_edge=no_filter): + """ View of `G` applying a filter on nodes and edges. + + `subgraph_view` provides a read-only view of the input graph that excludes + nodes and edges based on the outcome of two filter functions `filter_node` + and `filter_edge`. + + The `filter_node` function takes one argument --- the node --- and returns + `True` if the node should be included in the subgraph, and `False` if it + should not be included. + + The `filter_edge` function takes two (or three arguments if `G` is a + multi-graph) --- the nodes describing an edge, plus the edge-key if + parallel edges are possible --- and returns `True` if the edge should be + included in the subgraph, and `False` if it should not be included. + + Both node and edge filter functions are called on graph elements as they + are queried, meaning there is no up-front cost to creating the view. + + Parameters + ---------- + G : networkx.Graph + A directed/undirected graph/multigraph + + filter_node : callable, optional + A function taking a node as input, which returns `True` if the node + should appear in the view. + + filter_edge : callable, optional + A function taking as input the two nodes describing an edge (plus the + edge-key if `G` is a multi-graph), which returns `True` if the edge + should appear in the view. + + Returns + ------- + graph : networkx.Graph + A read-only graph view of the input graph. + + Examples + -------- + >>> G = nx.path_graph(6) + + Filter functions operate on the node, and return `True` if the node should + appear in the view: + + >>> def filter_node(n1): + ... return n1 != 5 + ... + >>> view = nx.subgraph_view(G, filter_node=filter_node) + >>> view.nodes() + NodeView((0, 1, 2, 3, 4)) + + We can use a closure pattern to filter graph elements based on additional + data --- for example, filtering on edge data attached to the graph: + + >>> G[3][4]["cross_me"] = False + >>> def filter_edge(n1, n2): + ... return G[n1][n2].get("cross_me", True) + ... + >>> view = nx.subgraph_view(G, filter_edge=filter_edge) + >>> view.edges() + EdgeView([(0, 1), (1, 2), (2, 3), (4, 5)]) + + >>> view = nx.subgraph_view(G, filter_node=filter_node, filter_edge=filter_edge,) + >>> view.nodes() + NodeView((0, 1, 2, 3, 4)) + >>> view.edges() + EdgeView([(0, 1), (1, 2), (2, 3)]) + """ + newG = nx.freeze(G.__class__()) + newG._NODE_OK = filter_node + newG._EDGE_OK = filter_edge + + # create view by assigning attributes from G + newG._graph = G + newG.graph = G.graph + + newG._node = FilterAtlas(G._node, filter_node) + if G.is_multigraph(): + Adj = FilterMultiAdjacency + + def reverse_edge(u, v, k): + return filter_edge(v, u, k) + + else: + Adj = FilterAdjacency + + def reverse_edge(u, v): + return filter_edge(v, u) + + if G.is_directed(): + newG._succ = Adj(G._succ, filter_node, filter_edge) + newG._pred = Adj(G._pred, filter_node, reverse_edge) + newG._adj = newG._succ + else: + newG._adj = Adj(G._adj, filter_node, filter_edge) + return newG + + +@not_implemented_for("undirected") +def reverse_view(G): + """ View of `G` with edge directions reversed + + `reverse_view` returns a read-only view of the input graph where + edge directions are reversed. + + Identical to digraph.reverse(copy=False) + + Parameters + ---------- + G : networkx.DiGraph + + Returns + ------- + graph : networkx.DiGraph + + Examples + -------- + >>> G = nx.DiGraph() + >>> G.add_edge(1, 2) + >>> G.add_edge(2, 3) + >>> G.edges() + OutEdgeView([(1, 2), (2, 3)]) + + >>> view = nx.reverse_view(G) + >>> view.edges() + OutEdgeView([(2, 1), (3, 2)]) + """ + newG = generic_graph_view(G) + newG._succ, newG._pred = G._pred, G._succ + newG._adj = newG._succ + return newG diff --git a/networkx/classes/multidigraph.py b/networkx/classes/multidigraph.py index dfcb8c6..89bcb48 100644 --- a/networkx/classes/multidigraph.py +++ b/networkx/classes/multidigraph.py @@ -1,23 +1,17 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg -# Dan Schult -# Pieter Swart """Base class for MultiDiGraph.""" from copy import deepcopy import networkx as nx -from networkx.classes.graph import Graph # for doctests from networkx.classes.digraph import DiGraph from networkx.classes.multigraph import MultiGraph from networkx.classes.coreviews import MultiAdjacencyView -from networkx.classes.reportviews import OutMultiEdgeView, InMultiEdgeView, \ - DiMultiDegreeView, OutMultiDegreeView, InMultiDegreeView +from networkx.classes.reportviews import ( + OutMultiEdgeView, + InMultiEdgeView, + DiMultiDegreeView, + OutMultiDegreeView, + InMultiDegreeView, +) from networkx.exception import NetworkXError @@ -104,7 +98,7 @@ class MultiDiGraph(MultiGraph, DiGraph): edge is created and stored using a key to identify the edge. By default the key is the lowest unused integer. - >>> keys = G.add_edges_from([(4,5,dict(route=282)), (4,5,dict(route=37))]) + >>> keys = G.add_edges_from([(4, 5, dict(route=282)), (4, 5, dict(route=37))]) >>> G[4] AdjacencyView({5: {0: {}, 1: {'route': 282}, 2: {'route': 37}}}) @@ -122,23 +116,23 @@ class MultiDiGraph(MultiGraph, DiGraph): Add node attributes using add_node(), add_nodes_from() or G.nodes - >>> G.add_node(1, time='5pm') - >>> G.add_nodes_from([3], time='2pm') + >>> G.add_node(1, time="5pm") + >>> G.add_nodes_from([3], time="2pm") >>> G.nodes[1] {'time': '5pm'} - >>> G.nodes[1]['room'] = 714 - >>> del G.nodes[1]['room'] # remove attribute + >>> G.nodes[1]["room"] = 714 + >>> del G.nodes[1]["room"] # remove attribute >>> list(G.nodes(data=True)) [(1, {'time': '5pm'}), (3, {'time': '2pm'})] Add edge attributes using add_edge(), add_edges_from(), subscript notation, or G.edges. - >>> key = G.add_edge(1, 2, weight=4.7 ) - >>> keys = G.add_edges_from([(3, 4), (4, 5)], color='red') - >>> keys = G.add_edges_from([(1,2,{'color':'blue'}), (2,3,{'weight':8})]) - >>> G[1][2][0]['weight'] = 4.7 - >>> G.edges[1, 2, 0]['weight'] = 4 + >>> key = G.add_edge(1, 2, weight=4.7) + >>> keys = G.add_edges_from([(3, 4), (4, 5)], color="red") + >>> keys = G.add_edges_from([(1, 2, {"color": "blue"}), (2, 3, {"weight": 8})]) + >>> G[1][2][0]["weight"] = 4.7 + >>> G.edges[1, 2, 0]["weight"] = 4 Warning: we protect the graph data structure by making `G.edges[1, 2]` a read-only dict-like structure. However, you can assign to attributes @@ -150,13 +144,13 @@ class MultiDiGraph(MultiGraph, DiGraph): Many common graph features allow python syntax to speed reporting. - >>> 1 in G # check if node in graph + >>> 1 in G # check if node in graph True - >>> [n for n in G if n<3] # iterate through nodes + >>> [n for n in G if n < 3] # iterate through nodes [1, 2] >>> len(G) # number of nodes in graph 5 - >>> G[1] # adjacency dict-like view keyed by neighbor to edge attributes + >>> G[1] # adjacency dict-like view keyed by neighbor to edge attributes AdjacencyView({2: {0: {'weight': 4}, 1: {'color': 'blue'}}}) Often the best way to traverse all edges of a graph is via the neighbors. @@ -165,14 +159,14 @@ class MultiDiGraph(MultiGraph, DiGraph): >>> for n, nbrsdict in G.adjacency(): ... for nbr, keydict in nbrsdict.items(): - ... for key, eattr in keydict.items(): - ... if 'weight' in eattr: - ... # Do something useful with the edges - ... pass + ... for key, eattr in keydict.items(): + ... if "weight" in eattr: + ... # Do something useful with the edges + ... pass But the edges() method is often more convenient: - >>> for u, v, keys, weight in G.edges(data='weight', keys=True): + >>> for u, v, keys, weight in G.edges(data="weight", keys=True): ... if weight is not None: ... # Do something useful with the edges ... pass @@ -205,14 +199,20 @@ class MultiDiGraph(MultiGraph, DiGraph): extra features can be added. To replace one of the dicts create a new graph class by changing the class(!) variable holding the factory for that dict-like structure. The variable names are - node_dict_factory, adjlist_inner_dict_factory, adjlist_outer_dict_factory, - and edge_attr_dict_factory. + node_dict_factory, node_attr_dict_factory, adjlist_inner_dict_factory, + adjlist_outer_dict_factory, edge_key_dict_factory, edge_attr_dict_factory + and graph_attr_dict_factory. node_dict_factory : function, (default: dict) Factory function to be used to create the dict containing node attributes, keyed by node id. It should require no arguments and return a dict-like object + node_attr_dict_factory: function, (default: dict) + Factory function to be used to create the node attribute + dict which holds attribute values keyed by attribute name. + It should require no arguments and return a dict-like object + adjlist_outer_dict_factory : function, (default: dict) Factory function to be used to create the outer-most dict in the data structure that holds adjacency info keyed by node. @@ -230,9 +230,28 @@ class MultiDiGraph(MultiGraph, DiGraph): edge_attr_dict_factory : function, (default: dict) Factory function to be used to create the edge attribute - dict which holds attrbute values keyed by attribute name. + dict which holds attribute values keyed by attribute name. + It should require no arguments and return a dict-like object. + + graph_attr_dict_factory : function, (default: dict) + Factory function to be used to create the graph attribute + dict which holds attribute values keyed by attribute name. It should require no arguments and return a dict-like object. + Typically, if your extension doesn't impact the data structure all + methods will inherited without issue except: `to_directed/to_undirected`. + By default these methods create a DiGraph/Graph class and you probably + want them to create your extension of a DiGraph/Graph. To facilitate + this we define two class variables that you can set in your subclass. + + to_directed_class : callable, (default: DiGraph or MultiDiGraph) + Class to create a new graph structure in the `to_directed` method. + If `None`, a NetworkX class (DiGraph or MultiDiGraph) is used. + + to_undirected_class : callable, (default: Graph or MultiGraph) + Class to create a new graph structure in the `to_undirected` method. + If `None`, a NetworkX class (Graph or MultiGraph) is used. + Examples -------- @@ -240,6 +259,7 @@ class MultiDiGraph(MultiGraph, DiGraph): creating graph subclasses by overwriting the base class `dict` with a dictionary-like object. """ + # node_dict_factory = dict # already assigned in Graph # adjlist_outer_dict_factory = dict # adjlist_inner_dict_factory = dict @@ -267,9 +287,9 @@ def __init__(self, incoming_graph_data=None, **attr): Examples -------- - >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc - >>> G = nx.Graph(name='my graph') - >>> e = [(1, 2), (2, 3), (3, 4)] # list of edges + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G = nx.Graph(name="my graph") + >>> e = [(1, 2), (2, 3), (3, 4)] # list of edges >>> G = nx.Graph(e) Arbitrary graph attribute pairs (key=value) may be assigned @@ -385,10 +405,10 @@ def add_edge(self, u_for_edge, v_for_edge, key=None, **attr): >>> G = nx.MultiDiGraph() >>> e = (1, 2) - >>> key = G.add_edge(1, 2) # explicit two-node form - >>> G.add_edge(*e) # single edge as tuple of two nodes + >>> key = G.add_edge(1, 2) # explicit two-node form + >>> G.add_edge(*e) # single edge as tuple of two nodes 1 - >>> G.add_edges_from( [(1, 2)] ) # add edges from iterable container + >>> G.add_edges_from([(1, 2)]) # add edges from iterable container [2] Associate data to edges using keywords: @@ -408,11 +428,11 @@ def add_edge(self, u_for_edge, v_for_edge, key=None, **attr): if u not in self._succ: self._succ[u] = self.adjlist_inner_dict_factory() self._pred[u] = self.adjlist_inner_dict_factory() - self._node[u] = {} + self._node[u] = self.node_attr_dict_factory() if v not in self._succ: self._succ[v] = self.adjlist_inner_dict_factory() self._pred[v] = self.adjlist_inner_dict_factory() - self._node[v] = {} + self._node[v] = self.node_attr_dict_factory() if key is None: key = self.new_edge_key(u, v) if v in self._succ[u]: @@ -457,39 +477,38 @@ def remove_edge(self, u, v, key=None): >>> nx.add_path(G, [0, 1, 2, 3]) >>> G.remove_edge(0, 1) >>> e = (1, 2) - >>> G.remove_edge(*e) # unpacks e from an edge tuple + >>> G.remove_edge(*e) # unpacks e from an edge tuple For multiple edges >>> G = nx.MultiDiGraph() >>> G.add_edges_from([(1, 2), (1, 2), (1, 2)]) # key_list returned [0, 1, 2] - >>> G.remove_edge(1, 2) # remove a single (arbitrary) edge + >>> G.remove_edge(1, 2) # remove a single (arbitrary) edge For edges with keys >>> G = nx.MultiDiGraph() - >>> G.add_edge(1, 2, key='first') + >>> G.add_edge(1, 2, key="first") 'first' - >>> G.add_edge(1, 2, key='second') + >>> G.add_edge(1, 2, key="second") 'second' - >>> G.remove_edge(1, 2, key='second') + >>> G.remove_edge(1, 2, key="second") """ try: d = self._adj[u][v] - except KeyError: - raise NetworkXError( - "The edge %s-%s is not in the graph." % (u, v)) + except KeyError as e: + raise NetworkXError(f"The edge {u}-{v} is not in the graph.") from e # remove the edge with specified data if key is None: d.popitem() else: try: del d[key] - except KeyError: - msg = "The edge %s-%s with key %s is not in the graph." - raise NetworkXError(msg % (u, v, key)) + except KeyError as e: + msg = f"The edge {u}-{v} with key {key} is not in the graph." + raise NetworkXError(msg) from e if len(d) == 0: # remove the key entries if last edge del self._succ[u][v] @@ -547,15 +566,15 @@ def edges(self): >>> key = G.add_edge(2, 3, weight=5) >>> [e for e in G.edges()] [(0, 1), (1, 2), (2, 3)] - >>> list(G.edges(data=True)) # default data is {} (empty dict) + >>> list(G.edges(data=True)) # default data is {} (empty dict) [(0, 1, {}), (1, 2, {}), (2, 3, {'weight': 5})] - >>> list(G.edges(data='weight', default=1)) + >>> list(G.edges(data="weight", default=1)) [(0, 1, 1), (1, 2, 1), (2, 3, 5)] - >>> list(G.edges(keys=True)) # default keys are integers + >>> list(G.edges(keys=True)) # default keys are integers [(0, 1, 0), (1, 2, 0), (2, 3, 0)] >>> list(G.edges(data=True, keys=True)) [(0, 1, 0, {}), (1, 2, 0, {}), (2, 3, 0, {'weight': 5})] - >>> list(G.edges(data='weight', default=1, keys=True)) + >>> list(G.edges(data="weight", default=1, keys=True)) [(0, 1, 0, 1), (1, 2, 0, 1), (2, 3, 0, 5)] >>> list(G.edges([0, 2])) [(0, 1), (2, 3)] @@ -566,9 +585,7 @@ def edges(self): -------- in_edges, out_edges """ - self.__dict__['edges'] = edges = OutMultiEdgeView(self) - self.__dict__['out_edges'] = edges - return edges + return OutMultiEdgeView(self) # alias out_edges to edges out_edges = edges @@ -604,8 +621,7 @@ def in_edges(self): -------- edges """ - self.__dict__['in_edges'] = in_edges = InMultiEdgeView(self) - return in_edges + return InMultiEdgeView(self) @property def degree(self): @@ -646,14 +662,13 @@ def degree(self): -------- >>> G = nx.MultiDiGraph() >>> nx.add_path(G, [0, 1, 2, 3]) - >>> G.degree(0) # node 0 with degree 1 + >>> G.degree(0) # node 0 with degree 1 1 >>> list(G.degree([0, 1, 2])) [(0, 1), (1, 2), (2, 2)] """ - self.__dict__['degree'] = degree = DiMultiDegreeView(self) - return degree + return DiMultiDegreeView(self) @property def in_degree(self): @@ -694,18 +709,17 @@ def in_degree(self): -------- >>> G = nx.MultiDiGraph() >>> nx.add_path(G, [0, 1, 2, 3]) - >>> G.in_degree(0) # node 0 with degree 0 + >>> G.in_degree(0) # node 0 with degree 0 0 >>> list(G.in_degree([0, 1, 2])) [(0, 0), (1, 1), (2, 1)] """ - self.__dict__['in_degree'] = in_degree = InMultiDegreeView(self) - return in_degree + return InMultiDegreeView(self) @property def out_degree(self): - """Return an iterator for (node, out-degree) or out-degree for single node. + """Returns an iterator for (node, out-degree) or out-degree for single node. out_degree(self, nbunch=None, weight=None) @@ -741,128 +755,24 @@ def out_degree(self): -------- >>> G = nx.MultiDiGraph() >>> nx.add_path(G, [0, 1, 2, 3]) - >>> G.out_degree(0) # node 0 with degree 1 + >>> G.out_degree(0) # node 0 with degree 1 1 >>> list(G.out_degree([0, 1, 2])) [(0, 1), (1, 1), (2, 1)] """ - self.__dict__['out_degree'] = out_degree = OutMultiDegreeView(self) - return out_degree + return OutMultiDegreeView(self) def is_multigraph(self): - """Return True if graph is a multigraph, False otherwise.""" + """Returns True if graph is a multigraph, False otherwise.""" return True def is_directed(self): - """Return True if graph is directed, False otherwise.""" + """Returns True if graph is directed, False otherwise.""" return True - def fresh_copy(self): - """Return a fresh copy graph with the same data structure. - - A fresh copy has no nodes, edges or graph attributes. It is - the same data structure as the current graph. This method is - typically used to create an empty version of the graph. - - Notes - ----- - If you subclass the base class you should overwrite this method - to return your class of graph. - """ - return MultiDiGraph() - - def copy(self, as_view=False): - """Return a copy of the graph. - - The copy method by default returns a shallow copy of the graph - and attributes. That is, if an attribute is a container, that - container is shared by the original an the copy. - Use Python's `copy.deepcopy` for new containers. - - If `as_view` is True then a view is returned instead of a copy. - - Notes - ----- - All copies reproduce the graph structure, but data attributes - may be handled in different ways. There are four types of copies - of a graph that people might want. - - Deepcopy -- The default behavior is a "deepcopy" where the graph - structure as well as all data attributes and any objects they might - contain are copied. The entire graph object is new so that changes - in the copy do not affect the original object. (see Python's - copy.deepcopy) - - Data Reference (Shallow) -- For a shallow copy the graph structure - is copied but the edge, node and graph attribute dicts are - references to those in the original graph. This saves - time and memory but could cause confusion if you change an attribute - in one graph and it changes the attribute in the other. - NetworkX does not provide this level of shallow copy. - - Independent Shallow -- This copy creates new independent attribute - dicts and then does a shallow copy of the attributes. That is, any - attributes that are containers are shared between the new graph - and the original. This is exactly what `dict.copy()` provides. - You can obtain this style copy using: - - >>> G = nx.path_graph(5) - >>> H = G.copy() - >>> H = G.copy(as_view=False) - >>> H = nx.Graph(G) - >>> H = G.fresh_copy().__class__(G) - - Fresh Data -- For fresh data, the graph structure is copied while - new empty data attribute dicts are created. The resulting graph - is independent of the original and it has no edge, node or graph - attributes. Fresh copies are not enabled. Instead use: - - >>> H = G.fresh_copy() - >>> H.add_nodes_from(G) - >>> H.add_edges_from(G.edges) - - View -- Inspired by dict-views, graph-views act like read-only - versions of the original graph, providing a copy of the original - structure without requiring any memory for copying the information. - - See the Python copy module for more information on shallow - and deep copies, https://docs.python.org/2/library/copy.html. - - Parameters - ---------- - as_view : bool, optional (default=False) - If True, the returned graph-view provides a read-only view - of the original graph without actually copying any data. - - Returns - ------- - G : Graph - A copy of the graph. - - See Also - -------- - to_directed: return a directed copy of the graph. - - Examples - -------- - >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc - >>> H = G.copy() - - """ - if as_view is True: - return nx.graphviews.MultiDiGraphView(self) - G = self.fresh_copy() - G.graph.update(self.graph) - G.add_nodes_from((n, d.copy()) for n, d in self._node.items()) - G.add_edges_from((u, v, key, datadict.copy()) - for u, nbrs in self._adj.items() - for v, keydict in nbrs.items() - for key, datadict in keydict.items()) - return G - def to_undirected(self, reciprocal=False, as_view=False): - """Return an undirected representation of the digraph. + """Returns an undirected representation of the digraph. Parameters ---------- @@ -896,7 +806,7 @@ def to_undirected(self, reciprocal=False, as_view=False): returns a shallow copy of the data. See the Python copy module for more information on shallow - and deep copies, https://docs.python.org/2/library/copy.html. + and deep copies, https://docs.python.org/3/library/copy.html. Warning: If you have subclassed MultiDiGraph to use dict-like objects in the data structure, those changes do not transfer @@ -904,7 +814,7 @@ def to_undirected(self, reciprocal=False, as_view=False): Examples -------- - >>> G = nx.path_graph(2) # or MultiGraph, etc + >>> G = nx.path_graph(2) # or MultiGraph, etc >>> H = G.to_directed() >>> list(H.edges) [(0, 1), (1, 0)] @@ -912,71 +822,32 @@ def to_undirected(self, reciprocal=False, as_view=False): >>> list(G2.edges) [(0, 1)] """ + graph_class = self.to_undirected_class() if as_view is True: - return nx.graphviews.MultiGraphView(self) + return nx.graphviews.generic_graph_view(self, graph_class) # deepcopy when not a view - G = MultiGraph() + G = graph_class() G.graph.update(deepcopy(self.graph)) G.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) if reciprocal is True: - G.add_edges_from((u, v, key, deepcopy(data)) - for u, nbrs in self._adj.items() - for v, keydict in nbrs.items() - for key, data in keydict.items() - if v in self._pred[u] and key in self._pred[u][v]) + G.add_edges_from( + (u, v, key, deepcopy(data)) + for u, nbrs in self._adj.items() + for v, keydict in nbrs.items() + for key, data in keydict.items() + if v in self._pred[u] and key in self._pred[u][v] + ) else: - G.add_edges_from((u, v, key, deepcopy(data)) - for u, nbrs in self._adj.items() - for v, keydict in nbrs.items() - for key, data in keydict.items()) + G.add_edges_from( + (u, v, key, deepcopy(data)) + for u, nbrs in self._adj.items() + for v, keydict in nbrs.items() + for key, data in keydict.items() + ) return G - def subgraph(self, nodes): - """Return a SubGraph view of the subgraph induced on nodes in `nodes`. - - The induced subgraph of the graph contains the nodes in `nodes` - and the edges between those nodes. - - Parameters - ---------- - nodes : list, iterable - A container of nodes which will be iterated through once. - - Returns - ------- - G : SubGraph View - A subgraph view of the graph. The graph structure cannot be - changed but node/edge attributes can and are shared with the - original graph. - - Notes - ----- - The graph, edge and node attributes are shared with the original graph. - Changes to the graph structure is ruled out by the view, but changes - to attributes are reflected in the original graph. - - To create a subgraph with its own copy of the edge/node attributes use: - G.subgraph(nodes).copy() - - For an inplace reduction of a graph to a subgraph you can remove nodes: - G.remove_nodes_from([n for n in G if n not in set(nodes)]) - - Examples - -------- - >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc - >>> H = G.subgraph([0, 1, 2]) - >>> list(H.edges) - [(0, 1), (1, 2)] - """ - induced_nodes = nx.filters.show_nodes(self.nbunch_iter(nodes)) - SubGraph = nx.graphviews.SubMultiDiGraph - # if already a subgraph, don't make a chain - if hasattr(self, '_NODE_OK'): - return SubGraph(self._graph, induced_nodes, self._EDGE_OK) - return SubGraph(self, induced_nodes) - def reverse(self, copy=True): - """Return the reverse of the graph. + """Returns the reverse of the graph. The reverse is a graph with the same nodes and edges but with the directions of the edges reversed. @@ -989,10 +860,12 @@ def reverse(self, copy=True): the original graph. """ if copy: - H = self.fresh_copy() + H = self.__class__() H.graph.update(deepcopy(self.graph)) H.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) - H.add_edges_from((v, u, k, deepcopy(d)) for u, v, k, d - in self.edges(keys=True, data=True)) + H.add_edges_from( + (v, u, k, deepcopy(d)) + for u, v, k, d in self.edges(keys=True, data=True) + ) return H - return nx.graphviews.MultiReverseView(self) + return nx.graphviews.reverse_view(self) diff --git a/networkx/classes/multigraph.py b/networkx/classes/multigraph.py index 40a8310..f6d8093 100644 --- a/networkx/classes/multigraph.py +++ b/networkx/classes/multigraph.py @@ -1,13 +1,3 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg -# Dan Schult -# Pieter Swart """Base class for MultiGraph.""" from copy import deepcopy @@ -16,7 +6,6 @@ from networkx.classes.coreviews import MultiAdjacencyView from networkx.classes.reportviews import MultiEdgeView, MultiDegreeView from networkx import NetworkXError -from networkx.utils import iterable class MultiGraph(Graph): @@ -103,7 +92,7 @@ class MultiGraph(Graph): edge is created and stored using a key to identify the edge. By default the key is the lowest unused integer. - >>> keys = G.add_edges_from([(4,5,{'route':28}), (4,5,{'route':37})]) + >>> keys = G.add_edges_from([(4, 5, {"route": 28}), (4, 5, {"route": 37})]) >>> G[4] AdjacencyView({3: {0: {}}, 5: {0: {}, 1: {'route': 28}, 2: {'route': 37}}}) @@ -121,23 +110,23 @@ class MultiGraph(Graph): Add node attributes using add_node(), add_nodes_from() or G.nodes - >>> G.add_node(1, time='5pm') - >>> G.add_nodes_from([3], time='2pm') + >>> G.add_node(1, time="5pm") + >>> G.add_nodes_from([3], time="2pm") >>> G.nodes[1] {'time': '5pm'} - >>> G.nodes[1]['room'] = 714 - >>> del G.nodes[1]['room'] # remove attribute + >>> G.nodes[1]["room"] = 714 + >>> del G.nodes[1]["room"] # remove attribute >>> list(G.nodes(data=True)) [(1, {'time': '5pm'}), (3, {'time': '2pm'})] Add edge attributes using add_edge(), add_edges_from(), subscript notation, or G.edges. - >>> key = G.add_edge(1, 2, weight=4.7 ) - >>> keys = G.add_edges_from([(3, 4), (4, 5)], color='red') - >>> keys = G.add_edges_from([(1,2,{'color':'blue'}), (2,3,{'weight':8})]) - >>> G[1][2][0]['weight'] = 4.7 - >>> G.edges[1, 2, 0]['weight'] = 4 + >>> key = G.add_edge(1, 2, weight=4.7) + >>> keys = G.add_edges_from([(3, 4), (4, 5)], color="red") + >>> keys = G.add_edges_from([(1, 2, {"color": "blue"}), (2, 3, {"weight": 8})]) + >>> G[1][2][0]["weight"] = 4.7 + >>> G.edges[1, 2, 0]["weight"] = 4 Warning: we protect the graph data structure by making `G.edges[1, 2]` a read-only dict-like structure. However, you can assign to attributes @@ -149,28 +138,28 @@ class MultiGraph(Graph): Many common graph features allow python syntax to speed reporting. - >>> 1 in G # check if node in graph + >>> 1 in G # check if node in graph True - >>> [n for n in G if n<3] # iterate through nodes + >>> [n for n in G if n < 3] # iterate through nodes [1, 2] >>> len(G) # number of nodes in graph 5 - >>> G[1] # adjacency dict-like view keyed by neighbor to edge attributes + >>> G[1] # adjacency dict-like view keyed by neighbor to edge attributes AdjacencyView({2: {0: {'weight': 4}, 1: {'color': 'blue'}}}) Often the best way to traverse all edges of a graph is via the neighbors. - The neighbors are reported as an adjacency-dict `G.adj` or as `G.adjacency()`. + The neighbors are reported as an adjacency-dict `G.adj` or `G.adjacency()`. >>> for n, nbrsdict in G.adjacency(): ... for nbr, keydict in nbrsdict.items(): - ... for key, eattr in keydict.items(): - ... if 'weight' in eattr: - ... # Do something useful with the edges - ... pass + ... for key, eattr in keydict.items(): + ... if "weight" in eattr: + ... # Do something useful with the edges + ... pass But the edges() method is often more convenient: - >>> for u, v, keys, weight in G.edges(data='weight', keys=True): + >>> for u, v, keys, weight in G.edges(data="weight", keys=True): ... if weight is not None: ... # Do something useful with the edges ... pass @@ -203,14 +192,20 @@ class MultiGraph(Graph): extra features can be added. To replace one of the dicts create a new graph class by changing the class(!) variable holding the factory for that dict-like structure. The variable names are - node_dict_factory, adjlist_inner_dict_factory, adjlist_outer_dict_factory, - and edge_attr_dict_factory. + node_dict_factory, node_attr_dict_factory, adjlist_inner_dict_factory, + adjlist_outer_dict_factory, edge_key_dict_factory, edge_attr_dict_factory + and graph_attr_dict_factory. node_dict_factory : function, (default: dict) Factory function to be used to create the dict containing node attributes, keyed by node id. It should require no arguments and return a dict-like object + node_attr_dict_factory: function, (default: dict) + Factory function to be used to create the node attribute + dict which holds attribute values keyed by attribute name. + It should require no arguments and return a dict-like object + adjlist_outer_dict_factory : function, (default: dict) Factory function to be used to create the outer-most dict in the data structure that holds adjacency info keyed by node. @@ -228,9 +223,28 @@ class MultiGraph(Graph): edge_attr_dict_factory : function, (default: dict) Factory function to be used to create the edge attribute - dict which holds attrbute values keyed by attribute name. + dict which holds attribute values keyed by attribute name. + It should require no arguments and return a dict-like object. + + graph_attr_dict_factory : function, (default: dict) + Factory function to be used to create the graph attribute + dict which holds attribute values keyed by attribute name. It should require no arguments and return a dict-like object. + Typically, if your extension doesn't impact the data structure all + methods will inherited without issue except: `to_directed/to_undirected`. + By default these methods create a DiGraph/Graph class and you probably + want them to create your extension of a DiGraph/Graph. To facilitate + this we define two class variables that you can set in your subclass. + + to_directed_class : callable, (default: DiGraph or MultiDiGraph) + Class to create a new graph structure in the `to_directed` method. + If `None`, a NetworkX class (DiGraph or MultiDiGraph) is used. + + to_undirected_class : callable, (default: Graph or MultiGraph) + Class to create a new graph structure in the `to_undirected` method. + If `None`, a NetworkX class (Graph or MultiGraph) is used. + Examples -------- @@ -238,12 +252,29 @@ class MultiGraph(Graph): creating graph subclasses by overwriting the base class `dict` with a dictionary-like object. """ + # node_dict_factory = dict # already assigned in Graph # adjlist_outer_dict_factory = dict # adjlist_inner_dict_factory = dict edge_key_dict_factory = dict # edge_attr_dict_factory = dict + def to_directed_class(self): + """Returns the class to use for empty directed copies. + + If you subclass the base classes, use this to designate + what directed class to use for `to_directed()` copies. + """ + return nx.MultiDiGraph + + def to_undirected_class(self): + """Returns the class to use for empty undirected copies. + + If you subclass the base classes, use this to designate + what directed class to use for `to_directed()` copies. + """ + return MultiGraph + def __init__(self, incoming_graph_data=None, **attr): """Initialize a graph with edges, name, or graph attributes. @@ -265,9 +296,9 @@ def __init__(self, incoming_graph_data=None, **attr): Examples -------- - >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc - >>> G = nx.Graph(name='my graph') - >>> e = [(1, 2), (2, 3), (3, 4)] # list of edges + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G = nx.Graph(name="my graph") + >>> e = [(1, 2), (2, 3), (3, 4)] # list of edges >>> G = nx.Graph(e) Arbitrary graph attribute pairs (key=value) may be assigned @@ -300,7 +331,7 @@ def adj(self): return MultiAdjacencyView(self._adj) def new_edge_key(self, u, v): - """Return an unused key for edges between nodes `u` and `v`. + """Returns an unused key for edges between nodes `u` and `v`. The nodes `u` and `v` do not need to be already in the graph. @@ -376,16 +407,16 @@ def add_edge(self, u_for_edge, v_for_edge, key=None, **attr): >>> G = nx.MultiGraph() >>> e = (1, 2) - >>> ekey = G.add_edge(1, 2) # explicit two-node form - >>> G.add_edge(*e) # single edge as tuple of two nodes + >>> ekey = G.add_edge(1, 2) # explicit two-node form + >>> G.add_edge(*e) # single edge as tuple of two nodes 1 - >>> G.add_edges_from( [(1, 2)] ) # add edges from iterable container + >>> G.add_edges_from([(1, 2)]) # add edges from iterable container [2] Associate data to edges using keywords: >>> ekey = G.add_edge(1, 2, weight=3) - >>> ekey = G.add_edge(1, 2, key=0, weight=4) # update data for key=0 + >>> ekey = G.add_edge(1, 2, key=0, weight=4) # update data for key=0 >>> ekey = G.add_edge(1, 3, weight=7, capacity=15, length=342.7) For non-string attribute keys, use subscript notation. @@ -398,10 +429,10 @@ def add_edge(self, u_for_edge, v_for_edge, key=None, **attr): # add nodes if u not in self._adj: self._adj[u] = self.adjlist_inner_dict_factory() - self._node[u] = {} + self._node[u] = self.node_attr_dict_factory() if v not in self._adj: self._adj[v] = self.adjlist_inner_dict_factory() - self._node[v] = {} + self._node[v] = self.node_attr_dict_factory() if key is None: key = self.new_edge_key(u, v) if v in self._adj[u]: @@ -460,15 +491,15 @@ def add_edges_from(self, ebunch_to_add, **attr): Examples -------- - >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc - >>> G.add_edges_from([(0, 1), (1, 2)]) # using a list of edge tuples + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.add_edges_from([(0, 1), (1, 2)]) # using a list of edge tuples >>> e = zip(range(0, 3), range(1, 4)) - >>> G.add_edges_from(e) # Add the path graph 0-1-2-3 + >>> G.add_edges_from(e) # Add the path graph 0-1-2-3 Associate data to edges >>> G.add_edges_from([(1, 2), (2, 3)], weight=3) - >>> G.add_edges_from([(3, 4), (1, 4)], label='WN2898') + >>> G.add_edges_from([(3, 4), (1, 4)], label="WN2898") """ keylist = [] for e in ebunch_to_add: @@ -483,16 +514,16 @@ def add_edges_from(self, ebunch_to_add, **attr): dd = {} key = None else: - msg = "Edge tuple {} must be a 2-tuple, 3-tuple or 4-tuple." - raise NetworkXError(msg.format(e)) + msg = f"Edge tuple {e} must be a 2-tuple, 3-tuple or 4-tuple." + raise NetworkXError(msg) ddd = {} ddd.update(attr) try: ddd.update(dd) - except: + except (TypeError, ValueError): if ne != 3: raise - key = dd + key = dd # ne == 3 with 3rd value not dict, must be a key key = self.add_edge(u, v, key) self[u][v][key].update(ddd) keylist.append(key) @@ -525,39 +556,38 @@ def remove_edge(self, u, v, key=None): >>> nx.add_path(G, [0, 1, 2, 3]) >>> G.remove_edge(0, 1) >>> e = (1, 2) - >>> G.remove_edge(*e) # unpacks e from an edge tuple + >>> G.remove_edge(*e) # unpacks e from an edge tuple For multiple edges - >>> G = nx.MultiGraph() # or MultiDiGraph, etc + >>> G = nx.MultiGraph() # or MultiDiGraph, etc >>> G.add_edges_from([(1, 2), (1, 2), (1, 2)]) # key_list returned [0, 1, 2] - >>> G.remove_edge(1, 2) # remove a single (arbitrary) edge + >>> G.remove_edge(1, 2) # remove a single (arbitrary) edge For edges with keys - >>> G = nx.MultiGraph() # or MultiDiGraph, etc - >>> G.add_edge(1, 2, key='first') + >>> G = nx.MultiGraph() # or MultiDiGraph, etc + >>> G.add_edge(1, 2, key="first") 'first' - >>> G.add_edge(1, 2, key='second') + >>> G.add_edge(1, 2, key="second") 'second' - >>> G.remove_edge(1, 2, key='second') + >>> G.remove_edge(1, 2, key="second") """ try: d = self._adj[u][v] - except KeyError: - raise NetworkXError( - "The edge %s-%s is not in the graph." % (u, v)) + except KeyError as e: + raise NetworkXError(f"The edge {u}-{v} is not in the graph.") from e # remove the edge with specified data if key is None: d.popitem() else: try: del d[key] - except KeyError: - msg = "The edge %s-%s with key %s is not in the graph." - raise NetworkXError(msg % (u, v, key)) + except KeyError as e: + msg = f"The edge {u}-{v} with key {key} is not in the graph." + raise NetworkXError(msg) from e if len(d) == 0: # remove the key entries if last edge del self._adj[u][v] @@ -588,7 +618,7 @@ def remove_edges_from(self, ebunch): Examples -------- >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc - >>> ebunch=[(1, 2), (2, 3)] + >>> ebunch = [(1, 2), (2, 3)] >>> G.remove_edges_from(ebunch) Removing multiple copies of edges @@ -598,8 +628,8 @@ def remove_edges_from(self, ebunch): >>> G.remove_edges_from([(1, 2), (1, 2)]) >>> list(G.edges()) [(1, 2)] - >>> G.remove_edges_from([(1, 2), (1, 2)]) # silently ignore extra copy - >>> list(G.edges) # now empty graph + >>> G.remove_edges_from([(1, 2), (1, 2)]) # silently ignore extra copy + >>> list(G.edges) # now empty graph [] """ for e in ebunch: @@ -609,7 +639,7 @@ def remove_edges_from(self, ebunch): pass def has_edge(self, u, v, key=None): - """Return True if the graph has an edge between nodes u and v. + """Returns True if the graph has an edge between nodes u and v. This is the same as `v in G[u] or key in G[u][v]` without KeyError exceptions. @@ -633,19 +663,19 @@ def has_edge(self, u, v, key=None): Can be called either using two nodes u, v, an edge tuple (u, v), or an edge tuple (u, v, key). - >>> G = nx.MultiGraph() # or MultiDiGraph + >>> G = nx.MultiGraph() # or MultiDiGraph >>> nx.add_path(G, [0, 1, 2, 3]) >>> G.has_edge(0, 1) # using two nodes True >>> e = (0, 1) >>> G.has_edge(*e) # e is a 2-tuple (u, v) True - >>> G.add_edge(0, 1, key='a') + >>> G.add_edge(0, 1, key="a") 'a' - >>> G.has_edge(0, 1, key='a') # specify key + >>> G.has_edge(0, 1, key="a") # specify key True - >>> e=(0, 1, 'a') - >>> G.has_edge(*e) # e is a 3-tuple (u, v, 'a') + >>> e = (0, 1, "a") + >>> G.has_edge(*e) # e is a 3-tuple (u, v, 'a') True The following syntax are equivalent: @@ -666,7 +696,7 @@ def has_edge(self, u, v, key=None): @property def edges(self): - """Return an iterator over the edges. + """Returns an iterator over the edges. edges(self, nbunch=None, data=False, keys=False, default=None) @@ -710,31 +740,30 @@ def edges(self): Examples -------- - >>> G = nx.MultiGraph() # or MultiDiGraph + >>> G = nx.MultiGraph() # or MultiDiGraph >>> nx.add_path(G, [0, 1, 2]) >>> key = G.add_edge(2, 3, weight=5) >>> [e for e in G.edges()] [(0, 1), (1, 2), (2, 3)] - >>> G.edges.data() # default data is {} (empty dict) + >>> G.edges.data() # default data is {} (empty dict) MultiEdgeDataView([(0, 1, {}), (1, 2, {}), (2, 3, {'weight': 5})]) - >>> G.edges.data('weight', default=1) + >>> G.edges.data("weight", default=1) MultiEdgeDataView([(0, 1, 1), (1, 2, 1), (2, 3, 5)]) - >>> G.edges(keys=True) # default keys are integers + >>> G.edges(keys=True) # default keys are integers MultiEdgeView([(0, 1, 0), (1, 2, 0), (2, 3, 0)]) >>> G.edges.data(keys=True) MultiEdgeDataView([(0, 1, 0, {}), (1, 2, 0, {}), (2, 3, 0, {'weight': 5})]) - >>> G.edges.data('weight', default=1, keys=True) + >>> G.edges.data("weight", default=1, keys=True) MultiEdgeDataView([(0, 1, 0, 1), (1, 2, 0, 1), (2, 3, 0, 5)]) >>> G.edges([0, 3]) MultiEdgeDataView([(0, 1), (3, 2)]) >>> G.edges(0) MultiEdgeDataView([(0, 1)]) """ - self.__dict__['edges'] = edges = MultiEdgeView(self) - return edges + return MultiEdgeView(self) def get_edge_data(self, u, v, key=None, default=None): - """Return the attribute dictionary associated with edge (u, v). + """Returns the attribute dictionary associated with edge (u, v). This is identical to `G[u][v][key]` except the default is returned instead of an exception is the edge doesn't exist. @@ -756,11 +785,11 @@ def get_edge_data(self, u, v, key=None, default=None): Examples -------- - >>> G = nx.MultiGraph() # or MultiDiGraph - >>> key = G.add_edge(0, 1, key='a', weight=7) - >>> G[0][1]['a'] # key='a' + >>> G = nx.MultiGraph() # or MultiDiGraph + >>> key = G.add_edge(0, 1, key="a", weight=7) + >>> G[0][1]["a"] # key='a' {'weight': 7} - >>> G.edges[0, 1, 'a'] # key='a' + >>> G.edges[0, 1, "a"] # key='a' {'weight': 7} Warning: we protect the graph data structure by making @@ -770,21 +799,21 @@ def get_edge_data(self, u, v, key=None, default=None): bracket as shown next. You need to specify all edge info to assign to the edge data associated with an edge. - >>> G[0][1]['a']['weight'] = 10 - >>> G.edges[0, 1, 'a']['weight'] = 10 - >>> G[0][1]['a']['weight'] + >>> G[0][1]["a"]["weight"] = 10 + >>> G.edges[0, 1, "a"]["weight"] = 10 + >>> G[0][1]["a"]["weight"] 10 - >>> G.edges[1, 0, 'a']['weight'] + >>> G.edges[1, 0, "a"]["weight"] 10 - >>> G = nx.MultiGraph() # or MultiDiGraph + >>> G = nx.MultiGraph() # or MultiDiGraph >>> nx.add_path(G, [0, 1, 2, 3]) >>> G.get_edge_data(0, 1) {0: {}} >>> e = (0, 1) - >>> G.get_edge_data(*e) # tuple form + >>> G.get_edge_data(*e) # tuple form {0: {}} - >>> G.get_edge_data('a', 'b', default=0) # edge not in graph, return 0 + >>> G.get_edge_data("a", "b", default=0) # edge not in graph, return 0 0 """ try: @@ -828,45 +857,30 @@ def degree(self): Examples -------- - >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> nx.add_path(G, [0, 1, 2, 3]) - >>> G.degree(0) # node 0 with degree 1 + >>> G.degree(0) # node 0 with degree 1 1 >>> list(G.degree([0, 1])) [(0, 1), (1, 2)] """ - self.__dict__['degree'] = degree = MultiDegreeView(self) - return degree + return MultiDegreeView(self) def is_multigraph(self): - """Return True if graph is a multigraph, False otherwise.""" + """Returns True if graph is a multigraph, False otherwise.""" return True def is_directed(self): - """Return True if graph is directed, False otherwise.""" + """Returns True if graph is directed, False otherwise.""" return False - def fresh_copy(self): - """Return a fresh copy graph with the same data structure. - - A fresh copy has no nodes, edges or graph attributes. It is - the same data structure as the current graph. This method is - typically used to create an empty version of the graph. - - Notes - ----- - If you subclass the base class you should overwrite this method - to return your class of graph. - """ - return MultiGraph() - def copy(self, as_view=False): - """Return a copy of the graph. + """Returns a copy of the graph. - The copy method by default returns a shallow copy of the graph - and attributes. That is, if an attribute is a container, that - container is shared by the original an the copy. + The copy method by default returns an independent shallow copy + of the graph and attributes. That is, if an attribute is a + container, that container is shared by the original an the copy. Use Python's `copy.deepcopy` for new containers. If `as_view` is True then a view is returned instead of a copy. @@ -877,11 +891,10 @@ def copy(self, as_view=False): may be handled in different ways. There are four types of copies of a graph that people might want. - Deepcopy -- The default behavior is a "deepcopy" where the graph - structure as well as all data attributes and any objects they might - contain are copied. The entire graph object is new so that changes - in the copy do not affect the original object. (see Python's - copy.deepcopy) + Deepcopy -- A "deepcopy" copies the graph structure as well as + all data attributes and any objects they might contain. + The entire graph object is new so that changes in the copy + do not affect the original object. (see Python's copy.deepcopy) Data Reference (Shallow) -- For a shallow copy the graph structure is copied but the edge, node and graph attribute dicts are @@ -900,14 +913,14 @@ def copy(self, as_view=False): >>> H = G.copy() >>> H = G.copy(as_view=False) >>> H = nx.Graph(G) - >>> H = G.fresh_copy().__class__(G) + >>> H = G.__class__(G) Fresh Data -- For fresh data, the graph structure is copied while new empty data attribute dicts are created. The resulting graph is independent of the original and it has no edge, node or graph attributes. Fresh copies are not enabled. Instead use: - >>> H = G.fresh_copy() + >>> H = G.__class__() >>> H.add_nodes_from(G) >>> H.add_edges_from(G.edges) @@ -916,7 +929,7 @@ def copy(self, as_view=False): structure without requiring any memory for copying the information. See the Python copy module for more information on shallow - and deep copies, https://docs.python.org/2/library/copy.html. + and deep copies, https://docs.python.org/3/library/copy.html. Parameters ---------- @@ -940,18 +953,20 @@ def copy(self, as_view=False): """ if as_view is True: - return nx.graphviews.MultiGraphView(self) - G = self.fresh_copy() + return nx.graphviews.generic_graph_view(self) + G = self.__class__() G.graph.update(self.graph) G.add_nodes_from((n, d.copy()) for n, d in self._node.items()) - G.add_edges_from((u, v, key, datadict.copy()) - for u, nbrs in self._adj.items() - for v, keydict in nbrs.items() - for key, datadict in keydict.items()) + G.add_edges_from( + (u, v, key, datadict.copy()) + for u, nbrs in self._adj.items() + for v, keydict in nbrs.items() + for key, datadict in keydict.items() + ) return G def to_directed(self, as_view=False): - """Return a directed representation of the graph. + """Returns a directed representation of the graph. Returns ------- @@ -970,7 +985,7 @@ def to_directed(self, as_view=False): shallow copy of the data. See the Python copy module for more information on shallow - and deep copies, https://docs.python.org/2/library/copy.html. + and deep copies, https://docs.python.org/3/library/copy.html. Warning: If you have subclassed MultiGraph to use dict-like objects in the data structure, those changes do not transfer to the @@ -978,7 +993,7 @@ def to_directed(self, as_view=False): Examples -------- - >>> G = nx.Graph() # or MultiGraph, etc + >>> G = nx.Graph() # or MultiGraph, etc >>> G.add_edge(0, 1) >>> H = G.to_directed() >>> list(H.edges) @@ -986,27 +1001,29 @@ def to_directed(self, as_view=False): If already directed, return a (deep) copy - >>> G = nx.DiGraph() # or MultiDiGraph, etc + >>> G = nx.DiGraph() # or MultiDiGraph, etc >>> G.add_edge(0, 1) >>> H = G.to_directed() >>> list(H.edges) [(0, 1)] """ + graph_class = self.to_directed_class() if as_view is True: - return nx.graphviews.MultiDiGraphView(self) + return nx.graphviews.generic_graph_view(self, graph_class) # deepcopy when not a view - from networkx.classes.multidigraph import MultiDiGraph - G = MultiDiGraph() + G = graph_class() G.graph.update(deepcopy(self.graph)) G.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) - G.add_edges_from((u, v, key, deepcopy(datadict)) - for u, nbrs in self.adj.items() - for v, keydict in nbrs.items() - for key, datadict in keydict.items()) + G.add_edges_from( + (u, v, key, deepcopy(datadict)) + for u, nbrs in self.adj.items() + for v, keydict in nbrs.items() + for key, datadict in keydict.items() + ) return G def to_undirected(self, as_view=False): - """Return an undirected copy of the graph. + """Returns an undirected copy of the graph. Returns ------- @@ -1027,7 +1044,7 @@ def to_undirected(self, as_view=False): which returns a shallow copy of the data. See the Python copy module for more information on shallow - and deep copies, https://docs.python.org/2/library/copy.html. + and deep copies, https://docs.python.org/3/library/copy.html. Warning: If you have subclassed MultiiGraph to use dict-like objects in the data structure, those changes do not transfer @@ -1035,7 +1052,7 @@ def to_undirected(self, as_view=False): Examples -------- - >>> G = nx.path_graph(2) # or MultiGraph, etc + >>> G = nx.path_graph(2) # or MultiGraph, etc >>> H = G.to_directed() >>> list(H.edges) [(0, 1), (1, 0)] @@ -1043,65 +1060,23 @@ def to_undirected(self, as_view=False): >>> list(G2.edges) [(0, 1)] """ + graph_class = self.to_undirected_class() if as_view is True: - return nx.graphviews.MultiGraphView(self) + return nx.graphviews.generic_graph_view(self, graph_class) # deepcopy when not a view - G = MultiGraph() + G = graph_class() G.graph.update(deepcopy(self.graph)) G.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) - G.add_edges_from((u, v, key, deepcopy(datadict)) - for u, nbrs in self._adj.items() - for v, keydict in nbrs.items() - for key, datadict in keydict.items()) + G.add_edges_from( + (u, v, key, deepcopy(datadict)) + for u, nbrs in self._adj.items() + for v, keydict in nbrs.items() + for key, datadict in keydict.items() + ) return G - def subgraph(self, nodes): - """Return a SubGraph view of the subgraph induced on nodes in `nodes`. - - The induced subgraph of the graph contains the nodes in `nodes` - and the edges between those nodes. - - Parameters - ---------- - nodes : list, iterable - A container of nodes which will be iterated through once. - - Returns - ------- - G : SubGraph View - A subgraph view of the graph. The graph structure cannot be - changed but node/edge attributes can and are shared with the - original graph. - - Notes - ----- - The graph, edge and node attributes are shared with the original graph. - Changes to the graph structure is ruled out by the view, but changes - to attributes are reflected in the original graph. - - To create a subgraph with its own copy of the edge/node attributes use: - G.subgraph(nodes).copy() - - For an inplace reduction of a graph to a subgraph you can remove nodes: - G.remove_nodes_from([n for n in G if n not in set(nodes)]) - - Examples - -------- - >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc - >>> nx.add_path(G, [0, 1, 2, 3]) - >>> H = G.subgraph([0, 1, 2]) - >>> list(H.edges) - [(0, 1), (1, 2)] - """ - induced_nodes = nx.filters.show_nodes(self.nbunch_iter(nodes)) - SubGraph = nx.graphviews.SubMultiGraph - # if already a subgraph, don't make a chain - if hasattr(self, '_NODE_OK'): - return SubGraph(self._graph, induced_nodes, self._EDGE_OK) - return SubGraph(self, induced_nodes) - def number_of_edges(self, u=None, v=None): - """Return the number of edges between two nodes. + """Returns the number of edges between two nodes. Parameters ---------- diff --git a/networkx/classes/ordered.py b/networkx/classes/ordered.py index 1cc7f01..74bd596 100644 --- a/networkx/classes/ordered.py +++ b/networkx/classes/ordered.py @@ -1,5 +1,10 @@ """ Consistently ordered variants of the default base classes. +Note that if you are using Python 3.6+, you shouldn't need these classes +because the dicts in Python 3.6+ are ordered. +Note also that there are many differing expectations for the word "ordered" +and that these classes may not provide the order you expect. +The intent here is to give a consistent order not a particular order. The Ordered (Di/Multi/MultiDi) Graphs give a consistent order for reporting of nodes and edges. The order of node reporting agrees with node adding, but for @@ -8,6 +13,19 @@ In general, you should use the default (i.e., unordered) graph classes. However, there are times (e.g., when testing) when you may need the order preserved. + +Special care is required when using subgraphs of the Ordered classes. +The order of nodes in the subclass is not necessarily the same order +as the original class. In general it is probably better to avoid using +subgraphs and replace with code similar to: + +.. code-block:: python + + # instead of SG = G.subgraph(ordered_nodes) + SG = nx.OrderedGraph() + SG.add_nodes_from(ordered_nodes) + SG.add_edges_from((u, v) for (u, v) in G.edges() if u in SG if v in SG) + """ from collections import OrderedDict @@ -18,79 +36,44 @@ __all__ = [] -__all__.extend([ - 'OrderedGraph', - 'OrderedDiGraph', - 'OrderedMultiGraph', - 'OrderedMultiDiGraph', -]) +__all__.extend( + ["OrderedGraph", "OrderedDiGraph", "OrderedMultiGraph", "OrderedMultiDiGraph"] +) class OrderedGraph(Graph): """Consistently ordered variant of :class:`~networkx.Graph`.""" + node_dict_factory = OrderedDict adjlist_outer_dict_factory = OrderedDict adjlist_inner_dict_factory = OrderedDict edge_attr_dict_factory = OrderedDict - def fresh_copy(self): - """Return a fresh copy graph with the same data structure. - - A fresh copy has no nodes, edges or graph attributes. It is - the same data structure as the current graph. This method is - typically used to create an empty version of the graph. - """ - return OrderedGraph() - class OrderedDiGraph(DiGraph): """Consistently ordered variant of :class:`~networkx.DiGraph`.""" + node_dict_factory = OrderedDict adjlist_outer_dict_factory = OrderedDict adjlist_inner_dict_factory = OrderedDict edge_attr_dict_factory = OrderedDict - def fresh_copy(self): - """Return a fresh copy graph with the same data structure. - - A fresh copy has no nodes, edges or graph attributes. It is - the same data structure as the current graph. This method is - typically used to create an empty version of the graph. - """ - return OrderedDiGraph() - class OrderedMultiGraph(MultiGraph): """Consistently ordered variant of :class:`~networkx.MultiGraph`.""" + node_dict_factory = OrderedDict adjlist_outer_dict_factory = OrderedDict adjlist_inner_dict_factory = OrderedDict edge_key_dict_factory = OrderedDict edge_attr_dict_factory = OrderedDict - def fresh_copy(self): - """Return a fresh copy graph with the same data structure. - - A fresh copy has no nodes, edges or graph attributes. It is - the same data structure as the current graph. This method is - typically used to create an empty version of the graph. - """ - return OrderedMultiGraph() - class OrderedMultiDiGraph(MultiDiGraph): """Consistently ordered variant of :class:`~networkx.MultiDiGraph`.""" + node_dict_factory = OrderedDict adjlist_outer_dict_factory = OrderedDict adjlist_inner_dict_factory = OrderedDict edge_key_dict_factory = OrderedDict edge_attr_dict_factory = OrderedDict - - def fresh_copy(self): - """Return a fresh copy graph with the same data structure. - - A fresh copy has no nodes, edges or graph attributes. It is - the same data structure as the current graph. This method is - typically used to create an empty version of the graph. - """ - return OrderedMultiDiGraph() diff --git a/networkx/classes/reportviews.py b/networkx/classes/reportviews.py index ca539e9..3d77d92 100644 --- a/networkx/classes/reportviews.py +++ b/networkx/classes/reportviews.py @@ -1,13 +1,3 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg (hagberg@lanl.gov), -# Pieter Swart (swart@lanl.gov), -# Dan Schult(dschult@colgate.edu) """ View Classes provide node, edge and degree "views" of a graph. @@ -92,17 +82,32 @@ The argument `nbunch` restricts edges to those incident to nodes in nbunch. """ -from collections import Mapping, Set, Iterable -import networkx as nx - -__all__ = ['NodeView', 'NodeDataView', - 'EdgeView', 'OutEdgeView', 'InEdgeView', - 'EdgeDataView', 'OutEdgeDataView', 'InEdgeDataView', - 'MultiEdgeView', 'OutMultiEdgeView', 'InMultiEdgeView', - 'MultiEdgeDataView', 'OutMultiEdgeDataView', 'InMultiEdgeDataView', - 'DegreeView', 'DiDegreeView', 'InDegreeView', 'OutDegreeView', - 'MultiDegreeView', 'DiMultiDegreeView', - 'InMultiDegreeView', 'OutMultiDegreeView'] +from collections.abc import Mapping, Set + +__all__ = [ + "NodeView", + "NodeDataView", + "EdgeView", + "OutEdgeView", + "InEdgeView", + "EdgeDataView", + "OutEdgeDataView", + "InEdgeDataView", + "MultiEdgeView", + "OutMultiEdgeView", + "InMultiEdgeView", + "MultiEdgeDataView", + "OutMultiEdgeDataView", + "InMultiEdgeDataView", + "DegreeView", + "DiDegreeView", + "InDegreeView", + "OutDegreeView", + "MultiDegreeView", + "DiMultiDegreeView", + "InMultiDegreeView", + "OutMultiDegreeView", +] # NodeViews @@ -124,20 +129,22 @@ class NodeView(Mapping, Set): >>> NV = G.nodes() >>> 2 in NV True - >>> for n in NV: print(n) + >>> for n in NV: + ... print(n) 0 1 2 - >>> assert(NV & {1, 2, 3} == {1, 2}) + >>> assert NV & {1, 2, 3} == {1, 2} - >>> G.add_node(2, color='blue') + >>> G.add_node(2, color="blue") >>> NV[2] {'color': 'blue'} - >>> G.add_node(8, color='red') + >>> G.add_node(8, color="red") >>> NDV = G.nodes(data=True) >>> (2, NV[2]) in NDV True - >>> for n, dd in NDV: print((n, dd.get('color', 'aqua'))) + >>> for n, dd in NDV: + ... print((n, dd.get("color", "aqua"))) (0, 'aqua') (1, 'aqua') (2, 'blue') @@ -145,10 +152,11 @@ class NodeView(Mapping, Set): >>> NDV[2] == NV[2] True - >>> NVdata = G.nodes(data='color', default='aqua') + >>> NVdata = G.nodes(data="color", default="aqua") >>> (2, NVdata[2]) in NVdata True - >>> for n, dd in NVdata: print((n, dd)) + >>> for n, dd in NVdata: + ... print((n, dd)) (0, 'aqua') (1, 'aqua') (2, 'blue') @@ -156,13 +164,14 @@ class NodeView(Mapping, Set): >>> NVdata[2] == NV[2] # NVdata gets 'color', NV gets datadict False """ - __slots__ = '_nodes', + + __slots__ = ("_nodes",) def __getstate__(self): - return {'_nodes': self._nodes} + return {"_nodes": self._nodes} def __setstate__(self, state): - self._nodes = state['_nodes'] + self._nodes = state["_nodes"] def __init__(self, graph): self._nodes = graph._node @@ -200,7 +209,7 @@ def __str__(self): return str(list(self)) def __repr__(self): - return '%s(%r)' % (self.__class__.__name__, tuple(self)) + return f"{self.__class__.__name__}({tuple(self)})" class NodeDataView(Set): @@ -220,17 +229,16 @@ class NodeDataView(Set): data : bool or string (default=False) default : object (default=None) """ - __slots__ = ('_nodes', '_data', '_default') + + __slots__ = ("_nodes", "_data", "_default") def __getstate__(self): - return {'_nodes': self._nodes, - '_data': self._data, - '_default': self._default} + return {"_nodes": self._nodes, "_data": self._data, "_default": self._default} def __setstate__(self, state): - self._nodes = state['_nodes'] - self._data = state['_data'] - self._default = state['_default'] + self._nodes = state["_nodes"] + self._data = state["_data"] + self._default = state["_default"] def __init__(self, nodedict, data=False, default=None): self._nodes = nodedict @@ -244,7 +252,7 @@ def _from_iterable(cls, it): except TypeError as err: if "unhashable" in str(err): msg = " : Could be b/c data=True or your values are unhashable" - raise TypeError(str(err) + msg) + raise TypeError(str(err) + msg) from err raise def __len__(self): @@ -256,8 +264,10 @@ def __iter__(self): return iter(self._nodes) if data is True: return iter(self._nodes.items()) - return ((n, dd[data] if data in dd else self._default) - for n, dd in self._nodes.items()) + return ( + (n, dd[data] if data in dd else self._default) + for n, dd in self._nodes.items() + ) def __contains__(self, n): try: @@ -284,16 +294,16 @@ def __str__(self): return str(list(self)) def __repr__(self): + name = self.__class__.__name__ if self._data is False: - return '%s(%r)' % (self.__class__.__name__, tuple(self)) + return f"{name}({tuple(self)})" if self._data is True: - return '%s(%r)' % (self.__class__.__name__, dict(self)) - return '%s(%r, data=%r)' % \ - (self.__class__.__name__, dict(self), self._data) + return f"{name}({dict(self)})" + return f"{name}({dict(self)}, data={self._data!r})" # DegreeViews -class DiDegreeView(object): +class DiDegreeView: """A View class for degree of nodes in a NetworkX Graph The functionality is like dict.items() with (node, degree) pairs. @@ -315,8 +325,8 @@ class DiDegreeView(object): -------- >>> G = nx.path_graph(3) >>> DV = G.degree() - >>> assert(DV[2] == 1) - >>> assert(sum(deg for n, deg in DV) == 4) + >>> assert DV[2] == 1 + >>> assert sum(deg for n, deg in DV) == 4 >>> DVweight = G.degree(weight="span") >>> G.add_edge(1, 2, span=34) @@ -328,15 +338,14 @@ class DiDegreeView(object): 70 >>> DVnbunch = G.degree(nbunch=(1, 2)) - >>> assert(len(list(DVnbunch)) == 2) # iteration over nbunch only + >>> assert len(list(DVnbunch)) == 2 # iteration over nbunch only """ def __init__(self, G, nbunch=None, weight=None): self._graph = G self._succ = G._succ if hasattr(G, "_succ") else G._adj self._pred = G._pred if hasattr(G, "_pred") else G._adj - self._nodes = self._succ if nbunch is None \ - else list(G.nbunch_iter(nbunch)) + self._nodes = self._succ if nbunch is None else list(G.nbunch_iter(nbunch)) self._weight = weight def __call__(self, nbunch=None, weight=None): @@ -359,8 +368,9 @@ def __getitem__(self, n): preds = self._pred[n] if weight is None: return len(succs) + len(preds) - return sum(dd.get(weight, 1) for dd in succs.values()) + \ - sum(dd.get(weight, 1) for dd in preds.values()) + return sum(dd.get(weight, 1) for dd in succs.values()) + sum( + dd.get(weight, 1) for dd in preds.values() + ) def __iter__(self): weight = self._weight @@ -373,8 +383,9 @@ def __iter__(self): for n in self._nodes: succs = self._succ[n] preds = self._pred[n] - deg = sum(dd.get(weight, 1) for dd in succs.values()) \ - + sum(dd.get(weight, 1) for dd in preds.values()) + deg = sum(dd.get(weight, 1) for dd in succs.values()) + sum( + dd.get(weight, 1) for dd in preds.values() + ) yield (n, deg) def __len__(self): @@ -384,7 +395,7 @@ def __str__(self): return str(list(self)) def __repr__(self): - return '%s(%r)' % (self.__class__.__name__, dict(self)) + return f"{self.__class__.__name__}({dict(self)})" class DegreeView(DiDegreeView): @@ -414,9 +425,9 @@ class DegreeView(DiDegreeView): -------- >>> G = nx.path_graph(3) >>> DV = G.degree() - >>> assert(DV[2] == 1) - >>> assert(G.degree[2] == 1) - >>> assert(sum(deg for n, deg in DV) == 4) + >>> assert DV[2] == 1 + >>> assert G.degree[2] == 1 + >>> assert sum(deg for n, deg in DV) == 4 >>> DVweight = G.degree(weight="span") >>> G.add_edge(1, 2, span=34) @@ -428,7 +439,7 @@ class DegreeView(DiDegreeView): 70 >>> DVnbunch = G.degree(nbunch=(1, 2)) - >>> assert(len(list(DVnbunch)) == 2) # iteration over nbunch only + >>> assert len(list(DVnbunch)) == 2 # iteration over nbunch only """ def __getitem__(self, n): @@ -436,8 +447,9 @@ def __getitem__(self, n): nbrs = self._succ[n] if weight is None: return len(nbrs) + (n in nbrs) - return sum(dd.get(weight, 1) for dd in nbrs.values()) + \ - (n in nbrs and nbrs[n].get(weight, 1)) + return sum(dd.get(weight, 1) for dd in nbrs.values()) + ( + n in nbrs and nbrs[n].get(weight, 1) + ) def __iter__(self): weight = self._weight @@ -448,8 +460,9 @@ def __iter__(self): else: for n in self._nodes: nbrs = self._succ[n] - deg = sum(dd.get(weight, 1) for dd in nbrs.values()) + \ - (n in nbrs and nbrs[n].get(weight, 1)) + deg = sum(dd.get(weight, 1) for dd in nbrs.values()) + ( + n in nbrs and nbrs[n].get(weight, 1) + ) yield (n, deg) @@ -506,11 +519,13 @@ def __getitem__(self, n): weight = self._weight nbrs = self._succ[n] if weight is None: - return sum(len(keys) for keys in nbrs.values()) + \ - (n in nbrs and len(nbrs[n])) + return sum(len(keys) for keys in nbrs.values()) + ( + n in nbrs and len(nbrs[n]) + ) # edge weighted graph - degree is sum of nbr edge weights - deg = sum(d.get(weight, 1) for key_dict in nbrs.values() - for d in key_dict.values()) + deg = sum( + d.get(weight, 1) for key_dict in nbrs.values() for d in key_dict.values() + ) if n in nbrs: deg += sum(d.get(weight, 1) for d in nbrs[n].values()) return deg @@ -520,14 +535,18 @@ def __iter__(self): if weight is None: for n in self._nodes: nbrs = self._succ[n] - deg = sum(len(keys) for keys in nbrs.values()) + \ - (n in nbrs and len(nbrs[n])) + deg = sum(len(keys) for keys in nbrs.values()) + ( + n in nbrs and len(nbrs[n]) + ) yield (n, deg) else: for n in self._nodes: nbrs = self._succ[n] - deg = sum(d.get(weight, 1) for key_dict in nbrs.values() - for d in key_dict.values()) + deg = sum( + d.get(weight, 1) + for key_dict in nbrs.values() + for d in key_dict.values() + ) if n in nbrs: deg += sum(d.get(weight, 1) for d in nbrs[n].values()) yield (n, deg) @@ -541,13 +560,15 @@ def __getitem__(self, n): succs = self._succ[n] preds = self._pred[n] if weight is None: - return sum(len(keys) for keys in succs.values()) + \ - sum(len(keys) for keys in preds.values()) + return sum(len(keys) for keys in succs.values()) + sum( + len(keys) for keys in preds.values() + ) # edge weighted graph - degree is sum of nbr edge weights - deg = sum(d.get(weight, 1) for key_dict in succs.values() - for d in key_dict.values()) + \ - sum(d.get(weight, 1) for key_dict in preds.values() - for d in key_dict.values()) + deg = sum( + d.get(weight, 1) for key_dict in succs.values() for d in key_dict.values() + ) + sum( + d.get(weight, 1) for key_dict in preds.values() for d in key_dict.values() + ) return deg def __iter__(self): @@ -556,17 +577,23 @@ def __iter__(self): for n in self._nodes: succs = self._succ[n] preds = self._pred[n] - deg = sum(len(keys) for keys in succs.values()) + \ - sum(len(keys) for keys in preds.values()) + deg = sum(len(keys) for keys in succs.values()) + sum( + len(keys) for keys in preds.values() + ) yield (n, deg) else: for n in self._nodes: succs = self._succ[n] preds = self._pred[n] - deg = sum(d.get(weight, 1) for key_dict in succs.values() - for d in key_dict.values()) + \ - sum(d.get(weight, 1) for key_dict in preds.values() - for d in key_dict.values()) + deg = sum( + d.get(weight, 1) + for key_dict in succs.values() + for d in key_dict.values() + ) + sum( + d.get(weight, 1) + for key_dict in preds.values() + for d in key_dict.values() + ) yield (n, deg) @@ -579,8 +606,9 @@ def __getitem__(self, n): if weight is None: return sum(len(data) for data in nbrs.values()) # edge weighted graph - degree is sum of nbr edge weights - return sum(d.get(weight, 1) for key_dict in nbrs.values() - for d in key_dict.values()) + return sum( + d.get(weight, 1) for key_dict in nbrs.values() for d in key_dict.values() + ) def __iter__(self): weight = self._weight @@ -592,8 +620,11 @@ def __iter__(self): else: for n in self._nodes: nbrs = self._pred[n] - deg = sum(d.get(weight, 1) for key_dict in nbrs.values() - for d in key_dict.values()) + deg = sum( + d.get(weight, 1) + for key_dict in nbrs.values() + for d in key_dict.values() + ) yield (n, deg) @@ -606,8 +637,9 @@ def __getitem__(self, n): if weight is None: return sum(len(data) for data in nbrs.values()) # edge weighted graph - degree is sum of nbr edge weights - return sum(d.get(weight, 1) for key_dict in nbrs.values() - for d in key_dict.values()) + return sum( + d.get(weight, 1) for key_dict in nbrs.values() for d in key_dict.values() + ) def __iter__(self): weight = self._weight @@ -619,34 +651,48 @@ def __iter__(self): else: for n in self._nodes: nbrs = self._succ[n] - deg = sum(d.get(weight, 1) for key_dict in nbrs.values() - for d in key_dict.values()) + deg = sum( + d.get(weight, 1) + for key_dict in nbrs.values() + for d in key_dict.values() + ) yield (n, deg) # EdgeDataViews -class OutEdgeDataView(object): +class OutEdgeDataView: """EdgeDataView for outward edges of DiGraph; See EdgeDataView""" - __slots__ = ('_viewer', '_nbunch', '_data', '_default', - '_adjdict', '_nodes_nbrs', '_report') + + __slots__ = ( + "_viewer", + "_nbunch", + "_data", + "_default", + "_adjdict", + "_nodes_nbrs", + "_report", + ) def __getstate__(self): - return {'viewer': self._viewer, - 'nbunch': self._nbunch, - 'data': self._data, - 'default': self._default} + return { + "viewer": self._viewer, + "nbunch": self._nbunch, + "data": self._data, + "default": self._default, + } def __setstate__(self, state): self.__init__(**state) def __init__(self, viewer, nbunch=None, data=False, default=None): self._viewer = viewer - self._adjdict = viewer._adjdict + adjdict = self._adjdict = viewer._adjdict if nbunch is None: - self._nodes_nbrs = self._adjdict.items + self._nodes_nbrs = adjdict.items else: - nbunch = list(viewer._graph.nbunch_iter(nbunch)) - self._nodes_nbrs = lambda: [(n, self._adjdict[n]) for n in nbunch] + # dict retains order of nodes but acts like a set + nbunch = dict.fromkeys(viewer._graph.nbunch_iter(nbunch)) + self._nodes_nbrs = lambda: [(n, adjdict[n]) for n in nbunch] self._nbunch = nbunch self._data = data self._default = default @@ -656,19 +702,27 @@ def __init__(self, viewer, nbunch=None, data=False, default=None): elif data is False: self._report = lambda n, nbr, dd: (n, nbr) else: # data is attribute name - self._report = lambda n, nbr, dd: \ - (n, nbr, dd[data]) if data in dd else (n, nbr, default) + self._report = ( + lambda n, nbr, dd: (n, nbr, dd[data]) + if data in dd + else (n, nbr, default) + ) def __len__(self): return sum(len(nbrs) for n, nbrs in self._nodes_nbrs()) def __iter__(self): - return (self._report(n, nbr, dd) for n, nbrs in self._nodes_nbrs() - for nbr, dd in nbrs.items()) + return ( + self._report(n, nbr, dd) + for n, nbrs in self._nodes_nbrs() + for nbr, dd in nbrs.items() + ) def __contains__(self, e): + u, v = e[:2] + if self._nbunch is not None and u not in self._nbunch: + return False # this edge doesn't start in nbunch try: - u, v = e[:2] ddict = self._adjdict[u][v] except KeyError: return False @@ -678,7 +732,7 @@ def __str__(self): return str(list(self)) def __repr__(self): - return '%s(%r)' % (self.__class__.__name__, list(self)) + return f"{self.__class__.__name__}({list(self)})" class EdgeDataView(OutEdgeDataView): @@ -704,11 +758,12 @@ class EdgeDataView(OutEdgeDataView): Examples -------- >>> G = nx.path_graph(3) - >>> G.add_edge(1, 2, foo='bar') - >>> list(G.edges(data='foo', default='biz')) + >>> G.add_edge(1, 2, foo="bar") + >>> list(G.edges(data="foo", default="biz")) [(0, 1, 'biz'), (1, 2, 'bar')] - >>> assert((0, 1, 'biz') in G.edges(data='foo', default='biz')) + >>> assert (0, 1, "biz") in G.edges(data="foo", default="biz") """ + __slots__ = () def __len__(self): @@ -724,28 +779,33 @@ def __iter__(self): del seen def __contains__(self, e): + u, v = e[:2] + if self._nbunch is not None and u not in self._nbunch and v not in self._nbunch: + return False # this edge doesn't start and it doesn't end in nbunch try: - u, v = e[:2] ddict = self._adjdict[u][v] except KeyError: - try: - ddict = self._adjdict[v][u] - except KeyError: - return False + return False return e == self._report(u, v, ddict) class InEdgeDataView(OutEdgeDataView): """An EdgeDataView class for outward edges of DiGraph; See EdgeDataView""" + __slots__ = () def __iter__(self): - return (self._report(nbr, n, dd) for n, nbrs in self._nodes_nbrs() - for nbr, dd in nbrs.items()) + return ( + self._report(nbr, n, dd) + for n, nbrs in self._nodes_nbrs() + for nbr, dd in nbrs.items() + ) def __contains__(self, e): + u, v = e[:2] + if self._nbunch is not None and v not in self._nbunch: + return False # this edge doesn't end in nbunch try: - u, v = e[:2] ddict = self._adjdict[v][u] except KeyError: return False @@ -754,28 +814,31 @@ def __contains__(self, e): class OutMultiEdgeDataView(OutEdgeDataView): """An EdgeDataView for outward edges of MultiDiGraph; See EdgeDataView""" - __slots__ = ('keys',) + + __slots__ = ("keys",) def __getstate__(self): - return {'viewer': self._viewer, - 'nbunch': self._nbunch, - 'keys': self.keys, - 'data': self._data, - 'default': self._default} + return { + "viewer": self._viewer, + "nbunch": self._nbunch, + "keys": self.keys, + "data": self._data, + "default": self._default, + } def __setstate__(self, state): self.__init__(**state) - def __init__(self, viewer, nbunch=None, - data=False, keys=False, default=None): + def __init__(self, viewer, nbunch=None, data=False, keys=False, default=None): self._viewer = viewer - self._adjdict = viewer._adjdict + adjdict = self._adjdict = viewer._adjdict self.keys = keys if nbunch is None: - self._nodes_nbrs = self._adjdict.items + self._nodes_nbrs = adjdict.items else: - nbunch = list(viewer._graph.nbunch_iter(nbunch)) - self._nodes_nbrs = lambda: [(n, self._adjdict[n]) for n in nbunch] + # dict retains order of nodes but acts like a set + nbunch = dict.fromkeys(viewer._graph.nbunch_iter(nbunch)) + self._nodes_nbrs = lambda: [(n, adjdict[n]) for n in nbunch] self._nbunch = nbunch self._data = data self._default = default @@ -792,21 +855,33 @@ def __init__(self, viewer, nbunch=None, self._report = lambda n, nbr, k, dd: (n, nbr) else: # data is attribute name if keys is True: - self._report = lambda n, nbr, k, dd: (n, nbr, k, dd[data]) \ - if data in dd else (n, nbr, k, default) + self._report = ( + lambda n, nbr, k, dd: (n, nbr, k, dd[data]) + if data in dd + else (n, nbr, k, default) + ) else: - self._report = lambda n, nbr, k, dd: (n, nbr, dd[data]) \ - if data in dd else (n, nbr, default) + self._report = ( + lambda n, nbr, k, dd: (n, nbr, dd[data]) + if data in dd + else (n, nbr, default) + ) def __len__(self): return sum(1 for e in self) def __iter__(self): - return (self._report(n, nbr, k, dd) for n, nbrs in self._nodes_nbrs() - for nbr, kd in nbrs.items() for k, dd in kd.items()) + return ( + self._report(n, nbr, k, dd) + for n, nbrs in self._nodes_nbrs() + for nbr, kd in nbrs.items() + for k, dd in kd.items() + ) def __contains__(self, e): u, v = e[:2] + if self._nbunch is not None and u not in self._nbunch: + return False # this edge doesn't start in nbunch try: kdict = self._adjdict[u][v] except KeyError: @@ -826,6 +901,7 @@ def __contains__(self, e): class MultiEdgeDataView(OutMultiEdgeDataView): """An EdgeDataView class for edges of MultiGraph; See EdgeDataView""" + __slots__ = () def __iter__(self): @@ -840,6 +916,8 @@ def __iter__(self): def __contains__(self, e): u, v = e[:2] + if self._nbunch is not None and u not in self._nbunch and v not in self._nbunch: + return False # this edge doesn't start and doesn't end in nbunch try: kdict = self._adjdict[u][v] except KeyError: @@ -862,14 +940,21 @@ def __contains__(self, e): class InMultiEdgeDataView(OutMultiEdgeDataView): """An EdgeDataView for inward edges of MultiDiGraph; See EdgeDataView""" + __slots__ = () def __iter__(self): - return (self._report(nbr, n, k, dd) for n, nbrs in self._nodes_nbrs() - for nbr, kd in nbrs.items() for k, dd in kd.items()) + return ( + self._report(nbr, n, k, dd) + for n, nbrs in self._nodes_nbrs() + for nbr, kd in nbrs.items() + for k, dd in kd.items() + ) def __contains__(self, e): u, v = e[:2] + if self._nbunch is not None and v not in self._nbunch: + return False # this edge doesn't end in nbunch try: kdict = self._adjdict[v][u] except KeyError: @@ -887,13 +972,14 @@ def __contains__(self, e): # EdgeViews have set operations and no data reported class OutEdgeView(Set, Mapping): """A EdgeView class for outward edges of a DiGraph""" - __slots__ = ('_adjdict', '_graph', '_nodes_nbrs') + + __slots__ = ("_adjdict", "_graph", "_nodes_nbrs") def __getstate__(self): - return {'_graph': self._graph} + return {"_graph": self._graph} def __setstate__(self, state): - self._graph = G = state['_graph'] + self._graph = G = state["_graph"] self._adjdict = G._succ if hasattr(G, "succ") else G._adj self._nodes_nbrs = self._adjdict.items @@ -945,7 +1031,7 @@ def __str__(self): return str(list(self)) def __repr__(self): - return "{0.__class__.__name__}({1!r})".format(self, list(self)) + return f"{self.__class__.__name__}({list(self)})" class EdgeView(OutEdgeView): @@ -982,49 +1068,55 @@ class EdgeView(OutEdgeView): >>> EV = G.edges() >>> (2, 3) in EV True - >>> for u, v in EV: print((u, v)) + >>> for u, v in EV: + ... print((u, v)) (0, 1) (1, 2) (2, 3) - >>> assert(EV & {(1, 2), (3, 4)} == {(1, 2)}) + >>> assert EV & {(1, 2), (3, 4)} == {(1, 2)} - >>> EVdata = G.edges(data='color', default='aqua') - >>> G.add_edge(2, 3, color='blue') - >>> assert((2, 3, 'blue') in EVdata) - >>> for u, v, c in EVdata: print("({}, {}) has color: {}".format(u, v, c)) + >>> EVdata = G.edges(data="color", default="aqua") + >>> G.add_edge(2, 3, color="blue") + >>> assert (2, 3, "blue") in EVdata + >>> for u, v, c in EVdata: + ... print(f"({u}, {v}) has color: {c}") (0, 1) has color: aqua (1, 2) has color: aqua (2, 3) has color: blue >>> EVnbunch = G.edges(nbunch=2) - >>> assert((2, 3) in EVnbunch) - >>> assert((0, 1) in EVnbunch) # nbunch is ignored in __contains__ - >>> for u, v in EVnbunch: assert(u == 2 or v == 2) + >>> assert (2, 3) in EVnbunch + >>> assert (0, 1) not in EVnbunch + >>> for u, v in EVnbunch: + ... assert u == 2 or v == 2 - >>> MG = nx.path_graph(4, create_using=nx.MultiGraph()) + >>> MG = nx.path_graph(4, create_using=nx.MultiGraph) >>> EVmulti = MG.edges(keys=True) >>> (2, 3, 0) in EVmulti True - >>> (2, 3) in EVmulti # 2-tuples work even when keys is True + >>> (2, 3) in EVmulti # 2-tuples work even when keys is True True >>> key = MG.add_edge(2, 3) - >>> for u, v, k in EVmulti: print((u, v, k)) + >>> for u, v, k in EVmulti: + ... print((u, v, k)) (0, 1, 0) (1, 2, 0) (2, 3, 0) (2, 3, 1) """ + __slots__ = () dataview = EdgeDataView def __len__(self): - return sum(len(nbrs) + (n in nbrs) for n, nbrs in self._nodes_nbrs()) // 2 + num_nbrs = (len(nbrs) + (n in nbrs) for n, nbrs in self._nodes_nbrs()) + return sum(num_nbrs) // 2 def __iter__(self): seen = {} for n, nbrs in self._nodes_nbrs(): - for nbr in nbrs: + for nbr in list(nbrs): if nbr not in seen: yield (n, nbr) seen[n] = 1 @@ -1040,10 +1132,11 @@ def __contains__(self, e): class InEdgeView(OutEdgeView): """A EdgeView class for inward edges of a DiGraph""" + __slots__ = () def __setstate__(self, state): - self._graph = G = state['_graph'] + self._graph = G = state["_graph"] self._adjdict = G._pred if hasattr(G, "pred") else G._adj self._nodes_nbrs = self._adjdict.items @@ -1073,13 +1166,15 @@ def __getitem__(self, e): class OutMultiEdgeView(OutEdgeView): """A EdgeView class for outward edges of a MultiDiGraph""" + __slots__ = () dataview = OutMultiEdgeDataView def __len__(self): - return sum(len(kdict) for n, nbrs in self._nodes_nbrs() - for nbr, kdict in nbrs.items()) + return sum( + len(kdict) for n, nbrs in self._nodes_nbrs() for nbr, kdict in nbrs.items() + ) def __iter__(self): for n, nbrs in self._nodes_nbrs(): @@ -1118,6 +1213,7 @@ def data(self, data=True, keys=False, default=None, nbunch=None): class MultiEdgeView(OutMultiEdgeView): """A EdgeView class for edges of a MultiGraph""" + __slots__ = () dataview = MultiEdgeDataView @@ -1138,10 +1234,11 @@ def __iter__(self): class InMultiEdgeView(OutMultiEdgeView): """A EdgeView class for inward edges of a MultiDiGraph""" + __slots__ = () def __setstate__(self, state): - self._graph = G = state['_graph'] + self._graph = G = state["_graph"] self._adjdict = G._pred if hasattr(G, "pred") else G._adj self._nodes_nbrs = self._adjdict.items diff --git a/networkx/classes/tests/__init__.py b/networkx/classes/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/classes/tests/historical_tests.py b/networkx/classes/tests/historical_tests.py index 25add85..8f53c4c 100644 --- a/networkx/classes/tests/historical_tests.py +++ b/networkx/classes/tests/historical_tests.py @@ -1,40 +1,39 @@ -#!/usr/bin/env python """Original NetworkX graph tests""" -from nose.tools import * +import pytest import networkx as nx from networkx import convert_node_labels_to_integers as cnlti -from networkx.testing import * - - -class HistoricalTests(object): - - def setUp(self): - self.null = nx.null_graph() - self.P1 = cnlti(nx.path_graph(1), first_label=1) - self.P3 = cnlti(nx.path_graph(3), first_label=1) - self.P10 = cnlti(nx.path_graph(10), first_label=1) - self.K1 = cnlti(nx.complete_graph(1), first_label=1) - self.K3 = cnlti(nx.complete_graph(3), first_label=1) - self.K4 = cnlti(nx.complete_graph(4), first_label=1) - self.K5 = cnlti(nx.complete_graph(5), first_label=1) - self.K10 = cnlti(nx.complete_graph(10), first_label=1) - self.G = nx.Graph +from networkx.testing import assert_edges_equal, assert_nodes_equal + + +class HistoricalTests: + @classmethod + def setup_class(cls): + cls.null = nx.null_graph() + cls.P1 = cnlti(nx.path_graph(1), first_label=1) + cls.P3 = cnlti(nx.path_graph(3), first_label=1) + cls.P10 = cnlti(nx.path_graph(10), first_label=1) + cls.K1 = cnlti(nx.complete_graph(1), first_label=1) + cls.K3 = cnlti(nx.complete_graph(3), first_label=1) + cls.K4 = cnlti(nx.complete_graph(4), first_label=1) + cls.K5 = cnlti(nx.complete_graph(5), first_label=1) + cls.K10 = cnlti(nx.complete_graph(10), first_label=1) + cls.G = nx.Graph def test_name(self): G = self.G(name="test") - assert_equal(str(G), 'test') - assert_equal(G.name, 'test') + assert str(G) == "test" + assert G.name == "test" H = self.G() - assert_equal(H.name, '') + assert H.name == "" # Nodes def test_add_remove_node(self): G = self.G() - G.add_node('A') - assert_true(G.has_node('A')) - G.remove_node('A') - assert_false(G.has_node('A')) + G.add_node("A") + assert G.has_node("A") + G.remove_node("A") + assert not G.has_node("A") def test_nonhashable_node(self): # Test if a non-hashable object is in the Graph. A python dict will @@ -42,352 +41,392 @@ def test_nonhashable_node(self): # returned (see Graph __contains__). If it cannot be a node then it is # not a node. G = self.G() - assert_false(G.has_node(['A'])) - assert_false(G.has_node({'A': 1})) + assert not G.has_node(["A"]) + assert not G.has_node({"A": 1}) def test_add_nodes_from(self): G = self.G() G.add_nodes_from(list("ABCDEFGHIJKL")) - assert_true(G.has_node("L")) - G.remove_nodes_from(['H', 'I', 'J', 'K', 'L']) + assert G.has_node("L") + G.remove_nodes_from(["H", "I", "J", "K", "L"]) G.add_nodes_from([1, 2, 3, 4]) - assert_equal(sorted(G.nodes(), key=str), - [1, 2, 3, 4, 'A', 'B', 'C', 'D', 'E', 'F', 'G']) + assert sorted(G.nodes(), key=str) == [ + 1, + 2, + 3, + 4, + "A", + "B", + "C", + "D", + "E", + "F", + "G", + ] # test __iter__ - assert_equal(sorted(G, key=str), - [1, 2, 3, 4, 'A', 'B', 'C', 'D', 'E', 'F', 'G']) + assert sorted(G, key=str) == [1, 2, 3, 4, "A", "B", "C", "D", "E", "F", "G"] def test_contains(self): G = self.G() - G.add_node('A') - assert_true('A' in G) - assert_false([] in G) # never raise a Key or TypeError in this test - assert_false({1: 1} in G) + G.add_node("A") + assert "A" in G + assert not [] in G # never raise a Key or TypeError in this test + assert not {1: 1} in G def test_add_remove(self): # Test add_node and remove_node acting for various nbunch G = self.G() - G.add_node('m') - assert_true(G.has_node('m')) - G.add_node('m') # no complaints - assert_raises(nx.NetworkXError, G.remove_node, 'j') - G.remove_node('m') - assert_equal(list(G), []) + G.add_node("m") + assert G.has_node("m") + G.add_node("m") # no complaints + pytest.raises(nx.NetworkXError, G.remove_node, "j") + G.remove_node("m") + assert list(G) == [] def test_nbunch_is_list(self): G = self.G() G.add_nodes_from(list("ABCD")) G.add_nodes_from(self.P3) # add nbunch of nodes (nbunch=Graph) - assert_equal(sorted(G.nodes(), key=str), - [1, 2, 3, 'A', 'B', 'C', 'D']) + assert sorted(G.nodes(), key=str) == [1, 2, 3, "A", "B", "C", "D"] G.remove_nodes_from(self.P3) # remove nbunch of nodes (nbunch=Graph) - assert_equal(sorted(G.nodes(), key=str), - ['A', 'B', 'C', 'D']) + assert sorted(G.nodes(), key=str) == ["A", "B", "C", "D"] def test_nbunch_is_set(self): G = self.G() nbunch = set("ABCDEFGHIJKL") G.add_nodes_from(nbunch) - assert_true(G.has_node("L")) + assert G.has_node("L") def test_nbunch_dict(self): # nbunch is a dict with nodes as keys G = self.G() nbunch = set("ABCDEFGHIJKL") G.add_nodes_from(nbunch) - nbunch = {'I': "foo", 'J': 2, 'K': True, 'L': "spam"} + nbunch = {"I": "foo", "J": 2, "K": True, "L": "spam"} G.remove_nodes_from(nbunch) - assert_true(sorted(G.nodes(), key=str), - ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']) + assert sorted(G.nodes(), key=str), ["A", "B", "C", "D", "E", "F", "G", "H"] def test_nbunch_iterator(self): G = self.G() - G.add_nodes_from(['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']) + G.add_nodes_from(["A", "B", "C", "D", "E", "F", "G", "H"]) n_iter = self.P3.nodes() G.add_nodes_from(n_iter) - assert_equal(sorted(G.nodes(), key=str), - [1, 2, 3, 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']) + assert sorted(G.nodes(), key=str) == [ + 1, + 2, + 3, + "A", + "B", + "C", + "D", + "E", + "F", + "G", + "H", + ] n_iter = self.P3.nodes() # rebuild same iterator G.remove_nodes_from(n_iter) # remove nbunch of nodes (nbunch=iterator) - assert_equal(sorted(G.nodes(), key=str), - ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']) + assert sorted(G.nodes(), key=str) == ["A", "B", "C", "D", "E", "F", "G", "H"] def test_nbunch_graph(self): G = self.G() - G.add_nodes_from(['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']) + G.add_nodes_from(["A", "B", "C", "D", "E", "F", "G", "H"]) nbunch = self.K3 G.add_nodes_from(nbunch) - assert_true(sorted(G.nodes(), key=str), - [1, 2, 3, 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']) + assert sorted(G.nodes(), key=str), [ + 1, + 2, + 3, + "A", + "B", + "C", + "D", + "E", + "F", + "G", + "H", + ] # Edges def test_add_edge(self): G = self.G() - assert_raises(TypeError, G.add_edge, 'A') + pytest.raises(TypeError, G.add_edge, "A") - G.add_edge('A', 'B') # testing add_edge() - G.add_edge('A', 'B') # should fail silently - assert_true(G.has_edge('A', 'B')) - assert_false(G.has_edge('A', 'C')) - assert_true(G.has_edge(*('A', 'B'))) + G.add_edge("A", "B") # testing add_edge() + G.add_edge("A", "B") # should fail silently + assert G.has_edge("A", "B") + assert not G.has_edge("A", "C") + assert G.has_edge(*("A", "B")) if G.is_directed(): - assert_false(G.has_edge('B', 'A')) + assert not G.has_edge("B", "A") else: # G is undirected, so B->A is an edge - assert_true(G.has_edge('B', 'A')) + assert G.has_edge("B", "A") - G.add_edge('A', 'C') # test directedness - G.add_edge('C', 'A') - G.remove_edge('C', 'A') + G.add_edge("A", "C") # test directedness + G.add_edge("C", "A") + G.remove_edge("C", "A") if G.is_directed(): - assert_true(G.has_edge('A', 'C')) + assert G.has_edge("A", "C") else: - assert_false(G.has_edge('A', 'C')) - assert_false(G.has_edge('C', 'A')) + assert not G.has_edge("A", "C") + assert not G.has_edge("C", "A") def test_self_loop(self): G = self.G() - G.add_edge('A', 'A') # test self loops - assert_true(G.has_edge('A', 'A')) - G.remove_edge('A', 'A') - G.add_edge('X', 'X') - assert_true(G.has_node('X')) - G.remove_node('X') - G.add_edge('A', 'Z') # should add the node silently - assert_true(G.has_node('Z')) + G.add_edge("A", "A") # test self loops + assert G.has_edge("A", "A") + G.remove_edge("A", "A") + G.add_edge("X", "X") + assert G.has_node("X") + G.remove_node("X") + G.add_edge("A", "Z") # should add the node silently + assert G.has_node("Z") def test_add_edges_from(self): G = self.G() - G.add_edges_from([('B', 'C')]) # test add_edges_from() - assert_true(G.has_edge('B', 'C')) + G.add_edges_from([("B", "C")]) # test add_edges_from() + assert G.has_edge("B", "C") if G.is_directed(): - assert_false(G.has_edge('C', 'B')) + assert not G.has_edge("C", "B") else: - assert_true(G.has_edge('C', 'B')) # undirected + assert G.has_edge("C", "B") # undirected - G.add_edges_from([('D', 'F'), ('B', 'D')]) - assert_true(G.has_edge('D', 'F')) - assert_true(G.has_edge('B', 'D')) + G.add_edges_from([("D", "F"), ("B", "D")]) + assert G.has_edge("D", "F") + assert G.has_edge("B", "D") if G.is_directed(): - assert_false(G.has_edge('D', 'B')) + assert not G.has_edge("D", "B") else: - assert_true(G.has_edge('D', 'B')) # undirected + assert G.has_edge("D", "B") # undirected def test_add_edges_from2(self): G = self.G() # after failing silently, should add 2nd edge - G.add_edges_from([tuple('IJ'), list('KK'), tuple('JK')]) - assert_true(G.has_edge(*('I', 'J'))) - assert_true(G.has_edge(*('K', 'K'))) - assert_true(G.has_edge(*('J', 'K'))) + G.add_edges_from([tuple("IJ"), list("KK"), tuple("JK")]) + assert G.has_edge(*("I", "J")) + assert G.has_edge(*("K", "K")) + assert G.has_edge(*("J", "K")) if G.is_directed(): - assert_false(G.has_edge(*('K', 'J'))) + assert not G.has_edge(*("K", "J")) else: - assert_true(G.has_edge(*('K', 'J'))) + assert G.has_edge(*("K", "J")) def test_add_edges_from3(self): G = self.G() - G.add_edges_from(zip(list('ACD'), list('CDE'))) - assert_true(G.has_edge('D', 'E')) - assert_false(G.has_edge('E', 'C')) + G.add_edges_from(zip(list("ACD"), list("CDE"))) + assert G.has_edge("D", "E") + assert not G.has_edge("E", "C") def test_remove_edge(self): G = self.G() - G.add_nodes_from([1, 2, 3, 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']) + G.add_nodes_from([1, 2, 3, "A", "B", "C", "D", "E", "F", "G", "H"]) - G.add_edges_from(zip(list('MNOP'), list('NOPM'))) - assert_true(G.has_edge('O', 'P')) - assert_true(G.has_edge('P', 'M')) - G.remove_node('P') # tests remove_node()'s handling of edges. - assert_false(G.has_edge('P', 'M')) - assert_raises(TypeError, G.remove_edge, 'M') + G.add_edges_from(zip(list("MNOP"), list("NOPM"))) + assert G.has_edge("O", "P") + assert G.has_edge("P", "M") + G.remove_node("P") # tests remove_node()'s handling of edges. + assert not G.has_edge("P", "M") + pytest.raises(TypeError, G.remove_edge, "M") - G.add_edge('N', 'M') - assert_true(G.has_edge('M', 'N')) - G.remove_edge('M', 'N') - assert_false(G.has_edge('M', 'N')) + G.add_edge("N", "M") + assert G.has_edge("M", "N") + G.remove_edge("M", "N") + assert not G.has_edge("M", "N") # self loop fails silently - G.remove_edges_from([list('HI'), list('DF'), - tuple('KK'), tuple('JK')]) - assert_false(G.has_edge('H', 'I')) - assert_false(G.has_edge('J', 'K')) - G.remove_edges_from([list('IJ'), list('KK'), list('JK')]) - assert_false(G.has_edge('I', 'J')) - G.remove_nodes_from(set('ZEFHIMNO')) - G.add_edge('J', 'K') + G.remove_edges_from([list("HI"), list("DF"), tuple("KK"), tuple("JK")]) + assert not G.has_edge("H", "I") + assert not G.has_edge("J", "K") + G.remove_edges_from([list("IJ"), list("KK"), list("JK")]) + assert not G.has_edge("I", "J") + G.remove_nodes_from(set("ZEFHIMNO")) + G.add_edge("J", "K") def test_edges_nbunch(self): # Test G.edges(nbunch) with various forms of nbunch G = self.G() - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('C', 'B'), ('C', 'D')]) + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) # node not in nbunch should be quietly ignored - assert_raises(nx.NetworkXError, G.edges, 6) - assert_equals(list(G.edges('Z')), []) # iterable non-node + pytest.raises(nx.NetworkXError, G.edges, 6) + assert list(G.edges("Z")) == [] # iterable non-node # nbunch can be an empty list - assert_equals(list(G.edges([])), []) + assert list(G.edges([])) == [] if G.is_directed(): - elist = [('A', 'B'), ('A', 'C'), ('B', 'D')] + elist = [("A", "B"), ("A", "C"), ("B", "D")] else: - elist = [('A', 'B'), ('A', 'C'), ('B', 'C'), ('B', 'D')] + elist = [("A", "B"), ("A", "C"), ("B", "C"), ("B", "D")] # nbunch can be a list - assert_edges_equal(list(G.edges(['A', 'B'])), elist) + assert_edges_equal(list(G.edges(["A", "B"])), elist) # nbunch can be a set - assert_edges_equal(G.edges(set(['A', 'B'])), elist) + assert_edges_equal(G.edges({"A", "B"}), elist) # nbunch can be a graph G1 = self.G() - G1.add_nodes_from('AB') + G1.add_nodes_from("AB") assert_edges_equal(G.edges(G1), elist) # nbunch can be a dict with nodes as keys - ndict = {'A': "thing1", 'B': "thing2"} + ndict = {"A": "thing1", "B": "thing2"} assert_edges_equal(G.edges(ndict), elist) # nbunch can be a single node - assert_edges_equal(list(G.edges('A')), [('A', 'B'), ('A', 'C')]) - assert_nodes_equal(sorted(G), ['A', 'B', 'C', 'D']) + assert_edges_equal(list(G.edges("A")), [("A", "B"), ("A", "C")]) + assert_nodes_equal(sorted(G), ["A", "B", "C", "D"]) # nbunch can be nothing (whole graph) assert_edges_equal( list(G.edges()), - [('A', 'B'), ('A', 'C'), ('B', 'D'), ('C', 'B'), ('C', 'D')] + [("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")], ) def test_degree(self): G = self.G() - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('C', 'B'), ('C', 'D')]) - assert_equal(G.degree('A'), 2) + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) + assert G.degree("A") == 2 # degree of single node in iterable container must return dict - assert_equal(list(G.degree(['A'])), [('A', 2)]) - assert_equal(sorted(d for n, d in G.degree(['A', 'B'])), [2, 3]) - assert_equal(sorted(d for n, d in G.degree()), [2, 2, 3, 3]) + assert list(G.degree(["A"])) == [("A", 2)] + assert sorted(d for n, d in G.degree(["A", "B"])) == [2, 3] + assert sorted(d for n, d in G.degree()) == [2, 2, 3, 3] def test_degree2(self): H = self.G() H.add_edges_from([(1, 24), (1, 2)]) - assert_equal(sorted(d for n, d in H.degree([1, 24])), [1, 2]) + assert sorted(d for n, d in H.degree([1, 24])) == [1, 2] def test_degree_graph(self): P3 = nx.path_graph(3) P5 = nx.path_graph(5) # silently ignore nodes not in P3 - assert_equal(dict(d for n, d in P3.degree(['A', 'B'])), {}) + assert dict(d for n, d in P3.degree(["A", "B"])) == {} # nbunch can be a graph - assert_equal(sorted(d for n, d in P5.degree(P3)), [1, 2, 2]) + assert sorted(d for n, d in P5.degree(P3)) == [1, 2, 2] # nbunch can be a graph that's way too big - assert_equal(sorted(d for n, d in P3.degree(P5)), [1, 1, 2]) - assert_equal(list(P5.degree([])), []) - assert_equal(dict(P5.degree([])), {}) + assert sorted(d for n, d in P3.degree(P5)) == [1, 1, 2] + assert list(P5.degree([])) == [] + assert dict(P5.degree([])) == {} def test_null(self): null = nx.null_graph() - assert_equal(list(null.degree()), []) - assert_equal(dict(null.degree()), {}) + assert list(null.degree()) == [] + assert dict(null.degree()) == {} def test_order_size(self): G = self.G() - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('C', 'B'), ('C', 'D')]) - assert_equal(G.order(), 4) - assert_equal(G.size(), 5) - assert_equal(G.number_of_edges(), 5) - assert_equal(G.number_of_edges('A', 'B'), 1) - assert_equal(G.number_of_edges('A', 'D'), 0) + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) + assert G.order() == 4 + assert G.size() == 5 + assert G.number_of_edges() == 5 + assert G.number_of_edges("A", "B") == 1 + assert G.number_of_edges("A", "D") == 0 def test_copy(self): G = self.G() - H = G.copy() # copy - assert_equal(H.adj, G.adj) - assert_equal(H.name, G.name) - assert_not_equal(H, G) + H = G.copy() # copy + assert H.adj == G.adj + assert H.name == G.name + assert H != G def test_subgraph(self): G = self.G() - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('C', 'B'), ('C', 'D')]) - SG = G.subgraph(['A', 'B', 'D']) - assert_nodes_equal(list(SG), ['A', 'B', 'D']) - assert_edges_equal(list(SG.edges()), [('A', 'B'), ('B', 'D')]) + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) + SG = G.subgraph(["A", "B", "D"]) + assert_nodes_equal(list(SG), ["A", "B", "D"]) + assert_edges_equal(list(SG.edges()), [("A", "B"), ("B", "D")]) def test_to_directed(self): G = self.G() if not G.is_directed(): - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('C', 'B'), ('C', 'D')]) + G.add_edges_from( + [("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")] + ) DG = G.to_directed() - assert_not_equal(DG, G) # directed copy or copy - - assert_true(DG.is_directed()) - assert_equal(DG.name, G.name) - assert_equal(DG.adj, G.adj) - assert_equal(sorted(DG.out_edges(list('AB'))), - [('A', 'B'), ('A', 'C'), ('B', 'A'), - ('B', 'C'), ('B', 'D')]) - DG.remove_edge('A', 'B') - assert_true(DG.has_edge('B', 'A')) # this removes B-A but not A-B - assert_false(DG.has_edge('A', 'B')) + assert DG != G # directed copy or copy + + assert DG.is_directed() + assert DG.name == G.name + assert DG.adj == G.adj + assert sorted(DG.out_edges(list("AB"))) == [ + ("A", "B"), + ("A", "C"), + ("B", "A"), + ("B", "C"), + ("B", "D"), + ] + DG.remove_edge("A", "B") + assert DG.has_edge("B", "A") # this removes B-A but not A-B + assert not DG.has_edge("A", "B") def test_to_undirected(self): G = self.G() if G.is_directed(): - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('C', 'B'), ('C', 'D')]) - UG = G.to_undirected() # to_undirected - assert_not_equal(UG, G) - assert_false(UG.is_directed()) - assert_true(G.is_directed()) - assert_equal(UG.name, G.name) - assert_not_equal(UG.adj, G.adj) - assert_equal(sorted(UG.edges(list('AB'))), - [('A', 'B'), ('A', 'C'), ('B', 'C'), ('B', 'D')]) - assert_equal(sorted(UG.edges(['A', 'B'])), - [('A', 'B'), ('A', 'C'), ('B', 'C'), ('B', 'D')]) - UG.remove_edge('A', 'B') - assert_false(UG.has_edge('B', 'A')) - assert_false(UG.has_edge('A', 'B')) + G.add_edges_from( + [("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")] + ) + UG = G.to_undirected() # to_undirected + assert UG != G + assert not UG.is_directed() + assert G.is_directed() + assert UG.name == G.name + assert UG.adj != G.adj + assert sorted(UG.edges(list("AB"))) == [ + ("A", "B"), + ("A", "C"), + ("B", "C"), + ("B", "D"), + ] + assert sorted(UG.edges(["A", "B"])) == [ + ("A", "B"), + ("A", "C"), + ("B", "C"), + ("B", "D"), + ] + UG.remove_edge("A", "B") + assert not UG.has_edge("B", "A") + assert not UG.has_edge("A", "B") def test_neighbors(self): G = self.G() - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('C', 'B'), ('C', 'D')]) - G.add_nodes_from('GJK') - assert_equal(sorted(G['A']), ['B', 'C']) - assert_equal(sorted(G.neighbors('A')), ['B', 'C']) - assert_equal(sorted(G.neighbors('A')), ['B', 'C']) - assert_equal(sorted(G.neighbors('G')), []) - assert_raises(nx.NetworkXError, G.neighbors, 'j') + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) + G.add_nodes_from("GJK") + assert sorted(G["A"]) == ["B", "C"] + assert sorted(G.neighbors("A")) == ["B", "C"] + assert sorted(G.neighbors("A")) == ["B", "C"] + assert sorted(G.neighbors("G")) == [] + pytest.raises(nx.NetworkXError, G.neighbors, "j") def test_iterators(self): G = self.G() - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('C', 'B'), ('C', 'D')]) - G.add_nodes_from('GJK') - assert_equal(sorted(G.nodes()), - ['A', 'B', 'C', 'D', 'G', 'J', 'K']) - assert_edges_equal(G.edges(), - [('A', 'B'), ('A', 'C'), ('B', 'D'), ('C', 'B'), ('C', 'D')]) - - assert_equal(sorted([v for k, v in G.degree()]), - [0, 0, 0, 2, 2, 3, 3]) - assert_equal(sorted(G.degree(), key=str), - [('A', 2), ('B', 3), ('C', 3), ('D', 2), - ('G', 0), ('J', 0), ('K', 0)]) - assert_equal(sorted(G.neighbors('A')), ['B', 'C']) - assert_raises(nx.NetworkXError, G.neighbors, 'X') + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) + G.add_nodes_from("GJK") + assert sorted(G.nodes()) == ["A", "B", "C", "D", "G", "J", "K"] + assert_edges_equal( + G.edges(), [("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")] + ) + + assert sorted([v for k, v in G.degree()]) == [0, 0, 0, 2, 2, 3, 3] + assert sorted(G.degree(), key=str) == [ + ("A", 2), + ("B", 3), + ("C", 3), + ("D", 2), + ("G", 0), + ("J", 0), + ("K", 0), + ] + assert sorted(G.neighbors("A")) == ["B", "C"] + pytest.raises(nx.NetworkXError, G.neighbors, "X") G.clear() - assert_equal(nx.number_of_nodes(G), 0) - assert_equal(nx.number_of_edges(G), 0) + assert nx.number_of_nodes(G) == 0 + assert nx.number_of_edges(G) == 0 def test_null_subgraph(self): # Subgraph of a null graph is a null graph nullgraph = nx.null_graph() G = nx.null_graph() H = G.subgraph([]) - assert_true(nx.is_isomorphic(H, nullgraph)) + assert nx.is_isomorphic(H, nullgraph) def test_empty_subgraph(self): # Subgraph of an empty graph is an empty graph. test 1 @@ -395,9 +434,9 @@ def test_empty_subgraph(self): E5 = nx.empty_graph(5) E10 = nx.empty_graph(10) H = E10.subgraph([]) - assert_true(nx.is_isomorphic(H, nullgraph)) + assert nx.is_isomorphic(H, nullgraph) H = E10.subgraph([1, 2, 3, 4, 5]) - assert_true(nx.is_isomorphic(H, E5)) + assert nx.is_isomorphic(H, E5) def test_complete_subgraph(self): # Subgraph of a complete graph is a complete graph @@ -405,7 +444,7 @@ def test_complete_subgraph(self): K3 = nx.complete_graph(3) K5 = nx.complete_graph(5) H = K5.subgraph([1, 2, 3]) - assert_true(nx.is_isomorphic(H, K3)) + assert nx.is_isomorphic(H, K3) def test_subgraph_nbunch(self): nullgraph = nx.null_graph() @@ -414,22 +453,22 @@ def test_subgraph_nbunch(self): K5 = nx.complete_graph(5) # Test G.subgraph(nbunch), where nbunch is a single node H = K5.subgraph(1) - assert_true(nx.is_isomorphic(H, K1)) + assert nx.is_isomorphic(H, K1) # Test G.subgraph(nbunch), where nbunch is a set - H = K5.subgraph(set([1])) - assert_true(nx.is_isomorphic(H, K1)) + H = K5.subgraph({1}) + assert nx.is_isomorphic(H, K1) # Test G.subgraph(nbunch), where nbunch is an iterator H = K5.subgraph(iter(K3)) - assert_true(nx.is_isomorphic(H, K3)) + assert nx.is_isomorphic(H, K3) # Test G.subgraph(nbunch), where nbunch is another graph H = K5.subgraph(K3) - assert_true(nx.is_isomorphic(H, K3)) + assert nx.is_isomorphic(H, K3) H = K5.subgraph([9]) - assert_true(nx.is_isomorphic(H, nullgraph)) + assert nx.is_isomorphic(H, nullgraph) def test_node_tuple_issue(self): H = self.G() # Test error handling of tuple as a node - assert_raises(nx.NetworkXError, H.remove_node, (1, 2)) + pytest.raises(nx.NetworkXError, H.remove_node, (1, 2)) H.remove_nodes_from([(1, 2)]) # no error - assert_raises(nx.NetworkXError, H.neighbors, (1, 2)) + pytest.raises(nx.NetworkXError, H.neighbors, (1, 2)) diff --git a/networkx/classes/tests/test_coreviews.py b/networkx/classes/tests/test_coreviews.py index 5de45af..c9b259a 100644 --- a/networkx/classes/tests/test_coreviews.py +++ b/networkx/classes/tests/test_coreviews.py @@ -1,311 +1,356 @@ -from nose.tools import assert_equal, assert_not_equal, assert_is,\ - assert_is_not, assert_true, assert_false, assert_raises -import tempfile +import pytest import pickle import networkx as nx -class TestAtlasView(object): +class TestAtlasView: # node->data def setup(self): - self.d = {0: {'color': 'blue', 'weight': 1.2}, 1: {}, 2: {'color': 1}} + self.d = {0: {"color": "blue", "weight": 1.2}, 1: {}, 2: {"color": 1}} self.av = nx.classes.coreviews.AtlasView(self.d) def test_pickle(self): view = self.av pview = pickle.loads(pickle.dumps(view, -1)) - assert_equal(view, pview) - assert_equal(view.__slots__, pview.__slots__) + assert view == pview + assert view.__slots__ == pview.__slots__ + pview = pickle.loads(pickle.dumps(view)) + assert view == pview + assert view.__slots__ == pview.__slots__ def test_len(self): - assert_equal(len(self.av), len(self.d)) + assert len(self.av) == len(self.d) def test_iter(self): - assert_equal(list(self.av), list(self.d)) + assert list(self.av) == list(self.d) def test_getitem(self): - assert_is(self.av[1], self.d[1]) - assert_equal(self.av[2]['color'], 1) - assert_raises(KeyError, self.av.__getitem__, 3) + assert self.av[1] is self.d[1] + assert self.av[2]["color"] == 1 + pytest.raises(KeyError, self.av.__getitem__, 3) def test_copy(self): avcopy = self.av.copy() - assert_equal(avcopy[0], self.av[0]) - assert_equal(avcopy, self.av) - assert_is_not(avcopy[0], self.av[0]) - assert_is_not(avcopy, self.av) + assert avcopy[0] == self.av[0] + assert avcopy == self.av + assert avcopy[0] is not self.av[0] + assert avcopy is not self.av avcopy[5] = {} - assert_not_equal(avcopy, self.av) + assert avcopy != self.av - avcopy[0]['ht'] = 4 - assert_not_equal(avcopy[0], self.av[0]) - self.av[0]['ht'] = 4 - assert_equal(avcopy[0], self.av[0]) - del self.av[0]['ht'] + avcopy[0]["ht"] = 4 + assert avcopy[0] != self.av[0] + self.av[0]["ht"] = 4 + assert avcopy[0] == self.av[0] + del self.av[0]["ht"] - assert_false(hasattr(self.av, '__setitem__')) + assert not hasattr(self.av, "__setitem__") def test_items(self): - assert_equal(sorted(self.av.items()), sorted(self.d.items())) + assert sorted(self.av.items()) == sorted(self.d.items()) def test_str(self): out = str(self.d) - assert_equal(str(self.av), out) + assert str(self.av) == out def test_repr(self): out = "AtlasView(" + str(self.d) + ")" - assert_equal(repr(self.av), out) + assert repr(self.av) == out -class TestAdjacencyView(object): +class TestAdjacencyView: # node->nbr->data def setup(self): - dd = {'color': 'blue', 'weight': 1.2} - self.nd = {0: dd, 1: {}, 2: {'color': 1}} - self.adj = {3: self.nd, 0: {3: dd}, 1: {}, 2: {3: {'color': 1}}} + dd = {"color": "blue", "weight": 1.2} + self.nd = {0: dd, 1: {}, 2: {"color": 1}} + self.adj = {3: self.nd, 0: {3: dd}, 1: {}, 2: {3: {"color": 1}}} self.adjview = nx.classes.coreviews.AdjacencyView(self.adj) def test_pickle(self): view = self.adjview pview = pickle.loads(pickle.dumps(view, -1)) - assert_equal(view, pview) - assert_equal(view.__slots__, pview.__slots__) + assert view == pview + assert view.__slots__ == pview.__slots__ def test_len(self): - assert_equal(len(self.adjview), len(self.adj)) + assert len(self.adjview) == len(self.adj) def test_iter(self): - assert_equal(list(self.adjview), list(self.adj)) + assert list(self.adjview) == list(self.adj) def test_getitem(self): - assert_is_not(self.adjview[1], self.adj[1]) - assert_is(self.adjview[3][0], self.adjview[0][3]) - assert_equal(self.adjview[2][3]['color'], 1) - assert_raises(KeyError, self.adjview.__getitem__, 4) + assert self.adjview[1] is not self.adj[1] + assert self.adjview[3][0] is self.adjview[0][3] + assert self.adjview[2][3]["color"] == 1 + pytest.raises(KeyError, self.adjview.__getitem__, 4) def test_copy(self): avcopy = self.adjview.copy() - assert_equal(avcopy[0], self.adjview[0]) - assert_is_not(avcopy[0], self.adjview[0]) + assert avcopy[0] == self.adjview[0] + assert avcopy[0] is not self.adjview[0] - avcopy[2][3]['ht'] = 4 - assert_not_equal(avcopy[2], self.adjview[2]) - self.adjview[2][3]['ht'] = 4 - assert_equal(avcopy[2], self.adjview[2]) - del self.adjview[2][3]['ht'] + avcopy[2][3]["ht"] = 4 + assert avcopy[2] != self.adjview[2] + self.adjview[2][3]["ht"] = 4 + assert avcopy[2] == self.adjview[2] + del self.adjview[2][3]["ht"] - assert_false(hasattr(self.adjview, '__setitem__')) + assert not hasattr(self.adjview, "__setitem__") def test_items(self): view_items = sorted((n, dict(d)) for n, d in self.adjview.items()) - assert_equal(view_items, sorted(self.adj.items())) + assert view_items == sorted(self.adj.items()) def test_str(self): out = str(dict(self.adj)) - assert_equal(str(self.adjview), out) + assert str(self.adjview) == out def test_repr(self): out = self.adjview.__class__.__name__ + "(" + str(self.adj) + ")" - assert_equal(repr(self.adjview), out) + assert repr(self.adjview) == out class TestMultiAdjacencyView(TestAdjacencyView): # node->nbr->key->data def setup(self): - dd = {'color': 'blue', 'weight': 1.2} - self.kd = {0: dd, 1: {}, 2: {'color': 1}} - self.nd = {3: self.kd, 0: {3: dd}, 1: {0: {}}, 2: {3: {'color': 1}}} + dd = {"color": "blue", "weight": 1.2} + self.kd = {0: dd, 1: {}, 2: {"color": 1}} + self.nd = {3: self.kd, 0: {3: dd}, 1: {0: {}}, 2: {3: {"color": 1}}} self.adj = {3: self.nd, 0: {3: {3: dd}}, 1: {}, 2: {3: {8: {}}}} self.adjview = nx.classes.coreviews.MultiAdjacencyView(self.adj) def test_getitem(self): - assert_is_not(self.adjview[1], self.adj[1]) - assert_is(self.adjview[3][0][3], self.adjview[0][3][3]) - assert_equal(self.adjview[3][2][3]['color'], 1) - assert_raises(KeyError, self.adjview.__getitem__, 4) + assert self.adjview[1] is not self.adj[1] + assert self.adjview[3][0][3] is self.adjview[0][3][3] + assert self.adjview[3][2][3]["color"] == 1 + pytest.raises(KeyError, self.adjview.__getitem__, 4) def test_copy(self): avcopy = self.adjview.copy() - assert_equal(avcopy[0], self.adjview[0]) - assert_is_not(avcopy[0], self.adjview[0]) + assert avcopy[0] == self.adjview[0] + assert avcopy[0] is not self.adjview[0] - avcopy[2][3][8]['ht'] = 4 - assert_not_equal(avcopy[2], self.adjview[2]) - self.adjview[2][3][8]['ht'] = 4 - assert_equal(avcopy[2], self.adjview[2]) - del self.adjview[2][3][8]['ht'] + avcopy[2][3][8]["ht"] = 4 + assert avcopy[2] != self.adjview[2] + self.adjview[2][3][8]["ht"] = 4 + assert avcopy[2] == self.adjview[2] + del self.adjview[2][3][8]["ht"] - assert_false(hasattr(self.adjview, '__setitem__')) + assert not hasattr(self.adjview, "__setitem__") -class TestUnionAtlas(object): +class TestUnionAtlas: # node->data def setup(self): - self.s = {0: {'color': 'blue', 'weight': 1.2}, 1: {}, 2: {'color': 1}} - self.p = {3: {'color': 'blue', 'weight': 1.2}, 4: {}, 2: {'watch': 2}} + self.s = {0: {"color": "blue", "weight": 1.2}, 1: {}, 2: {"color": 1}} + self.p = {3: {"color": "blue", "weight": 1.2}, 4: {}, 2: {"watch": 2}} self.av = nx.classes.coreviews.UnionAtlas(self.s, self.p) def test_pickle(self): view = self.av pview = pickle.loads(pickle.dumps(view, -1)) - assert_equal(view, pview) - assert_equal(view.__slots__, pview.__slots__) + assert view == pview + assert view.__slots__ == pview.__slots__ def test_len(self): - assert_equal(len(self.av), len(self.s) + len(self.p)) + assert len(self.av) == len(self.s) + len(self.p) def test_iter(self): - assert_equal(set(self.av), set(self.s) | set(self.p)) + assert set(self.av) == set(self.s) | set(self.p) def test_getitem(self): - assert_is(self.av[0], self.s[0]) - assert_is(self.av[4], self.p[4]) - assert_equal(self.av[2]['color'], 1) - assert_raises(KeyError, self.av[2].__getitem__, 'watch') - assert_raises(KeyError, self.av.__getitem__, 8) + assert self.av[0] is self.s[0] + assert self.av[4] is self.p[4] + assert self.av[2]["color"] == 1 + pytest.raises(KeyError, self.av[2].__getitem__, "watch") + pytest.raises(KeyError, self.av.__getitem__, 8) def test_copy(self): avcopy = self.av.copy() - assert_equal(avcopy[0], self.av[0]) - assert_is_not(avcopy[0], self.av[0]) - assert_is_not(avcopy, self.av) + assert avcopy[0] == self.av[0] + assert avcopy[0] is not self.av[0] + assert avcopy is not self.av avcopy[5] = {} - assert_not_equal(avcopy, self.av) + assert avcopy != self.av - avcopy[0]['ht'] = 4 - assert_not_equal(avcopy[0], self.av[0]) - self.av[0]['ht'] = 4 - assert_equal(avcopy[0], self.av[0]) - del self.av[0]['ht'] + avcopy[0]["ht"] = 4 + assert avcopy[0] != self.av[0] + self.av[0]["ht"] = 4 + assert avcopy[0] == self.av[0] + del self.av[0]["ht"] - assert_false(hasattr(self.av, '__setitem__')) + assert not hasattr(self.av, "__setitem__") def test_items(self): expected = dict(self.p.items()) expected.update(self.s) - assert_equal(sorted(self.av.items()), sorted(expected.items())) + assert sorted(self.av.items()) == sorted(expected.items()) def test_str(self): out = str(dict(self.av)) - assert_equal(str(self.av), out) + assert str(self.av) == out def test_repr(self): - out = "{}({}, {})".format(self.av.__class__.__name__, self.s, self.p) - assert_equal(repr(self.av), out) + out = f"{self.av.__class__.__name__}({self.s}, {self.p})" + assert repr(self.av) == out -class TestUnionAdjacency(object): +class TestUnionAdjacency: # node->nbr->data def setup(self): - dd = {'color': 'blue', 'weight': 1.2} - self.nd = {0: dd, 1: {}, 2: {'color': 1}} - self.s = {3: self.nd, 0: {}, 1: {}, 2: {3: {'color': 1}}} - self.p = {3: {}, 0: {3: dd}, 1: {0: {}}, 2: {1: {'color': 1}}} + dd = {"color": "blue", "weight": 1.2} + self.nd = {0: dd, 1: {}, 2: {"color": 1}} + self.s = {3: self.nd, 0: {}, 1: {}, 2: {3: {"color": 1}}} + self.p = {3: {}, 0: {3: dd}, 1: {0: {}}, 2: {1: {"color": 1}}} self.adjview = nx.classes.coreviews.UnionAdjacency(self.s, self.p) def test_pickle(self): view = self.adjview pview = pickle.loads(pickle.dumps(view, -1)) - assert_equal(view, pview) - assert_equal(view.__slots__, pview.__slots__) + assert view == pview + assert view.__slots__ == pview.__slots__ def test_len(self): - assert_equal(len(self.adjview), len(self.s)) + assert len(self.adjview) == len(self.s) def test_iter(self): - assert_equal(sorted(self.adjview), sorted(self.s)) + assert sorted(self.adjview) == sorted(self.s) def test_getitem(self): - assert_is_not(self.adjview[1], self.s[1]) - assert_is(self.adjview[3][0], self.adjview[0][3]) - assert_equal(self.adjview[2][3]['color'], 1) - assert_raises(KeyError, self.adjview.__getitem__, 4) + assert self.adjview[1] is not self.s[1] + assert self.adjview[3][0] is self.adjview[0][3] + assert self.adjview[2][3]["color"] == 1 + pytest.raises(KeyError, self.adjview.__getitem__, 4) def test_copy(self): avcopy = self.adjview.copy() - assert_equal(avcopy[0], self.adjview[0]) - assert_is_not(avcopy[0], self.adjview[0]) + assert avcopy[0] == self.adjview[0] + assert avcopy[0] is not self.adjview[0] - avcopy[2][3]['ht'] = 4 - assert_not_equal(avcopy[2], self.adjview[2]) - self.adjview[2][3]['ht'] = 4 - assert_equal(avcopy[2], self.adjview[2]) - del self.adjview[2][3]['ht'] + avcopy[2][3]["ht"] = 4 + assert avcopy[2] != self.adjview[2] + self.adjview[2][3]["ht"] = 4 + assert avcopy[2] == self.adjview[2] + del self.adjview[2][3]["ht"] - assert_false(hasattr(self.adjview, '__setitem__')) + assert not hasattr(self.adjview, "__setitem__") def test_str(self): out = str(dict(self.adjview)) - assert_equal(str(self.adjview), out) + assert str(self.adjview) == out def test_repr(self): clsname = self.adjview.__class__.__name__ - out = "{}({}, {})".format(clsname, self.s, self.p) - assert_equal(repr(self.adjview), out) + out = f"{clsname}({self.s}, {self.p})" + assert repr(self.adjview) == out class TestUnionMultiInner(TestUnionAdjacency): # nbr->key->data def setup(self): - dd = {'color': 'blue', 'weight': 1.2} - self.kd = {7: {}, 'ekey': {}, 9: {'color': 1}} - self.s = {3: self.kd, 0: {7: dd}, 1: {}, 2: {'key': {'color': 1}}} - self.p = {3: {}, 0: {3: dd}, 1: {}, 2: {1: {'span': 2}}} + dd = {"color": "blue", "weight": 1.2} + self.kd = {7: {}, "ekey": {}, 9: {"color": 1}} + self.s = {3: self.kd, 0: {7: dd}, 1: {}, 2: {"key": {"color": 1}}} + self.p = {3: {}, 0: {3: dd}, 1: {}, 2: {1: {"span": 2}}} self.adjview = nx.classes.coreviews.UnionMultiInner(self.s, self.p) def test_len(self): - assert_equal(len(self.adjview), len(self.s) + len(self.p)) + assert len(self.adjview) == len(self.s) + len(self.p) def test_getitem(self): - assert_is_not(self.adjview[1], self.s[1]) - assert_is(self.adjview[0][7], self.adjview[0][3]) - assert_equal(self.adjview[2]['key']['color'], 1) - assert_equal(self.adjview[2][1]['span'], 2) - assert_raises(KeyError, self.adjview.__getitem__, 4) - assert_raises(KeyError, self.adjview[1].__getitem__, 'key') + assert self.adjview[1] is not self.s[1] + assert self.adjview[0][7] is self.adjview[0][3] + assert self.adjview[2]["key"]["color"] == 1 + assert self.adjview[2][1]["span"] == 2 + pytest.raises(KeyError, self.adjview.__getitem__, 4) + pytest.raises(KeyError, self.adjview[1].__getitem__, "key") def test_copy(self): avcopy = self.adjview.copy() - assert_equal(avcopy[0], self.adjview[0]) - assert_is_not(avcopy[0], self.adjview[0]) + assert avcopy[0] == self.adjview[0] + assert avcopy[0] is not self.adjview[0] - avcopy[2][1]['width'] = 8 - assert_not_equal(avcopy[2], self.adjview[2]) - self.adjview[2][1]['width'] = 8 - assert_equal(avcopy[2], self.adjview[2]) - del self.adjview[2][1]['width'] + avcopy[2][1]["width"] = 8 + assert avcopy[2] != self.adjview[2] + self.adjview[2][1]["width"] = 8 + assert avcopy[2] == self.adjview[2] + del self.adjview[2][1]["width"] - assert_false(hasattr(self.adjview, '__setitem__')) - assert_true(hasattr(avcopy, '__setitem__')) + assert not hasattr(self.adjview, "__setitem__") + assert hasattr(avcopy, "__setitem__") class TestUnionMultiAdjacency(TestUnionAdjacency): # node->nbr->key->data def setup(self): - dd = {'color': 'blue', 'weight': 1.2} - self.kd = {7: {}, 8: {}, 9: {'color': 1}} - self.nd = {3: self.kd, 0: {9: dd}, 1: {8: {}}, 2: {9: {'color': 1}}} + dd = {"color": "blue", "weight": 1.2} + self.kd = {7: {}, 8: {}, 9: {"color": 1}} + self.nd = {3: self.kd, 0: {9: dd}, 1: {8: {}}, 2: {9: {"color": 1}}} self.s = {3: self.nd, 0: {3: {7: dd}}, 1: {}, 2: {3: {8: {}}}} self.p = {3: {}, 0: {3: {9: dd}}, 1: {}, 2: {1: {8: {}}}} self.adjview = nx.classes.coreviews.UnionMultiAdjacency(self.s, self.p) def test_getitem(self): - assert_is_not(self.adjview[1], self.s[1]) - assert_is(self.adjview[3][0][9], self.adjview[0][3][9]) - assert_equal(self.adjview[3][2][9]['color'], 1) - assert_raises(KeyError, self.adjview.__getitem__, 4) + assert self.adjview[1] is not self.s[1] + assert self.adjview[3][0][9] is self.adjview[0][3][9] + assert self.adjview[3][2][9]["color"] == 1 + pytest.raises(KeyError, self.adjview.__getitem__, 4) def test_copy(self): avcopy = self.adjview.copy() - assert_equal(avcopy[0], self.adjview[0]) - assert_is_not(avcopy[0], self.adjview[0]) + assert avcopy[0] == self.adjview[0] + assert avcopy[0] is not self.adjview[0] - avcopy[2][3][8]['ht'] = 4 - assert_not_equal(avcopy[2], self.adjview[2]) - self.adjview[2][3][8]['ht'] = 4 - assert_equal(avcopy[2], self.adjview[2]) - del self.adjview[2][3][8]['ht'] + avcopy[2][3][8]["ht"] = 4 + assert avcopy[2] != self.adjview[2] + self.adjview[2][3][8]["ht"] = 4 + assert avcopy[2] == self.adjview[2] + del self.adjview[2][3][8]["ht"] - assert_false(hasattr(self.adjview, '__setitem__')) - assert_true(hasattr(avcopy, '__setitem__')) + assert not hasattr(self.adjview, "__setitem__") + assert hasattr(avcopy, "__setitem__") + + +class TestFilteredGraphs: + def setup(self): + self.Graphs = [nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph] + self.SubGraphs = [nx.graphviews.subgraph_view] * 4 + + def test_hide_show_nodes(self): + for Graph, SubGraph in zip(self.Graphs, self.SubGraphs): + G = nx.path_graph(4, Graph) + SG = G.subgraph([2, 3]) + RG = SubGraph(G, nx.filters.hide_nodes([0, 1])) + assert SG.nodes == RG.nodes + assert SG.edges == RG.edges + SGC = SG.copy() + RGC = RG.copy() + assert SGC.nodes == RGC.nodes + assert SGC.edges == RGC.edges + + def test_str_repr(self): + for Graph, SubGraph in zip(self.Graphs, self.SubGraphs): + G = nx.path_graph(4, Graph) + SG = G.subgraph([2, 3]) + RG = SubGraph(G, nx.filters.hide_nodes([0, 1])) + str(SG.adj) + str(RG.adj) + repr(SG.adj) + repr(RG.adj) + str(SG.adj[2]) + str(RG.adj[2]) + repr(SG.adj[2]) + repr(RG.adj[2]) + + def test_copy(self): + for Graph, SubGraph in zip(self.Graphs, self.SubGraphs): + G = nx.path_graph(4, Graph) + SG = G.subgraph([2, 3]) + RG = SubGraph(G, nx.filters.hide_nodes([0, 1])) + assert G.adj.copy() == G.adj + assert G.adj[2].copy() == G.adj[2] + assert SG.adj.copy() == SG.adj + assert SG.adj[2].copy() == SG.adj[2] + assert RG.adj.copy() == RG.adj + assert RG.adj[2].copy() == RG.adj[2] diff --git a/networkx/classes/tests/test_digraph.py b/networkx/classes/tests/test_digraph.py index 0dcd3bf..7670282 100644 --- a/networkx/classes/tests/test_digraph.py +++ b/networkx/classes/tests/test_digraph.py @@ -1,175 +1,181 @@ -#!/usr/bin/env python - -from nose.tools import assert_equal -from nose.tools import assert_false -from nose.tools import assert_true -from nose.tools import assert_raises - +import pytest import networkx as nx from networkx.testing import assert_nodes_equal -from test_graph import BaseGraphTester, BaseAttrGraphTester, TestGraph -from test_graph import TestEdgeSubgraph as TestGraphEdgeSubgraph +from .test_graph import BaseGraphTester, BaseAttrGraphTester +from .test_graph import TestGraph as _TestGraph +from .test_graph import TestEdgeSubgraph as _TestGraphEdgeSubgraph class BaseDiGraphTester(BaseGraphTester): def test_has_successor(self): G = self.K3 - assert_equal(G.has_successor(0, 1), True) - assert_equal(G.has_successor(0, -1), False) + assert G.has_successor(0, 1) + assert not G.has_successor(0, -1) def test_successors(self): G = self.K3 - assert_equal(sorted(G.successors(0)), [1, 2]) - assert_raises((KeyError, nx.NetworkXError), G.successors, -1) + assert sorted(G.successors(0)) == [1, 2] + with pytest.raises(nx.NetworkXError): + G.successors(-1) def test_has_predecessor(self): G = self.K3 - assert_equal(G.has_predecessor(0, 1), True) - assert_equal(G.has_predecessor(0, -1), False) + assert G.has_predecessor(0, 1) + assert not G.has_predecessor(0, -1) def test_predecessors(self): G = self.K3 - assert_equal(sorted(G.predecessors(0)), [1, 2]) - assert_raises((KeyError, nx.NetworkXError), G.predecessors, -1) + assert sorted(G.predecessors(0)) == [1, 2] + with pytest.raises(nx.NetworkXError): + G.predecessors(-1) def test_edges(self): G = self.K3 - assert_equal(sorted(G.edges()), [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]) - assert_equal(sorted(G.edges(0)), [(0, 1), (0, 2)]) - assert_equal(sorted(G.edges([0, 1])), [(0, 1), (0, 2), (1, 0), (1, 2)]) - assert_raises((KeyError, nx.NetworkXError), G.edges, -1) - - def test_edges_data(self): - G = self.K3 - all_edges = [(0, 1, {}), (0, 2, {}), (1, 0, {}), (1, 2, {}), (2, 0, {}), (2, 1, {})] - assert_equal(sorted(G.edges(data=True)), all_edges) - assert_equal(sorted(G.edges(0, data=True)), all_edges[:2]) - assert_equal(sorted(G.edges([0, 1], data=True)), all_edges[:4]) - assert_raises((KeyError, nx.NetworkXError), G.edges, -1, True) + assert sorted(G.edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] + assert sorted(G.edges(0)) == [(0, 1), (0, 2)] + assert sorted(G.edges([0, 1])) == [(0, 1), (0, 2), (1, 0), (1, 2)] + with pytest.raises(nx.NetworkXError): + G.edges(-1) def test_out_edges(self): G = self.K3 - assert_equal(sorted(G.out_edges()), [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]) - assert_equal(sorted(G.out_edges(0)), [(0, 1), (0, 2)]) - assert_raises((KeyError, nx.NetworkXError), G.out_edges, -1) + assert sorted(G.out_edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] + assert sorted(G.out_edges(0)) == [(0, 1), (0, 2)] + with pytest.raises(nx.NetworkXError): + G.out_edges(-1) def test_out_edges_dir(self): G = self.P3 - assert_equal(sorted(G.out_edges()), [(0, 1), (1, 2)]) - assert_equal(sorted(G.out_edges(0)), [(0, 1)]) - assert_equal(sorted(G.out_edges(2)), []) + assert sorted(G.out_edges()) == [(0, 1), (1, 2)] + assert sorted(G.out_edges(0)) == [(0, 1)] + assert sorted(G.out_edges(2)) == [] def test_out_edges_data(self): - G = nx.DiGraph([(0, 1, {'data': 0}), (1, 0, {})]) - assert_equal(sorted(G.out_edges(data=True)), [(0, 1, {'data': 0}), (1, 0, {})]) - assert_equal(sorted(G.out_edges(0, data=True)), [(0, 1, {'data': 0})]) - assert_equal(sorted(G.out_edges(data='data')), [(0, 1, 0), (1, 0, None)]) - assert_equal(sorted(G.out_edges(0, data='data')), [(0, 1, 0)]) + G = nx.DiGraph([(0, 1, {"data": 0}), (1, 0, {})]) + assert sorted(G.out_edges(data=True)) == [(0, 1, {"data": 0}), (1, 0, {})] + assert sorted(G.out_edges(0, data=True)) == [(0, 1, {"data": 0})] + assert sorted(G.out_edges(data="data")) == [(0, 1, 0), (1, 0, None)] + assert sorted(G.out_edges(0, data="data")) == [(0, 1, 0)] def test_in_edges_dir(self): G = self.P3 - assert_equal(sorted(G.in_edges()), [(0, 1), (1, 2)]) - assert_equal(sorted(G.in_edges(0)), []) - assert_equal(sorted(G.in_edges(2)), [(1, 2)]) + assert sorted(G.in_edges()) == [(0, 1), (1, 2)] + assert sorted(G.in_edges(0)) == [] + assert sorted(G.in_edges(2)) == [(1, 2)] def test_in_edges_data(self): - G = nx.DiGraph([(0, 1, {'data': 0}), (1, 0, {})]) - assert_equal(sorted(G.in_edges(data=True)), [(0, 1, {'data': 0}), (1, 0, {})]) - assert_equal(sorted(G.in_edges(1, data=True)), [(0, 1, {'data': 0})]) - assert_equal(sorted(G.in_edges(data='data')), [(0, 1, 0), (1, 0, None)]) - assert_equal(sorted(G.in_edges(1, data='data')), [(0, 1, 0)]) + G = nx.DiGraph([(0, 1, {"data": 0}), (1, 0, {})]) + assert sorted(G.in_edges(data=True)) == [(0, 1, {"data": 0}), (1, 0, {})] + assert sorted(G.in_edges(1, data=True)) == [(0, 1, {"data": 0})] + assert sorted(G.in_edges(data="data")) == [(0, 1, 0), (1, 0, None)] + assert sorted(G.in_edges(1, data="data")) == [(0, 1, 0)] def test_degree(self): G = self.K3 - assert_equal(sorted(G.degree()), [(0, 4), (1, 4), (2, 4)]) - assert_equal(dict(G.degree()), {0: 4, 1: 4, 2: 4}) - assert_equal(G.degree(0), 4) - assert_equal(list(G.degree(iter([0]))), [ - (0, 4)]) # run through iterator + assert sorted(G.degree()) == [(0, 4), (1, 4), (2, 4)] + assert dict(G.degree()) == {0: 4, 1: 4, 2: 4} + assert G.degree(0) == 4 + assert list(G.degree(iter([0]))) == [(0, 4)] # run through iterator def test_in_degree(self): G = self.K3 - assert_equal(sorted(G.in_degree()), [(0, 2), (1, 2), (2, 2)]) - assert_equal(dict(G.in_degree()), {0: 2, 1: 2, 2: 2}) - assert_equal(G.in_degree(0), 2) - assert_equal(list(G.in_degree(iter([0]))), [(0, 2)]) # run through iterator - - def test_in_degree_weighted(self): - G = self.K3 - G.add_edge(0, 1, weight=0.3, other=1.2) - assert_equal(sorted(G.in_degree(weight='weight')), [(0, 2), (1, 1.3), (2, 2)]) - assert_equal(dict(G.in_degree(weight='weight')), {0: 2, 1: 1.3, 2: 2}) - assert_equal(G.in_degree(1, weight='weight'), 1.3) - assert_equal(sorted(G.in_degree(weight='other')), [(0, 2), (1, 2.2), (2, 2)]) - assert_equal(dict(G.in_degree(weight='other')), {0: 2, 1: 2.2, 2: 2}) - assert_equal(G.in_degree(1, weight='other'), 2.2) - assert_equal(list(G.in_degree(iter([1]), weight='other')), [(1, 2.2)]) - - def test_out_degree_weighted(self): - G = self.K3 - G.add_edge(0, 1, weight=0.3, other=1.2) - assert_equal(sorted(G.out_degree(weight='weight')), [(0, 1.3), (1, 2), (2, 2)]) - assert_equal(dict(G.out_degree(weight='weight')), {0: 1.3, 1: 2, 2: 2}) - assert_equal(G.out_degree(0, weight='weight'), 1.3) - assert_equal(sorted(G.out_degree(weight='other')), [(0, 2.2), (1, 2), (2, 2)]) - assert_equal(dict(G.out_degree(weight='other')), {0: 2.2, 1: 2, 2: 2}) - assert_equal(G.out_degree(0, weight='other'), 2.2) - assert_equal(list(G.out_degree(iter([0]), weight='other')), [(0, 2.2)]) + assert sorted(G.in_degree()) == [(0, 2), (1, 2), (2, 2)] + assert dict(G.in_degree()) == {0: 2, 1: 2, 2: 2} + assert G.in_degree(0) == 2 + assert list(G.in_degree(iter([0]))) == [(0, 2)] # run through iterator def test_out_degree(self): G = self.K3 - assert_equal(sorted(G.out_degree()), [(0, 2), (1, 2), (2, 2)]) - assert_equal(dict(G.out_degree()), {0: 2, 1: 2, 2: 2}) - assert_equal(G.out_degree(0), 2) - assert_equal(list(G.out_degree(iter([0]))), [(0, 2)]) + assert sorted(G.out_degree()) == [(0, 2), (1, 2), (2, 2)] + assert dict(G.out_degree()) == {0: 2, 1: 2, 2: 2} + assert G.out_degree(0) == 2 + assert list(G.out_degree(iter([0]))) == [(0, 2)] def test_size(self): G = self.K3 - assert_equal(G.size(), 6) - assert_equal(G.number_of_edges(), 6) + assert G.size() == 6 + assert G.number_of_edges() == 6 def test_to_undirected_reciprocal(self): G = self.Graph() G.add_edge(1, 2) - assert_true(G.to_undirected().has_edge(1, 2)) - assert_false(G.to_undirected(reciprocal=True).has_edge(1, 2)) + assert G.to_undirected().has_edge(1, 2) + assert not G.to_undirected(reciprocal=True).has_edge(1, 2) G.add_edge(2, 1) - assert_true(G.to_undirected(reciprocal=True).has_edge(1, 2)) + assert G.to_undirected(reciprocal=True).has_edge(1, 2) def test_reverse_copy(self): G = nx.DiGraph([(0, 1), (1, 2)]) R = G.reverse() - assert_equal(sorted(R.edges()), [(1, 0), (2, 1)]) + assert sorted(R.edges()) == [(1, 0), (2, 1)] R.remove_edge(1, 0) - assert_equal(sorted(R.edges()), [(2, 1)]) - assert_equal(sorted(G.edges()), [(0, 1), (1, 2)]) + assert sorted(R.edges()) == [(2, 1)] + assert sorted(G.edges()) == [(0, 1), (1, 2)] def test_reverse_nocopy(self): G = nx.DiGraph([(0, 1), (1, 2)]) R = G.reverse(copy=False) - assert_equal(sorted(R.edges()), [(1, 0), (2, 1)]) - assert_raises(nx.NetworkXError, R.remove_edge, 1, 0) + assert sorted(R.edges()) == [(1, 0), (2, 1)] + with pytest.raises(nx.NetworkXError): + R.remove_edge(1, 0) def test_reverse_hashable(self): - class Foo(object): + class Foo: pass + x = Foo() y = Foo() G = nx.DiGraph() G.add_edge(x, y) assert_nodes_equal(G.nodes(), G.reverse().nodes()) - assert_equal([(y, x)], list(G.reverse().edges())) + assert [(y, x)] == list(G.reverse().edges()) class BaseAttrDiGraphTester(BaseDiGraphTester, BaseAttrGraphTester): - pass + def test_edges_data(self): + G = self.K3 + all_edges = [ + (0, 1, {}), + (0, 2, {}), + (1, 0, {}), + (1, 2, {}), + (2, 0, {}), + (2, 1, {}), + ] + assert sorted(G.edges(data=True)) == all_edges + assert sorted(G.edges(0, data=True)) == all_edges[:2] + assert sorted(G.edges([0, 1], data=True)) == all_edges[:4] + with pytest.raises(nx.NetworkXError): + G.edges(-1, True) + + def test_in_degree_weighted(self): + G = self.K3.copy() + G.add_edge(0, 1, weight=0.3, other=1.2) + assert sorted(G.in_degree(weight="weight")) == [(0, 2), (1, 1.3), (2, 2)] + assert dict(G.in_degree(weight="weight")) == {0: 2, 1: 1.3, 2: 2} + assert G.in_degree(1, weight="weight") == 1.3 + assert sorted(G.in_degree(weight="other")) == [(0, 2), (1, 2.2), (2, 2)] + assert dict(G.in_degree(weight="other")) == {0: 2, 1: 2.2, 2: 2} + assert G.in_degree(1, weight="other") == 2.2 + assert list(G.in_degree(iter([1]), weight="other")) == [(1, 2.2)] + + def test_out_degree_weighted(self): + G = self.K3.copy() + G.add_edge(0, 1, weight=0.3, other=1.2) + assert sorted(G.out_degree(weight="weight")) == [(0, 1.3), (1, 2), (2, 2)] + assert dict(G.out_degree(weight="weight")) == {0: 1.3, 1: 2, 2: 2} + assert G.out_degree(0, weight="weight") == 1.3 + assert sorted(G.out_degree(weight="other")) == [(0, 2.2), (1, 2), (2, 2)] + assert dict(G.out_degree(weight="other")) == {0: 2.2, 1: 2, 2: 2} + assert G.out_degree(0, weight="other") == 2.2 + assert list(G.out_degree(iter([0]), weight="other")) == [(0, 2.2)] -class TestDiGraph(BaseAttrDiGraphTester, TestGraph): +class TestDiGraph(BaseAttrDiGraphTester, _TestGraph): """Tests specific to dict-of-dict-of-dict digraph data structure""" - def setUp(self): + def setup_method(self): self.Graph = nx.DiGraph # build dict-of-dict-of-dict K3 ed1, ed2, ed3, ed4, ed5, ed6 = ({}, {}, {}, {}, {}, {}) @@ -196,61 +202,86 @@ def setUp(self): def test_data_input(self): G = self.Graph({1: [2], 2: [1]}, name="test") - assert_equal(G.name, "test") - assert_equal(sorted(G.adj.items()), [(1, {2: {}}), (2, {1: {}})]) - assert_equal(sorted(G.succ.items()), [(1, {2: {}}), (2, {1: {}})]) - assert_equal(sorted(G.pred.items()), [(1, {2: {}}), (2, {1: {}})]) + assert G.name == "test" + assert sorted(G.adj.items()) == [(1, {2: {}}), (2, {1: {}})] + assert sorted(G.succ.items()) == [(1, {2: {}}), (2, {1: {}})] + assert sorted(G.pred.items()) == [(1, {2: {}}), (2, {1: {}})] def test_add_edge(self): G = self.Graph() G.add_edge(0, 1) - assert_equal(G.adj, {0: {1: {}}, 1: {}}) - assert_equal(G.succ, {0: {1: {}}, 1: {}}) - assert_equal(G.pred, {0: {}, 1: {0: {}}}) + assert G.adj == {0: {1: {}}, 1: {}} + assert G.succ == {0: {1: {}}, 1: {}} + assert G.pred == {0: {}, 1: {0: {}}} G = self.Graph() G.add_edge(*(0, 1)) - assert_equal(G.adj, {0: {1: {}}, 1: {}}) - assert_equal(G.succ, {0: {1: {}}, 1: {}}) - assert_equal(G.pred, {0: {}, 1: {0: {}}}) + assert G.adj == {0: {1: {}}, 1: {}} + assert G.succ == {0: {1: {}}, 1: {}} + assert G.pred == {0: {}, 1: {0: {}}} def test_add_edges_from(self): G = self.Graph() - G.add_edges_from([(0, 1), (0, 2, {'data': 3})], data=2) - assert_equal(G.adj, {0: {1: {'data': 2}, 2: {'data': 3}}, 1: {}, 2: {}}) - assert_equal(G.succ, {0: {1: {'data': 2}, 2: {'data': 3}}, 1: {}, 2: {}}) - assert_equal(G.pred, {0: {}, 1: {0: {'data': 2}}, 2: {0: {'data': 3}}}) - - assert_raises(nx.NetworkXError, G.add_edges_from, [(0,)]) # too few in tuple - assert_raises(nx.NetworkXError, G.add_edges_from, [(0, 1, 2, 3)]) # too many in tuple - assert_raises(TypeError, G.add_edges_from, [0]) # not a tuple + G.add_edges_from([(0, 1), (0, 2, {"data": 3})], data=2) + assert G.adj == {0: {1: {"data": 2}, 2: {"data": 3}}, 1: {}, 2: {}} + assert G.succ == {0: {1: {"data": 2}, 2: {"data": 3}}, 1: {}, 2: {}} + assert G.pred == {0: {}, 1: {0: {"data": 2}}, 2: {0: {"data": 3}}} + + with pytest.raises(nx.NetworkXError): + G.add_edges_from([(0,)]) # too few in tuple + with pytest.raises(nx.NetworkXError): + G.add_edges_from([(0, 1, 2, 3)]) # too many in tuple + with pytest.raises(TypeError): + G.add_edges_from([0]) # not a tuple def test_remove_edge(self): - G = self.K3 + G = self.K3.copy() G.remove_edge(0, 1) - assert_equal(G.succ, {0: {2: {}}, 1: {0: {}, 2: {}}, 2: {0: {}, 1: {}}}) - assert_equal(G.pred, {0: {1: {}, 2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}}) - assert_raises((KeyError, nx.NetworkXError), G.remove_edge, -1, 0) + assert G.succ == {0: {2: {}}, 1: {0: {}, 2: {}}, 2: {0: {}, 1: {}}} + assert G.pred == {0: {1: {}, 2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}} + with pytest.raises(nx.NetworkXError): + G.remove_edge(-1, 0) def test_remove_edges_from(self): - G = self.K3 + G = self.K3.copy() G.remove_edges_from([(0, 1)]) - assert_equal(G.succ, {0: {2: {}}, 1: {0: {}, 2: {}}, 2: {0: {}, 1: {}}}) - assert_equal(G.pred, {0: {1: {}, 2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}}) + assert G.succ == {0: {2: {}}, 1: {0: {}, 2: {}}, 2: {0: {}, 1: {}}} + assert G.pred == {0: {1: {}, 2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}} G.remove_edges_from([(0, 0)]) # silent fail - -class TestEdgeSubgraph(TestGraphEdgeSubgraph): + def test_clear(self): + G = self.K3 + G.graph["name"] = "K3" + G.clear() + assert list(G.nodes) == [] + assert G.succ == {} + assert G.pred == {} + assert G.graph == {} + + def test_clear_edges(self): + G = self.K3 + G.graph["name"] = "K3" + nodes = list(G.nodes) + G.clear_edges() + assert list(G.nodes) == nodes + expected = {0: {}, 1: {}, 2: {}} + assert G.succ == expected + assert G.pred == expected + assert list(G.edges) == [] + assert G.graph["name"] == "K3" + + +class TestEdgeSubgraph(_TestGraphEdgeSubgraph): """Unit tests for the :meth:`DiGraph.edge_subgraph` method.""" - def setup(self): + def setup_method(self): # Create a doubly-linked path graph on five nodes. G = nx.DiGraph(nx.path_graph(5)) # Add some node, edge, and graph attributes. for i in range(5): - G.nodes[i]['name'] = 'node{}'.format(i) - G.edges[0, 1]['name'] = 'edge01' - G.edges[3, 4]['name'] = 'edge34' - G.graph['name'] = 'graph' + G.nodes[i]["name"] = f"node{i}" + G.edges[0, 1]["name"] = "edge01" + G.edges[3, 4]["name"] = "edge34" + G.graph["name"] = "graph" # Get the subgraph induced by the first and last edges. self.G = G self.H = G.edge_subgraph([(0, 1), (3, 4)]) @@ -264,7 +295,7 @@ def test_pred_succ(self): G = nx.DiGraph() G.add_edge(0, 1) H = G.edge_subgraph([(0, 1)]) - assert_equal(list(H.predecessors(0)), []) - assert_equal(list(H.successors(0)), [1]) - assert_equal(list(H.predecessors(1)), [0]) - assert_equal(list(H.successors(1)), []) + assert list(H.predecessors(0)) == [] + assert list(H.successors(0)) == [1] + assert list(H.predecessors(1)) == [0] + assert list(H.successors(1)) == [] diff --git a/networkx/classes/tests/test_digraph_historical.py b/networkx/classes/tests/test_digraph_historical.py index cd965fe..7047bbf 100644 --- a/networkx/classes/tests/test_digraph_historical.py +++ b/networkx/classes/tests/test_digraph_historical.py @@ -1,103 +1,109 @@ -#!/usr/bin/env python """Original NetworkX graph tests""" -from nose.tools import * +import pytest import networkx import networkx as nx -from networkx.testing.utils import * -from historical_tests import HistoricalTests +from .historical_tests import HistoricalTests class TestDiGraphHistorical(HistoricalTests): - - def setUp(self): - HistoricalTests.setUp(self) - self.G = nx.DiGraph + @classmethod + def setup_class(cls): + HistoricalTests.setup_class() + cls.G = nx.DiGraph def test_in_degree(self): G = self.G() - G.add_nodes_from('GJK') - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('B', 'C'), ('C', 'D')]) - - assert_equal(sorted(d for n, d in G.in_degree()), [0, 0, 0, 0, 1, 2, 2]) - assert_equal(dict(G.in_degree()), - {'A': 0, 'C': 2, 'B': 1, 'D': 2, 'G': 0, 'K': 0, 'J': 0}) + G.add_nodes_from("GJK") + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")]) + + assert sorted(d for n, d in G.in_degree()) == [0, 0, 0, 0, 1, 2, 2] + assert dict(G.in_degree()) == { + "A": 0, + "C": 2, + "B": 1, + "D": 2, + "G": 0, + "K": 0, + "J": 0, + } def test_out_degree(self): G = self.G() - G.add_nodes_from('GJK') - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('B', 'C'), ('C', 'D')]) - assert_equal(sorted([v for k, v in G.in_degree()]), - [0, 0, 0, 0, 1, 2, 2]) - assert_equal(dict(G.out_degree()), - {'A': 2, 'C': 1, 'B': 2, 'D': 0, 'G': 0, 'K': 0, 'J': 0}) + G.add_nodes_from("GJK") + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")]) + assert sorted([v for k, v in G.in_degree()]) == [0, 0, 0, 0, 1, 2, 2] + assert dict(G.out_degree()) == { + "A": 2, + "C": 1, + "B": 2, + "D": 0, + "G": 0, + "K": 0, + "J": 0, + } def test_degree_digraph(self): H = nx.DiGraph() H.add_edges_from([(1, 24), (1, 2)]) - assert_equal(sorted(d for n, d in H.in_degree([1, 24])), [0, 1]) - assert_equal(sorted(d for n, d in H.out_degree([1, 24])), [0, 2]) - assert_equal(sorted(d for n, d in H.degree([1, 24])), [1, 2]) + assert sorted(d for n, d in H.in_degree([1, 24])) == [0, 1] + assert sorted(d for n, d in H.out_degree([1, 24])) == [0, 2] + assert sorted(d for n, d in H.degree([1, 24])) == [1, 2] def test_neighbors(self): G = self.G() - G.add_nodes_from('GJK') - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('B', 'C'), ('C', 'D')]) + G.add_nodes_from("GJK") + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")]) - assert_equal(sorted(G.neighbors('C')), ['D']) - assert_equal(sorted(G['C']), ['D']) - assert_equal(sorted(G.neighbors('A')), ['B', 'C']) - assert_raises(nx.NetworkXError, G.neighbors, 'j') - assert_raises(nx.NetworkXError, G.neighbors, 'j') + assert sorted(G.neighbors("C")) == ["D"] + assert sorted(G["C"]) == ["D"] + assert sorted(G.neighbors("A")) == ["B", "C"] + pytest.raises(nx.NetworkXError, G.neighbors, "j") + pytest.raises(nx.NetworkXError, G.neighbors, "j") def test_successors(self): G = self.G() - G.add_nodes_from('GJK') - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('B', 'C'), ('C', 'D')]) - assert_equal(sorted(G.successors('A')), ['B', 'C']) - assert_equal(sorted(G.successors('A')), ['B', 'C']) - assert_equal(sorted(G.successors('G')), []) - assert_equal(sorted(G.successors('D')), []) - assert_equal(sorted(G.successors('G')), []) - assert_raises(nx.NetworkXError, G.successors, 'j') - assert_raises(nx.NetworkXError, G.successors, 'j') + G.add_nodes_from("GJK") + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")]) + assert sorted(G.successors("A")) == ["B", "C"] + assert sorted(G.successors("A")) == ["B", "C"] + assert sorted(G.successors("G")) == [] + assert sorted(G.successors("D")) == [] + assert sorted(G.successors("G")) == [] + pytest.raises(nx.NetworkXError, G.successors, "j") + pytest.raises(nx.NetworkXError, G.successors, "j") def test_predecessors(self): G = self.G() - G.add_nodes_from('GJK') - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('B', 'C'), ('C', 'D')]) - assert_equal(sorted(G.predecessors('C')), ['A', 'B']) - assert_equal(sorted(G.predecessors('C')), ['A', 'B']) - assert_equal(sorted(G.predecessors('G')), []) - assert_equal(sorted(G.predecessors('A')), []) - assert_equal(sorted(G.predecessors('G')), []) - assert_equal(sorted(G.predecessors('A')), []) - assert_equal(sorted(G.successors('D')), []) - - assert_raises(nx.NetworkXError, G.predecessors, 'j') - assert_raises(nx.NetworkXError, G.predecessors, 'j') + G.add_nodes_from("GJK") + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")]) + assert sorted(G.predecessors("C")) == ["A", "B"] + assert sorted(G.predecessors("C")) == ["A", "B"] + assert sorted(G.predecessors("G")) == [] + assert sorted(G.predecessors("A")) == [] + assert sorted(G.predecessors("G")) == [] + assert sorted(G.predecessors("A")) == [] + assert sorted(G.successors("D")) == [] + + pytest.raises(nx.NetworkXError, G.predecessors, "j") + pytest.raises(nx.NetworkXError, G.predecessors, "j") def test_reverse(self): G = nx.complete_graph(10) H = G.to_directed() HR = H.reverse() - assert_true(nx.is_isomorphic(H, HR)) - assert_equal(sorted(H.edges()), sorted(HR.edges())) + assert nx.is_isomorphic(H, HR) + assert sorted(H.edges()) == sorted(HR.edges()) def test_reverse2(self): H = nx.DiGraph() foo = [H.add_edge(u, u + 1) for u in range(0, 5)] HR = H.reverse() for u in range(0, 5): - assert_true(HR.has_edge(u + 1, u)) + assert HR.has_edge(u + 1, u) def test_reverse3(self): H = nx.DiGraph() H.add_nodes_from([1, 2, 3, 4]) HR = H.reverse() - assert_equal(sorted(HR.nodes()), [1, 2, 3, 4]) + assert sorted(HR.nodes()) == [1, 2, 3, 4] diff --git a/networkx/classes/tests/test_filters.py b/networkx/classes/tests/test_filters.py index d4a3ee3..b8fe40b 100644 --- a/networkx/classes/tests/test_filters.py +++ b/networkx/classes/tests/test_filters.py @@ -1,176 +1,176 @@ -from nose.tools import assert_true, assert_false, assert_raises +import pytest import networkx as nx -class TestFilterFactory(object): +class TestFilterFactory: def test_no_filter(self): nf = nx.filters.no_filter - assert_true(nf()) - assert_true(nf(1)) - assert_true(nf(2, 1)) + assert nf() + assert nf(1) + assert nf(2, 1) def test_hide_nodes(self): f = nx.classes.filters.hide_nodes([1, 2, 3]) - assert_false(f(1)) - assert_false(f(2)) - assert_false(f(3)) - assert_true(f(4)) - assert_true(f(0)) - assert_true(f('a')) - assert_raises(TypeError, f, 1, 2) - assert_raises(TypeError, f) + assert not f(1) + assert not f(2) + assert not f(3) + assert f(4) + assert f(0) + assert f("a") + pytest.raises(TypeError, f, 1, 2) + pytest.raises(TypeError, f) def test_show_nodes(self): f = nx.classes.filters.show_nodes([1, 2, 3]) - assert_true(f(1)) - assert_true(f(2)) - assert_true(f(3)) - assert_false(f(4)) - assert_false(f(0)) - assert_false(f('a')) - assert_raises(TypeError, f, 1, 2) - assert_raises(TypeError, f) + assert f(1) + assert f(2) + assert f(3) + assert not f(4) + assert not f(0) + assert not f("a") + pytest.raises(TypeError, f, 1, 2) + pytest.raises(TypeError, f) def test_hide_edges(self): factory = nx.classes.filters.hide_edges f = factory([(1, 2), (3, 4)]) - assert_false(f(1, 2)) - assert_false(f(3, 4)) - assert_false(f(4, 3)) - assert_true(f(2, 3)) - assert_true(f(0, -1)) - assert_true(f('a', 'b')) - assert_raises(TypeError, f, 1, 2, 3) - assert_raises(TypeError, f, 1) - assert_raises(TypeError, f) - assert_raises(TypeError, factory, [1, 2, 3]) - assert_raises(ValueError, factory, [(1, 2, 3)]) + assert not f(1, 2) + assert not f(3, 4) + assert not f(4, 3) + assert f(2, 3) + assert f(0, -1) + assert f("a", "b") + pytest.raises(TypeError, f, 1, 2, 3) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2, 3)]) def test_show_edges(self): factory = nx.classes.filters.show_edges f = factory([(1, 2), (3, 4)]) - assert_true(f(1, 2)) - assert_true(f(3, 4)) - assert_true(f(4, 3)) - assert_false(f(2, 3)) - assert_false(f(0, -1)) - assert_false(f('a', 'b')) - assert_raises(TypeError, f, 1, 2, 3) - assert_raises(TypeError, f, 1) - assert_raises(TypeError, f) - assert_raises(TypeError, factory, [1, 2, 3]) - assert_raises(ValueError, factory, [(1, 2, 3)]) + assert f(1, 2) + assert f(3, 4) + assert f(4, 3) + assert not f(2, 3) + assert not f(0, -1) + assert not f("a", "b") + pytest.raises(TypeError, f, 1, 2, 3) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2, 3)]) def test_hide_diedges(self): factory = nx.classes.filters.hide_diedges f = factory([(1, 2), (3, 4)]) - assert_false(f(1, 2)) - assert_false(f(3, 4)) - assert_true(f(4, 3)) - assert_true(f(2, 3)) - assert_true(f(0, -1)) - assert_true(f('a', 'b')) - assert_raises(TypeError, f, 1, 2, 3) - assert_raises(TypeError, f, 1) - assert_raises(TypeError, f) - assert_raises(TypeError, factory, [1, 2, 3]) - assert_raises(ValueError, factory, [(1, 2, 3)]) + assert not f(1, 2) + assert not f(3, 4) + assert f(4, 3) + assert f(2, 3) + assert f(0, -1) + assert f("a", "b") + pytest.raises(TypeError, f, 1, 2, 3) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2, 3)]) def test_show_diedges(self): factory = nx.classes.filters.show_diedges f = factory([(1, 2), (3, 4)]) - assert_true(f(1, 2)) - assert_true(f(3, 4)) - assert_false(f(4, 3)) - assert_false(f(2, 3)) - assert_false(f(0, -1)) - assert_false(f('a', 'b')) - assert_raises(TypeError, f, 1, 2, 3) - assert_raises(TypeError, f, 1) - assert_raises(TypeError, f) - assert_raises(TypeError, factory, [1, 2, 3]) - assert_raises(ValueError, factory, [(1, 2, 3)]) + assert f(1, 2) + assert f(3, 4) + assert not f(4, 3) + assert not f(2, 3) + assert not f(0, -1) + assert not f("a", "b") + pytest.raises(TypeError, f, 1, 2, 3) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2, 3)]) def test_hide_multiedges(self): factory = nx.classes.filters.hide_multiedges f = factory([(1, 2, 0), (3, 4, 1), (1, 2, 1)]) - assert_false(f(1, 2, 0)) - assert_false(f(1, 2, 1)) - assert_true(f(1, 2, 2)) - assert_true(f(3, 4, 0)) - assert_false(f(3, 4, 1)) - assert_false(f(4, 3, 1)) - assert_true(f(4, 3, 0)) - assert_true(f(2, 3, 0)) - assert_true(f(0, -1, 0)) - assert_true(f('a', 'b', 0)) - assert_raises(TypeError, f, 1, 2, 3, 4) - assert_raises(TypeError, f, 1, 2) - assert_raises(TypeError, f, 1) - assert_raises(TypeError, f) - assert_raises(TypeError, factory, [1, 2, 3]) - assert_raises(ValueError, factory, [(1, 2)]) - assert_raises(ValueError, factory, [(1, 2, 3, 4)]) + assert not f(1, 2, 0) + assert not f(1, 2, 1) + assert f(1, 2, 2) + assert f(3, 4, 0) + assert not f(3, 4, 1) + assert not f(4, 3, 1) + assert f(4, 3, 0) + assert f(2, 3, 0) + assert f(0, -1, 0) + assert f("a", "b", 0) + pytest.raises(TypeError, f, 1, 2, 3, 4) + pytest.raises(TypeError, f, 1, 2) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2)]) + pytest.raises(ValueError, factory, [(1, 2, 3, 4)]) def test_show_multiedges(self): factory = nx.classes.filters.show_multiedges f = factory([(1, 2, 0), (3, 4, 1), (1, 2, 1)]) - assert_true(f(1, 2, 0)) - assert_true(f(1, 2, 1)) - assert_false(f(1, 2, 2)) - assert_false(f(3, 4, 0)) - assert_true(f(3, 4, 1)) - assert_true(f(4, 3, 1)) - assert_false(f(4, 3, 0)) - assert_false(f(2, 3, 0)) - assert_false(f(0, -1, 0)) - assert_false(f('a', 'b', 0)) - assert_raises(TypeError, f, 1, 2, 3, 4) - assert_raises(TypeError, f, 1, 2) - assert_raises(TypeError, f, 1) - assert_raises(TypeError, f) - assert_raises(TypeError, factory, [1, 2, 3]) - assert_raises(ValueError, factory, [(1, 2)]) - assert_raises(ValueError, factory, [(1, 2, 3, 4)]) + assert f(1, 2, 0) + assert f(1, 2, 1) + assert not f(1, 2, 2) + assert not f(3, 4, 0) + assert f(3, 4, 1) + assert f(4, 3, 1) + assert not f(4, 3, 0) + assert not f(2, 3, 0) + assert not f(0, -1, 0) + assert not f("a", "b", 0) + pytest.raises(TypeError, f, 1, 2, 3, 4) + pytest.raises(TypeError, f, 1, 2) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2)]) + pytest.raises(ValueError, factory, [(1, 2, 3, 4)]) def test_hide_multidiedges(self): factory = nx.classes.filters.hide_multidiedges f = factory([(1, 2, 0), (3, 4, 1), (1, 2, 1)]) - assert_false(f(1, 2, 0)) - assert_false(f(1, 2, 1)) - assert_true(f(1, 2, 2)) - assert_true(f(3, 4, 0)) - assert_false(f(3, 4, 1)) - assert_true(f(4, 3, 1)) - assert_true(f(4, 3, 0)) - assert_true(f(2, 3, 0)) - assert_true(f(0, -1, 0)) - assert_true(f('a', 'b', 0)) - assert_raises(TypeError, f, 1, 2, 3, 4) - assert_raises(TypeError, f, 1, 2) - assert_raises(TypeError, f, 1) - assert_raises(TypeError, f) - assert_raises(TypeError, factory, [1, 2, 3]) - assert_raises(ValueError, factory, [(1, 2)]) - assert_raises(ValueError, factory, [(1, 2, 3, 4)]) + assert not f(1, 2, 0) + assert not f(1, 2, 1) + assert f(1, 2, 2) + assert f(3, 4, 0) + assert not f(3, 4, 1) + assert f(4, 3, 1) + assert f(4, 3, 0) + assert f(2, 3, 0) + assert f(0, -1, 0) + assert f("a", "b", 0) + pytest.raises(TypeError, f, 1, 2, 3, 4) + pytest.raises(TypeError, f, 1, 2) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2)]) + pytest.raises(ValueError, factory, [(1, 2, 3, 4)]) def test_show_multidiedges(self): factory = nx.classes.filters.show_multidiedges f = factory([(1, 2, 0), (3, 4, 1), (1, 2, 1)]) - assert_true(f(1, 2, 0)) - assert_true(f(1, 2, 1)) - assert_false(f(1, 2, 2)) - assert_false(f(3, 4, 0)) - assert_true(f(3, 4, 1)) - assert_false(f(4, 3, 1)) - assert_false(f(4, 3, 0)) - assert_false(f(2, 3, 0)) - assert_false(f(0, -1, 0)) - assert_false(f('a', 'b', 0)) - assert_raises(TypeError, f, 1, 2, 3, 4) - assert_raises(TypeError, f, 1, 2) - assert_raises(TypeError, f, 1) - assert_raises(TypeError, f) - assert_raises(TypeError, factory, [1, 2, 3]) - assert_raises(ValueError, factory, [(1, 2)]) - assert_raises(ValueError, factory, [(1, 2, 3, 4)]) + assert f(1, 2, 0) + assert f(1, 2, 1) + assert not f(1, 2, 2) + assert not f(3, 4, 0) + assert f(3, 4, 1) + assert not f(4, 3, 1) + assert not f(4, 3, 0) + assert not f(2, 3, 0) + assert not f(0, -1, 0) + assert not f("a", "b", 0) + pytest.raises(TypeError, f, 1, 2, 3, 4) + pytest.raises(TypeError, f, 1, 2) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2)]) + pytest.raises(ValueError, factory, [(1, 2, 3, 4)]) diff --git a/networkx/classes/tests/test_function.py b/networkx/classes/tests/test_function.py index e8c4bc9..9f89951 100644 --- a/networkx/classes/tests/test_function.py +++ b/networkx/classes/tests/test_function.py @@ -1,13 +1,12 @@ -#!/usr/bin/env python import random -from nose.tools import * +import pytest import networkx as nx -from networkx.testing.utils import * +from networkx.testing.utils import assert_edges_equal, assert_nodes_equal -class TestFunction(object): - def setUp(self): - self.G = nx.Graph({0: [1, 2, 3], 1: [1, 2, 0], 4: []}, name='Test') +class TestFunction: + def setup_method(self): + self.G = nx.Graph({0: [1, 2, 3], 1: [1, 2, 0], 4: []}, name="Test") self.Gdegree = {0: 3, 1: 2, 2: 2, 3: 1, 4: 0} self.Gnodes = list(range(5)) self.Gedges = [(0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2)] @@ -23,51 +22,73 @@ def test_nodes(self): def test_edges(self): assert_edges_equal(self.G.edges(), list(nx.edges(self.G))) - assert_equal(sorted(self.DG.edges()), sorted(nx.edges(self.DG))) - assert_edges_equal(self.G.edges(nbunch=[0, 1, 3]), - list(nx.edges(self.G, nbunch=[0, 1, 3]))) - assert_equal(sorted(self.DG.edges(nbunch=[0, 1, 3])), - sorted(nx.edges(self.DG, nbunch=[0, 1, 3]))) + assert sorted(self.DG.edges()) == sorted(nx.edges(self.DG)) + assert_edges_equal( + self.G.edges(nbunch=[0, 1, 3]), list(nx.edges(self.G, nbunch=[0, 1, 3])) + ) + assert sorted(self.DG.edges(nbunch=[0, 1, 3])) == sorted( + nx.edges(self.DG, nbunch=[0, 1, 3]) + ) def test_degree(self): assert_edges_equal(self.G.degree(), list(nx.degree(self.G))) - assert_equal(sorted(self.DG.degree()), sorted(nx.degree(self.DG))) - assert_edges_equal(self.G.degree(nbunch=[0, 1]), - list(nx.degree(self.G, nbunch=[0, 1]))) - assert_equal(sorted(self.DG.degree(nbunch=[0, 1])), - sorted(nx.degree(self.DG, nbunch=[0, 1]))) - assert_edges_equal(self.G.degree(weight='weight'), - list(nx.degree(self.G, weight='weight'))) - assert_equal(sorted(self.DG.degree(weight='weight')), - sorted(nx.degree(self.DG, weight='weight'))) + assert sorted(self.DG.degree()) == sorted(nx.degree(self.DG)) + assert_edges_equal( + self.G.degree(nbunch=[0, 1]), list(nx.degree(self.G, nbunch=[0, 1])) + ) + assert sorted(self.DG.degree(nbunch=[0, 1])) == sorted( + nx.degree(self.DG, nbunch=[0, 1]) + ) + assert_edges_equal( + self.G.degree(weight="weight"), list(nx.degree(self.G, weight="weight")) + ) + assert sorted(self.DG.degree(weight="weight")) == sorted( + nx.degree(self.DG, weight="weight") + ) def test_neighbors(self): - assert_equal(list(self.G.neighbors(1)), list(nx.neighbors(self.G, 1))) - assert_equal(list(self.DG.neighbors(1)), list(nx.neighbors(self.DG, 1))) + assert list(self.G.neighbors(1)) == list(nx.neighbors(self.G, 1)) + assert list(self.DG.neighbors(1)) == list(nx.neighbors(self.DG, 1)) def test_number_of_nodes(self): - assert_equal(self.G.number_of_nodes(), nx.number_of_nodes(self.G)) - assert_equal(self.DG.number_of_nodes(), nx.number_of_nodes(self.DG)) + assert self.G.number_of_nodes() == nx.number_of_nodes(self.G) + assert self.DG.number_of_nodes() == nx.number_of_nodes(self.DG) def test_number_of_edges(self): - assert_equal(self.G.number_of_edges(), nx.number_of_edges(self.G)) - assert_equal(self.DG.number_of_edges(), nx.number_of_edges(self.DG)) + assert self.G.number_of_edges() == nx.number_of_edges(self.G) + assert self.DG.number_of_edges() == nx.number_of_edges(self.DG) def test_is_directed(self): - assert_equal(self.G.is_directed(), nx.is_directed(self.G)) - assert_equal(self.DG.is_directed(), nx.is_directed(self.DG)) + assert self.G.is_directed() == nx.is_directed(self.G) + assert self.DG.is_directed() == nx.is_directed(self.DG) def test_add_star(self): G = self.G.copy() nlist = [12, 13, 14, 15] nx.add_star(G, nlist) assert_edges_equal(G.edges(nlist), [(12, 13), (12, 14), (12, 15)]) + G = self.G.copy() nx.add_star(G, nlist, weight=2.0) - assert_edges_equal(G.edges(nlist, data=True), - [(12, 13, {'weight': 2.}), - (12, 14, {'weight': 2.}), - (12, 15, {'weight': 2.})]) + assert_edges_equal( + G.edges(nlist, data=True), + [ + (12, 13, {"weight": 2.0}), + (12, 14, {"weight": 2.0}), + (12, 15, {"weight": 2.0}), + ], + ) + + G = self.G.copy() + nlist = [12] + nx.add_star(G, nlist) + assert_nodes_equal(G, list(self.G) + nlist) + + G = self.G.copy() + nlist = [] + nx.add_star(G, nlist) + assert_nodes_equal(G.nodes, self.Gnodes) + assert_edges_equal(G.edges, self.G.edges) def test_add_path(self): G = self.G.copy() @@ -76,10 +97,14 @@ def test_add_path(self): assert_edges_equal(G.edges(nlist), [(12, 13), (13, 14), (14, 15)]) G = self.G.copy() nx.add_path(G, nlist, weight=2.0) - assert_edges_equal(G.edges(nlist, data=True), - [(12, 13, {'weight': 2.}), - (13, 14, {'weight': 2.}), - (14, 15, {'weight': 2.})]) + assert_edges_equal( + G.edges(nlist, data=True), + [ + (12, 13, {"weight": 2.0}), + (13, 14, {"weight": 2.0}), + (14, 15, {"weight": 2.0}), + ], + ) G = self.G.copy() nlist = [None] @@ -120,199 +145,238 @@ def test_add_path(self): def test_add_cycle(self): G = self.G.copy() nlist = [12, 13, 14, 15] - oklists = [[(12, 13), (12, 15), (13, 14), (14, 15)], - [(12, 13), (13, 14), (14, 15), (15, 12)]] + oklists = [ + [(12, 13), (12, 15), (13, 14), (14, 15)], + [(12, 13), (13, 14), (14, 15), (15, 12)], + ] nx.add_cycle(G, nlist) - assert_true(sorted(G.edges(nlist)) in oklists) + assert sorted(G.edges(nlist)) in oklists G = self.G.copy() - oklists = [[(12, 13, {'weight': 1.}), - (12, 15, {'weight': 1.}), - (13, 14, {'weight': 1.}), - (14, 15, {'weight': 1.})], - [(12, 13, {'weight': 1.}), - (13, 14, {'weight': 1.}), - (14, 15, {'weight': 1.}), - (15, 12, {'weight': 1.})]] + oklists = [ + [ + (12, 13, {"weight": 1.0}), + (12, 15, {"weight": 1.0}), + (13, 14, {"weight": 1.0}), + (14, 15, {"weight": 1.0}), + ], + [ + (12, 13, {"weight": 1.0}), + (13, 14, {"weight": 1.0}), + (14, 15, {"weight": 1.0}), + (15, 12, {"weight": 1.0}), + ], + ] nx.add_cycle(G, nlist, weight=1.0) - assert_true(sorted(G.edges(nlist, data=True)) in oklists) + assert sorted(G.edges(nlist, data=True)) in oklists + + G = self.G.copy() + nlist = [12] + nx.add_cycle(G, nlist) + assert_nodes_equal(G, list(self.G) + nlist) + + G = self.G.copy() + nlist = [] + nx.add_cycle(G, nlist) + assert_nodes_equal(G.nodes, self.Gnodes) + assert_edges_equal(G.edges, self.G.edges) def test_subgraph(self): - assert_equal(self.G.subgraph([0, 1, 2, 4]).adj, - nx.subgraph(self.G, [0, 1, 2, 4]).adj) - assert_equal(self.DG.subgraph([0, 1, 2, 4]).adj, - nx.subgraph(self.DG, [0, 1, 2, 4]).adj) - assert_equal(self.G.subgraph([0, 1, 2, 4]).adj, - nx.induced_subgraph(self.G, [0, 1, 2, 4]).adj) - assert_equal(self.DG.subgraph([0, 1, 2, 4]).adj, - nx.induced_subgraph(self.DG, [0, 1, 2, 4]).adj) + assert ( + self.G.subgraph([0, 1, 2, 4]).adj == nx.subgraph(self.G, [0, 1, 2, 4]).adj + ) + assert ( + self.DG.subgraph([0, 1, 2, 4]).adj == nx.subgraph(self.DG, [0, 1, 2, 4]).adj + ) + assert ( + self.G.subgraph([0, 1, 2, 4]).adj + == nx.induced_subgraph(self.G, [0, 1, 2, 4]).adj + ) + assert ( + self.DG.subgraph([0, 1, 2, 4]).adj + == nx.induced_subgraph(self.DG, [0, 1, 2, 4]).adj + ) # subgraph-subgraph chain is allowed in function interface H = nx.induced_subgraph(self.G.subgraph([0, 1, 2, 4]), [0, 1, 4]) - assert_is_not(H._graph, self.G) - assert_equal(H.adj, self.G.subgraph([0, 1, 4]).adj) + assert H._graph is not self.G + assert H.adj == self.G.subgraph([0, 1, 4]).adj def test_edge_subgraph(self): - assert_equal(self.G.edge_subgraph([(1, 2), (0, 3)]).adj, - nx.edge_subgraph(self.G, [(1, 2), (0, 3)]).adj) - assert_equal(self.DG.edge_subgraph([(1, 2), (0, 3)]).adj, - nx.edge_subgraph(self.DG, [(1, 2), (0, 3)]).adj) + assert ( + self.G.edge_subgraph([(1, 2), (0, 3)]).adj + == nx.edge_subgraph(self.G, [(1, 2), (0, 3)]).adj + ) + assert ( + self.DG.edge_subgraph([(1, 2), (0, 3)]).adj + == nx.edge_subgraph(self.DG, [(1, 2), (0, 3)]).adj + ) def test_restricted_view(self): H = nx.restricted_view(self.G, [0, 2, 5], [(1, 2), (3, 4)]) - assert_equal(set(H.nodes), {1, 3, 4}) - assert_equal(set(H.edges), {(1, 1)}) + assert set(H.nodes) == {1, 3, 4} + assert set(H.edges) == {(1, 1)} def test_create_empty_copy(self): G = nx.create_empty_copy(self.G, with_data=False) assert_nodes_equal(G, list(self.G)) - assert_equal(G.graph, {}) - assert_equal(G._node, {}.fromkeys(self.G.nodes(), {})) - assert_equal(G._adj, {}.fromkeys(self.G.nodes(), {})) + assert G.graph == {} + assert G._node == {}.fromkeys(self.G.nodes(), {}) + assert G._adj == {}.fromkeys(self.G.nodes(), {}) G = nx.create_empty_copy(self.G) assert_nodes_equal(G, list(self.G)) - assert_equal(G.graph, self.G.graph) - assert_equal(G._node, self.G._node) - assert_equal(G._adj, {}.fromkeys(self.G.nodes(), {})) + assert G.graph == self.G.graph + assert G._node == self.G._node + assert G._adj == {}.fromkeys(self.G.nodes(), {}) def test_degree_histogram(self): - assert_equal(nx.degree_histogram(self.G), [1, 1, 1, 1, 1]) + assert nx.degree_histogram(self.G) == [1, 1, 1, 1, 1] def test_density(self): - assert_equal(nx.density(self.G), 0.5) - assert_equal(nx.density(self.DG), 0.3) + assert nx.density(self.G) == 0.5 + assert nx.density(self.DG) == 0.3 G = nx.Graph() G.add_node(1) - assert_equal(nx.density(G), 0.0) + assert nx.density(G) == 0.0 def test_density_selfloop(self): G = nx.Graph() G.add_edge(1, 1) - assert_equal(nx.density(G), 0.0) + assert nx.density(G) == 0.0 G.add_edge(1, 2) - assert_equal(nx.density(G), 2.0) + assert nx.density(G) == 2.0 def test_freeze(self): G = nx.freeze(self.G) - assert_equal(G.frozen, True) - assert_raises(nx.NetworkXError, G.add_node, 1) - assert_raises(nx.NetworkXError, G.add_nodes_from, [1]) - assert_raises(nx.NetworkXError, G.remove_node, 1) - assert_raises(nx.NetworkXError, G.remove_nodes_from, [1]) - assert_raises(nx.NetworkXError, G.add_edge, 1, 2) - assert_raises(nx.NetworkXError, G.add_edges_from, [(1, 2)]) - assert_raises(nx.NetworkXError, G.remove_edge, 1, 2) - assert_raises(nx.NetworkXError, G.remove_edges_from, [(1, 2)]) - assert_raises(nx.NetworkXError, G.clear) + assert G.frozen + pytest.raises(nx.NetworkXError, G.add_node, 1) + pytest.raises(nx.NetworkXError, G.add_nodes_from, [1]) + pytest.raises(nx.NetworkXError, G.remove_node, 1) + pytest.raises(nx.NetworkXError, G.remove_nodes_from, [1]) + pytest.raises(nx.NetworkXError, G.add_edge, 1, 2) + pytest.raises(nx.NetworkXError, G.add_edges_from, [(1, 2)]) + pytest.raises(nx.NetworkXError, G.remove_edge, 1, 2) + pytest.raises(nx.NetworkXError, G.remove_edges_from, [(1, 2)]) + pytest.raises(nx.NetworkXError, G.clear) def test_is_frozen(self): - assert_equal(nx.is_frozen(self.G), False) + assert not nx.is_frozen(self.G) G = nx.freeze(self.G) - assert_equal(G.frozen, nx.is_frozen(self.G)) - assert_equal(G.frozen, True) + assert G.frozen == nx.is_frozen(self.G) + assert G.frozen def test_info(self): G = nx.path_graph(5) G.name = "path_graph(5)" info = nx.info(G) - expected_graph_info = '\n'.join(['Name: path_graph(5)', - 'Type: Graph', - 'Number of nodes: 5', - 'Number of edges: 4', - 'Average degree: 1.6000']) - assert_equal(info, expected_graph_info) + expected_graph_info = "\n".join( + [ + "Name: path_graph(5)", + "Type: Graph", + "Number of nodes: 5", + "Number of edges: 4", + "Average degree: 1.6000", + ] + ) + assert info == expected_graph_info info = nx.info(G, n=1) - expected_node_info = '\n'.join( - ['Node 1 has the following properties:', - 'Degree: 2', - 'Neighbors: 0 2']) - assert_equal(info, expected_node_info) + assert type(info) == str + expected_node_info = "\n".join( + ["Node 1 has the following properties:", "Degree: 2", "Neighbors: 0 2"] + ) + assert info == expected_node_info + + # must raise an error for a non-existent node + pytest.raises(nx.NetworkXError, nx.info, G, 1248) def test_info_digraph(self): - G = nx.DiGraph(name='path_graph(5)') + G = nx.DiGraph(name="path_graph(5)") nx.add_path(G, [0, 1, 2, 3, 4]) info = nx.info(G) - expected_graph_info = '\n'.join(['Name: path_graph(5)', - 'Type: DiGraph', - 'Number of nodes: 5', - 'Number of edges: 4', - 'Average in degree: 0.8000', - 'Average out degree: 0.8000']) - assert_equal(info, expected_graph_info) + expected_graph_info = "\n".join( + [ + "Name: path_graph(5)", + "Type: DiGraph", + "Number of nodes: 5", + "Number of edges: 4", + "Average in degree: 0.8000", + "Average out degree: 0.8000", + ] + ) + assert info == expected_graph_info info = nx.info(G, n=1) - expected_node_info = '\n'.join( - ['Node 1 has the following properties:', - 'Degree: 2', - 'Neighbors: 2']) - assert_equal(info, expected_node_info) + expected_node_info = "\n".join( + ["Node 1 has the following properties:", "Degree: 2", "Neighbors: 2"] + ) + assert info == expected_node_info - assert_raises(nx.NetworkXError, nx.info, G, n=-1) + pytest.raises(nx.NetworkXError, nx.info, G, n=-1) def test_neighbors_complete_graph(self): graph = nx.complete_graph(100) pop = random.sample(list(graph), 1) nbors = list(nx.neighbors(graph, pop[0])) # should be all the other vertices in the graph - assert_equal(len(nbors), len(graph) - 1) + assert len(nbors) == len(graph) - 1 graph = nx.path_graph(100) node = random.sample(list(graph), 1)[0] nbors = list(nx.neighbors(graph, node)) # should be all the other vertices in the graph if node != 0 and node != 99: - assert_equal(len(nbors), 2) + assert len(nbors) == 2 else: - assert_equal(len(nbors), 1) + assert len(nbors) == 1 # create a star graph with 99 outer nodes graph = nx.star_graph(99) nbors = list(nx.neighbors(graph, 0)) - assert_equal(len(nbors), 99) + assert len(nbors) == 99 def test_non_neighbors(self): graph = nx.complete_graph(100) pop = random.sample(list(graph), 1) nbors = list(nx.non_neighbors(graph, pop[0])) # should be all the other vertices in the graph - assert_equal(len(nbors), 0) + assert len(nbors) == 0 graph = nx.path_graph(100) node = random.sample(list(graph), 1)[0] nbors = list(nx.non_neighbors(graph, node)) # should be all the other vertices in the graph if node != 0 and node != 99: - assert_equal(len(nbors), 97) + assert len(nbors) == 97 else: - assert_equal(len(nbors), 98) + assert len(nbors) == 98 # create a star graph with 99 outer nodes graph = nx.star_graph(99) nbors = list(nx.non_neighbors(graph, 0)) - assert_equal(len(nbors), 0) + assert len(nbors) == 0 # disconnected graph graph = nx.Graph() graph.add_nodes_from(range(10)) nbors = list(nx.non_neighbors(graph, 0)) - assert_equal(len(nbors), 9) + assert len(nbors) == 9 def test_non_edges(self): # All possible edges exist graph = nx.complete_graph(5) nedges = list(nx.non_edges(graph)) - assert_equal(len(nedges), 0) + assert len(nedges) == 0 graph = nx.path_graph(4) expected = [(0, 2), (0, 3), (1, 3)] nedges = list(nx.non_edges(graph)) for (u, v) in expected: - assert_true((u, v) in nedges or (v, u) in nedges) + assert (u, v) in nedges or (v, u) in nedges graph = nx.star_graph(4) expected = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)] nedges = list(nx.non_edges(graph)) for (u, v) in expected: - assert_true((u, v) in nedges or (v, u) in nedges) + assert (u, v) in nedges or (v, u) in nedges # Directed graphs graph = nx.DiGraph() @@ -320,73 +384,89 @@ def test_non_edges(self): expected = [(0, 1), (1, 0), (1, 2)] nedges = list(nx.non_edges(graph)) for e in expected: - assert_true(e in nedges) + assert e in nedges def test_is_weighted(self): G = nx.Graph() - assert_false(nx.is_weighted(G)) + assert not nx.is_weighted(G) G = nx.path_graph(4) - assert_false(nx.is_weighted(G)) - assert_false(nx.is_weighted(G, (2, 3))) + assert not nx.is_weighted(G) + assert not nx.is_weighted(G, (2, 3)) G.add_node(4) G.add_edge(3, 4, weight=4) - assert_false(nx.is_weighted(G)) - assert_true(nx.is_weighted(G, (3, 4))) + assert not nx.is_weighted(G) + assert nx.is_weighted(G, (3, 4)) G = nx.DiGraph() - G.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5), - ('1', '0', -5), ('0', '2', 2), - ('1', '2', 4), ('2', '3', 1)]) - assert_true(nx.is_weighted(G)) - assert_true(nx.is_weighted(G, ('1', '0'))) + G.add_weighted_edges_from( + [ + ("0", "3", 3), + ("0", "1", -5), + ("1", "0", -5), + ("0", "2", 2), + ("1", "2", 4), + ("2", "3", 1), + ] + ) + assert nx.is_weighted(G) + assert nx.is_weighted(G, ("1", "0")) G = G.to_undirected() - assert_true(nx.is_weighted(G)) - assert_true(nx.is_weighted(G, ('1', '0'))) + assert nx.is_weighted(G) + assert nx.is_weighted(G, ("1", "0")) - assert_raises(nx.NetworkXError, nx.is_weighted, G, (1, 2)) + pytest.raises(nx.NetworkXError, nx.is_weighted, G, (1, 2)) def test_is_negatively_weighted(self): G = nx.Graph() - assert_false(nx.is_negatively_weighted(G)) + assert not nx.is_negatively_weighted(G) G.add_node(1) G.add_nodes_from([2, 3, 4, 5]) - assert_false(nx.is_negatively_weighted(G)) + assert not nx.is_negatively_weighted(G) G.add_edge(1, 2, weight=4) - assert_false(nx.is_negatively_weighted(G, (1, 2))) + assert not nx.is_negatively_weighted(G, (1, 2)) G.add_edges_from([(1, 3), (2, 4), (2, 6)]) - G[1][3]['color'] = 'blue' - assert_false(nx.is_negatively_weighted(G)) - assert_false(nx.is_negatively_weighted(G, (1, 3))) + G[1][3]["color"] = "blue" + assert not nx.is_negatively_weighted(G) + assert not nx.is_negatively_weighted(G, (1, 3)) - G[2][4]['weight'] = -2 - assert_true(nx.is_negatively_weighted(G, (2, 4))) - assert_true(nx.is_negatively_weighted(G)) + G[2][4]["weight"] = -2 + assert nx.is_negatively_weighted(G, (2, 4)) + assert nx.is_negatively_weighted(G) G = nx.DiGraph() - G.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5), - ('1', '0', -2), ('0', '2', 2), - ('1', '2', -3), ('2', '3', 1)]) - assert_true(nx.is_negatively_weighted(G)) - assert_false(nx.is_negatively_weighted(G, ('0', '3'))) - assert_true(nx.is_negatively_weighted(G, ('1', '0'))) - - assert_raises(nx.NetworkXError, nx.is_negatively_weighted, G, (1, 4)) - - -class TestCommonNeighbors(): - def setUp(self): - self.func = nx.common_neighbors + G.add_weighted_edges_from( + [ + ("0", "3", 3), + ("0", "1", -5), + ("1", "0", -2), + ("0", "2", 2), + ("1", "2", -3), + ("2", "3", 1), + ] + ) + assert nx.is_negatively_weighted(G) + assert not nx.is_negatively_weighted(G, ("0", "3")) + assert nx.is_negatively_weighted(G, ("1", "0")) + + pytest.raises(nx.NetworkXError, nx.is_negatively_weighted, G, (1, 4)) + + +class TestCommonNeighbors: + @classmethod + def setup_class(cls): + cls.func = staticmethod(nx.common_neighbors) def test_func(G, u, v, expected): - result = sorted(self.func(G, u, v)) - assert_equal(result, expected) - self.test = test_func + result = sorted(cls.func(G, u, v)) + assert result == expected + + cls.test = staticmethod(test_func) def test_K5(self): G = nx.complete_graph(5) @@ -400,17 +480,17 @@ def test_S4(self): G = nx.star_graph(4) self.test(G, 1, 2, [0]) - @raises(nx.NetworkXNotImplemented) def test_digraph(self): - G = nx.DiGraph() - G.add_edges_from([(0, 1), (1, 2)]) - self.func(G, 0, 2) + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.DiGraph() + G.add_edges_from([(0, 1), (1, 2)]) + self.func(G, 0, 2) def test_nonexistent_nodes(self): G = nx.complete_graph(5) - assert_raises(nx.NetworkXError, nx.common_neighbors, G, 5, 4) - assert_raises(nx.NetworkXError, nx.common_neighbors, G, 4, 5) - assert_raises(nx.NetworkXError, nx.common_neighbors, G, 5, 6) + pytest.raises(nx.NetworkXError, nx.common_neighbors, G, 5, 4) + pytest.raises(nx.NetworkXError, nx.common_neighbors, G, 4, 5) + pytest.raises(nx.NetworkXError, nx.common_neighbors, G, 5, 6) def test_custom1(self): """Case of no common neighbors.""" @@ -430,30 +510,30 @@ def test_set_node_attributes(): # Test single value G = nx.path_graph(3, create_using=G) vals = 100 - attr = 'hello' + attr = "hello" nx.set_node_attributes(G, vals, attr) - assert_equal(G.nodes[0][attr], vals) - assert_equal(G.nodes[1][attr], vals) - assert_equal(G.nodes[2][attr], vals) + assert G.nodes[0][attr] == vals + assert G.nodes[1][attr] == vals + assert G.nodes[2][attr] == vals # Test dictionary G = nx.path_graph(3, create_using=G) vals = dict(zip(sorted(G.nodes()), range(len(G)))) - attr = 'hi' + attr = "hi" nx.set_node_attributes(G, vals, attr) - assert_equal(G.nodes[0][attr], 0) - assert_equal(G.nodes[1][attr], 1) - assert_equal(G.nodes[2][attr], 2) + assert G.nodes[0][attr] == 0 + assert G.nodes[1][attr] == 1 + assert G.nodes[2][attr] == 2 # Test dictionary of dictionaries G = nx.path_graph(3, create_using=G) - d = {'hi': 0, 'hello': 200} + d = {"hi": 0, "hello": 200} vals = dict.fromkeys(G.nodes(), d) vals.pop(0) nx.set_node_attributes(G, vals) - assert_equal(G.nodes[0], {}) - assert_equal(G.nodes[1]["hi"], 0) - assert_equal(G.nodes[2]["hello"], 200) + assert G.nodes[0] == {} + assert G.nodes[1]["hi"] == 0 + assert G.nodes[2]["hello"] == 200 def test_set_edge_attributes(): @@ -461,30 +541,30 @@ def test_set_edge_attributes(): for G in graphs: # Test single value G = nx.path_graph(3, create_using=G) - attr = 'hello' + attr = "hello" vals = 3 nx.set_edge_attributes(G, vals, attr) - assert_equal(G[0][1][attr], vals) - assert_equal(G[1][2][attr], vals) + assert G[0][1][attr] == vals + assert G[1][2][attr] == vals # Test multiple values G = nx.path_graph(3, create_using=G) - attr = 'hi' + attr = "hi" edges = [(0, 1), (1, 2)] vals = dict(zip(edges, range(len(edges)))) nx.set_edge_attributes(G, vals, attr) - assert_equal(G[0][1][attr], 0) - assert_equal(G[1][2][attr], 1) + assert G[0][1][attr] == 0 + assert G[1][2][attr] == 1 # Test dictionary of dictionaries G = nx.path_graph(3, create_using=G) - d = {'hi': 0, 'hello': 200} + d = {"hi": 0, "hello": 200} edges = [(0, 1)] vals = dict.fromkeys(edges, d) nx.set_edge_attributes(G, vals) - assert_equal(G[0][1]['hi'], 0) - assert_equal(G[0][1]['hello'], 200) - assert_equal(G[1][2], {}) + assert G[0][1]["hi"] == 0 + assert G[0][1]["hello"] == 200 + assert G[1][2] == {} def test_set_edge_attributes_multi(): @@ -492,79 +572,79 @@ def test_set_edge_attributes_multi(): for G in graphs: # Test single value G = nx.path_graph(3, create_using=G) - attr = 'hello' + attr = "hello" vals = 3 nx.set_edge_attributes(G, vals, attr) - assert_equal(G[0][1][0][attr], vals) - assert_equal(G[1][2][0][attr], vals) + assert G[0][1][0][attr] == vals + assert G[1][2][0][attr] == vals # Test multiple values G = nx.path_graph(3, create_using=G) - attr = 'hi' + attr = "hi" edges = [(0, 1, 0), (1, 2, 0)] vals = dict(zip(edges, range(len(edges)))) nx.set_edge_attributes(G, vals, attr) - assert_equal(G[0][1][0][attr], 0) - assert_equal(G[1][2][0][attr], 1) + assert G[0][1][0][attr] == 0 + assert G[1][2][0][attr] == 1 # Test dictionary of dictionaries G = nx.path_graph(3, create_using=G) - d = {'hi': 0, 'hello': 200} + d = {"hi": 0, "hello": 200} edges = [(0, 1, 0)] vals = dict.fromkeys(edges, d) nx.set_edge_attributes(G, vals) - assert_equal(G[0][1][0]['hi'], 0) - assert_equal(G[0][1][0]['hello'], 200) - assert_equal(G[1][2][0], {}) + assert G[0][1][0]["hi"] == 0 + assert G[0][1][0]["hello"] == 200 + assert G[1][2][0] == {} def test_get_node_attributes(): graphs = [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()] for G in graphs: G = nx.path_graph(3, create_using=G) - attr = 'hello' + attr = "hello" vals = 100 nx.set_node_attributes(G, vals, attr) attrs = nx.get_node_attributes(G, attr) - assert_equal(attrs[0], vals) - assert_equal(attrs[1], vals) - assert_equal(attrs[2], vals) + assert attrs[0] == vals + assert attrs[1] == vals + assert attrs[2] == vals def test_get_edge_attributes(): graphs = [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()] for G in graphs: G = nx.path_graph(3, create_using=G) - attr = 'hello' + attr = "hello" vals = 100 nx.set_edge_attributes(G, vals, attr) attrs = nx.get_edge_attributes(G, attr) - assert_equal(len(attrs), 2) + assert len(attrs) == 2 if G.is_multigraph(): keys = [(0, 1, 0), (1, 2, 0)] for u, v, k in keys: try: - assert_equal(attrs[(u, v, k)], 100) + assert attrs[(u, v, k)] == 100 except KeyError: - assert_equal(attrs[(v, u, k)], 100) + assert attrs[(v, u, k)] == 100 else: keys = [(0, 1), (1, 2)] for u, v in keys: try: - assert_equal(attrs[(u, v)], 100) + assert attrs[(u, v)] == 100 except KeyError: - assert_equal(attrs[(v, u)], 100) + assert attrs[(v, u)] == 100 def test_is_empty(): graphs = [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()] for G in graphs: - assert_true(nx.is_empty(G)) + assert nx.is_empty(G) G.add_nodes_from(range(5)) - assert_true(nx.is_empty(G)) + assert nx.is_empty(G) G.add_edges_from([(1, 2), (3, 4)]) - assert_false(nx.is_empty(G)) + assert not nx.is_empty(G) def test_selfloops(): @@ -575,10 +655,64 @@ def test_selfloops(): assert_nodes_equal(nx.nodes_with_selfloops(G), [0]) assert_edges_equal(nx.selfloop_edges(G), [(0, 0)]) assert_edges_equal(nx.selfloop_edges(G, data=True), [(0, 0, {})]) - assert_equal(nx.number_of_selfloops(G), 1) + assert nx.number_of_selfloops(G) == 1 # test selfloop attr G.add_edge(1, 1, weight=2) - assert_edges_equal(nx.selfloop_edges(G, data=True), - [(0, 0, {}), (1, 1, {'weight': 2})]) - assert_edges_equal(nx.selfloop_edges(G, data='weight'), - [(0, 0, None), (1, 1, 2)]) + assert_edges_equal( + nx.selfloop_edges(G, data=True), [(0, 0, {}), (1, 1, {"weight": 2})] + ) + assert_edges_equal( + nx.selfloop_edges(G, data="weight"), [(0, 0, None), (1, 1, 2)] + ) + # test removing selfloops behavior vis-a-vis altering a dict while iterating + G.add_edge(0, 0) + G.remove_edges_from(nx.selfloop_edges(G)) + if G.is_multigraph(): + G.add_edge(0, 0) + pytest.raises( + RuntimeError, G.remove_edges_from, nx.selfloop_edges(G, keys=True) + ) + G.add_edge(0, 0) + pytest.raises( + TypeError, G.remove_edges_from, nx.selfloop_edges(G, data=True) + ) + G.add_edge(0, 0) + pytest.raises( + RuntimeError, + G.remove_edges_from, + nx.selfloop_edges(G, data=True, keys=True), + ) + else: + G.add_edge(0, 0) + G.remove_edges_from(nx.selfloop_edges(G, keys=True)) + G.add_edge(0, 0) + G.remove_edges_from(nx.selfloop_edges(G, data=True)) + G.add_edge(0, 0) + G.remove_edges_from(nx.selfloop_edges(G, keys=True, data=True)) + + +def test_pathweight(): + valid_path = [1, 2, 3] + invalid_path = [1, 3, 2] + graphs = [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()] + edges = [ + (1, 2, dict(cost=5, dist=6)), + (2, 3, dict(cost=3, dist=4)), + (1, 2, dict(cost=1, dist=2)), + ] + for graph in graphs: + graph.add_edges_from(edges) + assert nx.path_weight(graph, valid_path, "cost") == 4 + assert nx.path_weight(graph, valid_path, "dist") == 6 + pytest.raises(nx.NetworkXNoPath, nx.path_weight, graph, invalid_path, "cost") + + +def test_ispath(): + valid_path = [1, 2, 3, 4] + invalid_path = [1, 2, 4, 3] + graphs = [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()] + edges = [(1, 2), (2, 3), (1, 2), (3, 4)] + for graph in graphs: + graph.add_edges_from(edges) + assert nx.is_path(graph, valid_path) + assert not nx.is_path(graph, invalid_path) diff --git a/networkx/classes/tests/test_graph.py b/networkx/classes/tests/test_graph.py index 50fcd8e..03902b0 100644 --- a/networkx/classes/tests/test_graph.py +++ b/networkx/classes/tests/test_graph.py @@ -1,102 +1,101 @@ -from nose.tools import assert_equal -from nose.tools import assert_is -from nose.tools import assert_not_equal -from nose.tools import assert_raises -from nose.tools import raises +import pickle +import gc import networkx as nx -from networkx.testing.utils import * +from networkx.testing.utils import ( + assert_graphs_equal, + assert_edges_equal, + assert_nodes_equal, +) +import pytest -def test_deprecated(): - # for backwards compatibility with 1.x, will be removed for 3.x - G = nx.complete_graph(3) - assert_equal(G.node, {0: {}, 1: {}, 2: {}}) - G = nx.DiGraph() - G.add_path([3, 4]) - assert_equal(G.adj, {3: {4: {}}, 4: {}}) - - G = nx.DiGraph() - G.add_cycle([3, 4, 5]) - assert_equal(G.adj, {3: {4: {}}, 4: {5: {}}, 5: {3: {}}}) - - G = nx.DiGraph() - G.add_star([3, 4, 5]) - assert_equal(G.adj, {3: {4: {}, 5: {}}, 4: {}, 5: {}}) - - G = nx.DiGraph([(0, 0), (0, 1), (1, 2)]) - assert_equal(G.number_of_selfloops(), 1) - assert_equal(list(G.nodes_with_selfloops()), [0]) - assert_equal(list(G.selfloop_edges()), [(0, 0)]) - - -class BaseGraphTester(object): +class BaseGraphTester: """ Tests for data-structure independent graph class features.""" def test_contains(self): G = self.K3 - assert(1 in G) - assert(4 not in G) - assert('b' not in G) - assert([] not in G) # no exception for nonhashable - assert({1: 1} not in G) # no exception for nonhashable + assert 1 in G + assert 4 not in G + assert "b" not in G + assert [] not in G # no exception for nonhashable + assert {1: 1} not in G # no exception for nonhashable def test_order(self): G = self.K3 - assert_equal(len(G), 3) - assert_equal(G.order(), 3) - assert_equal(G.number_of_nodes(), 3) + assert len(G) == 3 + assert G.order() == 3 + assert G.number_of_nodes() == 3 def test_nodes(self): G = self.K3 - assert_equal(sorted(G.nodes()), self.k3nodes) - assert_equal(sorted(G.nodes(data=True)), [(0, {}), (1, {}), (2, {})]) + assert sorted(G.nodes()) == self.k3nodes + assert sorted(G.nodes(data=True)) == [(0, {}), (1, {}), (2, {})] def test_has_node(self): G = self.K3 - assert(G.has_node(1)) - assert(not G.has_node(4)) - assert(not G.has_node([])) # no exception for nonhashable - assert(not G.has_node({1: 1})) # no exception for nonhashable + assert G.has_node(1) + assert not G.has_node(4) + assert not G.has_node([]) # no exception for nonhashable + assert not G.has_node({1: 1}) # no exception for nonhashable def test_has_edge(self): G = self.K3 - assert_equal(G.has_edge(0, 1), True) - assert_equal(G.has_edge(0, -1), False) + assert G.has_edge(0, 1) + assert not G.has_edge(0, -1) def test_neighbors(self): G = self.K3 - assert_equal(sorted(G.neighbors(0)), [1, 2]) - assert_raises((KeyError, nx.NetworkXError), G.neighbors, -1) + assert sorted(G.neighbors(0)) == [1, 2] + with pytest.raises(nx.NetworkXError): + G.neighbors(-1) + + def test_memory_leak(self): + G = self.Graph() + + def count_objects_of_type(_type): + return sum(1 for obj in gc.get_objects() if isinstance(obj, _type)) + + gc.collect() + before = count_objects_of_type(self.Graph) + G.copy() + gc.collect() + after = count_objects_of_type(self.Graph) + assert before == after + + # test a subgraph of the base class + class MyGraph(self.Graph): + pass + + gc.collect() + G = MyGraph() + before = count_objects_of_type(MyGraph) + G.copy() + gc.collect() + after = count_objects_of_type(MyGraph) + assert before == after def test_edges(self): G = self.K3 assert_edges_equal(G.edges(), [(0, 1), (0, 2), (1, 2)]) assert_edges_equal(G.edges(0), [(0, 1), (0, 2)]) assert_edges_equal(G.edges([0, 1]), [(0, 1), (0, 2), (1, 2)]) - assert_raises((KeyError, nx.NetworkXError), G.edges, -1) - - def test_weighted_degree(self): - G = self.Graph() - G.add_edge(1, 2, weight=2) - G.add_edge(2, 3, weight=3) - assert_equal(sorted(d for n, d in G.degree(weight='weight')), [2, 3, 5]) - assert_equal(dict(G.degree(weight='weight')), {1: 2, 2: 5, 3: 3}) - assert_equal(G.degree(1, weight='weight'), 2) - assert_equal(G.degree([1], weight='weight'), [(1, 2)]) + with pytest.raises(nx.NetworkXError): + G.edges(-1) def test_degree(self): G = self.K3 - assert_equal(sorted(G.degree()), [(0, 2), (1, 2), (2, 2)]) - assert_equal(dict(G.degree()), {0: 2, 1: 2, 2: 2}) - assert_equal(G.degree(0), 2) - assert_raises(nx.NetworkXError, G.degree, -1) # node not in graph + assert sorted(G.degree()) == [(0, 2), (1, 2), (2, 2)] + assert dict(G.degree()) == {0: 2, 1: 2, 2: 2} + assert G.degree(0) == 2 + with pytest.raises(nx.NetworkXError): + G.degree(-1) # node not in graph def test_size(self): G = self.K3 - assert_equal(G.size(), 3) - assert_equal(G.number_of_edges(), 3) + assert G.size() == 3 + assert G.number_of_edges() == 3 def test_nbunch_iter(self): G = self.K3 @@ -110,13 +109,14 @@ def test_nbunch_iter(self): # node not in graph doesn't get caught upon creation of iterator bunch = G.nbunch_iter(-1) # but gets caught when iterator used - assert_raises(nx.NetworkXError, list, bunch) + with pytest.raises(nx.NetworkXError): + list(bunch) # unhashable doesn't get caught upon creation of iterator bunch = G.nbunch_iter([0, 1, 2, {}]) # but gets caught when iterator hits the unhashable - assert_raises(nx.NetworkXError, list, bunch) + with pytest.raises(nx.NetworkXError): + list(bunch) - @raises(nx.NetworkXError) def test_nbunch_iter_node_format_raise(self): # Tests that a node that would have failed string formatting # doesn't cause an error when attempting to raise a @@ -124,24 +124,25 @@ def test_nbunch_iter_node_format_raise(self): # For more information, see pull request #1813. G = self.Graph() - nbunch = [('x', set())] - list(G.nbunch_iter(nbunch)) + nbunch = [("x", set())] + with pytest.raises(nx.NetworkXError): + list(G.nbunch_iter(nbunch)) def test_selfloop_degree(self): G = self.Graph() G.add_edge(1, 1) - assert_equal(sorted(G.degree()), [(1, 2)]) - assert_equal(dict(G.degree()), {1: 2}) - assert_equal(G.degree(1), 2) - assert_equal(sorted(G.degree([1])), [(1, 2)]) - assert_equal(G.degree(1, weight='weight'), 2) + assert sorted(G.degree()) == [(1, 2)] + assert dict(G.degree()) == {1: 2} + assert G.degree(1) == 2 + assert sorted(G.degree([1])) == [(1, 2)] + assert G.degree(1, weight="weight") == 2 def test_selfloops(self): G = self.K3.copy() G.add_edge(0, 0) assert_nodes_equal(nx.nodes_with_selfloops(G), [0]) assert_edges_equal(nx.selfloop_edges(G), [(0, 0)]) - assert_equal(nx.number_of_selfloops(G), 1) + assert nx.number_of_selfloops(G) == 1 G.remove_edge(0, 0) G.add_edge(0, 0) G.remove_edges_from([(0, 0)]) @@ -159,30 +160,39 @@ def test_weighted_degree(self): G = self.Graph() G.add_edge(1, 2, weight=2, other=3) G.add_edge(2, 3, weight=3, other=4) - assert_nodes_equal((d for n, d in G.degree(weight='weight')), [2, 5, 3]) - assert_equal(dict(G.degree(weight='weight')), {1: 2, 2: 5, 3: 3}) - assert_equal(G.degree(1, weight='weight'), 2) - assert_nodes_equal((G.degree([1], weight='weight')), [(1, 2)]) + assert sorted(d for n, d in G.degree(weight="weight")) == [2, 3, 5] + assert dict(G.degree(weight="weight")) == {1: 2, 2: 5, 3: 3} + assert G.degree(1, weight="weight") == 2 + assert_nodes_equal((G.degree([1], weight="weight")), [(1, 2)]) - assert_nodes_equal((d for n, d in G.degree(weight='other')), [3, 7, 4]) - assert_equal(dict(G.degree(weight='other')), {1: 3, 2: 7, 3: 4}) - assert_equal(G.degree(1, weight='other'), 3) - assert_edges_equal((G.degree([1], weight='other')), [(1, 3)]) + assert_nodes_equal((d for n, d in G.degree(weight="other")), [3, 7, 4]) + assert dict(G.degree(weight="other")) == {1: 3, 2: 7, 3: 4} + assert G.degree(1, weight="other") == 3 + assert_edges_equal((G.degree([1], weight="other")), [(1, 3)]) def add_attributes(self, G): - G.graph['foo'] = [] - G.nodes[0]['foo'] = [] + G.graph["foo"] = [] + G.nodes[0]["foo"] = [] G.remove_edge(1, 2) ll = [] G.add_edge(1, 2, foo=ll) G.add_edge(2, 1, foo=ll) def test_name(self): - G = self.Graph(name='') - assert_equal(G.name, "") - G = self.Graph(name='test') - assert_equal(G.__str__(), "test") - assert_equal(G.name, "test") + G = self.Graph(name="") + assert G.name == "" + G = self.Graph(name="test") + assert G.__str__() == "test" + assert G.name == "test" + + def test_graph_chain(self): + G = self.Graph([(0, 1), (1, 2)]) + DG = G.to_directed(as_view=True) + SDG = DG.subgraph([0, 1]) + RSDG = SDG.reverse(copy=False) + assert G is DG._graph + assert DG is SDG._graph + assert SDG is RSDG._graph def test_copy(self): G = self.Graph() @@ -206,29 +216,21 @@ def test_class_copy(self): self.different_attrdict(H, G) self.shallow_copy_attrdict(H, G) - def test_root_graph(self): - G = self.Graph([(0, 1), (1, 2)]) - assert_is(G, G.root_graph) - DG = G.to_directed(as_view=True) - SDG = DG.subgraph([0, 1]) - RSDG = SDG.reverse(copy=False) - assert_is(G, RSDG.root_graph) - def test_fresh_copy(self): G = self.Graph() G.add_node(0) G.add_edge(1, 2) self.add_attributes(G) # copy graph structure but use fresh datadict - H = G.fresh_copy() + H = G.__class__() H.add_nodes_from(G) H.add_edges_from(G.edges()) - assert_equal(len(G.nodes[0]), 1) + assert len(G.nodes[0]) == 1 ddict = G.adj[1][2][0] if G.is_multigraph() else G.adj[1][2] - assert_equal(len(ddict), 1) - assert_equal(len(H.nodes[0]), 0) + assert len(ddict) == 1 + assert len(H.nodes[0]) == 0 ddict = H.adj[1][2][0] if H.is_multigraph() else H.adj[1][2] - assert_equal(len(ddict), 0) + assert len(ddict) == 0 def is_deepcopy(self, H, G): self.graphs_equal(H, G) @@ -241,19 +243,19 @@ def deep_copy_attrdict(self, H, G): self.deepcopy_edge_attr(H, G) def deepcopy_graph_attr(self, H, G): - assert_equal(G.graph['foo'], H.graph['foo']) - G.graph['foo'].append(1) - assert_not_equal(G.graph['foo'], H.graph['foo']) + assert G.graph["foo"] == H.graph["foo"] + G.graph["foo"].append(1) + assert G.graph["foo"] != H.graph["foo"] def deepcopy_node_attr(self, H, G): - assert_equal(G.nodes[0]['foo'], H.nodes[0]['foo']) - G.nodes[0]['foo'].append(1) - assert_not_equal(G.nodes[0]['foo'], H.nodes[0]['foo']) + assert G.nodes[0]["foo"] == H.nodes[0]["foo"] + G.nodes[0]["foo"].append(1) + assert G.nodes[0]["foo"] != H.nodes[0]["foo"] def deepcopy_edge_attr(self, H, G): - assert_equal(G[1][2]['foo'], H[1][2]['foo']) - G[1][2]['foo'].append(1) - assert_not_equal(G[1][2]['foo'], H[1][2]['foo']) + assert G[1][2]["foo"] == H[1][2]["foo"] + G[1][2]["foo"].append(1) + assert G[1][2]["foo"] != H[1][2]["foo"] def is_shallow_copy(self, H, G): self.graphs_equal(H, G) @@ -265,54 +267,54 @@ def shallow_copy_attrdict(self, H, G): self.shallow_copy_edge_attr(H, G) def shallow_copy_graph_attr(self, H, G): - assert_equal(G.graph['foo'], H.graph['foo']) - G.graph['foo'].append(1) - assert_equal(G.graph['foo'], H.graph['foo']) + assert G.graph["foo"] == H.graph["foo"] + G.graph["foo"].append(1) + assert G.graph["foo"] == H.graph["foo"] def shallow_copy_node_attr(self, H, G): - assert_equal(G.nodes[0]['foo'], H.nodes[0]['foo']) - G.nodes[0]['foo'].append(1) - assert_equal(G.nodes[0]['foo'], H.nodes[0]['foo']) + assert G.nodes[0]["foo"] == H.nodes[0]["foo"] + G.nodes[0]["foo"].append(1) + assert G.nodes[0]["foo"] == H.nodes[0]["foo"] def shallow_copy_edge_attr(self, H, G): - assert_equal(G[1][2]['foo'], H[1][2]['foo']) - G[1][2]['foo'].append(1) - assert_equal(G[1][2]['foo'], H[1][2]['foo']) + assert G[1][2]["foo"] == H[1][2]["foo"] + G[1][2]["foo"].append(1) + assert G[1][2]["foo"] == H[1][2]["foo"] def same_attrdict(self, H, G): - old_foo = H[1][2]['foo'] - H.adj[1][2]['foo'] = 'baz' - assert_equal(G.edges, H.edges) - H.adj[1][2]['foo'] = old_foo - assert_equal(G.edges, H.edges) - - old_foo = H.nodes[0]['foo'] - H.nodes[0]['foo'] = 'baz' - assert_equal(G.nodes, H.nodes) - H.nodes[0]['foo'] = old_foo - assert_equal(G.nodes, H.nodes) + old_foo = H[1][2]["foo"] + H.adj[1][2]["foo"] = "baz" + assert G.edges == H.edges + H.adj[1][2]["foo"] = old_foo + assert G.edges == H.edges + + old_foo = H.nodes[0]["foo"] + H.nodes[0]["foo"] = "baz" + assert G.nodes == H.nodes + H.nodes[0]["foo"] = old_foo + assert G.nodes == H.nodes def different_attrdict(self, H, G): - old_foo = H[1][2]['foo'] - H.adj[1][2]['foo'] = 'baz' - assert_not_equal(G._adj, H._adj) - H.adj[1][2]['foo'] = old_foo - assert_equal(G._adj, H._adj) - - old_foo = H.nodes[0]['foo'] - H.nodes[0]['foo'] = 'baz' - assert_not_equal(G._node, H._node) - H.nodes[0]['foo'] = old_foo - assert_equal(G._node, H._node) + old_foo = H[1][2]["foo"] + H.adj[1][2]["foo"] = "baz" + assert G._adj != H._adj + H.adj[1][2]["foo"] = old_foo + assert G._adj == H._adj + + old_foo = H.nodes[0]["foo"] + H.nodes[0]["foo"] = "baz" + assert G._node != H._node + H.nodes[0]["foo"] = old_foo + assert G._node == H._node def graphs_equal(self, H, G): - assert_equal(G._adj, H._adj) - assert_equal(G._node, H._node) - assert_equal(G.graph, H.graph) - assert_equal(G.name, H.name) + assert G._adj == H._adj + assert G._node == H._node + assert G.graph == H.graph + assert G.name == H.name if not G.is_directed() and not H.is_directed(): - assert_is(H._adj[1][2], H._adj[2][1]) - assert_is(G._adj[1][2], G._adj[2][1]) + assert H._adj[1][2] is H._adj[2][1] + assert G._adj[1][2] is G._adj[2][1] else: # at least one is directed if not G.is_directed(): G._pred = G._adj @@ -320,93 +322,106 @@ def graphs_equal(self, H, G): if not H.is_directed(): H._pred = H._adj H._succ = H._adj - assert_equal(G._pred, H._pred) - assert_equal(G._succ, H._succ) - assert_is(H._succ[1][2], H._pred[2][1]) - assert_is(G._succ[1][2], G._pred[2][1]) + assert G._pred == H._pred + assert G._succ == H._succ + assert H._succ[1][2] is H._pred[2][1] + assert G._succ[1][2] is G._pred[2][1] def test_graph_attr(self): - G = self.K3 - G.graph['foo'] = 'bar' - assert_equal(G.graph['foo'], 'bar') - del G.graph['foo'] - assert_equal(G.graph, {}) - H = self.Graph(foo='bar') - assert_equal(H.graph['foo'], 'bar') + G = self.K3.copy() + G.graph["foo"] = "bar" + assert G.graph["foo"] == "bar" + del G.graph["foo"] + assert G.graph == {} + H = self.Graph(foo="bar") + assert H.graph["foo"] == "bar" def test_node_attr(self): - G = self.K3 - G.add_node(1, foo='bar') + G = self.K3.copy() + G.add_node(1, foo="bar") assert_nodes_equal(G.nodes(), [0, 1, 2]) - assert_nodes_equal(G.nodes(data=True), - [(0, {}), (1, {'foo': 'bar'}), (2, {})]) - G.nodes[1]['foo'] = 'baz' - assert_nodes_equal(G.nodes(data=True), - [(0, {}), (1, {'foo': 'baz'}), (2, {})]) - assert_nodes_equal(G.nodes(data='foo'), - [(0, None), (1, 'baz'), (2, None)]) - assert_nodes_equal(G.nodes(data='foo', default='bar'), - [(0, 'bar'), (1, 'baz'), (2, 'bar')]) + assert_nodes_equal(G.nodes(data=True), [(0, {}), (1, {"foo": "bar"}), (2, {})]) + G.nodes[1]["foo"] = "baz" + assert_nodes_equal(G.nodes(data=True), [(0, {}), (1, {"foo": "baz"}), (2, {})]) + assert_nodes_equal(G.nodes(data="foo"), [(0, None), (1, "baz"), (2, None)]) + assert_nodes_equal( + G.nodes(data="foo", default="bar"), [(0, "bar"), (1, "baz"), (2, "bar")] + ) def test_node_attr2(self): - G = self.K3 - a = {'foo': 'bar'} + G = self.K3.copy() + a = {"foo": "bar"} G.add_node(3, **a) assert_nodes_equal(G.nodes(), [0, 1, 2, 3]) - assert_nodes_equal(G.nodes(data=True), - [(0, {}), (1, {}), (2, {}), (3, {'foo': 'bar'})]) + assert_nodes_equal( + G.nodes(data=True), [(0, {}), (1, {}), (2, {}), (3, {"foo": "bar"})] + ) def test_edge_lookup(self): G = self.Graph() - G.add_edge(1, 2, foo='bar') - assert_edges_equal(G.edges[1, 2], {'foo': 'bar'}) + G.add_edge(1, 2, foo="bar") + assert_edges_equal(G.edges[1, 2], {"foo": "bar"}) def test_edge_attr(self): G = self.Graph() - G.add_edge(1, 2, foo='bar') - assert_edges_equal(G.edges(data=True), [(1, 2, {'foo': 'bar'})]) - assert_edges_equal(G.edges(data='foo'), [(1, 2, 'bar')]) + G.add_edge(1, 2, foo="bar") + assert_edges_equal(G.edges(data=True), [(1, 2, {"foo": "bar"})]) + assert_edges_equal(G.edges(data="foo"), [(1, 2, "bar")]) def test_edge_attr2(self): G = self.Graph() - G.add_edges_from([(1, 2), (3, 4)], foo='foo') - assert_edges_equal(G.edges(data=True), - [(1, 2, {'foo': 'foo'}), (3, 4, {'foo': 'foo'})]) - assert_edges_equal(G.edges(data='foo'), - [(1, 2, 'foo'), (3, 4, 'foo')]) + G.add_edges_from([(1, 2), (3, 4)], foo="foo") + assert_edges_equal( + G.edges(data=True), [(1, 2, {"foo": "foo"}), (3, 4, {"foo": "foo"})] + ) + assert_edges_equal(G.edges(data="foo"), [(1, 2, "foo"), (3, 4, "foo")]) def test_edge_attr3(self): G = self.Graph() - G.add_edges_from([(1, 2, {'weight': 32}), (3, 4, {'weight': 64})], foo='foo') - assert_edges_equal(G.edges(data=True), - [(1, 2, {'foo': 'foo', 'weight': 32}), - (3, 4, {'foo': 'foo', 'weight': 64})]) + G.add_edges_from([(1, 2, {"weight": 32}), (3, 4, {"weight": 64})], foo="foo") + assert_edges_equal( + G.edges(data=True), + [ + (1, 2, {"foo": "foo", "weight": 32}), + (3, 4, {"foo": "foo", "weight": 64}), + ], + ) G.remove_edges_from([(1, 2), (3, 4)]) - G.add_edge(1, 2, data=7, spam='bar', bar='foo') - assert_edges_equal(G.edges(data=True), - [(1, 2, {'data': 7, 'spam': 'bar', 'bar': 'foo'})]) + G.add_edge(1, 2, data=7, spam="bar", bar="foo") + assert_edges_equal( + G.edges(data=True), [(1, 2, {"data": 7, "spam": "bar", "bar": "foo"})] + ) def test_edge_attr4(self): G = self.Graph() - G.add_edge(1, 2, data=7, spam='bar', bar='foo') - assert_edges_equal(G.edges(data=True), - [(1, 2, {'data': 7, 'spam': 'bar', 'bar': 'foo'})]) - G[1][2]['data'] = 10 # OK to set data like this - assert_edges_equal(G.edges(data=True), - [(1, 2, {'data': 10, 'spam': 'bar', 'bar': 'foo'})]) - - G.adj[1][2]['data'] = 20 - assert_edges_equal(G.edges(data=True), - [(1, 2, {'data': 20, 'spam': 'bar', 'bar': 'foo'})]) - G.edges[1, 2]['data'] = 21 # another spelling, "edge" - assert_edges_equal(G.edges(data=True), - [(1, 2, {'data': 21, 'spam': 'bar', 'bar': 'foo'})]) - G.adj[1][2]['listdata'] = [20, 200] - G.adj[1][2]['weight'] = 20 - assert_edges_equal(G.edges(data=True), - [(1, 2, {'data': 21, 'spam': 'bar', - 'bar': 'foo', 'listdata': [20, 200], 'weight':20})]) + G.add_edge(1, 2, data=7, spam="bar", bar="foo") + assert_edges_equal( + G.edges(data=True), [(1, 2, {"data": 7, "spam": "bar", "bar": "foo"})] + ) + G[1][2]["data"] = 10 # OK to set data like this + assert_edges_equal( + G.edges(data=True), [(1, 2, {"data": 10, "spam": "bar", "bar": "foo"})] + ) + + G.adj[1][2]["data"] = 20 + assert_edges_equal( + G.edges(data=True), [(1, 2, {"data": 20, "spam": "bar", "bar": "foo"})] + ) + G.edges[1, 2]["data"] = 21 # another spelling, "edge" + assert_edges_equal( + G.edges(data=True), [(1, 2, {"data": 21, "spam": "bar", "bar": "foo"})] + ) + G.adj[1][2]["listdata"] = [20, 200] + G.adj[1][2]["weight"] = 20 + dd = { + "data": 21, + "spam": "bar", + "bar": "foo", + "listdata": [20, 200], + "weight": 20, + } + assert_edges_equal(G.edges(data=True), [(1, 2, dd)]) def test_to_undirected(self): G = self.K3 @@ -435,31 +450,31 @@ def test_subgraph(self): self.shallow_copy_attrdict(H, G) H = G.subgraph(0) - assert_equal(H.adj, {0: {}}) + assert H.adj == {0: {}} H = G.subgraph([]) - assert_equal(H.adj, {}) - assert_not_equal(G.adj, {}) + assert H.adj == {} + assert G.adj != {} def test_selfloops_attr(self): G = self.K3.copy() G.add_edge(0, 0) G.add_edge(1, 1, weight=2) - assert_edges_equal(nx.selfloop_edges(G, data=True), - [(0, 0, {}), (1, 1, {'weight': 2})]) - assert_edges_equal(nx.selfloop_edges(G, data='weight'), - [(0, 0, None), (1, 1, 2)]) + assert_edges_equal( + nx.selfloop_edges(G, data=True), [(0, 0, {}), (1, 1, {"weight": 2})] + ) + assert_edges_equal( + nx.selfloop_edges(G, data="weight"), [(0, 0, None), (1, 1, 2)] + ) class TestGraph(BaseAttrGraphTester): """Tests specific to dict-of-dict-of-dict graph data structure""" - def setUp(self): + def setup_method(self): self.Graph = nx.Graph # build dict-of-dict-of-dict K3 ed1, ed2, ed3 = ({}, {}, {}) - self.k3adj = {0: {1: ed1, 2: ed2}, - 1: {0: ed1, 2: ed3}, - 2: {0: ed2, 1: ed3}} + self.k3adj = {0: {1: ed1, 2: ed2}, 1: {0: ed1, 2: ed3}, 2: {0: ed2, 1: ed3}} self.k3edges = [(0, 1), (0, 2), (1, 2)] self.k3nodes = [0, 1, 2] self.K3 = self.Graph() @@ -469,128 +484,160 @@ def setUp(self): self.K3._node[1] = {} self.K3._node[2] = {} + def test_pickle(self): + G = self.K3 + pg = pickle.loads(pickle.dumps(G, -1)) + self.graphs_equal(pg, G) + pg = pickle.loads(pickle.dumps(G)) + self.graphs_equal(pg, G) + def test_data_input(self): G = self.Graph({1: [2], 2: [1]}, name="test") - assert_equal(G.name, "test") - assert_equal(sorted(G.adj.items()), [(1, {2: {}}), (2, {1: {}})]) + assert G.name == "test" + assert sorted(G.adj.items()) == [(1, {2: {}}), (2, {1: {}})] G = self.Graph({1: [2], 2: [1]}, name="test") - assert_equal(G.name, "test") - assert_equal(sorted(G.adj.items()), [(1, {2: {}}), (2, {1: {}})]) + assert G.name == "test" + assert sorted(G.adj.items()) == [(1, {2: {}}), (2, {1: {}})] def test_adjacency(self): G = self.K3 - assert_equal(dict(G.adjacency()), - {0: {1: {}, 2: {}}, 1: {0: {}, 2: {}}, 2: {0: {}, 1: {}}}) + assert dict(G.adjacency()) == { + 0: {1: {}, 2: {}}, + 1: {0: {}, 2: {}}, + 2: {0: {}, 1: {}}, + } def test_getitem(self): G = self.K3 - assert_equal(G[0], {1: {}, 2: {}}) - assert_raises(KeyError, G.__getitem__, 'j') - assert_raises((TypeError, nx.NetworkXError), G.__getitem__, ['A']) + assert G[0] == {1: {}, 2: {}} + with pytest.raises(KeyError): + G.__getitem__("j") + with pytest.raises(TypeError): + G.__getitem__(["A"]) def test_add_node(self): G = self.Graph() G.add_node(0) - assert_equal(G.adj, {0: {}}) + assert G.adj == {0: {}} # test add attributes - G.add_node(1, c='red') - G.add_node(2, c='blue') - G.add_node(3, c='red') - assert_equal(G.nodes[1]['c'], 'red') - assert_equal(G.nodes[2]['c'], 'blue') - assert_equal(G.nodes[3]['c'], 'red') + G.add_node(1, c="red") + G.add_node(2, c="blue") + G.add_node(3, c="red") + assert G.nodes[1]["c"] == "red" + assert G.nodes[2]["c"] == "blue" + assert G.nodes[3]["c"] == "red" # test updating attributes - G.add_node(1, c='blue') - G.add_node(2, c='red') - G.add_node(3, c='blue') - assert_equal(G.nodes[1]['c'], 'blue') - assert_equal(G.nodes[2]['c'], 'red') - assert_equal(G.nodes[3]['c'], 'blue') + G.add_node(1, c="blue") + G.add_node(2, c="red") + G.add_node(3, c="blue") + assert G.nodes[1]["c"] == "blue" + assert G.nodes[2]["c"] == "red" + assert G.nodes[3]["c"] == "blue" def test_add_nodes_from(self): G = self.Graph() G.add_nodes_from([0, 1, 2]) - assert_equal(G.adj, {0: {}, 1: {}, 2: {}}) + assert G.adj == {0: {}, 1: {}, 2: {}} # test add attributes - G.add_nodes_from([0, 1, 2], c='red') - assert_equal(G.nodes[0]['c'], 'red') - assert_equal(G.nodes[2]['c'], 'red') + G.add_nodes_from([0, 1, 2], c="red") + assert G.nodes[0]["c"] == "red" + assert G.nodes[2]["c"] == "red" # test that attribute dicts are not the same - assert(G.nodes[0] is not G.nodes[1]) + assert G.nodes[0] is not G.nodes[1] # test updating attributes - G.add_nodes_from([0, 1, 2], c='blue') - assert_equal(G.nodes[0]['c'], 'blue') - assert_equal(G.nodes[2]['c'], 'blue') - assert(G.nodes[0] is not G.nodes[1]) + G.add_nodes_from([0, 1, 2], c="blue") + assert G.nodes[0]["c"] == "blue" + assert G.nodes[2]["c"] == "blue" + assert G.nodes[0] is not G.nodes[1] # test tuple input H = self.Graph() H.add_nodes_from(G.nodes(data=True)) - assert_equal(H.nodes[0]['c'], 'blue') - assert_equal(H.nodes[2]['c'], 'blue') - assert(H.nodes[0] is not H.nodes[1]) + assert H.nodes[0]["c"] == "blue" + assert H.nodes[2]["c"] == "blue" + assert H.nodes[0] is not H.nodes[1] # specific overrides general - H.add_nodes_from([0, (1, {'c': 'green'}), (3, {'c': 'cyan'})], c='red') - assert_equal(H.nodes[0]['c'], 'red') - assert_equal(H.nodes[1]['c'], 'green') - assert_equal(H.nodes[2]['c'], 'blue') - assert_equal(H.nodes[3]['c'], 'cyan') + H.add_nodes_from([0, (1, {"c": "green"}), (3, {"c": "cyan"})], c="red") + assert H.nodes[0]["c"] == "red" + assert H.nodes[1]["c"] == "green" + assert H.nodes[2]["c"] == "blue" + assert H.nodes[3]["c"] == "cyan" def test_remove_node(self): - G = self.K3 + G = self.K3.copy() G.remove_node(0) - assert_equal(G.adj, {1: {2: {}}, 2: {1: {}}}) - assert_raises((KeyError, nx.NetworkXError), G.remove_node, -1) + assert G.adj == {1: {2: {}}, 2: {1: {}}} + with pytest.raises(nx.NetworkXError): + G.remove_node(-1) # generator here to implement list,set,string... + def test_remove_nodes_from(self): - G = self.K3 + G = self.K3.copy() G.remove_nodes_from([0, 1]) - assert_equal(G.adj, {2: {}}) + assert G.adj == {2: {}} G.remove_nodes_from([-1]) # silent fail def test_add_edge(self): G = self.Graph() G.add_edge(0, 1) - assert_equal(G.adj, {0: {1: {}}, 1: {0: {}}}) + assert G.adj == {0: {1: {}}, 1: {0: {}}} G = self.Graph() G.add_edge(*(0, 1)) - assert_equal(G.adj, {0: {1: {}}, 1: {0: {}}}) + assert G.adj == {0: {1: {}}, 1: {0: {}}} def test_add_edges_from(self): G = self.Graph() - G.add_edges_from([(0, 1), (0, 2, {'weight': 3})]) - assert_equal(G.adj, {0: {1: {}, 2: {'weight': 3}}, 1: {0: {}}, - 2: {0: {'weight': 3}}}) + G.add_edges_from([(0, 1), (0, 2, {"weight": 3})]) + assert G.adj == { + 0: {1: {}, 2: {"weight": 3}}, + 1: {0: {}}, + 2: {0: {"weight": 3}}, + } G = self.Graph() - G.add_edges_from([(0, 1), (0, 2, {'weight': 3}), (1, 2, {'data': 4})], data=2) - assert_equal(G.adj, { - 0: {1: {'data': 2}, 2: {'weight': 3, 'data': 2}}, - 1: {0: {'data': 2}, 2: {'data': 4}}, - 2: {0: {'weight': 3, 'data': 2}, 1: {'data': 4}} - }) - - assert_raises(nx.NetworkXError, - G.add_edges_from, [(0,)]) # too few in tuple - assert_raises(nx.NetworkXError, - G.add_edges_from, [(0, 1, 2, 3)]) # too many in tuple - assert_raises(TypeError, G.add_edges_from, [0]) # not a tuple + G.add_edges_from([(0, 1), (0, 2, {"weight": 3}), (1, 2, {"data": 4})], data=2) + assert G.adj == { + 0: {1: {"data": 2}, 2: {"weight": 3, "data": 2}}, + 1: {0: {"data": 2}, 2: {"data": 4}}, + 2: {0: {"weight": 3, "data": 2}, 1: {"data": 4}}, + } + + with pytest.raises(nx.NetworkXError): + G.add_edges_from([(0,)]) # too few in tuple + with pytest.raises(nx.NetworkXError): + G.add_edges_from([(0, 1, 2, 3)]) # too many in tuple + with pytest.raises(TypeError): + G.add_edges_from([0]) # not a tuple def test_remove_edge(self): - G = self.K3 + G = self.K3.copy() G.remove_edge(0, 1) - assert_equal(G.adj, {0: {2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}}) - assert_raises((KeyError, nx.NetworkXError), G.remove_edge, -1, 0) + assert G.adj == {0: {2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}} + with pytest.raises(nx.NetworkXError): + G.remove_edge(-1, 0) def test_remove_edges_from(self): - G = self.K3 + G = self.K3.copy() G.remove_edges_from([(0, 1)]) - assert_equal(G.adj, {0: {2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}}) + assert G.adj == {0: {2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}} G.remove_edges_from([(0, 0)]) # silent fail def test_clear(self): - G = self.K3 + G = self.K3.copy() + G.graph["name"] = "K3" G.clear() - assert_equal(G.adj, {}) + assert list(G.nodes) == [] + assert G.adj == {} + assert G.graph == {} + + def test_clear_edges(self): + G = self.K3.copy() + G.graph["name"] = "K3" + nodes = list(G.nodes) + G.clear_edges() + assert list(G.nodes) == nodes + assert G.adj == {0: {}, 1: {}, 2: {}} + assert list(G.edges) == [] + assert G.graph["name"] == "K3" def test_edges_data(self): G = self.K3 @@ -598,41 +645,113 @@ def test_edges_data(self): assert_edges_equal(G.edges(data=True), all_edges) assert_edges_equal(G.edges(0, data=True), [(0, 1, {}), (0, 2, {})]) assert_edges_equal(G.edges([0, 1], data=True), all_edges) - assert_raises((KeyError, nx.NetworkXError), G.edges, -1, True) + with pytest.raises(nx.NetworkXError): + G.edges(-1, True) def test_get_edge_data(self): - G = self.K3 - assert_equal(G.get_edge_data(0, 1), {}) - assert_equal(G[0][1], {}) - assert_equal(G.get_edge_data(10, 20), None) - assert_equal(G.get_edge_data(-1, 0), None) - assert_equal(G.get_edge_data(-1, 0, default=1), 1) + G = self.K3.copy() + assert G.get_edge_data(0, 1) == {} + assert G[0][1] == {} + assert G.get_edge_data(10, 20) is None + assert G.get_edge_data(-1, 0) is None + assert G.get_edge_data(-1, 0, default=1) == 1 + + def test_update(self): + # specify both edgees and nodes + G = self.K3.copy() + G.update(nodes=[3, (4, {"size": 2})], edges=[(4, 5), (6, 7, {"weight": 2})]) + nlist = [ + (0, {}), + (1, {}), + (2, {}), + (3, {}), + (4, {"size": 2}), + (5, {}), + (6, {}), + (7, {}), + ] + assert sorted(G.nodes.data()) == nlist + if G.is_directed(): + elist = [ + (0, 1, {}), + (0, 2, {}), + (1, 0, {}), + (1, 2, {}), + (2, 0, {}), + (2, 1, {}), + (4, 5, {}), + (6, 7, {"weight": 2}), + ] + else: + elist = [ + (0, 1, {}), + (0, 2, {}), + (1, 2, {}), + (4, 5, {}), + (6, 7, {"weight": 2}), + ] + assert sorted(G.edges.data()) == elist + assert G.graph == {} + + # no keywords -- order is edges, nodes + G = self.K3.copy() + G.update([(4, 5), (6, 7, {"weight": 2})], [3, (4, {"size": 2})]) + assert sorted(G.nodes.data()) == nlist + assert sorted(G.edges.data()) == elist + assert G.graph == {} + + # update using only a graph + G = self.Graph() + G.graph["foo"] = "bar" + G.add_node(2, data=4) + G.add_edge(0, 1, weight=0.5) + GG = G.copy() + H = self.Graph() + GG.update(H) + assert_graphs_equal(G, GG) + H.update(G) + assert_graphs_equal(H, G) + + # update nodes only + H = self.Graph() + H.update(nodes=[3, 4]) + assert H.nodes ^ {3, 4} == set() + assert H.size() == 0 + + # update edges only + H = self.Graph() + H.update(edges=[(3, 4)]) + assert sorted(H.edges.data()) == [(3, 4, {})] + assert H.size() == 1 + + # No inputs -> exception + with pytest.raises(nx.NetworkXError): + nx.Graph().update() -class TestEdgeSubgraph(object): +class TestEdgeSubgraph: """Unit tests for the :meth:`Graph.edge_subgraph` method.""" - def setup(self): + def setup_method(self): # Create a path graph on five nodes. G = nx.path_graph(5) # Add some node, edge, and graph attributes. for i in range(5): - G.nodes[i]['name'] = 'node{}'.format(i) - G.edges[0, 1]['name'] = 'edge01' - G.edges[3, 4]['name'] = 'edge34' - G.graph['name'] = 'graph' + G.nodes[i]["name"] = f"node{i}" + G.edges[0, 1]["name"] = "edge01" + G.edges[3, 4]["name"] = "edge34" + G.graph["name"] = "graph" # Get the subgraph induced by the first and last edges. self.G = G self.H = G.edge_subgraph([(0, 1), (3, 4)]) def test_correct_nodes(self): """Tests that the subgraph has the correct nodes.""" - assert_equal([0, 1, 3, 4], sorted(self.H.nodes())) + assert [0, 1, 3, 4] == sorted(self.H.nodes()) def test_correct_edges(self): """Tests that the subgraph has the correct edges.""" - assert_equal([(0, 1, 'edge01'), (3, 4, 'edge34')], - sorted(self.H.edges(data='name'))) + assert [(0, 1, "edge01"), (3, 4, "edge34")] == sorted(self.H.edges(data="name")) def test_add_node(self): """Tests that adding a node to the original graph does not @@ -640,7 +759,7 @@ def test_add_node(self): """ self.G.add_node(5) - assert_equal([0, 1, 3, 4], sorted(self.H.nodes())) + assert [0, 1, 3, 4] == sorted(self.H.nodes()) def test_remove_node(self): """Tests that removing a node in the original graph does @@ -648,7 +767,7 @@ def test_remove_node(self): """ self.G.remove_node(0) - assert_equal([1, 3, 4], sorted(self.H.nodes())) + assert [1, 3, 4] == sorted(self.H.nodes()) def test_node_attr_dict(self): """Tests that the node attribute dictionary of the two graphs is @@ -656,12 +775,12 @@ def test_node_attr_dict(self): """ for v in self.H: - assert_equal(self.G.nodes[v], self.H.nodes[v]) + assert self.G.nodes[v] == self.H.nodes[v] # Making a change to G should make a change in H and vice versa. - self.G.nodes[0]['name'] = 'foo' - assert_equal(self.G.nodes[0], self.H.nodes[0]) - self.H.nodes[1]['name'] = 'bar' - assert_equal(self.G.nodes[1], self.H.nodes[1]) + self.G.nodes[0]["name"] = "foo" + assert self.G.nodes[0] == self.H.nodes[0] + self.H.nodes[1]["name"] = "bar" + assert self.G.nodes[1] == self.H.nodes[1] def test_edge_attr_dict(self): """Tests that the edge attribute dictionary of the two graphs is @@ -669,18 +788,16 @@ def test_edge_attr_dict(self): """ for u, v in self.H.edges(): - assert_equal(self.G.edges[u, v], self.H.edges[u, v]) + assert self.G.edges[u, v] == self.H.edges[u, v] # Making a change to G should make a change in H and vice versa. - self.G.edges[0, 1]['name'] = 'foo' - assert_equal(self.G.edges[0, 1]['name'], - self.H.edges[0, 1]['name']) - self.H.edges[3, 4]['name'] = 'bar' - assert_equal(self.G.edges[3, 4]['name'], - self.H.edges[3, 4]['name']) + self.G.edges[0, 1]["name"] = "foo" + assert self.G.edges[0, 1]["name"] == self.H.edges[0, 1]["name"] + self.H.edges[3, 4]["name"] = "bar" + assert self.G.edges[3, 4]["name"] == self.H.edges[3, 4]["name"] def test_graph_attr_dict(self): """Tests that the graph attribute dictionary of the two graphs is the same object. """ - assert_is(self.G.graph, self.H.graph) + assert self.G.graph is self.H.graph diff --git a/networkx/classes/tests/test_graph_historical.py b/networkx/classes/tests/test_graph_historical.py index fb6492c..7af081c 100644 --- a/networkx/classes/tests/test_graph_historical.py +++ b/networkx/classes/tests/test_graph_historical.py @@ -1,14 +1,12 @@ -#!/usr/bin/env python """Original NetworkX graph tests""" -from nose.tools import * import networkx import networkx as nx -from historical_tests import HistoricalTests +from .historical_tests import HistoricalTests class TestGraphHistorical(HistoricalTests): - - def setUp(self): - HistoricalTests.setUp(self) - self.G = nx.Graph + @classmethod + def setup_class(cls): + HistoricalTests.setup_class() + cls.G = nx.Graph diff --git a/networkx/classes/tests/test_graphviews.py b/networkx/classes/tests/test_graphviews.py index b31e27e..2fd8849 100644 --- a/networkx/classes/tests/test_graphviews.py +++ b/networkx/classes/tests/test_graphviews.py @@ -1,6 +1,4 @@ -from nose.tools import assert_in, assert_not_in, assert_equal -from nose.tools import assert_is, assert_is_not -from nose.tools import assert_raises, assert_true, assert_false +import pytest import networkx as nx from networkx.testing import assert_edges_equal, assert_nodes_equal @@ -8,35 +6,54 @@ # Note: SubGraph views are not tested here. They have their own testing file -class TestReverseView(object): +class TestReverseView: def setup(self): self.G = nx.path_graph(9, create_using=nx.DiGraph()) self.rv = nx.reverse_view(self.G) def test_pickle(self): import pickle + rv = self.rv prv = pickle.loads(pickle.dumps(rv, -1)) - assert_equal(rv._node, prv._node) - assert_equal(rv._adj, prv._adj) - assert_equal(rv.graph, prv.graph) + assert rv._node == prv._node + assert rv._adj == prv._adj + assert rv.graph == prv.graph def test_contains(self): - assert_in((2, 3), self.G.edges) - assert_not_in((3, 2), self.G.edges) - assert_not_in((2, 3), self.rv.edges) - assert_in((3, 2), self.rv.edges) + assert (2, 3) in self.G.edges + assert (3, 2) not in self.G.edges + assert (2, 3) not in self.rv.edges + assert (3, 2) in self.rv.edges def test_iter(self): expected = sorted(tuple(reversed(e)) for e in self.G.edges) - assert_equal(sorted(self.rv.edges), expected) + assert sorted(self.rv.edges) == expected def test_exceptions(self): nxg = nx.graphviews - assert_raises(nx.NetworkXNotImplemented, nxg.ReverseView, nx.Graph()) + pytest.raises(nx.NetworkXNotImplemented, nxg.reverse_view, nx.Graph()) + + def test_subclass(self): + class MyGraph(nx.DiGraph): + def my_method(self): + return "me" + + def to_directed_class(self): + return MyGraph() + M = MyGraph() + M.add_edge(1, 2) + RM = nx.reverse_view(M) + print("RM class", RM.__class__) + RMC = RM.copy() + print("RMC class", RMC.__class__) + print(RMC.edges) + assert RMC.has_edge(2, 1) + assert RMC.my_method() == "me" -class TestMultiReverseView(object): + +class TestMultiReverseView: def setup(self): self.G = nx.path_graph(9, create_using=nx.MultiDiGraph()) self.G.add_edge(4, 5) @@ -44,31 +61,32 @@ def setup(self): def test_pickle(self): import pickle + rv = self.rv prv = pickle.loads(pickle.dumps(rv, -1)) - assert_equal(rv._node, prv._node) - assert_equal(rv._adj, prv._adj) - assert_equal(rv.graph, prv.graph) + assert rv._node == prv._node + assert rv._adj == prv._adj + assert rv.graph == prv.graph def test_contains(self): - assert_in((2, 3, 0), self.G.edges) - assert_not_in((3, 2, 0), self.G.edges) - assert_not_in((2, 3, 0), self.rv.edges) - assert_in((3, 2, 0), self.rv.edges) - assert_in((5, 4, 1), self.rv.edges) - assert_not_in((4, 5, 1), self.rv.edges) + assert (2, 3, 0) in self.G.edges + assert (3, 2, 0) not in self.G.edges + assert (2, 3, 0) not in self.rv.edges + assert (3, 2, 0) in self.rv.edges + assert (5, 4, 1) in self.rv.edges + assert (4, 5, 1) not in self.rv.edges def test_iter(self): expected = sorted((v, u, k) for u, v, k in self.G.edges) - assert_equal(sorted(self.rv.edges), expected) + assert sorted(self.rv.edges) == expected def test_exceptions(self): nxg = nx.graphviews MG = nx.MultiGraph(self.G) - assert_raises(nx.NetworkXNotImplemented, nxg.MultiReverseView, MG) + pytest.raises(nx.NetworkXNotImplemented, nxg.reverse_view, MG) -class TestToDirected(object): +class TestToDirected: def setup(self): self.G = nx.path_graph(9) self.dv = nx.to_directed(self.G) @@ -76,8 +94,8 @@ def setup(self): self.Mdv = nx.to_directed(self.MG) def test_directed(self): - assert_false(self.G.is_directed()) - assert_true(self.dv.is_directed()) + assert not self.G.is_directed() + assert self.dv.is_directed() def test_already_directed(self): dd = nx.to_directed(self.dv) @@ -87,31 +105,27 @@ def test_already_directed(self): def test_pickle(self): import pickle + dv = self.dv pdv = pickle.loads(pickle.dumps(dv, -1)) - assert_equal(dv._node, pdv._node) - assert_equal(dv._succ, pdv._succ) - assert_equal(dv._pred, pdv._pred) - assert_equal(dv.graph, pdv.graph) + assert dv._node == pdv._node + assert dv._succ == pdv._succ + assert dv._pred == pdv._pred + assert dv.graph == pdv.graph def test_contains(self): - assert_in((2, 3), self.G.edges) - assert_in((3, 2), self.G.edges) - assert_in((2, 3), self.dv.edges) - assert_in((3, 2), self.dv.edges) + assert (2, 3) in self.G.edges + assert (3, 2) in self.G.edges + assert (2, 3) in self.dv.edges + assert (3, 2) in self.dv.edges def test_iter(self): revd = [tuple(reversed(e)) for e in self.G.edges] expected = sorted(list(self.G.edges) + revd) - assert_equal(sorted(self.dv.edges), expected) - - def test_exceptions(self): - nxg = nx.graphviews - assert_raises(nx.NetworkXError, nxg.DiGraphView, self.MG) - assert_raises(nx.NetworkXError, nxg.MultiDiGraphView, self.G) + assert sorted(self.dv.edges) == expected -class TestToUndirected(object): +class TestToUndirected: def setup(self): self.DG = nx.path_graph(9, create_using=nx.DiGraph()) self.uv = nx.to_undirected(self.DG) @@ -119,8 +133,8 @@ def setup(self): self.Muv = nx.to_undirected(self.MDG) def test_directed(self): - assert_true(self.DG.is_directed()) - assert_false(self.uv.is_directed()) + assert self.DG.is_directed() + assert not self.uv.is_directed() def test_already_directed(self): uu = nx.to_undirected(self.uv) @@ -130,49 +144,56 @@ def test_already_directed(self): def test_pickle(self): import pickle + uv = self.uv puv = pickle.loads(pickle.dumps(uv, -1)) - assert_equal(uv._node, puv._node) - assert_equal(uv._adj, puv._adj) - assert_equal(uv.graph, puv.graph) - assert_true(hasattr(uv, '_graph')) + assert uv._node == puv._node + assert uv._adj == puv._adj + assert uv.graph == puv.graph + assert hasattr(uv, "_graph") def test_contains(self): - assert_in((2, 3), self.DG.edges) - assert_not_in((3, 2), self.DG.edges) - assert_in((2, 3), self.uv.edges) - assert_in((3, 2), self.uv.edges) + assert (2, 3) in self.DG.edges + assert (3, 2) not in self.DG.edges + assert (2, 3) in self.uv.edges + assert (3, 2) in self.uv.edges def test_iter(self): expected = sorted(self.DG.edges) - assert_equal(sorted(self.uv.edges), expected) - - def test_exceptions(self): - nxg = nx.graphviews - assert_raises(nx.NetworkXError, nxg.GraphView, self.MDG) - assert_raises(nx.NetworkXError, nxg.MultiGraphView, self.DG) - - -class TestChainsOfViews(object): - def setUp(self): - self.G = nx.path_graph(9) - self.DG = nx.path_graph(9, create_using=nx.DiGraph()) - self.MG = nx.path_graph(9, create_using=nx.MultiGraph()) - self.MDG = nx.path_graph(9, create_using=nx.MultiDiGraph()) - self.Gv = nx.to_undirected(self.DG) - self.DGv = nx.to_directed(self.G) - self.MGv = nx.to_undirected(self.MDG) - self.MDGv = nx.to_directed(self.MG) - self.Rv = self.DG.reverse() - self.MRv = self.MDG.reverse() - self.graphs = [self.G, self.DG, self.MG, self.MDG, - self.Gv, self.DGv, self.MGv, self.MDGv, - self.Rv, self.MRv] - for G in self.graphs: + assert sorted(self.uv.edges) == expected + + +class TestChainsOfViews: + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.DG = nx.path_graph(9, create_using=nx.DiGraph()) + cls.MG = nx.path_graph(9, create_using=nx.MultiGraph()) + cls.MDG = nx.path_graph(9, create_using=nx.MultiDiGraph()) + cls.Gv = nx.to_undirected(cls.DG) + cls.DGv = nx.to_directed(cls.G) + cls.MGv = nx.to_undirected(cls.MDG) + cls.MDGv = nx.to_directed(cls.MG) + cls.Rv = cls.DG.reverse() + cls.MRv = cls.MDG.reverse() + cls.graphs = [ + cls.G, + cls.DG, + cls.MG, + cls.MDG, + cls.Gv, + cls.DGv, + cls.MGv, + cls.MDGv, + cls.Rv, + cls.MRv, + ] + for G in cls.graphs: G.edges, G.nodes, G.degree def test_pickle(self): import pickle + for G in self.graphs: H = pickle.loads(pickle.dumps(G, -1)) assert_edges_equal(H.edges, G.edges) @@ -185,11 +206,11 @@ def test_subgraph_of_subgraph(self): SMDGv = nx.subgraph(self.MDG, range(3, 7)) for G in self.graphs + [SGv, SDGv, SMGv, SMDGv]: SG = nx.induced_subgraph(G, [4, 5, 6]) - assert_equal(list(SG), [4, 5, 6]) + assert list(SG) == [4, 5, 6] SSG = SG.subgraph([6, 7]) - assert_equal(list(SSG), [6]) + assert list(SSG) == [6] # subgraph-subgraph chain is short-cut in base class method - assert_is(SSG._graph, G) + assert SSG._graph is G def test_restricted_induced_subgraph_chains(self): """ Test subgraph chains that both restrict and show nodes/edges. @@ -204,8 +225,9 @@ def test_restricted_induced_subgraph_chains(self): nodes = [4, 5, 6, 7, 8] SG = nx.induced_subgraph(RG, nodes) SSG = RG.subgraph(nodes) - assert_is(SSG.root_graph, SSG._graph) - assert_is_not(SG.root_graph, SG._graph) + assert RG._graph is self.G + assert SSG._graph is self.G + assert SG._graph is RG assert_edges_equal(SG.edges, SSG.edges) # should be same as morphing the graph CG = self.G.copy() @@ -217,74 +239,103 @@ def test_restricted_induced_subgraph_chains(self): # switch order: subgraph first, then restricted view SSSG = self.G.subgraph(nodes) RSG = nx.restricted_view(SSSG, hide_nodes, hide_edges) - assert_is_not(RSG.root_graph, RSG._graph) + assert RSG._graph is not self.G assert_edges_equal(RSG.edges, CG.edges) + def test_subgraph_copy(self): + for origG in self.graphs: + G = nx.OrderedGraph(origG) + SG = G.subgraph([4, 5, 6]) + H = SG.copy() + assert type(G) == type(H) + def test_subgraph_todirected(self): SG = nx.induced_subgraph(self.G, [4, 5, 6]) SSG = SG.to_directed() - assert_equal(sorted(SSG), [4, 5, 6]) - assert_equal(sorted(SSG.edges), [(4, 5), (5, 4), (5, 6), (6, 5)]) + assert sorted(SSG) == [4, 5, 6] + assert sorted(SSG.edges) == [(4, 5), (5, 4), (5, 6), (6, 5)] def test_subgraph_toundirected(self): SG = nx.induced_subgraph(self.G, [4, 5, 6]) SSG = SG.to_undirected() - assert_equal(list(SSG), [4, 5, 6]) - assert_equal(sorted(SSG.edges), [(4, 5), (5, 6)]) + assert list(SSG) == [4, 5, 6] + assert sorted(SSG.edges) == [(4, 5), (5, 6)] def test_reverse_subgraph_toundirected(self): G = self.DG.reverse(copy=False) SG = G.subgraph([4, 5, 6]) SSG = SG.to_undirected() - assert_equal(list(SSG), [4, 5, 6]) - assert_equal(sorted(SSG.edges), [(4, 5), (5, 6)]) + assert list(SSG) == [4, 5, 6] + assert sorted(SSG.edges) == [(4, 5), (5, 6)] def test_reverse_reverse_copy(self): G = self.DG.reverse(copy=False) H = G.reverse(copy=True) - assert_equal(H.nodes, self.DG.nodes) - assert_equal(H.edges, self.DG.edges) + assert H.nodes == self.DG.nodes + assert H.edges == self.DG.edges G = self.MDG.reverse(copy=False) H = G.reverse(copy=True) - assert_equal(H.nodes, self.MDG.nodes) - assert_equal(H.edges, self.MDG.edges) + assert H.nodes == self.MDG.nodes + assert H.edges == self.MDG.edges def test_subgraph_edgesubgraph_toundirected(self): G = self.G.copy() SG = G.subgraph([4, 5, 6]) SSG = SG.edge_subgraph([(4, 5), (5, 4)]) USSG = SSG.to_undirected() - assert_equal(list(USSG), [4, 5]) - assert_equal(sorted(USSG.edges), [(4, 5)]) + assert list(USSG) == [4, 5] + assert sorted(USSG.edges) == [(4, 5)] def test_copy_subgraph(self): G = self.G.copy() SG = G.subgraph([4, 5, 6]) CSG = SG.copy(as_view=True) DCSG = SG.copy(as_view=False) - assert_equal(CSG.__class__.__name__, 'GraphView') - assert_equal(DCSG.__class__.__name__, 'Graph') + assert hasattr(CSG, "_graph") # is a view + assert not hasattr(DCSG, "_graph") # not a view def test_copy_disubgraph(self): G = self.DG.copy() SG = G.subgraph([4, 5, 6]) CSG = SG.copy(as_view=True) DCSG = SG.copy(as_view=False) - assert_equal(CSG.__class__.__name__, 'DiGraphView') - assert_equal(DCSG.__class__.__name__, 'DiGraph') + assert hasattr(CSG, "_graph") # is a view + assert not hasattr(DCSG, "_graph") # not a view def test_copy_multidisubgraph(self): G = self.MDG.copy() SG = G.subgraph([4, 5, 6]) CSG = SG.copy(as_view=True) DCSG = SG.copy(as_view=False) - assert_equal(CSG.__class__.__name__, 'MultiDiGraphView') - assert_equal(DCSG.__class__.__name__, 'MultiDiGraph') + assert hasattr(CSG, "_graph") # is a view + assert not hasattr(DCSG, "_graph") # not a view def test_copy_multisubgraph(self): - G = self.MGv.copy() + G = self.MG.copy() SG = G.subgraph([4, 5, 6]) CSG = SG.copy(as_view=True) DCSG = SG.copy(as_view=False) - assert_equal(CSG.__class__.__name__, 'MultiGraphView') - assert_equal(DCSG.__class__.__name__, 'MultiGraph') + assert hasattr(CSG, "_graph") # is a view + assert not hasattr(DCSG, "_graph") # not a view + + def test_copy_of_view(self): + G = nx.OrderedMultiGraph(self.MGv) + assert G.__class__.__name__ == "OrderedMultiGraph" + G = G.copy(as_view=True) + assert G.__class__.__name__ == "OrderedMultiGraph" + + def test_subclass(self): + class MyGraph(nx.DiGraph): + def my_method(self): + return "me" + + def to_directed_class(self): + return MyGraph() + + for origG in self.graphs: + G = MyGraph(origG) + SG = G.subgraph([4, 5, 6]) + H = SG.copy() + assert SG.my_method() == "me" + assert H.my_method() == "me" + assert not 3 in H or 3 in SG diff --git a/networkx/classes/tests/test_multidigraph.py b/networkx/classes/tests/test_multidigraph.py index 29f1dc8..2bf184c 100644 --- a/networkx/classes/tests/test_multidigraph.py +++ b/networkx/classes/tests/test_multidigraph.py @@ -1,129 +1,150 @@ -#!/usr/bin/env python -from nose.tools import * +import pytest from networkx.testing import assert_edges_equal import networkx as nx -from test_multigraph import BaseMultiGraphTester, TestMultiGraph -from test_multigraph import TestEdgeSubgraph as TestMultiGraphEdgeSubgraph +from .test_multigraph import BaseMultiGraphTester +from .test_multigraph import TestMultiGraph as _TestMultiGraph +from .test_multigraph import TestEdgeSubgraph as _TestMultiGraphEdgeSubgraph class BaseMultiDiGraphTester(BaseMultiGraphTester): def test_edges(self): G = self.K3 edges = [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] - assert_equal(sorted(G.edges()), edges) - assert_equal(sorted(G.edges(0)), [(0, 1), (0, 2)]) - assert_raises((KeyError, nx.NetworkXError), G.edges, -1) + assert sorted(G.edges()) == edges + assert sorted(G.edges(0)) == [(0, 1), (0, 2)] + pytest.raises((KeyError, nx.NetworkXError), G.edges, -1) def test_edges_data(self): G = self.K3 - edges = [(0, 1, {}), (0, 2, {}), (1, 0, {}), - (1, 2, {}), (2, 0, {}), (2, 1, {})] - assert_equal(sorted(G.edges(data=True)), edges) - assert_equal(sorted(G.edges(0, data=True)), [(0, 1, {}), (0, 2, {})]) - assert_raises((KeyError, nx.NetworkXError), G.neighbors, -1) + edges = [(0, 1, {}), (0, 2, {}), (1, 0, {}), (1, 2, {}), (2, 0, {}), (2, 1, {})] + assert sorted(G.edges(data=True)) == edges + assert sorted(G.edges(0, data=True)) == [(0, 1, {}), (0, 2, {})] + pytest.raises((KeyError, nx.NetworkXError), G.neighbors, -1) def test_edges_multi(self): G = self.K3 - assert_equal(sorted(G.edges()), - [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]) - assert_equal(sorted(G.edges(0)), [(0, 1), (0, 2)]) + assert sorted(G.edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] + assert sorted(G.edges(0)) == [(0, 1), (0, 2)] G.add_edge(0, 1) - assert_equal(sorted(G.edges()), - [(0, 1), (0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]) + assert sorted(G.edges()) == [ + (0, 1), + (0, 1), + (0, 2), + (1, 0), + (1, 2), + (2, 0), + (2, 1), + ] def test_out_edges(self): G = self.K3 - assert_equal(sorted(G.out_edges()), - [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]) - assert_equal(sorted(G.out_edges(0)), [(0, 1), (0, 2)]) - assert_raises((KeyError, nx.NetworkXError), G.out_edges, -1) - assert_equal(sorted(G.out_edges(0, keys=True)), [(0, 1, 0), (0, 2, 0)]) + assert sorted(G.out_edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] + assert sorted(G.out_edges(0)) == [(0, 1), (0, 2)] + pytest.raises((KeyError, nx.NetworkXError), G.out_edges, -1) + assert sorted(G.out_edges(0, keys=True)) == [(0, 1, 0), (0, 2, 0)] def test_out_edges_multi(self): G = self.K3 - assert_equal(sorted(G.out_edges()), - [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]) - assert_equal(sorted(G.out_edges(0)), [(0, 1), (0, 2)]) + assert sorted(G.out_edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] + assert sorted(G.out_edges(0)) == [(0, 1), (0, 2)] G.add_edge(0, 1, 2) - assert_equal(sorted(G.out_edges()), - [(0, 1), (0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]) + assert sorted(G.out_edges()) == [ + (0, 1), + (0, 1), + (0, 2), + (1, 0), + (1, 2), + (2, 0), + (2, 1), + ] def test_out_edges_data(self): G = self.K3 - assert_equal(sorted(G.edges(0, data=True)), [(0, 1, {}), (0, 2, {})]) + assert sorted(G.edges(0, data=True)) == [(0, 1, {}), (0, 2, {})] G.remove_edge(0, 1) G.add_edge(0, 1, data=1) - assert_equal(sorted(G.edges(0, data=True)), - [(0, 1, {'data': 1}), (0, 2, {})]) - assert_equal(sorted(G.edges(0, data='data')), - [(0, 1, 1), (0, 2, None)]) - assert_equal(sorted(G.edges(0, data='data', default=-1)), - [(0, 1, 1), (0, 2, -1)]) + assert sorted(G.edges(0, data=True)) == [(0, 1, {"data": 1}), (0, 2, {})] + assert sorted(G.edges(0, data="data")) == [(0, 1, 1), (0, 2, None)] + assert sorted(G.edges(0, data="data", default=-1)) == [(0, 1, 1), (0, 2, -1)] def test_in_edges(self): G = self.K3 - assert_equal(sorted(G.in_edges()), - [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]) - assert_equal(sorted(G.in_edges(0)), [(1, 0), (2, 0)]) - assert_raises((KeyError, nx.NetworkXError), G.in_edges, -1) + assert sorted(G.in_edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] + assert sorted(G.in_edges(0)) == [(1, 0), (2, 0)] + pytest.raises((KeyError, nx.NetworkXError), G.in_edges, -1) G.add_edge(0, 1, 2) - assert_equal(sorted(G.in_edges()), - [(0, 1), (0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]) - assert_equal(sorted(G.in_edges(0, keys=True)), [(1, 0, 0), (2, 0, 0)]) + assert sorted(G.in_edges()) == [ + (0, 1), + (0, 1), + (0, 2), + (1, 0), + (1, 2), + (2, 0), + (2, 1), + ] + assert sorted(G.in_edges(0, keys=True)) == [(1, 0, 0), (2, 0, 0)] def test_in_edges_no_keys(self): G = self.K3 - assert_equal(sorted(G.in_edges()), - [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]) - assert_equal(sorted(G.in_edges(0)), [(1, 0), (2, 0)]) + assert sorted(G.in_edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] + assert sorted(G.in_edges(0)) == [(1, 0), (2, 0)] G.add_edge(0, 1, 2) - assert_equal(sorted(G.in_edges()), - [(0, 1), (0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]) - - assert_equal(sorted(G.in_edges(data=True, keys=False)), - [(0, 1, {}), (0, 1, {}), (0, 2, {}), (1, 0, {}), - (1, 2, {}), (2, 0, {}), (2, 1, {})]) + assert sorted(G.in_edges()) == [ + (0, 1), + (0, 1), + (0, 2), + (1, 0), + (1, 2), + (2, 0), + (2, 1), + ] + + assert sorted(G.in_edges(data=True, keys=False)) == [ + (0, 1, {}), + (0, 1, {}), + (0, 2, {}), + (1, 0, {}), + (1, 2, {}), + (2, 0, {}), + (2, 1, {}), + ] def test_in_edges_data(self): G = self.K3 - assert_equal(sorted(G.in_edges(0, data=True)), - [(1, 0, {}), (2, 0, {})]) + assert sorted(G.in_edges(0, data=True)) == [(1, 0, {}), (2, 0, {})] G.remove_edge(1, 0) G.add_edge(1, 0, data=1) - assert_equal(sorted(G.in_edges(0, data=True)), - [(1, 0, {'data': 1}), (2, 0, {})]) - assert_equal(sorted(G.in_edges(0, data='data')), - [(1, 0, 1), (2, 0, None)]) - assert_equal(sorted(G.in_edges(0, data='data', default=-1)), - [(1, 0, 1), (2, 0, -1)]) + assert sorted(G.in_edges(0, data=True)) == [(1, 0, {"data": 1}), (2, 0, {})] + assert sorted(G.in_edges(0, data="data")) == [(1, 0, 1), (2, 0, None)] + assert sorted(G.in_edges(0, data="data", default=-1)) == [(1, 0, 1), (2, 0, -1)] def is_shallow(self, H, G): # graph - assert_equal(G.graph['foo'], H.graph['foo']) - G.graph['foo'].append(1) - assert_equal(G.graph['foo'], H.graph['foo']) + assert G.graph["foo"] == H.graph["foo"] + G.graph["foo"].append(1) + assert G.graph["foo"] == H.graph["foo"] # node - assert_equal(G.nodes[0]['foo'], H.nodes[0]['foo']) - G.nodes[0]['foo'].append(1) - assert_equal(G.nodes[0]['foo'], H.nodes[0]['foo']) + assert G.nodes[0]["foo"] == H.nodes[0]["foo"] + G.nodes[0]["foo"].append(1) + assert G.nodes[0]["foo"] == H.nodes[0]["foo"] # edge - assert_equal(G[1][2][0]['foo'], H[1][2][0]['foo']) - G[1][2][0]['foo'].append(1) - assert_equal(G[1][2][0]['foo'], H[1][2][0]['foo']) + assert G[1][2][0]["foo"] == H[1][2][0]["foo"] + G[1][2][0]["foo"].append(1) + assert G[1][2][0]["foo"] == H[1][2][0]["foo"] def is_deep(self, H, G): # graph - assert_equal(G.graph['foo'], H.graph['foo']) - G.graph['foo'].append(1) - assert_not_equal(G.graph['foo'], H.graph['foo']) + assert G.graph["foo"] == H.graph["foo"] + G.graph["foo"].append(1) + assert G.graph["foo"] != H.graph["foo"] # node - assert_equal(G.nodes[0]['foo'], H.nodes[0]['foo']) - G.nodes[0]['foo'].append(1) - assert_not_equal(G.nodes[0]['foo'], H.nodes[0]['foo']) + assert G.nodes[0]["foo"] == H.nodes[0]["foo"] + G.nodes[0]["foo"].append(1) + assert G.nodes[0]["foo"] != H.nodes[0]["foo"] # edge - assert_equal(G[1][2][0]['foo'], H[1][2][0]['foo']) - G[1][2][0]['foo'].append(1) - assert_not_equal(G[1][2][0]['foo'], H[1][2][0]['foo']) + assert G[1][2][0]["foo"] == H[1][2][0]["foo"] + G[1][2][0]["foo"].append(1) + assert G[1][2][0]["foo"] != H[1][2][0]["foo"] def test_to_undirected(self): # MultiDiGraph -> MultiGraph changes number of edges so it is @@ -143,85 +164,83 @@ def test_to_undirected(self): def test_has_successor(self): G = self.K3 - assert_equal(G.has_successor(0, 1), True) - assert_equal(G.has_successor(0, -1), False) + assert G.has_successor(0, 1) + assert not G.has_successor(0, -1) def test_successors(self): G = self.K3 - assert_equal(sorted(G.successors(0)), [1, 2]) - assert_raises((KeyError, nx.NetworkXError), G.successors, -1) + assert sorted(G.successors(0)) == [1, 2] + pytest.raises((KeyError, nx.NetworkXError), G.successors, -1) def test_has_predecessor(self): G = self.K3 - assert_equal(G.has_predecessor(0, 1), True) - assert_equal(G.has_predecessor(0, -1), False) + assert G.has_predecessor(0, 1) + assert not G.has_predecessor(0, -1) def test_predecessors(self): G = self.K3 - assert_equal(sorted(G.predecessors(0)), [1, 2]) - assert_raises((KeyError, nx.NetworkXError), G.predecessors, -1) + assert sorted(G.predecessors(0)) == [1, 2] + pytest.raises((KeyError, nx.NetworkXError), G.predecessors, -1) def test_degree(self): G = self.K3 - assert_equal(sorted(G.degree()), [(0, 4), (1, 4), (2, 4)]) - assert_equal(dict(G.degree()), {0: 4, 1: 4, 2: 4}) - assert_equal(G.degree(0), 4) - assert_equal(list(G.degree(iter([0]))), [(0, 4)]) + assert sorted(G.degree()) == [(0, 4), (1, 4), (2, 4)] + assert dict(G.degree()) == {0: 4, 1: 4, 2: 4} + assert G.degree(0) == 4 + assert list(G.degree(iter([0]))) == [(0, 4)] G.add_edge(0, 1, weight=0.3, other=1.2) - assert_equal(sorted(G.degree(weight='weight')), - [(0, 4.3), (1, 4.3), (2, 4)]) - assert_equal(sorted(G.degree(weight='other')), - [(0, 5.2), (1, 5.2), (2, 4)]) + assert sorted(G.degree(weight="weight")) == [(0, 4.3), (1, 4.3), (2, 4)] + assert sorted(G.degree(weight="other")) == [(0, 5.2), (1, 5.2), (2, 4)] def test_in_degree(self): G = self.K3 - assert_equal(sorted(G.in_degree()), [(0, 2), (1, 2), (2, 2)]) - assert_equal(dict(G.in_degree()), {0: 2, 1: 2, 2: 2}) - assert_equal(G.in_degree(0), 2) - assert_equal(list(G.in_degree(iter([0]))), [(0, 2)]) - assert_equal(G.in_degree(0, weight='weight'), 2) + assert sorted(G.in_degree()) == [(0, 2), (1, 2), (2, 2)] + assert dict(G.in_degree()) == {0: 2, 1: 2, 2: 2} + assert G.in_degree(0) == 2 + assert list(G.in_degree(iter([0]))) == [(0, 2)] + assert G.in_degree(0, weight="weight") == 2 def test_out_degree(self): G = self.K3 - assert_equal(sorted(G.out_degree()), [(0, 2), (1, 2), (2, 2)]) - assert_equal(dict(G.out_degree()), {0: 2, 1: 2, 2: 2}) - assert_equal(G.out_degree(0), 2) - assert_equal(list(G.out_degree(iter([0]))), [(0, 2)]) - assert_equal(G.out_degree(0, weight='weight'), 2) + assert sorted(G.out_degree()) == [(0, 2), (1, 2), (2, 2)] + assert dict(G.out_degree()) == {0: 2, 1: 2, 2: 2} + assert G.out_degree(0) == 2 + assert list(G.out_degree(iter([0]))) == [(0, 2)] + assert G.out_degree(0, weight="weight") == 2 def test_size(self): G = self.K3 - assert_equal(G.size(), 6) - assert_equal(G.number_of_edges(), 6) + assert G.size() == 6 + assert G.number_of_edges() == 6 G.add_edge(0, 1, weight=0.3, other=1.2) - assert_equal(round(G.size(weight='weight'), 2), 6.3) - assert_equal(round(G.size(weight='other'), 2), 7.2) + assert round(G.size(weight="weight"), 2) == 6.3 + assert round(G.size(weight="other"), 2) == 7.2 def test_to_undirected_reciprocal(self): G = self.Graph() G.add_edge(1, 2) - assert_true(G.to_undirected().has_edge(1, 2)) - assert_false(G.to_undirected(reciprocal=True).has_edge(1, 2)) + assert G.to_undirected().has_edge(1, 2) + assert not G.to_undirected(reciprocal=True).has_edge(1, 2) G.add_edge(2, 1) - assert_true(G.to_undirected(reciprocal=True).has_edge(1, 2)) + assert G.to_undirected(reciprocal=True).has_edge(1, 2) def test_reverse_copy(self): G = nx.MultiDiGraph([(0, 1), (0, 1)]) R = G.reverse() - assert_equal(sorted(R.edges()), [(1, 0), (1, 0)]) + assert sorted(R.edges()) == [(1, 0), (1, 0)] R.remove_edge(1, 0) - assert_equal(sorted(R.edges()), [(1, 0)]) - assert_equal(sorted(G.edges()), [(0, 1), (0, 1)]) + assert sorted(R.edges()) == [(1, 0)] + assert sorted(G.edges()) == [(0, 1), (0, 1)] def test_reverse_nocopy(self): G = nx.MultiDiGraph([(0, 1), (0, 1)]) R = G.reverse(copy=False) - assert_equal(sorted(R.edges()), [(1, 0), (1, 0)]) - assert_raises(nx.NetworkXError, R.remove_edge, 1, 0) + assert sorted(R.edges()) == [(1, 0), (1, 0)] + pytest.raises(nx.NetworkXError, R.remove_edge, 1, 0) -class TestMultiDiGraph(BaseMultiDiGraphTester, TestMultiGraph): - def setUp(self): +class TestMultiDiGraph(BaseMultiDiGraphTester, _TestMultiGraph): + def setup_method(self): self.Graph = nx.MultiDiGraph # build K3 self.k3edges = [(0, 1), (0, 2), (1, 2)] @@ -245,100 +264,121 @@ def setUp(self): def test_add_edge(self): G = self.Graph() G.add_edge(0, 1) - assert_equal(G._adj, {0: {1: {0: {}}}, 1: {}}) - assert_equal(G._succ, {0: {1: {0: {}}}, 1: {}}) - assert_equal(G._pred, {0: {}, 1: {0: {0: {}}}}) + assert G._adj == {0: {1: {0: {}}}, 1: {}} + assert G._succ == {0: {1: {0: {}}}, 1: {}} + assert G._pred == {0: {}, 1: {0: {0: {}}}} G = self.Graph() G.add_edge(*(0, 1)) - assert_equal(G._adj, {0: {1: {0: {}}}, 1: {}}) - assert_equal(G._succ, {0: {1: {0: {}}}, 1: {}}) - assert_equal(G._pred, {0: {}, 1: {0: {0: {}}}}) + assert G._adj == {0: {1: {0: {}}}, 1: {}} + assert G._succ == {0: {1: {0: {}}}, 1: {}} + assert G._pred == {0: {}, 1: {0: {0: {}}}} def test_add_edges_from(self): G = self.Graph() - G.add_edges_from([(0, 1), (0, 1, {'weight': 3})]) - assert_equal(G._adj, {0: {1: {0: {}, 1: {'weight': 3}}}, 1: {}}) - assert_equal(G._succ, {0: {1: {0: {}, 1: {'weight': 3}}}, 1: {}}) - assert_equal(G._pred, {0: {}, 1: {0: {0: {}, 1: {'weight': 3}}}}) - - G.add_edges_from([(0, 1), (0, 1, {'weight': 3})], weight=2) - assert_equal(G._succ, {0: {1: {0: {}, - 1: {'weight': 3}, - 2: {'weight': 2}, - 3: {'weight': 3}}}, - 1: {}}) - assert_equal(G._pred, {0: {}, 1: {0: {0: {}, 1: {'weight': 3}, - 2: {'weight': 2}, - 3: {'weight': 3}}}}) + G.add_edges_from([(0, 1), (0, 1, {"weight": 3})]) + assert G._adj == {0: {1: {0: {}, 1: {"weight": 3}}}, 1: {}} + assert G._succ == {0: {1: {0: {}, 1: {"weight": 3}}}, 1: {}} + assert G._pred == {0: {}, 1: {0: {0: {}, 1: {"weight": 3}}}} + + G.add_edges_from([(0, 1), (0, 1, {"weight": 3})], weight=2) + assert G._succ == { + 0: {1: {0: {}, 1: {"weight": 3}, 2: {"weight": 2}, 3: {"weight": 3}}}, + 1: {}, + } + assert G._pred == { + 0: {}, + 1: {0: {0: {}, 1: {"weight": 3}, 2: {"weight": 2}, 3: {"weight": 3}}}, + } G = self.Graph() - edges = [(0, 1, {'weight': 3}), (0, 1, (('weight', 2),)), - (0, 1, 5), (0, 1, 's')] + edges = [ + (0, 1, {"weight": 3}), + (0, 1, (("weight", 2),)), + (0, 1, 5), + (0, 1, "s"), + ] G.add_edges_from(edges) - keydict = {0: {'weight': 3}, 1: {'weight': 2}, 5: {}, 's': {}} - assert_equal(G._succ, {0: {1: keydict}, 1: {}}) - assert_equal(G._pred, {1: {0: keydict}, 0: {}}) + keydict = {0: {"weight": 3}, 1: {"weight": 2}, 5: {}, "s": {}} + assert G._succ == {0: {1: keydict}, 1: {}} + assert G._pred == {1: {0: keydict}, 0: {}} # too few in tuple - assert_raises(nx.NetworkXError, G.add_edges_from, [(0,)]) + pytest.raises(nx.NetworkXError, G.add_edges_from, [(0,)]) # too many in tuple - assert_raises(nx.NetworkXError, G.add_edges_from, [(0, 1, 2, 3, 4)]) + pytest.raises(nx.NetworkXError, G.add_edges_from, [(0, 1, 2, 3, 4)]) # not a tuple - assert_raises(TypeError, G.add_edges_from, [0]) + pytest.raises(TypeError, G.add_edges_from, [0]) def test_remove_edge(self): G = self.K3 G.remove_edge(0, 1) - assert_equal(G._succ, {0: {2: {0: {}}}, - 1: {0: {0: {}}, 2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}}) - assert_equal(G._pred, {0: {1: {0: {}}, 2: {0: {}}}, - 1: {2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}}) - assert_raises((KeyError, nx.NetworkXError), G.remove_edge, -1, 0) - assert_raises((KeyError, nx.NetworkXError), G.remove_edge, 0, 2, - key=1) + assert G._succ == { + 0: {2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + assert G._pred == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + pytest.raises((KeyError, nx.NetworkXError), G.remove_edge, -1, 0) + pytest.raises((KeyError, nx.NetworkXError), G.remove_edge, 0, 2, key=1) def test_remove_multiedge(self): G = self.K3 - G.add_edge(0, 1, key='parallel edge') - G.remove_edge(0, 1, key='parallel edge') - assert_equal(G._adj, {0: {1: {0: {}}, 2: {0: {}}}, - 1: {0: {0: {}}, 2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}}) - - assert_equal(G._succ, {0: {1: {0: {}}, 2: {0: {}}}, - 1: {0: {0: {}}, 2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}}) - - assert_equal(G._pred, {0: {1: {0: {}}, 2: {0: {}}}, - 1: {0: {0: {}}, 2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}}) + G.add_edge(0, 1, key="parallel edge") + G.remove_edge(0, 1, key="parallel edge") + assert G._adj == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + + assert G._succ == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + + assert G._pred == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } G.remove_edge(0, 1) - assert_equal(G._succ, {0: {2: {0: {}}}, - 1: {0: {0: {}}, 2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}}) - assert_equal(G._pred, {0: {1: {0: {}}, 2: {0: {}}}, - 1: {2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}}) - assert_raises((KeyError, nx.NetworkXError), G.remove_edge, -1, 0) + assert G._succ == { + 0: {2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + assert G._pred == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + pytest.raises((KeyError, nx.NetworkXError), G.remove_edge, -1, 0) def test_remove_edges_from(self): G = self.K3 G.remove_edges_from([(0, 1)]) - assert_equal(G._succ, {0: {2: {0: {}}}, - 1: {0: {0: {}}, 2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}}) - assert_equal(G._pred, {0: {1: {0: {}}, 2: {0: {}}}, - 1: {2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}}) + assert G._succ == { + 0: {2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + assert G._pred == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } G.remove_edges_from([(0, 0)]) # silent fail -class TestEdgeSubgraph(TestMultiGraphEdgeSubgraph): +class TestEdgeSubgraph(_TestMultiGraphEdgeSubgraph): """Unit tests for the :meth:`MultiDiGraph.edge_subgraph` method.""" - def setup(self): + def setup_method(self): # Create a quadruply-linked path graph on five nodes. G = nx.MultiDiGraph() nx.add_path(G, range(5)) @@ -347,12 +387,12 @@ def setup(self): nx.add_path(G, reversed(range(5))) # Add some node, edge, and graph attributes. for i in range(5): - G.nodes[i]['name'] = 'node{}'.format(i) - G.adj[0][1][0]['name'] = 'edge010' - G.adj[0][1][1]['name'] = 'edge011' - G.adj[3][4][0]['name'] = 'edge340' - G.adj[3][4][1]['name'] = 'edge341' - G.graph['name'] = 'graph' + G.nodes[i]["name"] = f"node{i}" + G.adj[0][1][0]["name"] = "edge010" + G.adj[0][1][1]["name"] = "edge011" + G.adj[3][4][0]["name"] = "edge340" + G.adj[3][4][1]["name"] = "edge341" + G.graph["name"] = "graph" # Get the subgraph induced by one of the first edges and one of # the last edges. self.G = G diff --git a/networkx/classes/tests/test_multigraph.py b/networkx/classes/tests/test_multigraph.py index e103d2c..5fee189 100644 --- a/networkx/classes/tests/test_multigraph.py +++ b/networkx/classes/tests/test_multigraph.py @@ -1,56 +1,54 @@ -#!/usr/bin/env python -from nose.tools import assert_equal -from nose.tools import assert_is -from nose.tools import assert_not_equal -from nose.tools import assert_raises +import pytest import networkx as nx -from networkx.testing.utils import * +from networkx.testing.utils import assert_edges_equal -from test_graph import BaseAttrGraphTester, TestGraph +from .test_graph import BaseAttrGraphTester +from .test_graph import TestGraph as _TestGraph class BaseMultiGraphTester(BaseAttrGraphTester): def test_has_edge(self): G = self.K3 - assert_equal(G.has_edge(0, 1), True) - assert_equal(G.has_edge(0, -1), False) - assert_equal(G.has_edge(0, 1, 0), True) - assert_equal(G.has_edge(0, 1, 1), False) + assert G.has_edge(0, 1) + assert not G.has_edge(0, -1) + assert G.has_edge(0, 1, 0) + assert not G.has_edge(0, 1, 1) def test_get_edge_data(self): G = self.K3 - assert_equal(G.get_edge_data(0, 1), {0: {}}) - assert_equal(G[0][1], {0: {}}) - assert_equal(G[0][1][0], {}) - assert_equal(G.get_edge_data(10, 20), None) - assert_equal(G.get_edge_data(0, 1, 0), {}) + assert G.get_edge_data(0, 1) == {0: {}} + assert G[0][1] == {0: {}} + assert G[0][1][0] == {} + assert G.get_edge_data(10, 20) is None + assert G.get_edge_data(0, 1, 0) == {} def test_adjacency(self): G = self.K3 - assert_equal(dict(G.adjacency()), - {0: {1: {0: {}}, 2: {0: {}}}, - 1: {0: {0: {}}, 2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}}) + assert dict(G.adjacency()) == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } def deepcopy_edge_attr(self, H, G): - assert_equal(G[1][2][0]['foo'], H[1][2][0]['foo']) - G[1][2][0]['foo'].append(1) - assert_not_equal(G[1][2][0]['foo'], H[1][2][0]['foo']) + assert G[1][2][0]["foo"] == H[1][2][0]["foo"] + G[1][2][0]["foo"].append(1) + assert G[1][2][0]["foo"] != H[1][2][0]["foo"] def shallow_copy_edge_attr(self, H, G): - assert_equal(G[1][2][0]['foo'], H[1][2][0]['foo']) - G[1][2][0]['foo'].append(1) - assert_equal(G[1][2][0]['foo'], H[1][2][0]['foo']) + assert G[1][2][0]["foo"] == H[1][2][0]["foo"] + G[1][2][0]["foo"].append(1) + assert G[1][2][0]["foo"] == H[1][2][0]["foo"] def graphs_equal(self, H, G): - assert_equal(G._adj, H._adj) - assert_equal(G._node, H._node) - assert_equal(G.graph, H.graph) - assert_equal(G.name, H.name) + assert G._adj == H._adj + assert G._node == H._node + assert G.graph == H.graph + assert G.name == H.name if not G.is_directed() and not H.is_directed(): - assert_is(H._adj[1][2][0], H._adj[2][1][0]) - assert_is(G._adj[1][2][0], G._adj[2][1][0]) + assert H._adj[1][2][0] is H._adj[2][1][0] + assert G._adj[1][2][0] is G._adj[2][1][0] else: # at least one is directed if not G.is_directed(): G._pred = G._adj @@ -58,38 +56,38 @@ def graphs_equal(self, H, G): if not H.is_directed(): H._pred = H._adj H._succ = H._adj - assert_equal(G._pred, H._pred) - assert_equal(G._succ, H._succ) - assert_is(H._succ[1][2][0], H._pred[2][1][0]) - assert_is(G._succ[1][2][0], G._pred[2][1][0]) + assert G._pred == H._pred + assert G._succ == H._succ + assert H._succ[1][2][0] is H._pred[2][1][0] + assert G._succ[1][2][0] is G._pred[2][1][0] def same_attrdict(self, H, G): # same attrdict in the edgedata - old_foo = H[1][2][0]['foo'] - H.adj[1][2][0]['foo'] = 'baz' - assert_equal(G._adj, H._adj) - H.adj[1][2][0]['foo'] = old_foo - assert_equal(G._adj, H._adj) - - old_foo = H.nodes[0]['foo'] - H.nodes[0]['foo'] = 'baz' - assert_equal(G._node, H._node) - H.nodes[0]['foo'] = old_foo - assert_equal(G._node, H._node) + old_foo = H[1][2][0]["foo"] + H.adj[1][2][0]["foo"] = "baz" + assert G._adj == H._adj + H.adj[1][2][0]["foo"] = old_foo + assert G._adj == H._adj + + old_foo = H.nodes[0]["foo"] + H.nodes[0]["foo"] = "baz" + assert G._node == H._node + H.nodes[0]["foo"] = old_foo + assert G._node == H._node def different_attrdict(self, H, G): # used by graph_equal_but_different - old_foo = H[1][2][0]['foo'] - H.adj[1][2][0]['foo'] = 'baz' - assert_not_equal(G._adj, H._adj) - H.adj[1][2][0]['foo'] = old_foo - assert_equal(G._adj, H._adj) - - old_foo = H.nodes[0]['foo'] - H.nodes[0]['foo'] = 'baz' - assert_not_equal(G._node, H._node) - H.nodes[0]['foo'] = old_foo - assert_equal(G._node, H._node) + old_foo = H[1][2][0]["foo"] + H.adj[1][2][0]["foo"] = "baz" + assert G._adj != H._adj + H.adj[1][2][0]["foo"] = old_foo + assert G._adj == H._adj + + old_foo = H.nodes[0]["foo"] + H.nodes[0]["foo"] = "baz" + assert G._node != H._node + H.nodes[0]["foo"] = old_foo + assert G._node == H._node def test_to_undirected(self): G = self.K3 @@ -111,49 +109,64 @@ def test_number_of_edges_selfloops(self): G = self.K3 G.add_edge(0, 0) G.add_edge(0, 0) - G.add_edge(0, 0, key='parallel edge') - G.remove_edge(0, 0, key='parallel edge') - assert_equal(G.number_of_edges(0, 0), 2) + G.add_edge(0, 0, key="parallel edge") + G.remove_edge(0, 0, key="parallel edge") + assert G.number_of_edges(0, 0) == 2 G.remove_edge(0, 0) - assert_equal(G.number_of_edges(0, 0), 1) + assert G.number_of_edges(0, 0) == 1 def test_edge_lookup(self): G = self.Graph() - G.add_edge(1, 2, foo='bar') - G.add_edge(1, 2, 'key', foo='biz') - assert_edges_equal(G.edges[1, 2, 0], {'foo': 'bar'}) - assert_edges_equal(G.edges[1, 2, 'key'], {'foo': 'biz'}) + G.add_edge(1, 2, foo="bar") + G.add_edge(1, 2, "key", foo="biz") + assert_edges_equal(G.edges[1, 2, 0], {"foo": "bar"}) + assert_edges_equal(G.edges[1, 2, "key"], {"foo": "biz"}) def test_edge_attr4(self): G = self.Graph() - G.add_edge(1, 2, key=0, data=7, spam='bar', bar='foo') - assert_edges_equal(G.edges(data=True), - [(1, 2, {'data': 7, 'spam': 'bar', 'bar': 'foo'})]) - G[1][2][0]['data'] = 10 # OK to set data like this - assert_edges_equal(G.edges(data=True), - [(1, 2, {'data': 10, 'spam': 'bar', 'bar': 'foo'})]) - - G.adj[1][2][0]['data'] = 20 - assert_edges_equal(G.edges(data=True), - [(1, 2, {'data': 20, 'spam': 'bar', 'bar': 'foo'})]) - G.edges[1, 2, 0]['data'] = 21 # another spelling, "edge" - assert_edges_equal(G.edges(data=True), - [(1, 2, {'data': 21, 'spam': 'bar', 'bar': 'foo'})]) - G.adj[1][2][0]['listdata'] = [20, 200] - G.adj[1][2][0]['weight'] = 20 - assert_edges_equal(G.edges(data=True), - [(1, 2, {'data': 21, 'spam': 'bar', 'bar': 'foo', - 'listdata': [20, 200], 'weight':20})]) - - -class TestMultiGraph(BaseMultiGraphTester, TestGraph): - def setUp(self): + G.add_edge(1, 2, key=0, data=7, spam="bar", bar="foo") + assert_edges_equal( + G.edges(data=True), [(1, 2, {"data": 7, "spam": "bar", "bar": "foo"})] + ) + G[1][2][0]["data"] = 10 # OK to set data like this + assert_edges_equal( + G.edges(data=True), [(1, 2, {"data": 10, "spam": "bar", "bar": "foo"})] + ) + + G.adj[1][2][0]["data"] = 20 + assert_edges_equal( + G.edges(data=True), [(1, 2, {"data": 20, "spam": "bar", "bar": "foo"})] + ) + G.edges[1, 2, 0]["data"] = 21 # another spelling, "edge" + assert_edges_equal( + G.edges(data=True), [(1, 2, {"data": 21, "spam": "bar", "bar": "foo"})] + ) + G.adj[1][2][0]["listdata"] = [20, 200] + G.adj[1][2][0]["weight"] = 20 + assert_edges_equal( + G.edges(data=True), + [ + ( + 1, + 2, + { + "data": 21, + "spam": "bar", + "bar": "foo", + "listdata": [20, 200], + "weight": 20, + }, + ) + ], + ) + + +class TestMultiGraph(BaseMultiGraphTester, _TestGraph): + def setup_method(self): self.Graph = nx.MultiGraph # build K3 ed1, ed2, ed3 = ({0: {}}, {0: {}}, {0: {}}) - self.k3adj = {0: {1: ed1, 2: ed2}, - 1: {0: ed1, 2: ed3}, - 2: {0: ed2, 1: ed3}} + self.k3adj = {0: {1: ed1, 2: ed2}, 1: {0: ed1, 2: ed3}, 2: {0: ed2, 1: ed3}} self.k3edges = [(0, 1), (0, 2), (1, 2)] self.k3nodes = [0, 1, 2] self.K3 = self.Graph() @@ -165,125 +178,138 @@ def setUp(self): def test_data_input(self): G = self.Graph({1: [2], 2: [1]}, name="test") - assert_equal(G.name, "test") + assert G.name == "test" expected = [(1, {2: {0: {}}}), (2, {1: {0: {}}})] - assert_equal(sorted(G.adj.items()), expected) + assert sorted(G.adj.items()) == expected def test_getitem(self): G = self.K3 - assert_equal(G[0], {1: {0: {}}, 2: {0: {}}}) - assert_raises(KeyError, G.__getitem__, 'j') - assert_raises((TypeError, nx.NetworkXError), G.__getitem__, ['A']) + assert G[0] == {1: {0: {}}, 2: {0: {}}} + with pytest.raises(KeyError): + G.__getitem__("j") + with pytest.raises(TypeError): + G.__getitem__(["A"]) def test_remove_node(self): G = self.K3 G.remove_node(0) - assert_equal(G.adj, {1: {2: {0: {}}}, 2: {1: {0: {}}}}) - assert_raises((KeyError, nx.NetworkXError), G.remove_node, -1) + assert G.adj == {1: {2: {0: {}}}, 2: {1: {0: {}}}} + with pytest.raises(nx.NetworkXError): + G.remove_node(-1) def test_add_edge(self): G = self.Graph() G.add_edge(0, 1) - assert_equal(G.adj, {0: {1: {0: {}}}, 1: {0: {0: {}}}}) + assert G.adj == {0: {1: {0: {}}}, 1: {0: {0: {}}}} G = self.Graph() G.add_edge(*(0, 1)) - assert_equal(G.adj, {0: {1: {0: {}}}, 1: {0: {0: {}}}}) + assert G.adj == {0: {1: {0: {}}}, 1: {0: {0: {}}}} def test_add_edge_conflicting_key(self): G = self.Graph() G.add_edge(0, 1, key=1) G.add_edge(0, 1) - assert_equal(G.number_of_edges(), 2) + assert G.number_of_edges() == 2 G = self.Graph() G.add_edges_from([(0, 1, 1, {})]) G.add_edges_from([(0, 1)]) - assert_equal(G.number_of_edges(), 2) + assert G.number_of_edges() == 2 def test_add_edges_from(self): G = self.Graph() - G.add_edges_from([(0, 1), (0, 1, {'weight': 3})]) - assert_equal(G.adj, {0: {1: {0: {}, 1: {'weight': 3}}}, - 1: {0: {0: {}, 1: {'weight': 3}}}}) - G.add_edges_from([(0, 1), (0, 1, {'weight': 3})], weight=2) - assert_equal(G.adj, {0: {1: {0: {}, 1: {'weight': 3}, - 2: {'weight': 2}, 3: {'weight': 3}}}, - 1: {0: {0: {}, 1: {'weight': 3}, - 2: {'weight': 2}, 3: {'weight': 3}}}}) + G.add_edges_from([(0, 1), (0, 1, {"weight": 3})]) + assert G.adj == { + 0: {1: {0: {}, 1: {"weight": 3}}}, + 1: {0: {0: {}, 1: {"weight": 3}}}, + } + G.add_edges_from([(0, 1), (0, 1, {"weight": 3})], weight=2) + assert G.adj == { + 0: {1: {0: {}, 1: {"weight": 3}, 2: {"weight": 2}, 3: {"weight": 3}}}, + 1: {0: {0: {}, 1: {"weight": 3}, 2: {"weight": 2}, 3: {"weight": 3}}}, + } G = self.Graph() - edges = [(0, 1, {'weight': 3}), (0, 1, (('weight', 2),)), - (0, 1, 5), (0, 1, 's')] + edges = [ + (0, 1, {"weight": 3}), + (0, 1, (("weight", 2),)), + (0, 1, 5), + (0, 1, "s"), + ] G.add_edges_from(edges) - keydict = {0: {'weight': 3}, 1: {'weight': 2}, 5: {}, 's': {}} - assert_equal(G._adj, {0: {1: keydict}, 1: {0: keydict}}) + keydict = {0: {"weight": 3}, 1: {"weight": 2}, 5: {}, "s": {}} + assert G._adj == {0: {1: keydict}, 1: {0: keydict}} # too few in tuple - assert_raises(nx.NetworkXError, G.add_edges_from, [(0,)]) + with pytest.raises(nx.NetworkXError): + G.add_edges_from([(0,)]) # too many in tuple - assert_raises(nx.NetworkXError, G.add_edges_from, [(0, 1, 2, 3, 4)]) + with pytest.raises(nx.NetworkXError): + G.add_edges_from([(0, 1, 2, 3, 4)]) # not a tuple - assert_raises(TypeError, G.add_edges_from, [0]) + with pytest.raises(TypeError): + G.add_edges_from([0]) def test_remove_edge(self): G = self.K3 G.remove_edge(0, 1) - assert_equal(G.adj, {0: {2: {0: {}}}, - 1: {2: {0: {}}}, - 2: {0: {0: {}}, - 1: {0: {}}}}) + assert G.adj == {0: {2: {0: {}}}, 1: {2: {0: {}}}, 2: {0: {0: {}}, 1: {0: {}}}} - assert_raises((KeyError, nx.NetworkXError), G.remove_edge, -1, 0) - assert_raises((KeyError, nx.NetworkXError), G.remove_edge, 0, 2, - key=1) + with pytest.raises(nx.NetworkXError): + G.remove_edge(-1, 0) + with pytest.raises(nx.NetworkXError): + G.remove_edge(0, 2, key=1) def test_remove_edges_from(self): G = self.K3.copy() G.remove_edges_from([(0, 1)]) kd = {0: {}} - assert_equal(G.adj, {0: {2: kd}, 1: {2: kd}, 2: {0: kd, 1: kd}}) + assert G.adj == {0: {2: kd}, 1: {2: kd}, 2: {0: kd, 1: kd}} G.remove_edges_from([(0, 0)]) # silent fail self.K3.add_edge(0, 1) G = self.K3.copy() G.remove_edges_from(list(G.edges(data=True, keys=True))) - assert_equal(G.adj, {0: {}, 1: {}, 2: {}}) + assert G.adj == {0: {}, 1: {}, 2: {}} G = self.K3.copy() G.remove_edges_from(list(G.edges(data=False, keys=True))) - assert_equal(G.adj, {0: {}, 1: {}, 2: {}}) + assert G.adj == {0: {}, 1: {}, 2: {}} G = self.K3.copy() G.remove_edges_from(list(G.edges(data=False, keys=False))) - assert_equal(G.adj, {0: {}, 1: {}, 2: {}}) + assert G.adj == {0: {}, 1: {}, 2: {}} G = self.K3.copy() G.remove_edges_from([(0, 1, 0), (0, 2, 0, {}), (1, 2)]) - assert_equal(G.adj, {0: {1: {1: {}}}, 1: {0: {1: {}}}, 2: {}}) + assert G.adj == {0: {1: {1: {}}}, 1: {0: {1: {}}}, 2: {}} def test_remove_multiedge(self): G = self.K3 - G.add_edge(0, 1, key='parallel edge') - G.remove_edge(0, 1, key='parallel edge') - assert_equal(G.adj, {0: {1: {0: {}}, 2: {0: {}}}, - 1: {0: {0: {}}, 2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}}) + G.add_edge(0, 1, key="parallel edge") + G.remove_edge(0, 1, key="parallel edge") + assert G.adj == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } G.remove_edge(0, 1) kd = {0: {}} - assert_equal(G.adj, {0: {2: kd}, 1: {2: kd}, 2: {0: kd, 1: kd}}) - assert_raises((KeyError, nx.NetworkXError), G.remove_edge, -1, 0) + assert G.adj == {0: {2: kd}, 1: {2: kd}, 2: {0: kd, 1: kd}} + with pytest.raises(nx.NetworkXError): + G.remove_edge(-1, 0) -class TestEdgeSubgraph(object): +class TestEdgeSubgraph: """Unit tests for the :meth:`MultiGraph.edge_subgraph` method.""" - def setup(self): + def setup_method(self): # Create a doubly-linked path graph on five nodes. G = nx.MultiGraph() nx.add_path(G, range(5)) nx.add_path(G, range(5)) # Add some node, edge, and graph attributes. for i in range(5): - G.nodes[i]['name'] = 'node{}'.format(i) - G.adj[0][1][0]['name'] = 'edge010' - G.adj[0][1][1]['name'] = 'edge011' - G.adj[3][4][0]['name'] = 'edge340' - G.adj[3][4][1]['name'] = 'edge341' - G.graph['name'] = 'graph' + G.nodes[i]["name"] = f"node{i}" + G.adj[0][1][0]["name"] = "edge010" + G.adj[0][1][1]["name"] = "edge011" + G.adj[3][4][0]["name"] = "edge340" + G.adj[3][4][1]["name"] = "edge341" + G.graph["name"] = "graph" # Get the subgraph induced by one of the first edges and one of # the last edges. self.G = G @@ -291,12 +317,13 @@ def setup(self): def test_correct_nodes(self): """Tests that the subgraph has the correct nodes.""" - assert_equal([0, 1, 3, 4], sorted(self.H.nodes())) + assert [0, 1, 3, 4] == sorted(self.H.nodes()) def test_correct_edges(self): """Tests that the subgraph has the correct edges.""" - assert_equal([(0, 1, 0, 'edge010'), (3, 4, 1, 'edge341')], - sorted(self.H.edges(keys=True, data='name'))) + assert [(0, 1, 0, "edge010"), (3, 4, 1, "edge341")] == sorted( + self.H.edges(keys=True, data="name") + ) def test_add_node(self): """Tests that adding a node to the original graph does not @@ -304,7 +331,7 @@ def test_add_node(self): """ self.G.add_node(5) - assert_equal([0, 1, 3, 4], sorted(self.H.nodes())) + assert [0, 1, 3, 4] == sorted(self.H.nodes()) def test_remove_node(self): """Tests that removing a node in the original graph does @@ -312,7 +339,7 @@ def test_remove_node(self): """ self.G.remove_node(0) - assert_equal([1, 3, 4], sorted(self.H.nodes())) + assert [1, 3, 4] == sorted(self.H.nodes()) def test_node_attr_dict(self): """Tests that the node attribute dictionary of the two graphs is @@ -320,12 +347,12 @@ def test_node_attr_dict(self): """ for v in self.H: - assert_equal(self.G.nodes[v], self.H.nodes[v]) + assert self.G.nodes[v] == self.H.nodes[v] # Making a change to G should make a change in H and vice versa. - self.G.nodes[0]['name'] = 'foo' - assert_equal(self.G.nodes[0], self.H.nodes[0]) - self.H.nodes[1]['name'] = 'bar' - assert_equal(self.G.nodes[1], self.H.nodes[1]) + self.G.nodes[0]["name"] = "foo" + assert self.G.nodes[0] == self.H.nodes[0] + self.H.nodes[1]["name"] = "bar" + assert self.G.nodes[1] == self.H.nodes[1] def test_edge_attr_dict(self): """Tests that the edge attribute dictionary of the two graphs is @@ -333,18 +360,16 @@ def test_edge_attr_dict(self): """ for u, v, k in self.H.edges(keys=True): - assert_equal(self.G._adj[u][v][k], self.H._adj[u][v][k]) + assert self.G._adj[u][v][k] == self.H._adj[u][v][k] # Making a change to G should make a change in H and vice versa. - self.G._adj[0][1][0]['name'] = 'foo' - assert_equal(self.G._adj[0][1][0]['name'], - self.H._adj[0][1][0]['name']) - self.H._adj[3][4][1]['name'] = 'bar' - assert_equal(self.G._adj[3][4][1]['name'], - self.H._adj[3][4][1]['name']) + self.G._adj[0][1][0]["name"] = "foo" + assert self.G._adj[0][1][0]["name"] == self.H._adj[0][1][0]["name"] + self.H._adj[3][4][1]["name"] = "bar" + assert self.G._adj[3][4][1]["name"] == self.H._adj[3][4][1]["name"] def test_graph_attr_dict(self): """Tests that the graph attribute dictionary of the two graphs is the same object. """ - assert_is(self.G.graph, self.H.graph) + assert self.G.graph is self.H.graph diff --git a/networkx/classes/tests/test_ordered.py b/networkx/classes/tests/test_ordered.py index 1d3f917..f29ecb4 100644 --- a/networkx/classes/tests/test_ordered.py +++ b/networkx/classes/tests/test_ordered.py @@ -1,8 +1,7 @@ -from nose.tools import assert_equals import networkx as nx -class SmokeTestOrdered(object): +class TestOrdered: # Just test instantiation. def test_graph(self): G = nx.OrderedGraph() @@ -17,24 +16,25 @@ def test_multidigraph(self): G = nx.OrderedMultiDiGraph() -class TestOrderedFeatures(object): - def setUp(self): - self.G = nx.OrderedDiGraph() - self.G.add_nodes_from([1, 2, 3]) - self.G.add_edges_from([(2, 3), (1, 3)]) +class TestOrderedFeatures: + @classmethod + def setup_class(cls): + cls.G = nx.OrderedDiGraph() + cls.G.add_nodes_from([1, 2, 3]) + cls.G.add_edges_from([(2, 3), (1, 3)]) def test_subgraph_order(self): G = self.G G_sub = G.subgraph([1, 2, 3]) - assert_equals(list(G.nodes), list(G_sub.nodes)) - assert_equals(list(G.edges), list(G_sub.edges)) - assert_equals(list(G.pred[3]), list(G_sub.pred[3])) - assert_equals([2, 1], list(G_sub.pred[3])) - assert_equals([], list(G_sub.succ[3])) + assert list(G.nodes) == list(G_sub.nodes) + assert list(G.edges) == list(G_sub.edges) + assert list(G.pred[3]) == list(G_sub.pred[3]) + assert [2, 1] == list(G_sub.pred[3]) + assert [] == list(G_sub.succ[3]) G_sub = nx.induced_subgraph(G, [1, 2, 3]) - assert_equals(list(G.nodes), list(G_sub.nodes)) - assert_equals(list(G.edges), list(G_sub.edges)) - assert_equals(list(G.pred[3]), list(G_sub.pred[3])) - assert_equals([2, 1], list(G_sub.pred[3])) - assert_equals([], list(G_sub.succ[3])) + assert list(G.nodes) == list(G_sub.nodes) + assert list(G.edges) == list(G_sub.edges) + assert list(G.pred[3]) == list(G_sub.pred[3]) + assert [2, 1] == list(G_sub.pred[3]) + assert [] == list(G_sub.succ[3]) diff --git a/networkx/classes/tests/test_reportviews.py b/networkx/classes/tests/test_reportviews.py index ce07bfd..704e43a 100644 --- a/networkx/classes/tests/test_reportviews.py +++ b/networkx/classes/tests/test_reportviews.py @@ -1,298 +1,328 @@ -from nose.tools import assert_equal, assert_not_equal, \ - assert_true, assert_false, assert_raises, \ - assert_is, assert_is_not +import pytest import networkx as nx +from networkx.classes.reportviews import NodeDataView # Nodes -class TestNodeView(object): - def setup(self): - self.G = nx.path_graph(9) - self.nv = self.G.nodes # NodeView(G) +class TestNodeView: + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.nv = cls.G.nodes # NodeView(G) def test_pickle(self): import pickle + nv = self.nv pnv = pickle.loads(pickle.dumps(nv, -1)) - assert_equal(nv, pnv) - assert_equal(nv.__slots__, pnv.__slots__) + assert nv == pnv + assert nv.__slots__ == pnv.__slots__ def test_str(self): - assert_equal(str(self.nv), "[0, 1, 2, 3, 4, 5, 6, 7, 8]") + assert str(self.nv) == "[0, 1, 2, 3, 4, 5, 6, 7, 8]" def test_repr(self): - assert_equal(repr(self.nv), "NodeView((0, 1, 2, 3, 4, 5, 6, 7, 8))") + assert repr(self.nv) == "NodeView((0, 1, 2, 3, 4, 5, 6, 7, 8))" def test_contains(self): - nv = self.nv - assert_true(7 in nv) - assert_false(9 in nv) - self.G.remove_node(7) - self.G.add_node(9) - assert_false(7 in nv) - assert_true(9 in nv) + G = self.G.copy() + nv = G.nodes + assert 7 in nv + assert 9 not in nv + G.remove_node(7) + G.add_node(9) + assert 7 not in nv + assert 9 in nv def test_getitem(self): - nv = self.nv - self.G.nodes[3]['foo'] = 'bar' - assert_equal(nv[7], {}) - assert_equal(nv[3], {'foo': 'bar'}) + G = self.G.copy() + nv = G.nodes + G.nodes[3]["foo"] = "bar" + assert nv[7] == {} + assert nv[3] == {"foo": "bar"} def test_iter(self): nv = self.nv for i, n in enumerate(nv): - assert_equal(i, n) + assert i == n inv = iter(nv) - assert_equal(next(inv), 0) - assert_not_equal(iter(nv), nv) - assert_equal(iter(inv), inv) + assert next(inv) == 0 + assert iter(nv) != nv + assert iter(inv) == inv inv2 = iter(nv) next(inv2) - assert_equal(list(inv), list(inv2)) + assert list(inv) == list(inv2) # odd case where NodeView calls NodeDataView with data=False nnv = nv(data=False) for i, n in enumerate(nnv): - assert_equal(i, n) + assert i == n def test_call(self): nodes = self.nv - assert_is(nodes, nodes()) - assert_is_not(nodes, nodes(data=True)) - assert_is_not(nodes, nodes(data='weight')) + assert nodes is nodes() + assert nodes is not nodes(data=True) + assert nodes is not nodes(data="weight") -class TestNodeDataView(object): - def setup(self): - self.G = nx.path_graph(9) - self.nv = self.G.nodes.data() # NodeDataView(G) - self.ndv = self.G.nodes.data(True) - self.nwv = self.G.nodes.data('foo') +class TestNodeDataView: + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.nv = NodeDataView(cls.G) + cls.ndv = cls.G.nodes.data(True) + cls.nwv = cls.G.nodes.data("foo") def test_viewtype(self): nv = self.G.nodes ndvfalse = nv.data(False) - assert_is(nv, ndvfalse) - assert_is_not(nv, self.ndv) + assert nv is ndvfalse + assert nv is not self.ndv def test_pickle(self): import pickle + nv = self.nv pnv = pickle.loads(pickle.dumps(nv, -1)) - assert_equal(nv, pnv) - assert_equal(nv.__slots__, pnv.__slots__) + assert nv == pnv + assert nv.__slots__ == pnv.__slots__ def test_str(self): msg = str([(n, {}) for n in range(9)]) - assert_equal(str(self.ndv), msg) + assert str(self.ndv) == msg def test_repr(self): - msg = "NodeDataView({0: {}, 1: {}, 2: {}, 3: {}, " + \ - "4: {}, 5: {}, 6: {}, 7: {}, 8: {}})" - assert_equal(repr(self.ndv), msg) + expected = "NodeDataView((0, 1, 2, 3, 4, 5, 6, 7, 8))" + assert repr(self.nv) == expected + expected = ( + "NodeDataView({0: {}, 1: {}, 2: {}, 3: {}, " + + "4: {}, 5: {}, 6: {}, 7: {}, 8: {}})" + ) + assert repr(self.ndv) == expected + expected = ( + "NodeDataView({0: None, 1: None, 2: None, 3: None, 4: None, " + + "5: None, 6: None, 7: None, 8: None}, data='foo')" + ) + assert repr(self.nwv) == expected def test_contains(self): - self.G.nodes[3]['foo'] = 'bar' - assert_true((7, {}) in self.nv) - assert_true((3, {'foo': 'bar'}) in self.nv) - assert_true((3, 'bar') in self.nwv) - assert_true((7, None) in self.nwv) + G = self.G.copy() + nv = G.nodes.data() + nwv = G.nodes.data("foo") + G.nodes[3]["foo"] = "bar" + assert (7, {}) in nv + assert (3, {"foo": "bar"}) in nv + assert (3, "bar") in nwv + assert (7, None) in nwv # default - nwv_def = self.G.nodes(data='foo', default='biz') - assert_true((7, 'biz') in nwv_def) - assert_true((3, 'bar') in nwv_def) + nwv_def = G.nodes(data="foo", default="biz") + assert (7, "biz") in nwv_def + assert (3, "bar") in nwv_def def test_getitem(self): - self.G.nodes[3]['foo'] = 'bar' - assert_equal(self.nv[3], {'foo': 'bar'}) + G = self.G.copy() + nv = G.nodes + G.nodes[3]["foo"] = "bar" + assert nv[3] == {"foo": "bar"} # default - nwv_def = self.G.nodes(data='foo', default='biz') - assert_true(nwv_def[7], 'biz') - assert_equal(nwv_def[3], 'bar') + nwv_def = G.nodes(data="foo", default="biz") + assert nwv_def[7], "biz" + assert nwv_def[3] == "bar" def test_iter(self): - nv = self.nv + G = self.G.copy() + nv = G.nodes.data() + ndv = G.nodes.data(True) + nwv = G.nodes.data("foo") for i, (n, d) in enumerate(nv): - assert_equal(i, n) - assert_equal(d, {}) + assert i == n + assert d == {} inv = iter(nv) - assert_equal(next(inv), (0, {})) - self.G.nodes[3]['foo'] = 'bar' + assert next(inv) == (0, {}) + G.nodes[3]["foo"] = "bar" # default for n, d in nv: if n == 3: - assert_equal(d, {'foo': 'bar'}) + assert d == {"foo": "bar"} else: - assert_equal(d, {}) + assert d == {} # data=True - for n, d in self.ndv: + for n, d in ndv: if n == 3: - assert_equal(d, {'foo': 'bar'}) + assert d == {"foo": "bar"} else: - assert_equal(d, {}) + assert d == {} # data='foo' - for n, d in self.nwv: + for n, d in nwv: if n == 3: - assert_equal(d, 'bar') + assert d == "bar" else: - assert_equal(d, None) + assert d is None # data='foo', default=1 - for n, d in self.G.nodes.data('foo', default=1): + for n, d in G.nodes.data("foo", default=1): if n == 3: - assert_equal(d, 'bar') + assert d == "bar" else: - assert_equal(d, 1) + assert d == 1 def test_nodedataview_unhashable(): G = nx.path_graph(9) - G.nodes[3]['foo'] = 'bar' + G.nodes[3]["foo"] = "bar" nvs = [G.nodes.data()] nvs.append(G.nodes.data(True)) H = G.copy() - H.nodes[4]['foo'] = {1, 2, 3} + H.nodes[4]["foo"] = {1, 2, 3} nvs.append(H.nodes.data(True)) # raise unhashable for nv in nvs: - assert_raises(TypeError, set, nv) - assert_raises(TypeError, eval, 'nv | nv', locals()) + pytest.raises(TypeError, set, nv) + pytest.raises(TypeError, eval, "nv | nv", locals()) # no raise... hashable Gn = G.nodes.data(False) set(Gn) Gn | Gn - Gn = G.nodes.data('foo') + Gn = G.nodes.data("foo") set(Gn) Gn | Gn -class TestNodeViewSetOps(object): - def setUp(self): - self.G = nx.path_graph(9) - self.G.nodes[3]['foo'] = 'bar' - self.nv = self.G.nodes +class TestNodeViewSetOps: + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.G.nodes[3]["foo"] = "bar" + cls.nv = cls.G.nodes def n_its(self, nodes): return {node for node in nodes} def test_len(self): - nv = self.nv - assert_equal(len(nv), 9) - self.G.remove_node(7) - assert_equal(len(nv), 8) - self.G.add_node(9) - assert_equal(len(nv), 9) + G = self.G.copy() + nv = G.nodes + assert len(nv) == 9 + G.remove_node(7) + assert len(nv) == 8 + G.add_node(9) + assert len(nv) == 9 def test_and(self): # print("G & H nodes:", gnv & hnv) nv = self.nv some_nodes = self.n_its(range(5, 12)) - assert_equal(nv & some_nodes, self.n_its(range(5, 9))) - assert_equal(some_nodes & nv, self.n_its(range(5, 9))) + assert nv & some_nodes == self.n_its(range(5, 9)) + assert some_nodes & nv == self.n_its(range(5, 9)) def test_or(self): # print("G | H nodes:", gnv | hnv) nv = self.nv some_nodes = self.n_its(range(5, 12)) - assert_equal(nv | some_nodes, self.n_its(range(12))) - assert_equal(some_nodes | nv, self.n_its(range(12))) + assert nv | some_nodes == self.n_its(range(12)) + assert some_nodes | nv == self.n_its(range(12)) def test_xor(self): # print("G ^ H nodes:", gnv ^ hnv) nv = self.nv some_nodes = self.n_its(range(5, 12)) nodes = {0, 1, 2, 3, 4, 9, 10, 11} - assert_equal(nv ^ some_nodes, self.n_its(nodes)) - assert_equal(some_nodes ^ nv, self.n_its(nodes)) + assert nv ^ some_nodes == self.n_its(nodes) + assert some_nodes ^ nv == self.n_its(nodes) def test_sub(self): # print("G - H nodes:", gnv - hnv) nv = self.nv some_nodes = self.n_its(range(5, 12)) - assert_equal(nv - some_nodes, self.n_its(range(5))) - assert_equal(some_nodes - nv, self.n_its(range(9, 12))) + assert nv - some_nodes == self.n_its(range(5)) + assert some_nodes - nv == self.n_its(range(9, 12)) class TestNodeDataViewSetOps(TestNodeViewSetOps): - def setUp(self): - self.G = nx.path_graph(9) - self.G.nodes[3]['foo'] = 'bar' - self.nv = self.G.nodes.data('foo') + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.G.nodes[3]["foo"] = "bar" + cls.nv = cls.G.nodes.data("foo") def n_its(self, nodes): - return {(node, 'bar' if node == 3 else None) for node in nodes} + return {(node, "bar" if node == 3 else None) for node in nodes} class TestNodeDataViewDefaultSetOps(TestNodeDataViewSetOps): - def setUp(self): - self.G = nx.path_graph(9) - self.G.nodes[3]['foo'] = 'bar' - self.nv = self.G.nodes.data('foo', default=1) + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.G.nodes[3]["foo"] = "bar" + cls.nv = cls.G.nodes.data("foo", default=1) def n_its(self, nodes): - return {(node, 'bar' if node == 3 else 1) for node in nodes} + return {(node, "bar" if node == 3 else 1) for node in nodes} # Edges Data View -class TestEdgeDataView(object): - def setUp(self): - self.G = nx.path_graph(9) - self.eview = nx.reportviews.EdgeView +class TestEdgeDataView: + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.eview = nx.reportviews.EdgeView def test_pickle(self): import pickle + ev = self.eview(self.G)(data=True) pev = pickle.loads(pickle.dumps(ev, -1)) - assert_equal(list(ev), list(pev)) - assert_equal(ev.__slots__, pev.__slots__) + assert list(ev) == list(pev) + assert ev.__slots__ == pev.__slots__ def modify_edge(self, G, e, **kwds): - self.G._adj[e[0]][e[1]].update(kwds) + G._adj[e[0]][e[1]].update(kwds) def test_str(self): ev = self.eview(self.G)(data=True) rep = str([(n, n + 1, {}) for n in range(8)]) - assert_equal(str(ev), rep) + assert str(ev) == rep def test_repr(self): ev = self.eview(self.G)(data=True) - rep = "EdgeDataView([(0, 1, {}), (1, 2, {}), " + \ - "(2, 3, {}), (3, 4, {}), " + \ - "(4, 5, {}), (5, 6, {}), " + \ - "(6, 7, {}), (7, 8, {})])" - assert_equal(repr(ev), rep) + rep = ( + "EdgeDataView([(0, 1, {}), (1, 2, {}), " + + "(2, 3, {}), (3, 4, {}), " + + "(4, 5, {}), (5, 6, {}), " + + "(6, 7, {}), (7, 8, {})])" + ) + assert repr(ev) == rep def test_iterdata(self): - G = self.G + G = self.G.copy() evr = self.eview(G) ev = evr(data=True) - ev_def = evr(data='foo', default=1) + ev_def = evr(data="foo", default=1) for u, v, d in ev: pass - assert_equal(d, {}) + assert d == {} for u, v, wt in ev_def: pass - assert_equal(wt, 1) + assert wt == 1 - self.modify_edge(G, (2, 3), foo='bar') + self.modify_edge(G, (2, 3), foo="bar") for e in ev: - assert_equal(len(e), 3) + assert len(e) == 3 if set(e[:2]) == {2, 3}: - assert_equal(e[2], {'foo': 'bar'}) + assert e[2] == {"foo": "bar"} checked = True else: - assert_equal(e[2], {}) - assert_true(checked) + assert e[2] == {} + assert checked for e in ev_def: - assert_equal(len(e), 3) + assert len(e) == 3 if set(e[:2]) == {2, 3}: - assert_equal(e[2], 'bar') + assert e[2] == "bar" checked_wt = True else: - assert_equal(e[2], 1) - assert_true(checked_wt) + assert e[2] == 1 + assert checked_wt def test_iter(self): evr = self.eview(self.G) @@ -300,220 +330,328 @@ def test_iter(self): for u, v in ev: pass iev = iter(ev) - assert_equal(next(iev), (0, 1)) - assert_not_equal(iter(ev), ev) - assert_equal(iter(iev), iev) + assert next(iev) == (0, 1) + assert iter(ev) != ev + assert iter(iev) == iev def test_contains(self): evr = self.eview(self.G) ev = evr() if self.G.is_directed(): - assert_true((1, 2) in ev and (2, 1) not in ev) + assert (1, 2) in ev and (2, 1) not in ev else: - assert_true((1, 2) in ev and (2, 1) in ev) - assert_false((1, 4) in ev) - assert_false((1, 90) in ev) - assert_false((90, 1) in ev) + assert (1, 2) in ev and (2, 1) in ev + assert not (1, 4) in ev + assert not (1, 90) in ev + assert not (90, 1) in ev + + def test_contains_with_nbunch(self): + evr = self.eview(self.G) + ev = evr(nbunch=[0, 2]) + if self.G.is_directed(): + assert (0, 1) in ev + assert not (1, 2) in ev + assert (2, 3) in ev + else: + assert (0, 1) in ev + assert (1, 2) in ev + assert (2, 3) in ev + assert not (3, 4) in ev + assert not (4, 5) in ev + assert not (5, 6) in ev + assert not (7, 8) in ev + assert not (8, 9) in ev def test_len(self): evr = self.eview(self.G) - ev = evr(data='foo') - assert_equal(len(ev), 8) - assert_equal(len(evr(1)), 2) - assert_equal(len(evr([1, 2, 3])), 4) + ev = evr(data="foo") + assert len(ev) == 8 + assert len(evr(1)) == 2 + assert len(evr([1, 2, 3])) == 4 - assert_equal(len(self.G.edges(1)), 2) - assert_equal(len(self.G.edges()), 8) - assert_equal(len(self.G.edges), 8) + assert len(self.G.edges(1)) == 2 + assert len(self.G.edges()) == 8 + assert len(self.G.edges) == 8 H = self.G.copy() H.add_edge(1, 1) - assert_equal(len(H.edges(1)), 3) - assert_equal(len(H.edges()), 9) - assert_equal(len(H.edges), 9) + assert len(H.edges(1)) == 3 + assert len(H.edges()) == 9 + assert len(H.edges) == 9 class TestOutEdgeDataView(TestEdgeDataView): - def setUp(self): - self.G = nx.path_graph(9, create_using=nx.DiGraph()) - self.eview = nx.reportviews.OutEdgeView + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, create_using=nx.DiGraph()) + cls.eview = nx.reportviews.OutEdgeView def test_repr(self): ev = self.eview(self.G)(data=True) - rep = "OutEdgeDataView([(0, 1, {}), (1, 2, {}), " + \ - "(2, 3, {}), (3, 4, {}), " + \ - "(4, 5, {}), (5, 6, {}), " + \ - "(6, 7, {}), (7, 8, {})])" - assert_equal(repr(ev), rep) + rep = ( + "OutEdgeDataView([(0, 1, {}), (1, 2, {}), " + + "(2, 3, {}), (3, 4, {}), " + + "(4, 5, {}), (5, 6, {}), " + + "(6, 7, {}), (7, 8, {})])" + ) + assert repr(ev) == rep def test_len(self): evr = self.eview(self.G) - ev = evr(data='foo') - assert_equal(len(ev), 8) - assert_equal(len(evr(1)), 1) - assert_equal(len(evr([1, 2, 3])), 3) + ev = evr(data="foo") + assert len(ev) == 8 + assert len(evr(1)) == 1 + assert len(evr([1, 2, 3])) == 3 - assert_equal(len(self.G.edges(1)), 1) - assert_equal(len(self.G.edges()), 8) - assert_equal(len(self.G.edges), 8) + assert len(self.G.edges(1)) == 1 + assert len(self.G.edges()) == 8 + assert len(self.G.edges) == 8 H = self.G.copy() H.add_edge(1, 1) - assert_equal(len(H.edges(1)), 2) - assert_equal(len(H.edges()), 9) - assert_equal(len(H.edges), 9) + assert len(H.edges(1)) == 2 + assert len(H.edges()) == 9 + assert len(H.edges) == 9 + + def test_contains_with_nbunch(self): + evr = self.eview(self.G) + ev = evr(nbunch=[0, 2]) + assert (0, 1) in ev + assert not (1, 2) in ev + assert (2, 3) in ev + assert not (3, 4) in ev + assert not (4, 5) in ev + assert not (5, 6) in ev + assert not (7, 8) in ev + assert not (8, 9) in ev class TestInEdgeDataView(TestOutEdgeDataView): - def setUp(self): - self.G = nx.path_graph(9, create_using=nx.DiGraph()) - self.eview = nx.reportviews.InEdgeView + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, create_using=nx.DiGraph()) + cls.eview = nx.reportviews.InEdgeView def test_repr(self): ev = self.eview(self.G)(data=True) - rep = "InEdgeDataView([(0, 1, {}), (1, 2, {}), " + \ - "(2, 3, {}), (3, 4, {}), " + \ - "(4, 5, {}), (5, 6, {}), " + \ - "(6, 7, {}), (7, 8, {})])" - assert_equal(repr(ev), rep) + rep = ( + "InEdgeDataView([(0, 1, {}), (1, 2, {}), " + + "(2, 3, {}), (3, 4, {}), " + + "(4, 5, {}), (5, 6, {}), " + + "(6, 7, {}), (7, 8, {})])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + evr = self.eview(self.G) + ev = evr(nbunch=[0, 2]) + assert not (0, 1) in ev + assert (1, 2) in ev + assert not (2, 3) in ev + assert not (3, 4) in ev + assert not (4, 5) in ev + assert not (5, 6) in ev + assert not (7, 8) in ev + assert not (8, 9) in ev class TestMultiEdgeDataView(TestEdgeDataView): - def setUp(self): - self.G = nx.path_graph(9, create_using=nx.MultiGraph()) - self.eview = nx.reportviews.MultiEdgeView + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, create_using=nx.MultiGraph()) + cls.eview = nx.reportviews.MultiEdgeView def modify_edge(self, G, e, **kwds): - self.G._adj[e[0]][e[1]][0].update(kwds) + G._adj[e[0]][e[1]][0].update(kwds) def test_repr(self): ev = self.eview(self.G)(data=True) - rep = "MultiEdgeDataView([(0, 1, {}), (1, 2, {}), " + \ - "(2, 3, {}), (3, 4, {}), " + \ - "(4, 5, {}), (5, 6, {}), " + \ - "(6, 7, {}), (7, 8, {})])" - assert_equal(repr(ev), rep) + rep = ( + "MultiEdgeDataView([(0, 1, {}), (1, 2, {}), " + + "(2, 3, {}), (3, 4, {}), " + + "(4, 5, {}), (5, 6, {}), " + + "(6, 7, {}), (7, 8, {})])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + evr = self.eview(self.G) + ev = evr(nbunch=[0, 2]) + assert (0, 1) in ev + assert (1, 2) in ev + assert (2, 3) in ev + assert not (3, 4) in ev + assert not (4, 5) in ev + assert not (5, 6) in ev + assert not (7, 8) in ev + assert not (8, 9) in ev class TestOutMultiEdgeDataView(TestOutEdgeDataView): - def setUp(self): - self.G = nx.path_graph(9, create_using=nx.MultiDiGraph()) - self.eview = nx.reportviews.OutMultiEdgeView + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, create_using=nx.MultiDiGraph()) + cls.eview = nx.reportviews.OutMultiEdgeView def modify_edge(self, G, e, **kwds): - self.G._adj[e[0]][e[1]][0].update(kwds) + G._adj[e[0]][e[1]][0].update(kwds) def test_repr(self): ev = self.eview(self.G)(data=True) - rep = "OutMultiEdgeDataView([(0, 1, {}), (1, 2, {}), " + \ - "(2, 3, {}), (3, 4, {}), " + \ - "(4, 5, {}), (5, 6, {}), " + \ - "(6, 7, {}), (7, 8, {})])" - assert_equal(repr(ev), rep) + rep = ( + "OutMultiEdgeDataView([(0, 1, {}), (1, 2, {}), " + + "(2, 3, {}), (3, 4, {}), " + + "(4, 5, {}), (5, 6, {}), " + + "(6, 7, {}), (7, 8, {})])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + evr = self.eview(self.G) + ev = evr(nbunch=[0, 2]) + assert (0, 1) in ev + assert not (1, 2) in ev + assert (2, 3) in ev + assert not (3, 4) in ev + assert not (4, 5) in ev + assert not (5, 6) in ev + assert not (7, 8) in ev + assert not (8, 9) in ev class TestInMultiEdgeDataView(TestOutMultiEdgeDataView): - def setUp(self): - self.G = nx.path_graph(9, create_using=nx.MultiDiGraph()) - self.eview = nx.reportviews.InMultiEdgeView + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, create_using=nx.MultiDiGraph()) + cls.eview = nx.reportviews.InMultiEdgeView def test_repr(self): ev = self.eview(self.G)(data=True) - rep = "InMultiEdgeDataView([(0, 1, {}), (1, 2, {}), " + \ - "(2, 3, {}), (3, 4, {}), " + \ - "(4, 5, {}), (5, 6, {}), " + \ - "(6, 7, {}), (7, 8, {})])" - assert_equal(repr(ev), rep) + rep = ( + "InMultiEdgeDataView([(0, 1, {}), (1, 2, {}), " + + "(2, 3, {}), (3, 4, {}), " + + "(4, 5, {}), (5, 6, {}), " + + "(6, 7, {}), (7, 8, {})])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + evr = self.eview(self.G) + ev = evr(nbunch=[0, 2]) + assert not (0, 1) in ev + assert (1, 2) in ev + assert not (2, 3) in ev + assert not (3, 4) in ev + assert not (4, 5) in ev + assert not (5, 6) in ev + assert not (7, 8) in ev + assert not (8, 9) in ev # Edge Views -class TestEdgeView(object): - def setup(self): - self.G = nx.path_graph(9) - self.eview = nx.reportviews.EdgeView +class TestEdgeView: + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.eview = nx.reportviews.EdgeView def test_pickle(self): import pickle + ev = self.eview(self.G) pev = pickle.loads(pickle.dumps(ev, -1)) - assert_equal(ev, pev) - assert_equal(ev.__slots__, pev.__slots__) + assert ev == pev + assert ev.__slots__ == pev.__slots__ def modify_edge(self, G, e, **kwds): - self.G._adj[e[0]][e[1]].update(kwds) + G._adj[e[0]][e[1]].update(kwds) def test_str(self): ev = self.eview(self.G) rep = str([(n, n + 1) for n in range(8)]) - assert_equal(str(ev), rep) + assert str(ev) == rep def test_repr(self): ev = self.eview(self.G) - rep = "EdgeView([(0, 1), (1, 2), (2, 3), (3, 4), " + \ - "(4, 5), (5, 6), (6, 7), (7, 8)])" - assert_equal(repr(ev), rep) + rep = ( + "EdgeView([(0, 1), (1, 2), (2, 3), (3, 4), " + + "(4, 5), (5, 6), (6, 7), (7, 8)])" + ) + assert repr(ev) == rep def test_call(self): ev = self.eview(self.G) - assert_equal(id(ev), id(ev())) - assert_equal(id(ev), id(ev(data=False))) - assert_not_equal(id(ev), id(ev(data=True))) - assert_not_equal(id(ev), id(ev(nbunch=1))) + assert id(ev) == id(ev()) + assert id(ev) == id(ev(data=False)) + assert id(ev) != id(ev(data=True)) + assert id(ev) != id(ev(nbunch=1)) def test_data(self): ev = self.eview(self.G) - assert_not_equal(id(ev), id(ev.data())) - assert_equal(id(ev), id(ev.data(data=False))) - assert_not_equal(id(ev), id(ev.data(data=True))) - assert_not_equal(id(ev), id(ev.data(nbunch=1))) + assert id(ev) != id(ev.data()) + assert id(ev) == id(ev.data(data=False)) + assert id(ev) != id(ev.data(data=True)) + assert id(ev) != id(ev.data(nbunch=1)) def test_iter(self): ev = self.eview(self.G) for u, v in ev: pass iev = iter(ev) - assert_equal(next(iev), (0, 1)) - assert_not_equal(iter(ev), ev) - assert_equal(iter(iev), iev) + assert next(iev) == (0, 1) + assert iter(ev) != ev + assert iter(iev) == iev def test_contains(self): ev = self.eview(self.G) edv = ev() if self.G.is_directed(): - assert_true((1, 2) in ev and (2, 1) not in ev) - assert_true((1, 2) in edv and (2, 1) not in edv) + assert (1, 2) in ev and (2, 1) not in ev + assert (1, 2) in edv and (2, 1) not in edv else: - assert_true((1, 2) in ev and (2, 1) in ev) - assert_true((1, 2) in edv and (2, 1) in edv) - assert_false((1, 4) in ev) - assert_false((1, 4) in edv) + assert (1, 2) in ev and (2, 1) in ev + assert (1, 2) in edv and (2, 1) in edv + assert not (1, 4) in ev + assert not (1, 4) in edv # edge not in graph - assert_false((1, 90) in ev) - assert_false((90, 1) in ev) - assert_false((1, 90) in edv) - assert_false((90, 1) in edv) + assert not (1, 90) in ev + assert not (90, 1) in ev + assert not (1, 90) in edv + assert not (90, 1) in edv + + def test_contains_with_nbunch(self): + ev = self.eview(self.G) + evn = ev(nbunch=[0, 2]) + assert (0, 1) in evn + assert (1, 2) in evn + assert (2, 3) in evn + assert not (3, 4) in evn + assert not (4, 5) in evn + assert not (5, 6) in evn + assert not (7, 8) in evn + assert not (8, 9) in evn def test_len(self): ev = self.eview(self.G) num_ed = 9 if self.G.is_multigraph() else 8 - assert_equal(len(ev), num_ed) + assert len(ev) == num_ed H = self.G.copy() H.add_edge(1, 1) - assert_equal(len(H.edges(1)), 3 + H.is_multigraph() - H.is_directed()) - assert_equal(len(H.edges()), num_ed + 1) - assert_equal(len(H.edges), num_ed + 1) + assert len(H.edges(1)) == 3 + H.is_multigraph() - H.is_directed() + assert len(H.edges()) == num_ed + 1 + assert len(H.edges) == num_ed + 1 def test_and(self): # print("G & H edges:", gnv & hnv) ev = self.eview(self.G) some_edges = {(0, 1), (1, 0), (0, 2)} if self.G.is_directed(): - assert_true(some_edges & ev, {(0, 1)}) - assert_true(ev & some_edges, {(0, 1)}) + assert some_edges & ev, {(0, 1)} + assert ev & some_edges, {(0, 1)} else: - assert_equal(ev & some_edges, {(0, 1), (1, 0)}) - assert_equal(some_edges & ev, {(0, 1), (1, 0)}) + assert ev & some_edges == {(0, 1), (1, 0)} + assert some_edges & ev == {(0, 1), (1, 0)} return def test_or(self): @@ -524,8 +662,8 @@ def test_or(self): result1.update(some_edges) result2 = {(n + 1, n) for n in range(8)} result2.update(some_edges) - assert_true((ev | some_edges) in (result1, result2)) - assert_true((some_edges | ev) in (result1, result2)) + assert (ev | some_edges) in (result1, result2) + assert (some_edges | ev) in (result1, result2) def test_xor(self): # print("G ^ H edges:", gnv ^ hnv) @@ -534,11 +672,11 @@ def test_xor(self): if self.G.is_directed(): result = {(n, n + 1) for n in range(1, 8)} result.update({(1, 0), (0, 2)}) - assert_equal(ev ^ some_edges, result) + assert ev ^ some_edges == result else: result = {(n, n + 1) for n in range(1, 8)} result.update({(0, 2)}) - assert_equal(ev ^ some_edges, result) + assert ev ^ some_edges == result return def test_sub(self): @@ -547,81 +685,114 @@ def test_sub(self): some_edges = {(0, 1), (1, 0), (0, 2)} result = {(n, n + 1) for n in range(8)} result.remove((0, 1)) - assert_true(ev - some_edges, result) + assert ev - some_edges, result class TestOutEdgeView(TestEdgeView): - def setup(self): - self.G = nx.path_graph(9, nx.DiGraph()) - self.eview = nx.reportviews.OutEdgeView + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, nx.DiGraph()) + cls.eview = nx.reportviews.OutEdgeView def test_repr(self): ev = self.eview(self.G) - rep = "OutEdgeView([(0, 1), (1, 2), (2, 3), (3, 4), " + \ - "(4, 5), (5, 6), (6, 7), (7, 8)])" - assert_equal(repr(ev), rep) + rep = ( + "OutEdgeView([(0, 1), (1, 2), (2, 3), (3, 4), " + + "(4, 5), (5, 6), (6, 7), (7, 8)])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + ev = self.eview(self.G) + evn = ev(nbunch=[0, 2]) + assert (0, 1) in evn + assert not (1, 2) in evn + assert (2, 3) in evn + assert not (3, 4) in evn + assert not (4, 5) in evn + assert not (5, 6) in evn + assert not (7, 8) in evn + assert not (8, 9) in evn class TestInEdgeView(TestEdgeView): - def setup(self): - self.G = nx.path_graph(9, nx.DiGraph()) - self.eview = nx.reportviews.InEdgeView + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, nx.DiGraph()) + cls.eview = nx.reportviews.InEdgeView def test_repr(self): ev = self.eview(self.G) - rep = "InEdgeView([(0, 1), (1, 2), (2, 3), (3, 4), " + \ - "(4, 5), (5, 6), (6, 7), (7, 8)])" - assert_equal(repr(ev), rep) + rep = ( + "InEdgeView([(0, 1), (1, 2), (2, 3), (3, 4), " + + "(4, 5), (5, 6), (6, 7), (7, 8)])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + ev = self.eview(self.G) + evn = ev(nbunch=[0, 2]) + assert not (0, 1) in evn + assert (1, 2) in evn + assert not (2, 3) in evn + assert not (3, 4) in evn + assert not (4, 5) in evn + assert not (5, 6) in evn + assert not (7, 8) in evn + assert not (8, 9) in evn class TestMultiEdgeView(TestEdgeView): - def setup(self): - self.G = nx.path_graph(9, nx.MultiGraph()) - self.G.add_edge(1, 2, key=3, foo='bar') - self.eview = nx.reportviews.MultiEdgeView + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, nx.MultiGraph()) + cls.G.add_edge(1, 2, key=3, foo="bar") + cls.eview = nx.reportviews.MultiEdgeView def modify_edge(self, G, e, **kwds): if len(e) == 2: e = e + (0,) - self.G._adj[e[0]][e[1]][e[2]].update(kwds) + G._adj[e[0]][e[1]][e[2]].update(kwds) def test_str(self): ev = self.eview(self.G) replist = [(n, n + 1, 0) for n in range(8)] replist.insert(2, (1, 2, 3)) rep = str(replist) - assert_equal(str(ev), rep) + assert str(ev) == rep def test_repr(self): ev = self.eview(self.G) - rep = "MultiEdgeView([(0, 1, 0), (1, 2, 0), (1, 2, 3), (2, 3, 0), " + \ - "(3, 4, 0), (4, 5, 0), (5, 6, 0), (6, 7, 0), (7, 8, 0)])" - assert_equal(repr(ev), rep) + rep = ( + "MultiEdgeView([(0, 1, 0), (1, 2, 0), (1, 2, 3), (2, 3, 0), " + + "(3, 4, 0), (4, 5, 0), (5, 6, 0), (6, 7, 0), (7, 8, 0)])" + ) + assert repr(ev) == rep def test_call(self): ev = self.eview(self.G) - assert_equal(id(ev), id(ev(keys=True))) - assert_equal(id(ev), id(ev(data=False, keys=True))) - assert_not_equal(id(ev), id(ev(keys=False))) - assert_not_equal(id(ev), id(ev(data=True))) - assert_not_equal(id(ev), id(ev(nbunch=1))) + assert id(ev) == id(ev(keys=True)) + assert id(ev) == id(ev(data=False, keys=True)) + assert id(ev) != id(ev(keys=False)) + assert id(ev) != id(ev(data=True)) + assert id(ev) != id(ev(nbunch=1)) def test_data(self): ev = self.eview(self.G) - assert_not_equal(id(ev), id(ev.data())) - assert_equal(id(ev), id(ev.data(data=False, keys=True))) - assert_not_equal(id(ev), id(ev.data(keys=False))) - assert_not_equal(id(ev), id(ev.data(data=True))) - assert_not_equal(id(ev), id(ev.data(nbunch=1))) + assert id(ev) != id(ev.data()) + assert id(ev) == id(ev.data(data=False, keys=True)) + assert id(ev) != id(ev.data(keys=False)) + assert id(ev) != id(ev.data(data=True)) + assert id(ev) != id(ev.data(nbunch=1)) def test_iter(self): ev = self.eview(self.G) for u, v, k in ev: pass iev = iter(ev) - assert_equal(next(iev), (0, 1, 0)) - assert_not_equal(iter(ev), ev) - assert_equal(iter(iev), iev) + assert next(iev) == (0, 1, 0) + assert iter(ev) != ev + assert iter(iev) == iev def test_iterkeys(self): G = self.G @@ -629,61 +800,61 @@ def test_iterkeys(self): ev = evr(keys=True) for u, v, k in ev: pass - assert_equal(k, 0) + assert k == 0 ev = evr(keys=True, data="foo", default=1) for u, v, k, wt in ev: pass - assert_equal(wt, 1) + assert wt == 1 - self.modify_edge(G, (2, 3, 0), foo='bar') + self.modify_edge(G, (2, 3, 0), foo="bar") ev = evr(keys=True, data=True) for e in ev: - assert_equal(len(e), 4) - print('edge:', e) + assert len(e) == 4 + print("edge:", e) if set(e[:2]) == {2, 3}: print(self.G._adj[2][3]) - assert_equal(e[2], 0) - assert_equal(e[3], {'foo': 'bar'}) + assert e[2] == 0 + assert e[3] == {"foo": "bar"} checked = True elif set(e[:3]) == {1, 2, 3}: - assert_equal(e[2], 3) - assert_equal(e[3], {'foo': 'bar'}) + assert e[2] == 3 + assert e[3] == {"foo": "bar"} checked_multi = True else: - assert_equal(e[2], 0) - assert_equal(e[3], {}) - assert_true(checked) - assert_true(checked_multi) - ev = evr(keys=True, data='foo', default=1) + assert e[2] == 0 + assert e[3] == {} + assert checked + assert checked_multi + ev = evr(keys=True, data="foo", default=1) for e in ev: if set(e[:2]) == {1, 2} and e[2] == 3: - assert_equal(e[3], 'bar') + assert e[3] == "bar" if set(e[:2]) == {1, 2} and e[2] == 0: - assert_equal(e[3], 1) + assert e[3] == 1 if set(e[:2]) == {2, 3}: - assert_equal(e[2], 0) - assert_equal(e[3], 'bar') - assert_equal(len(e), 4) + assert e[2] == 0 + assert e[3] == "bar" + assert len(e) == 4 checked_wt = True - assert_true(checked_wt) + assert checked_wt ev = evr(keys=True) for e in ev: - assert_equal(len(e), 3) + assert len(e) == 3 elist = sorted([(i, i + 1, 0) for i in range(8)] + [(1, 2, 3)]) - assert_equal(sorted(list(ev)), elist) + assert sorted(list(ev)) == elist # test order of arguments:graph, nbunch, data, keys, default - ev = evr((1, 2), 'foo', True, 1) + ev = evr((1, 2), "foo", True, 1) for e in ev: if set(e[:2]) == {1, 2}: - assert_true(e[2] in {0, 3}) + assert e[2] in {0, 3} if e[2] == 3: - assert_equal(e[3], 'bar') + assert e[3] == "bar" else: # e[2] == 0 - assert_equal(e[3], 1) + assert e[3] == 1 if G.is_directed(): - assert_equal(len(list(ev)), 3) + assert len(list(ev)) == 3 else: - assert_equal(len(list(ev)), 4) + assert len(list(ev)) == 4 def test_or(self): # print("G | H edges:", gnv | hnv) @@ -692,8 +863,8 @@ def test_or(self): result = {(n, n + 1, 0) for n in range(8)} result.update(some_edges) result.update({(1, 2, 3)}) - assert_equal(ev | some_edges, result) - assert_equal(some_edges | ev, result) + assert ev | some_edges == result + assert some_edges | ev == result def test_sub(self): # print("G - H edges:", gnv - hnv) @@ -702,8 +873,8 @@ def test_sub(self): result = {(n, n + 1, 0) for n in range(8)} result.remove((0, 1, 0)) result.update({(1, 2, 3)}) - assert_true(ev - some_edges, result) - assert_true(some_edges - ev, result) + assert ev - some_edges, result + assert some_edges - ev, result def test_xor(self): # print("G ^ H edges:", gnv ^ hnv) @@ -712,145 +883,189 @@ def test_xor(self): if self.G.is_directed(): result = {(n, n + 1, 0) for n in range(1, 8)} result.update({(1, 0, 0), (0, 2, 0), (1, 2, 3)}) - assert_equal(ev ^ some_edges, result) - assert_equal(some_edges ^ ev, result) + assert ev ^ some_edges == result + assert some_edges ^ ev == result else: result = {(n, n + 1, 0) for n in range(1, 8)} result.update({(0, 2, 0), (1, 2, 3)}) - assert_equal(ev ^ some_edges, result) - assert_equal(some_edges ^ ev, result) + assert ev ^ some_edges == result + assert some_edges ^ ev == result def test_and(self): # print("G & H edges:", gnv & hnv) ev = self.eview(self.G) some_edges = {(0, 1, 0), (1, 0, 0), (0, 2, 0)} if self.G.is_directed(): - assert_equal(ev & some_edges, {(0, 1, 0)}) - assert_equal(some_edges & ev, {(0, 1, 0)}) + assert ev & some_edges == {(0, 1, 0)} + assert some_edges & ev == {(0, 1, 0)} else: - assert_equal(ev & some_edges, {(0, 1, 0), (1, 0, 0)}) - assert_equal(some_edges & ev, {(0, 1, 0), (1, 0, 0)}) + assert ev & some_edges == {(0, 1, 0), (1, 0, 0)} + assert some_edges & ev == {(0, 1, 0), (1, 0, 0)} + + def test_contains_with_nbunch(self): + ev = self.eview(self.G) + evn = ev(nbunch=[0, 2]) + assert (0, 1) in evn + assert (1, 2) in evn + assert (2, 3) in evn + assert not (3, 4) in evn + assert not (4, 5) in evn + assert not (5, 6) in evn + assert not (7, 8) in evn + assert not (8, 9) in evn class TestOutMultiEdgeView(TestMultiEdgeView): - def setup(self): - self.G = nx.path_graph(9, nx.MultiDiGraph()) - self.G.add_edge(1, 2, key=3, foo='bar') - self.eview = nx.reportviews.OutMultiEdgeView + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, nx.MultiDiGraph()) + cls.G.add_edge(1, 2, key=3, foo="bar") + cls.eview = nx.reportviews.OutMultiEdgeView def modify_edge(self, G, e, **kwds): if len(e) == 2: e = e + (0,) - self.G._adj[e[0]][e[1]][e[2]].update(kwds) + G._adj[e[0]][e[1]][e[2]].update(kwds) def test_repr(self): ev = self.eview(self.G) - rep = "OutMultiEdgeView([(0, 1, 0), (1, 2, 0), (1, 2, 3), (2, 3, 0),"\ - + " (3, 4, 0), (4, 5, 0), (5, 6, 0), (6, 7, 0), (7, 8, 0)])" - assert_equal(repr(ev), rep) + rep = ( + "OutMultiEdgeView([(0, 1, 0), (1, 2, 0), (1, 2, 3), (2, 3, 0)," + + " (3, 4, 0), (4, 5, 0), (5, 6, 0), (6, 7, 0), (7, 8, 0)])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + ev = self.eview(self.G) + evn = ev(nbunch=[0, 2]) + assert (0, 1) in evn + assert not (1, 2) in evn + assert (2, 3) in evn + assert not (3, 4) in evn + assert not (4, 5) in evn + assert not (5, 6) in evn + assert not (7, 8) in evn + assert not (8, 9) in evn class TestInMultiEdgeView(TestMultiEdgeView): - def setup(self): - self.G = nx.path_graph(9, nx.MultiDiGraph()) - self.G.add_edge(1, 2, key=3, foo='bar') - self.eview = nx.reportviews.InMultiEdgeView + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, nx.MultiDiGraph()) + cls.G.add_edge(1, 2, key=3, foo="bar") + cls.eview = nx.reportviews.InMultiEdgeView def modify_edge(self, G, e, **kwds): if len(e) == 2: e = e + (0,) - self.G._adj[e[0]][e[1]][e[2]].update(kwds) + G._adj[e[0]][e[1]][e[2]].update(kwds) def test_repr(self): ev = self.eview(self.G) - rep = "InMultiEdgeView([(0, 1, 0), (1, 2, 0), (1, 2, 3), (2, 3, 0), "\ - + "(3, 4, 0), (4, 5, 0), (5, 6, 0), (6, 7, 0), (7, 8, 0)])" - assert_equal(repr(ev), rep) + rep = ( + "InMultiEdgeView([(0, 1, 0), (1, 2, 0), (1, 2, 3), (2, 3, 0), " + + "(3, 4, 0), (4, 5, 0), (5, 6, 0), (6, 7, 0), (7, 8, 0)])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + ev = self.eview(self.G) + evn = ev(nbunch=[0, 2]) + assert not (0, 1) in evn + assert (1, 2) in evn + assert not (2, 3) in evn + assert not (3, 4) in evn + assert not (4, 5) in evn + assert not (5, 6) in evn + assert not (7, 8) in evn + assert not (8, 9) in evn # Degrees -class TestDegreeView(object): +class TestDegreeView: GRAPH = nx.Graph dview = nx.reportviews.DegreeView - def setup(self): - self.G = nx.path_graph(6, self.GRAPH()) - self.G.add_edge(1, 3, foo=2) - self.G.add_edge(1, 3, foo=3) + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(6, cls.GRAPH()) + cls.G.add_edge(1, 3, foo=2) + cls.G.add_edge(1, 3, foo=3) def test_pickle(self): import pickle + deg = self.G.degree pdeg = pickle.loads(pickle.dumps(deg, -1)) - assert_equal(dict(deg), dict(pdeg)) + assert dict(deg) == dict(pdeg) def test_str(self): dv = self.dview(self.G) rep = str([(0, 1), (1, 3), (2, 2), (3, 3), (4, 2), (5, 1)]) - assert_equal(str(dv), rep) + assert str(dv) == rep dv = self.G.degree() - assert_equal(str(dv), rep) + assert str(dv) == rep def test_repr(self): dv = self.dview(self.G) rep = "DegreeView({0: 1, 1: 3, 2: 2, 3: 3, 4: 2, 5: 1})" - assert_equal(repr(dv), rep) + assert repr(dv) == rep def test_iter(self): dv = self.dview(self.G) for n, d in dv: pass idv = iter(dv) - assert_not_equal(iter(dv), dv) - assert_equal(iter(idv), idv) - assert_equal(next(idv), (0, dv[0])) - assert_equal(next(idv), (1, dv[1])) + assert iter(dv) != dv + assert iter(idv) == idv + assert next(idv) == (0, dv[0]) + assert next(idv) == (1, dv[1]) # weighted - dv = self.dview(self.G, weight='foo') + dv = self.dview(self.G, weight="foo") for n, d in dv: pass idv = iter(dv) - assert_not_equal(iter(dv), dv) - assert_equal(iter(idv), idv) - assert_equal(next(idv), (0, dv[0])) - assert_equal(next(idv), (1, dv[1])) + assert iter(dv) != dv + assert iter(idv) == idv + assert next(idv) == (0, dv[0]) + assert next(idv) == (1, dv[1]) def test_nbunch(self): dv = self.dview(self.G) dvn = dv(0) - assert_equal(dvn, 1) + assert dvn == 1 dvn = dv([2, 3]) - assert_equal(sorted(dvn), [(2, 2), (3, 3)]) + assert sorted(dvn) == [(2, 2), (3, 3)] def test_getitem(self): dv = self.dview(self.G) - assert_equal(dv[0], 1) - assert_equal(dv[1], 3) - assert_equal(dv[2], 2) - assert_equal(dv[3], 3) - dv = self.dview(self.G, weight='foo') - assert_equal(dv[0], 1) - assert_equal(dv[1], 5) - assert_equal(dv[2], 2) - assert_equal(dv[3], 5) + assert dv[0] == 1 + assert dv[1] == 3 + assert dv[2] == 2 + assert dv[3] == 3 + dv = self.dview(self.G, weight="foo") + assert dv[0] == 1 + assert dv[1] == 5 + assert dv[2] == 2 + assert dv[3] == 5 def test_weight(self): dv = self.dview(self.G) - dvw = dv(0, weight='foo') - assert_equal(dvw, 1) - dvw = dv(1, weight='foo') - assert_equal(dvw, 5) - dvw = dv([2, 3], weight='foo') - assert_equal(sorted(dvw), [(2, 2), (3, 5)]) - dvd = dict(dv(weight='foo')) - assert_equal(dvd[0], 1) - assert_equal(dvd[1], 5) - assert_equal(dvd[2], 2) - assert_equal(dvd[3], 5) + dvw = dv(0, weight="foo") + assert dvw == 1 + dvw = dv(1, weight="foo") + assert dvw == 5 + dvw = dv([2, 3], weight="foo") + assert sorted(dvw) == [(2, 2), (3, 5)] + dvd = dict(dv(weight="foo")) + assert dvd[0] == 1 + assert dvd[1] == 5 + assert dvd[2] == 2 + assert dvd[3] == 5 def test_len(self): dv = self.dview(self.G) - assert_equal(len(dv), 6) + assert len(dv) == 6 class TestDiDegreeView(TestDegreeView): @@ -860,7 +1075,7 @@ class TestDiDegreeView(TestDegreeView): def test_repr(self): dv = self.G.degree() rep = "DiDegreeView({0: 1, 1: 3, 2: 2, 3: 3, 4: 2, 5: 1})" - assert_equal(repr(dv), rep) + assert repr(dv) == rep class TestOutDegreeView(TestDegreeView): @@ -870,47 +1085,47 @@ class TestOutDegreeView(TestDegreeView): def test_str(self): dv = self.dview(self.G) rep = str([(0, 1), (1, 2), (2, 1), (3, 1), (4, 1), (5, 0)]) - assert_equal(str(dv), rep) + assert str(dv) == rep dv = self.G.out_degree() - assert_equal(str(dv), rep) + assert str(dv) == rep def test_repr(self): dv = self.G.out_degree() rep = "OutDegreeView({0: 1, 1: 2, 2: 1, 3: 1, 4: 1, 5: 0})" - assert_equal(repr(dv), rep) + assert repr(dv) == rep def test_nbunch(self): dv = self.dview(self.G) dvn = dv(0) - assert_equal(dvn, 1) + assert dvn == 1 dvn = dv([2, 3]) - assert_equal(sorted(dvn), [(2, 1), (3, 1)]) + assert sorted(dvn) == [(2, 1), (3, 1)] def test_getitem(self): dv = self.dview(self.G) - assert_equal(dv[0], 1) - assert_equal(dv[1], 2) - assert_equal(dv[2], 1) - assert_equal(dv[3], 1) - dv = self.dview(self.G, weight='foo') - assert_equal(dv[0], 1) - assert_equal(dv[1], 4) - assert_equal(dv[2], 1) - assert_equal(dv[3], 1) + assert dv[0] == 1 + assert dv[1] == 2 + assert dv[2] == 1 + assert dv[3] == 1 + dv = self.dview(self.G, weight="foo") + assert dv[0] == 1 + assert dv[1] == 4 + assert dv[2] == 1 + assert dv[3] == 1 def test_weight(self): dv = self.dview(self.G) - dvw = dv(0, weight='foo') - assert_equal(dvw, 1) - dvw = dv(1, weight='foo') - assert_equal(dvw, 4) - dvw = dv([2, 3], weight='foo') - assert_equal(sorted(dvw), [(2, 1), (3, 1)]) - dvd = dict(dv(weight='foo')) - assert_equal(dvd[0], 1) - assert_equal(dvd[1], 4) - assert_equal(dvd[2], 1) - assert_equal(dvd[3], 1) + dvw = dv(0, weight="foo") + assert dvw == 1 + dvw = dv(1, weight="foo") + assert dvw == 4 + dvw = dv([2, 3], weight="foo") + assert sorted(dvw) == [(2, 1), (3, 1)] + dvd = dict(dv(weight="foo")) + assert dvd[0] == 1 + assert dvd[1] == 4 + assert dvd[2] == 1 + assert dvd[3] == 1 class TestInDegreeView(TestDegreeView): @@ -920,47 +1135,47 @@ class TestInDegreeView(TestDegreeView): def test_str(self): dv = self.dview(self.G) rep = str([(0, 0), (1, 1), (2, 1), (3, 2), (4, 1), (5, 1)]) - assert_equal(str(dv), rep) + assert str(dv) == rep dv = self.G.in_degree() - assert_equal(str(dv), rep) + assert str(dv) == rep def test_repr(self): dv = self.G.in_degree() rep = "InDegreeView({0: 0, 1: 1, 2: 1, 3: 2, 4: 1, 5: 1})" - assert_equal(repr(dv), rep) + assert repr(dv) == rep def test_nbunch(self): dv = self.dview(self.G) dvn = dv(0) - assert_equal(dvn, 0) + assert dvn == 0 dvn = dv([2, 3]) - assert_equal(sorted(dvn), [(2, 1), (3, 2)]) + assert sorted(dvn) == [(2, 1), (3, 2)] def test_getitem(self): dv = self.dview(self.G) - assert_equal(dv[0], 0) - assert_equal(dv[1], 1) - assert_equal(dv[2], 1) - assert_equal(dv[3], 2) - dv = self.dview(self.G, weight='foo') - assert_equal(dv[0], 0) - assert_equal(dv[1], 1) - assert_equal(dv[2], 1) - assert_equal(dv[3], 4) + assert dv[0] == 0 + assert dv[1] == 1 + assert dv[2] == 1 + assert dv[3] == 2 + dv = self.dview(self.G, weight="foo") + assert dv[0] == 0 + assert dv[1] == 1 + assert dv[2] == 1 + assert dv[3] == 4 def test_weight(self): dv = self.dview(self.G) - dvw = dv(0, weight='foo') - assert_equal(dvw, 0) - dvw = dv(1, weight='foo') - assert_equal(dvw, 1) - dvw = dv([2, 3], weight='foo') - assert_equal(sorted(dvw), [(2, 1), (3, 4)]) - dvd = dict(dv(weight='foo')) - assert_equal(dvd[0], 0) - assert_equal(dvd[1], 1) - assert_equal(dvd[2], 1) - assert_equal(dvd[3], 4) + dvw = dv(0, weight="foo") + assert dvw == 0 + dvw = dv(1, weight="foo") + assert dvw == 1 + dvw = dv([2, 3], weight="foo") + assert sorted(dvw) == [(2, 1), (3, 4)] + dvd = dict(dv(weight="foo")) + assert dvd[0] == 0 + assert dvd[1] == 1 + assert dvd[2] == 1 + assert dvd[3] == 4 class TestMultiDegreeView(TestDegreeView): @@ -970,47 +1185,47 @@ class TestMultiDegreeView(TestDegreeView): def test_str(self): dv = self.dview(self.G) rep = str([(0, 1), (1, 4), (2, 2), (3, 4), (4, 2), (5, 1)]) - assert_equal(str(dv), rep) + assert str(dv) == rep dv = self.G.degree() - assert_equal(str(dv), rep) + assert str(dv) == rep def test_repr(self): dv = self.G.degree() rep = "MultiDegreeView({0: 1, 1: 4, 2: 2, 3: 4, 4: 2, 5: 1})" - assert_equal(repr(dv), rep) + assert repr(dv) == rep def test_nbunch(self): dv = self.dview(self.G) dvn = dv(0) - assert_equal(dvn, 1) + assert dvn == 1 dvn = dv([2, 3]) - assert_equal(sorted(dvn), [(2, 2), (3, 4)]) + assert sorted(dvn) == [(2, 2), (3, 4)] def test_getitem(self): dv = self.dview(self.G) - assert_equal(dv[0], 1) - assert_equal(dv[1], 4) - assert_equal(dv[2], 2) - assert_equal(dv[3], 4) - dv = self.dview(self.G, weight='foo') - assert_equal(dv[0], 1) - assert_equal(dv[1], 7) - assert_equal(dv[2], 2) - assert_equal(dv[3], 7) + assert dv[0] == 1 + assert dv[1] == 4 + assert dv[2] == 2 + assert dv[3] == 4 + dv = self.dview(self.G, weight="foo") + assert dv[0] == 1 + assert dv[1] == 7 + assert dv[2] == 2 + assert dv[3] == 7 def test_weight(self): dv = self.dview(self.G) - dvw = dv(0, weight='foo') - assert_equal(dvw, 1) - dvw = dv(1, weight='foo') - assert_equal(dvw, 7) - dvw = dv([2, 3], weight='foo') - assert_equal(sorted(dvw), [(2, 2), (3, 7)]) - dvd = dict(dv(weight='foo')) - assert_equal(dvd[0], 1) - assert_equal(dvd[1], 7) - assert_equal(dvd[2], 2) - assert_equal(dvd[3], 7) + dvw = dv(0, weight="foo") + assert dvw == 1 + dvw = dv(1, weight="foo") + assert dvw == 7 + dvw = dv([2, 3], weight="foo") + assert sorted(dvw) == [(2, 2), (3, 7)] + dvd = dict(dv(weight="foo")) + assert dvd[0] == 1 + assert dvd[1] == 7 + assert dvd[2] == 2 + assert dvd[3] == 7 class TestDiMultiDegreeView(TestMultiDegreeView): @@ -1020,7 +1235,7 @@ class TestDiMultiDegreeView(TestMultiDegreeView): def test_repr(self): dv = self.G.degree() rep = "DiMultiDegreeView({0: 1, 1: 4, 2: 2, 3: 4, 4: 2, 5: 1})" - assert_equal(repr(dv), rep) + assert repr(dv) == rep class TestOutMultiDegreeView(TestDegreeView): @@ -1030,47 +1245,47 @@ class TestOutMultiDegreeView(TestDegreeView): def test_str(self): dv = self.dview(self.G) rep = str([(0, 1), (1, 3), (2, 1), (3, 1), (4, 1), (5, 0)]) - assert_equal(str(dv), rep) + assert str(dv) == rep dv = self.G.out_degree() - assert_equal(str(dv), rep) + assert str(dv) == rep def test_repr(self): dv = self.G.out_degree() rep = "OutMultiDegreeView({0: 1, 1: 3, 2: 1, 3: 1, 4: 1, 5: 0})" - assert_equal(repr(dv), rep) + assert repr(dv) == rep def test_nbunch(self): dv = self.dview(self.G) dvn = dv(0) - assert_equal(dvn, 1) + assert dvn == 1 dvn = dv([2, 3]) - assert_equal(sorted(dvn), [(2, 1), (3, 1)]) + assert sorted(dvn) == [(2, 1), (3, 1)] def test_getitem(self): dv = self.dview(self.G) - assert_equal(dv[0], 1) - assert_equal(dv[1], 3) - assert_equal(dv[2], 1) - assert_equal(dv[3], 1) - dv = self.dview(self.G, weight='foo') - assert_equal(dv[0], 1) - assert_equal(dv[1], 6) - assert_equal(dv[2], 1) - assert_equal(dv[3], 1) + assert dv[0] == 1 + assert dv[1] == 3 + assert dv[2] == 1 + assert dv[3] == 1 + dv = self.dview(self.G, weight="foo") + assert dv[0] == 1 + assert dv[1] == 6 + assert dv[2] == 1 + assert dv[3] == 1 def test_weight(self): dv = self.dview(self.G) - dvw = dv(0, weight='foo') - assert_equal(dvw, 1) - dvw = dv(1, weight='foo') - assert_equal(dvw, 6) - dvw = dv([2, 3], weight='foo') - assert_equal(sorted(dvw), [(2, 1), (3, 1)]) - dvd = dict(dv(weight='foo')) - assert_equal(dvd[0], 1) - assert_equal(dvd[1], 6) - assert_equal(dvd[2], 1) - assert_equal(dvd[3], 1) + dvw = dv(0, weight="foo") + assert dvw == 1 + dvw = dv(1, weight="foo") + assert dvw == 6 + dvw = dv([2, 3], weight="foo") + assert sorted(dvw) == [(2, 1), (3, 1)] + dvd = dict(dv(weight="foo")) + assert dvd[0] == 1 + assert dvd[1] == 6 + assert dvd[2] == 1 + assert dvd[3] == 1 class TestInMultiDegreeView(TestDegreeView): @@ -1080,44 +1295,44 @@ class TestInMultiDegreeView(TestDegreeView): def test_str(self): dv = self.dview(self.G) rep = str([(0, 0), (1, 1), (2, 1), (3, 3), (4, 1), (5, 1)]) - assert_equal(str(dv), rep) + assert str(dv) == rep dv = self.G.in_degree() - assert_equal(str(dv), rep) + assert str(dv) == rep def test_repr(self): dv = self.G.in_degree() rep = "InMultiDegreeView({0: 0, 1: 1, 2: 1, 3: 3, 4: 1, 5: 1})" - assert_equal(repr(dv), rep) + assert repr(dv) == rep def test_nbunch(self): dv = self.dview(self.G) dvn = dv(0) - assert_equal(dvn, 0) + assert dvn == 0 dvn = dv([2, 3]) - assert_equal(sorted(dvn), [(2, 1), (3, 3)]) + assert sorted(dvn) == [(2, 1), (3, 3)] def test_getitem(self): dv = self.dview(self.G) - assert_equal(dv[0], 0) - assert_equal(dv[1], 1) - assert_equal(dv[2], 1) - assert_equal(dv[3], 3) - dv = self.dview(self.G, weight='foo') - assert_equal(dv[0], 0) - assert_equal(dv[1], 1) - assert_equal(dv[2], 1) - assert_equal(dv[3], 6) + assert dv[0] == 0 + assert dv[1] == 1 + assert dv[2] == 1 + assert dv[3] == 3 + dv = self.dview(self.G, weight="foo") + assert dv[0] == 0 + assert dv[1] == 1 + assert dv[2] == 1 + assert dv[3] == 6 def test_weight(self): dv = self.dview(self.G) - dvw = dv(0, weight='foo') - assert_equal(dvw, 0) - dvw = dv(1, weight='foo') - assert_equal(dvw, 1) - dvw = dv([2, 3], weight='foo') - assert_equal(sorted(dvw), [(2, 1), (3, 6)]) - dvd = dict(dv(weight='foo')) - assert_equal(dvd[0], 0) - assert_equal(dvd[1], 1) - assert_equal(dvd[2], 1) - assert_equal(dvd[3], 6) + dvw = dv(0, weight="foo") + assert dvw == 0 + dvw = dv(1, weight="foo") + assert dvw == 1 + dvw = dv([2, 3], weight="foo") + assert sorted(dvw) == [(2, 1), (3, 6)] + dvd = dict(dv(weight="foo")) + assert dvd[0] == 0 + assert dvd[1] == 1 + assert dvd[2] == 1 + assert dvd[3] == 6 diff --git a/networkx/classes/tests/test_special.py b/networkx/classes/tests/test_special.py index 67f71ae..cb0142e 100644 --- a/networkx/classes/tests/test_special.py +++ b/networkx/classes/tests/test_special.py @@ -1,43 +1,86 @@ -#!/usr/bin/env python -from nose.tools import * from collections import OrderedDict import networkx as nx -from test_graph import TestGraph -from test_digraph import TestDiGraph -from test_multigraph import TestMultiGraph -from test_multidigraph import TestMultiDiGraph - - -class SpecialGraphTester(TestGraph): - def setUp(self): - TestGraph.setUp(self) +from .test_graph import TestGraph as _TestGraph +from .test_graph import BaseGraphTester +from .test_digraph import TestDiGraph as _TestDiGraph +from .test_digraph import BaseDiGraphTester +from .test_multigraph import TestMultiGraph as _TestMultiGraph +from .test_multidigraph import TestMultiDiGraph as _TestMultiDiGraph + + +def test_factories(): + class mydict1(dict): + pass + + class mydict2(dict): + pass + + class mydict3(dict): + pass + + class mydict4(dict): + pass + + class mydict5(dict): + pass + + for Graph in (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph): + # print("testing class: ", Graph.__name__) + class MyGraph(Graph): + node_dict_factory = mydict1 + adjlist_outer_dict_factory = mydict2 + adjlist_inner_dict_factory = mydict3 + edge_key_dict_factory = mydict4 + edge_attr_dict_factory = mydict5 + + G = MyGraph() + assert isinstance(G._node, mydict1) + assert isinstance(G._adj, mydict2) + G.add_node(1) + assert isinstance(G._adj[1], mydict3) + if G.is_directed(): + assert isinstance(G._pred, mydict2) + assert isinstance(G._succ, mydict2) + assert isinstance(G._pred[1], mydict3) + G.add_edge(1, 2) + if G.is_multigraph(): + assert isinstance(G._adj[1][2], mydict4) + assert isinstance(G._adj[1][2][0], mydict5) + else: + assert isinstance(G._adj[1][2], mydict5) + + +class TestSpecialGraph(_TestGraph): + def setup_method(self): + _TestGraph.setup_method(self) self.Graph = nx.Graph -class OrderedGraphTester(TestGraph): - def setUp(self): - TestGraph.setUp(self) +class TestOrderedGraph(_TestGraph): + def setup_method(self): + _TestGraph.setup_method(self) class MyGraph(nx.Graph): node_dict_factory = OrderedDict adjlist_outer_dict_factory = OrderedDict adjlist_inner_dict_factory = OrderedDict edge_attr_dict_factory = OrderedDict + self.Graph = MyGraph -class ThinGraphTester(TestGraph): - def setUp(self): - all_edge_dict = {'weight': 1} +class TestThinGraph(BaseGraphTester): + def setup_method(self): + all_edge_dict = {"weight": 1} class MyGraph(nx.Graph): - def edge_attr_dict_factory(self): return all_edge_dict + def edge_attr_dict_factory(self): + return all_edge_dict + self.Graph = MyGraph # build dict-of-dict-of-dict K3 ed1, ed2, ed3 = (all_edge_dict, all_edge_dict, all_edge_dict) - self.k3adj = {0: {1: ed1, 2: ed2}, - 1: {0: ed1, 2: ed3}, - 2: {0: ed2, 1: ed3}} + self.k3adj = {0: {1: ed1, 2: ed2}, 1: {0: ed1, 2: ed3}, 2: {0: ed2, 1: ed3}} self.k3edges = [(0, 1), (0, 2), (1, 2)] self.k3nodes = [0, 1, 2] self.K3 = self.Graph() @@ -48,55 +91,68 @@ def edge_attr_dict_factory(self): return all_edge_dict self.K3._node[2] = {} -class SpecialDiGraphTester(TestDiGraph): - def setUp(self): - TestDiGraph.setUp(self) +class TestSpecialDiGraph(_TestDiGraph): + def setup_method(self): + _TestDiGraph.setup_method(self) self.Graph = nx.DiGraph -class OrderedDiGraphTester(TestDiGraph): - def setUp(self): - TestGraph.setUp(self) +class TestOrderedDiGraph(_TestDiGraph): + def setup_method(self): + _TestDiGraph.setup_method(self) class MyGraph(nx.DiGraph): node_dict_factory = OrderedDict adjlist_outer_dict_factory = OrderedDict adjlist_inner_dict_factory = OrderedDict edge_attr_dict_factory = OrderedDict + self.Graph = MyGraph -class ThinDiGraphTester(TestDiGraph): - def setUp(self): - all_edge_dict = {'weight': 1} +class TestThinDiGraph(BaseDiGraphTester): + def setup_method(self): + all_edge_dict = {"weight": 1} class MyGraph(nx.DiGraph): - def edge_attr_dict_factory(self): return all_edge_dict + def edge_attr_dict_factory(self): + return all_edge_dict + self.Graph = MyGraph # build dict-of-dict-of-dict K3 ed1, ed2, ed3 = (all_edge_dict, all_edge_dict, all_edge_dict) - self.k3adj = {0: {1: ed1, 2: ed2}, - 1: {0: ed1, 2: ed3}, - 2: {0: ed2, 1: ed3}} + ed4, ed5, ed6 = (all_edge_dict, all_edge_dict, all_edge_dict) + self.k3adj = {0: {1: ed1, 2: ed2}, 1: {0: ed3, 2: ed4}, 2: {0: ed5, 1: ed6}} self.k3edges = [(0, 1), (0, 2), (1, 2)] self.k3nodes = [0, 1, 2] self.K3 = self.Graph() - self.K3.adj = self.k3adj + self.K3._adj = self.K3._succ = self.k3adj + self.K3._pred = {0: {1: ed3, 2: ed5}, 1: {0: ed1, 2: ed6}, 2: {0: ed2, 1: ed4}} self.K3._node = {} self.K3._node[0] = {} self.K3._node[1] = {} self.K3._node[2] = {} + ed1, ed2 = (all_edge_dict, all_edge_dict) + self.P3 = self.Graph() + self.P3._adj = {0: {1: ed1}, 1: {2: ed2}, 2: {}} + self.P3._succ = self.P3._adj + self.P3._pred = {0: {}, 1: {0: ed1}, 2: {1: ed2}} + self.P3._node = {} + self.P3._node[0] = {} + self.P3._node[1] = {} + self.P3._node[2] = {} + -class SpecialMultiGraphTester(TestMultiGraph): - def setUp(self): - TestMultiGraph.setUp(self) +class TestSpecialMultiGraph(_TestMultiGraph): + def setup_method(self): + _TestMultiGraph.setup_method(self) self.Graph = nx.MultiGraph -class OrderedMultiGraphTester(TestMultiGraph): - def setUp(self): - TestMultiGraph.setUp(self) +class TestOrderedMultiGraph(_TestMultiGraph): + def setup_method(self): + _TestMultiGraph.setup_method(self) class MyGraph(nx.MultiGraph): node_dict_factory = OrderedDict @@ -104,18 +160,19 @@ class MyGraph(nx.MultiGraph): adjlist_inner_dict_factory = OrderedDict edge_key_dict_factory = OrderedDict edge_attr_dict_factory = OrderedDict + self.Graph = MyGraph -class SpecialMultiDiGraphTester(TestMultiDiGraph): - def setUp(self): - TestMultiDiGraph.setUp(self) +class TestSpecialMultiDiGraph(_TestMultiDiGraph): + def setup_method(self): + _TestMultiDiGraph.setup_method(self) self.Graph = nx.MultiDiGraph -class OrderedMultiDiGraphTester(TestMultiDiGraph): - def setUp(self): - TestMultiDiGraph.setUp(self) +class TestOrderedMultiDiGraph(_TestMultiDiGraph): + def setup_method(self): + _TestMultiDiGraph.setup_method(self) class MyGraph(nx.MultiDiGraph): node_dict_factory = OrderedDict @@ -123,4 +180,5 @@ class MyGraph(nx.MultiDiGraph): adjlist_inner_dict_factory = OrderedDict edge_key_dict_factory = OrderedDict edge_attr_dict_factory = OrderedDict + self.Graph = MyGraph diff --git a/networkx/classes/tests/test_subgraphviews.py b/networkx/classes/tests/test_subgraphviews.py index d63f59b..bcfeea5 100644 --- a/networkx/classes/tests/test_subgraphviews.py +++ b/networkx/classes/tests/test_subgraphviews.py @@ -1,96 +1,100 @@ -from nose.tools import assert_equal, assert_not_equal, \ - assert_is, assert_true, assert_raises +import pytest import networkx as nx -class TestSubGraphView(object): - gview = nx.graphviews.SubGraph +class TestSubGraphView: + gview = staticmethod(nx.graphviews.subgraph_view) graph = nx.Graph hide_edges_filter = staticmethod(nx.filters.hide_edges) show_edges_filter = staticmethod(nx.filters.show_edges) - def setUp(self): - self.G = nx.path_graph(9, create_using=self.graph()) - self.hide_edges_w_hide_nodes = {(3, 4), (4, 5), (5, 6)} + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, create_using=cls.graph()) + cls.hide_edges_w_hide_nodes = {(3, 4), (4, 5), (5, 6)} def test_hidden_nodes(self): hide_nodes = [4, 5, 111] nodes_gone = nx.filters.hide_nodes(hide_nodes) - G = self.gview(self.G, filter_node=nodes_gone) - assert_equal(self.G.nodes - G.nodes, {4, 5}) - assert_equal(self.G.edges - G.edges, self.hide_edges_w_hide_nodes) + gview = self.gview + print(gview) + G = gview(self.G, filter_node=nodes_gone) + assert self.G.nodes - G.nodes == {4, 5} + assert self.G.edges - G.edges == self.hide_edges_w_hide_nodes if G.is_directed(): - assert_equal(list(G[3]), []) - assert_equal(list(G[2]), [3]) + assert list(G[3]) == [] + assert list(G[2]) == [3] else: - assert_equal(list(G[3]), [2]) - assert_equal(set(G[2]), {1, 3}) - assert_raises(KeyError, G.__getitem__, 4) - assert_raises(KeyError, G.__getitem__, 112) - assert_raises(KeyError, G.__getitem__, 111) - assert_equal(G.degree(3), 3 if G.is_multigraph() else 1) - assert_equal(G.size(), 7 if G.is_multigraph() else 5) + assert list(G[3]) == [2] + assert set(G[2]) == {1, 3} + pytest.raises(KeyError, G.__getitem__, 4) + pytest.raises(KeyError, G.__getitem__, 112) + pytest.raises(KeyError, G.__getitem__, 111) + assert G.degree(3) == (3 if G.is_multigraph() else 1) + assert G.size() == (7 if G.is_multigraph() else 5) def test_hidden_edges(self): hide_edges = [(2, 3), (8, 7), (222, 223)] edges_gone = self.hide_edges_filter(hide_edges) - G = self.gview(self.G, filter_edge=edges_gone) - assert_equal(self.G.nodes, G.nodes) + gview = self.gview + G = gview(self.G, filter_edge=edges_gone) + assert self.G.nodes == G.nodes if G.is_directed(): - assert_equal(self.G.edges - G.edges, {(2, 3)}) - assert_equal(list(G[2]), []) - assert_equal(list(G.pred[3]), []) - assert_equal(list(G.pred[2]), [1]) - assert_equal(G.size(), 7) + assert self.G.edges - G.edges == {(2, 3)} + assert list(G[2]) == [] + assert list(G.pred[3]) == [] + assert list(G.pred[2]) == [1] + assert G.size() == 7 else: - assert_equal(self.G.edges - G.edges, {(2, 3), (7, 8)}) - assert_equal(list(G[2]), [1]) - assert_equal(G.size(), 6) - assert_equal(list(G[3]), [4]) - assert_raises(KeyError, G.__getitem__, 221) - assert_raises(KeyError, G.__getitem__, 222) - assert_equal(G.degree(3), 1) + assert self.G.edges - G.edges == {(2, 3), (7, 8)} + assert list(G[2]) == [1] + assert G.size() == 6 + assert list(G[3]) == [4] + pytest.raises(KeyError, G.__getitem__, 221) + pytest.raises(KeyError, G.__getitem__, 222) + assert G.degree(3) == 1 def test_shown_node(self): induced_subgraph = nx.filters.show_nodes([2, 3, 111]) - G = self.gview(self.G, filter_node=induced_subgraph) - assert_equal(set(G.nodes), {2, 3}) + gview = self.gview + G = gview(self.G, filter_node=induced_subgraph) + assert set(G.nodes) == {2, 3} if G.is_directed(): - assert_equal(list(G[3]), []) + assert list(G[3]) == [] else: - assert_equal(list(G[3]), [2]) - assert_equal(list(G[2]), [3]) - assert_raises(KeyError, G.__getitem__, 4) - assert_raises(KeyError, G.__getitem__, 112) - assert_raises(KeyError, G.__getitem__, 111) - assert_equal(G.degree(3), 3 if G.is_multigraph() else 1) - assert_equal(G.size(), 3 if G.is_multigraph() else 1) + assert list(G[3]) == [2] + assert list(G[2]) == [3] + pytest.raises(KeyError, G.__getitem__, 4) + pytest.raises(KeyError, G.__getitem__, 112) + pytest.raises(KeyError, G.__getitem__, 111) + assert G.degree(3) == (3 if G.is_multigraph() else 1) + assert G.size() == (3 if G.is_multigraph() else 1) def test_shown_edges(self): show_edges = [(2, 3), (8, 7), (222, 223)] edge_subgraph = self.show_edges_filter(show_edges) G = self.gview(self.G, filter_edge=edge_subgraph) - assert_equal(self.G.nodes, G.nodes) + assert self.G.nodes == G.nodes if G.is_directed(): - assert_equal(G.edges, {(2, 3)}) - assert_equal(list(G[3]), []) - assert_equal(list(G[2]), [3]) - assert_equal(list(G.pred[3]), [2]) - assert_equal(list(G.pred[2]), []) - assert_equal(G.size(), 1) + assert G.edges == {(2, 3)} + assert list(G[3]) == [] + assert list(G[2]) == [3] + assert list(G.pred[3]) == [2] + assert list(G.pred[2]) == [] + assert G.size() == 1 else: - assert_equal(G.edges, {(2, 3), (7, 8)}) - assert_equal(list(G[3]), [2]) - assert_equal(list(G[2]), [3]) - assert_equal(G.size(), 2) - assert_raises(KeyError, G.__getitem__, 221) - assert_raises(KeyError, G.__getitem__, 222) - assert_equal(G.degree(3), 1) + assert G.edges == {(2, 3), (7, 8)} + assert list(G[3]) == [2] + assert list(G[2]) == [3] + assert G.size() == 2 + pytest.raises(KeyError, G.__getitem__, 221) + pytest.raises(KeyError, G.__getitem__, 222) + assert G.degree(3) == 1 class TestSubDiGraphView(TestSubGraphView): - gview = nx.graphviews.SubDiGraph + gview = staticmethod(nx.graphviews.subgraph_view) graph = nx.DiGraph hide_edges_filter = staticmethod(nx.filters.hide_diedges) show_edges_filter = staticmethod(nx.filters.show_diedges) @@ -103,8 +107,8 @@ def test_inoutedges(self): nodes_gone = nx.filters.hide_nodes(hide_nodes) G = self.gview(self.G, nodes_gone, edges_gone) - assert_equal(self.G.in_edges - G.in_edges, self.excluded) - assert_equal(self.G.out_edges - G.out_edges, self.excluded) + assert self.G.in_edges - G.in_edges == self.excluded + assert self.G.out_edges - G.out_edges == self.excluded def test_pred(self): edges_gone = self.hide_edges_filter(self.hide_edges) @@ -112,8 +116,8 @@ def test_pred(self): nodes_gone = nx.filters.hide_nodes(hide_nodes) G = self.gview(self.G, nodes_gone, edges_gone) - assert_equal(list(G.pred[2]), [1]) - assert_equal(list(G.pred[6]), []) + assert list(G.pred[2]) == [1] + assert list(G.pred[6]) == [] def test_inout_degree(self): edges_gone = self.hide_edges_filter(self.hide_edges) @@ -121,70 +125,71 @@ def test_inout_degree(self): nodes_gone = nx.filters.hide_nodes(hide_nodes) G = self.gview(self.G, nodes_gone, edges_gone) - assert_equal(G.degree(2), 1) - assert_equal(G.out_degree(2), 0) - assert_equal(G.in_degree(2), 1) - assert_equal(G.size(), 4) + assert G.degree(2) == 1 + assert G.out_degree(2) == 0 + assert G.in_degree(2) == 1 + assert G.size() == 4 # multigraph class TestMultiGraphView(TestSubGraphView): - gview = nx.graphviews.SubMultiGraph + gview = staticmethod(nx.graphviews.subgraph_view) graph = nx.MultiGraph hide_edges_filter = staticmethod(nx.filters.hide_multiedges) show_edges_filter = staticmethod(nx.filters.show_multiedges) - def setUp(self): - self.G = nx.path_graph(9, create_using=self.graph()) + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, create_using=cls.graph()) multiedges = {(2, 3, 4), (2, 3, 5)} - self.G.add_edges_from(multiedges) - self.hide_edges_w_hide_nodes = {(3, 4, 0), (4, 5, 0), (5, 6, 0)} + cls.G.add_edges_from(multiedges) + cls.hide_edges_w_hide_nodes = {(3, 4, 0), (4, 5, 0), (5, 6, 0)} def test_hidden_edges(self): hide_edges = [(2, 3, 4), (2, 3, 3), (8, 7, 0), (222, 223, 0)] edges_gone = self.hide_edges_filter(hide_edges) G = self.gview(self.G, filter_edge=edges_gone) - assert_equal(self.G.nodes, G.nodes) + assert self.G.nodes == G.nodes if G.is_directed(): - assert_equal(self.G.edges - G.edges, {(2, 3, 4)}) - assert_equal(list(G[3]), [4]) - assert_equal(list(G[2]), [3]) - assert_equal(list(G.pred[3]), [2]) # only one 2 but two edges - assert_equal(list(G.pred[2]), [1]) - assert_equal(G.size(), 9) + assert self.G.edges - G.edges == {(2, 3, 4)} + assert list(G[3]) == [4] + assert list(G[2]) == [3] + assert list(G.pred[3]) == [2] # only one 2 but two edges + assert list(G.pred[2]) == [1] + assert G.size() == 9 else: - assert_equal(self.G.edges - G.edges, {(2, 3, 4), (7, 8, 0)}) - assert_equal(list(G[3]), [2, 4]) - assert_equal(list(G[2]), [1, 3]) - assert_equal(G.size(), 8) - assert_equal(G.degree(3), 3) - assert_raises(KeyError, G.__getitem__, 221) - assert_raises(KeyError, G.__getitem__, 222) + assert self.G.edges - G.edges == {(2, 3, 4), (7, 8, 0)} + assert list(G[3]) == [2, 4] + assert list(G[2]) == [1, 3] + assert G.size() == 8 + assert G.degree(3) == 3 + pytest.raises(KeyError, G.__getitem__, 221) + pytest.raises(KeyError, G.__getitem__, 222) def test_shown_edges(self): show_edges = [(2, 3, 4), (2, 3, 3), (8, 7, 0), (222, 223, 0)] edge_subgraph = self.show_edges_filter(show_edges) G = self.gview(self.G, filter_edge=edge_subgraph) - assert_equal(self.G.nodes, G.nodes) + assert self.G.nodes == G.nodes if G.is_directed(): - assert_equal(G.edges, {(2, 3, 4)}) - assert_equal(list(G[3]), []) - assert_equal(list(G.pred[3]), [2]) - assert_equal(list(G.pred[2]), []) - assert_equal(G.size(), 1) + assert G.edges == {(2, 3, 4)} + assert list(G[3]) == [] + assert list(G.pred[3]) == [2] + assert list(G.pred[2]) == [] + assert G.size() == 1 else: - assert_equal(G.edges, {(2, 3, 4), (7, 8, 0)}) - assert_equal(G.size(), 2) - assert_equal(list(G[3]), [2]) - assert_equal(G.degree(3), 1) - assert_equal(list(G[2]), [3]) - assert_raises(KeyError, G.__getitem__, 221) - assert_raises(KeyError, G.__getitem__, 222) + assert G.edges == {(2, 3, 4), (7, 8, 0)} + assert G.size() == 2 + assert list(G[3]) == [2] + assert G.degree(3) == 1 + assert list(G[2]) == [3] + pytest.raises(KeyError, G.__getitem__, 221) + pytest.raises(KeyError, G.__getitem__, 222) # multidigraph class TestMultiDiGraphView(TestMultiGraphView, TestSubDiGraphView): - gview = nx.graphviews.SubMultiDiGraph + gview = staticmethod(nx.graphviews.subgraph_view) graph = nx.MultiDiGraph hide_edges_filter = staticmethod(nx.filters.hide_multidiedges) show_edges_filter = staticmethod(nx.filters.show_multidiedges) @@ -197,18 +202,19 @@ def test_inout_degree(self): nodes_gone = nx.filters.hide_nodes(hide_nodes) G = self.gview(self.G, nodes_gone, edges_gone) - assert_equal(G.degree(2), 3) - assert_equal(G.out_degree(2), 2) - assert_equal(G.in_degree(2), 1) - assert_equal(G.size(), 6) + assert G.degree(2) == 3 + assert G.out_degree(2) == 2 + assert G.in_degree(2) == 1 + assert G.size() == 6 # induced_subgraph -class TestInducedSubGraph(object): - def setUp(self): - self.K3 = G = nx.complete_graph(3) - G.graph['foo'] = [] - G.nodes[0]['foo'] = [] +class TestInducedSubGraph: + @classmethod + def setup_class(cls): + cls.K3 = G = nx.complete_graph(3) + G.graph["foo"] = [] + G.nodes[0]["foo"] = [] G.remove_edge(1, 2) ll = [] G.add_edge(1, 2, foo=ll) @@ -217,39 +223,39 @@ def setUp(self): def test_full_graph(self): G = self.K3 H = nx.induced_subgraph(G, [0, 1, 2, 5]) - assert_equal(H.name, G.name) + assert H.name == G.name self.graphs_equal(H, G) self.same_attrdict(H, G) def test_partial_subgraph(self): G = self.K3 H = nx.induced_subgraph(G, 0) - assert_equal(dict(H.adj), {0: {}}) - assert_not_equal(dict(G.adj), {0: {}}) + assert dict(H.adj) == {0: {}} + assert dict(G.adj) != {0: {}} H = nx.induced_subgraph(G, [0, 1]) - assert_equal(dict(H.adj), {0: {1: {}}, 1: {0: {}}}) + assert dict(H.adj) == {0: {1: {}}, 1: {0: {}}} def same_attrdict(self, H, G): - old_foo = H[1][2]['foo'] - H.edges[1, 2]['foo'] = 'baz' - assert_equal(G.edges, H.edges) - H.edges[1, 2]['foo'] = old_foo - assert_equal(G.edges, H.edges) - old_foo = H.nodes[0]['foo'] - H.nodes[0]['foo'] = 'baz' - assert_equal(G.nodes, H.nodes) - H.nodes[0]['foo'] = old_foo - assert_equal(G.nodes, H.nodes) + old_foo = H[1][2]["foo"] + H.edges[1, 2]["foo"] = "baz" + assert G.edges == H.edges + H.edges[1, 2]["foo"] = old_foo + assert G.edges == H.edges + old_foo = H.nodes[0]["foo"] + H.nodes[0]["foo"] = "baz" + assert G.nodes == H.nodes + H.nodes[0]["foo"] = old_foo + assert G.nodes == H.nodes def graphs_equal(self, H, G): - assert_equal(G._adj, H._adj) - assert_equal(G._node, H._node) - assert_equal(G.graph, H.graph) - assert_equal(G.name, H.name) + assert G._adj == H._adj + assert G._node == H._node + assert G.graph == H.graph + assert G.name == H.name if not G.is_directed() and not H.is_directed(): - assert_true(H._adj[1][2] is H._adj[2][1]) - assert_true(G._adj[1][2] is G._adj[2][1]) + assert H._adj[1][2] is H._adj[2][1] + assert G._adj[1][2] is G._adj[2][1] else: # at least one is directed if not G.is_directed(): G._pred = G._adj @@ -257,34 +263,34 @@ def graphs_equal(self, H, G): if not H.is_directed(): H._pred = H._adj H._succ = H._adj - assert_equal(G._pred, H._pred) - assert_equal(G._succ, H._succ) - assert_true(H._succ[1][2] is H._pred[2][1]) - assert_true(G._succ[1][2] is G._pred[2][1]) + assert G._pred == H._pred + assert G._succ == H._succ + assert H._succ[1][2] is H._pred[2][1] + assert G._succ[1][2] is G._pred[2][1] # edge_subgraph -class TestEdgeSubGraph(object): - def setup(self): +class TestEdgeSubGraph: + @classmethod + def setup_class(cls): # Create a path graph on five nodes. - self.G = G = nx.path_graph(5) + cls.G = G = nx.path_graph(5) # Add some node, edge, and graph attributes. for i in range(5): - G.nodes[i]['name'] = 'node{}'.format(i) - G.edges[0, 1]['name'] = 'edge01' - G.edges[3, 4]['name'] = 'edge34' - G.graph['name'] = 'graph' + G.nodes[i]["name"] = f"node{i}" + G.edges[0, 1]["name"] = "edge01" + G.edges[3, 4]["name"] = "edge34" + G.graph["name"] = "graph" # Get the subgraph induced by the first and last edges. - self.H = nx.edge_subgraph(G, [(0, 1), (3, 4)]) + cls.H = nx.edge_subgraph(G, [(0, 1), (3, 4)]) def test_correct_nodes(self): """Tests that the subgraph has the correct nodes.""" - assert_equal([0, 1, 3, 4], sorted(self.H.nodes)) + assert [0, 1, 3, 4] == sorted(self.H.nodes) def test_correct_edges(self): """Tests that the subgraph has the correct edges.""" - assert_equal([(0, 1, 'edge01'), (3, 4, 'edge34')], - sorted(self.H.edges(data='name'))) + assert [(0, 1, "edge01"), (3, 4, "edge34")] == sorted(self.H.edges(data="name")) def test_add_node(self): """Tests that adding a node to the original graph does not @@ -292,7 +298,7 @@ def test_add_node(self): """ self.G.add_node(5) - assert_equal([0, 1, 3, 4], sorted(self.H.nodes)) + assert [0, 1, 3, 4] == sorted(self.H.nodes) self.G.remove_node(5) def test_remove_node(self): @@ -301,7 +307,7 @@ def test_remove_node(self): """ self.G.remove_node(0) - assert_equal([1, 3, 4], sorted(self.H.nodes)) + assert [1, 3, 4] == sorted(self.H.nodes) self.G.add_edge(0, 1) def test_node_attr_dict(self): @@ -310,12 +316,12 @@ def test_node_attr_dict(self): """ for v in self.H: - assert_equal(self.G.nodes[v], self.H.nodes[v]) + assert self.G.nodes[v] == self.H.nodes[v] # Making a change to G should make a change in H and vice versa. - self.G.nodes[0]['name'] = 'foo' - assert_equal(self.G.nodes[0], self.H.nodes[0]) - self.H.nodes[1]['name'] = 'bar' - assert_equal(self.G.nodes[1], self.H.nodes[1]) + self.G.nodes[0]["name"] = "foo" + assert self.G.nodes[0] == self.H.nodes[0] + self.H.nodes[1]["name"] = "bar" + assert self.G.nodes[1] == self.H.nodes[1] def test_edge_attr_dict(self): """Tests that the edge attribute dictionary of the two graphs is @@ -323,25 +329,23 @@ def test_edge_attr_dict(self): """ for u, v in self.H.edges(): - assert_equal(self.G.edges[u, v], self.H.edges[u, v]) + assert self.G.edges[u, v] == self.H.edges[u, v] # Making a change to G should make a change in H and vice versa. - self.G.edges[0, 1]['name'] = 'foo' - assert_equal(self.G.edges[0, 1]['name'], - self.H.edges[0, 1]['name']) - self.H.edges[3, 4]['name'] = 'bar' - assert_equal(self.G.edges[3, 4]['name'], - self.H.edges[3, 4]['name']) + self.G.edges[0, 1]["name"] = "foo" + assert self.G.edges[0, 1]["name"] == self.H.edges[0, 1]["name"] + self.H.edges[3, 4]["name"] = "bar" + assert self.G.edges[3, 4]["name"] == self.H.edges[3, 4]["name"] def test_graph_attr_dict(self): """Tests that the graph attribute dictionary of the two graphs is the same object. """ - assert_is(self.G.graph, self.H.graph) + assert self.G.graph is self.H.graph def test_readonly(self): """Tests that the subgraph cannot change the graph structure""" - assert_raises(nx.NetworkXError, self.H.add_node, 5) - assert_raises(nx.NetworkXError, self.H.remove_node, 0) - assert_raises(nx.NetworkXError, self.H.add_edge, 5, 6) - assert_raises(nx.NetworkXError, self.H.remove_edge, 0, 1) + pytest.raises(nx.NetworkXError, self.H.add_node, 5) + pytest.raises(nx.NetworkXError, self.H.remove_node, 0) + pytest.raises(nx.NetworkXError, self.H.add_edge, 5, 6) + pytest.raises(nx.NetworkXError, self.H.remove_edge, 0, 1) diff --git a/networkx/conftest.py b/networkx/conftest.py new file mode 100644 index 0000000..37aa1a9 --- /dev/null +++ b/networkx/conftest.py @@ -0,0 +1,200 @@ +import pytest +import networkx +import sys +import warnings + + +def pytest_addoption(parser): + parser.addoption( + "--runslow", action="store_true", default=False, help="run slow tests" + ) + + +def pytest_configure(config): + config.addinivalue_line("markers", "slow: mark test as slow to run") + + +def pytest_collection_modifyitems(config, items): + if config.getoption("--runslow"): + # --runslow given in cli: do not skip slow tests + return + skip_slow = pytest.mark.skip(reason="need --runslow option to run") + for item in items: + if "slow" in item.keywords: + item.add_marker(skip_slow) + + +# TODO: The warnings below need to be dealt with, but for now we silence them. +@pytest.fixture(autouse=True) +def set_warnings(): + warnings.filterwarnings( + "ignore", + category=DeprecationWarning, + message="literal_stringizer is deprecated*", + ) + warnings.filterwarnings( + "ignore", + category=DeprecationWarning, + message="literal_destringizer is deprecated*", + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="is_string_like is deprecated*" + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="make_str is deprecated*" + ) + warnings.filterwarnings( + "ignore", + category=DeprecationWarning, + message="context manager reversed is deprecated*", + ) + warnings.filterwarnings( + "ignore", + category=DeprecationWarning, + message="This will return a generator in 3.0*", + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="betweenness_centrality_source*", + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="edge_betweeness*", + ) + warnings.filterwarnings( + "ignore", + category=PendingDeprecationWarning, + message="the matrix subclass is not the recommended way*", + ) + + +@pytest.fixture(autouse=True) +def add_nx(doctest_namespace): + doctest_namespace["nx"] = networkx + + +# What dependencies are installed? + +try: + import numpy + + has_numpy = True +except ImportError: + has_numpy = False + +try: + import scipy + + has_scipy = True +except ImportError: + has_scipy = False + +try: + import matplotlib + + has_matplotlib = True +except ImportError: + has_matplotlib = False + +try: + import pandas + + has_pandas = True +except ImportError: + has_pandas = False + +try: + import pygraphviz + + has_pygraphviz = True +except ImportError: + has_pygraphviz = False + +try: + import yaml + + has_yaml = True +except ImportError: + has_yaml = False + +try: + import pydot + + has_pydot = True +except ImportError: + has_pydot = False + +try: + import ogr + + has_ogr = True +except ImportError: + has_ogr = False + + +# List of files that pytest should ignore + +collect_ignore = [] + +needs_numpy = [ + "algorithms/centrality/current_flow_closeness.py", + "algorithms/node_classification/__init__.py", + "algorithms/non_randomness.py", + "algorithms/shortest_paths/dense.py", + "linalg/bethehessianmatrix.py", + "linalg/laplacianmatrix.py", + "utils/misc.py", +] +needs_scipy = [ + "algorithms/assortativity/correlation.py", + "algorithms/assortativity/mixing.py", + "algorithms/assortativity/pairs.py", + "algorithms/bipartite/matrix.py", + "algorithms/bipartite/spectral.py", + "algorithms/centrality/current_flow_betweenness.py", + "algorithms/centrality/current_flow_betweenness_subset.py", + "algorithms/centrality/eigenvector.py", + "algorithms/centrality/katz.py", + "algorithms/centrality/second_order.py", + "algorithms/centrality/subgraph_alg.py", + "algorithms/communicability_alg.py", + "algorithms/link_analysis/hits_alg.py", + "algorithms/link_analysis/pagerank_alg.py", + "algorithms/node_classification/hmn.py", + "algorithms/node_classification/lgc.py", + "algorithms/similarity.py", + "convert_matrix.py", + "drawing/layout.py", + "generators/spectral_graph_forge.py", + "linalg/algebraicconnectivity.py", + "linalg/attrmatrix.py", + "linalg/graphmatrix.py", + "linalg/modularitymatrix.py", + "linalg/spectrum.py", + "utils/rcm.py", +] +needs_matplotlib = ["drawing/nx_pylab.py"] +needs_pandas = ["convert_matrix.py"] +needs_yaml = ["readwrite/nx_yaml.py"] +needs_pygraphviz = ["drawing/nx_agraph.py"] +needs_pydot = ["drawing/nx_pydot.py"] +needs_ogr = ["readwrite/nx_shp.py"] + +if not has_numpy: + collect_ignore += needs_numpy +if not has_scipy: + collect_ignore += needs_scipy +if not has_matplotlib: + collect_ignore += needs_matplotlib +if not has_pandas: + collect_ignore += needs_pandas +if not has_yaml: + collect_ignore += needs_yaml +if not has_pygraphviz: + collect_ignore += needs_pygraphviz +if not has_pydot: + collect_ignore += needs_pydot +if not has_ogr: + collect_ignore += needs_ogr + +# FIXME: This is to avoid errors on AppVeyor +if sys.platform.startswith("win"): + collect_ignore += ["readwrite/graph6.py", "readwrite/sparse6.py"] diff --git a/networkx/convert.py b/networkx/convert.py index 7a69289..c9cef12 100644 --- a/networkx/convert.py +++ b/networkx/convert.py @@ -8,45 +8,26 @@ -------- Create a graph with a single edge from a dictionary of dictionaries ->>> d={0: {1: 1}} # dict-of-dicts single edge (0,1) ->>> G=nx.Graph(d) +>>> d = {0: {1: 1}} # dict-of-dicts single edge (0,1) +>>> G = nx.Graph(d) See Also -------- nx_agraph, nx_pydot """ -# Copyright (C) 2006-2013 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. import warnings import networkx as nx -__author__ = """\n""".join(['Aric Hagberg ', - 'Pieter Swart (swart@lanl.gov)', - 'Dan Schult(dschult@colgate.edu)']) -__all__ = ['to_networkx_graph', - 'from_dict_of_dicts', 'to_dict_of_dicts', - 'from_dict_of_lists', 'to_dict_of_lists', - 'from_edgelist', 'to_edgelist'] +from collections.abc import Collection, Generator, Iterator - -def _prep_create_using(create_using): - """Return a graph object ready to be populated. - - If create_using is None return the default (just networkx.Graph()) - If create_using.clear() works, assume it returns a graph object. - Otherwise raise an exception because create_using is not a networkx graph. - - """ - if create_using is None: - return nx.Graph() - try: - create_using.clear() - except: - raise TypeError("Input graph is not a networkx graph type") - return create_using +__all__ = [ + "to_networkx_graph", + "from_dict_of_dicts", + "to_dict_of_dicts", + "from_dict_of_lists", + "to_dict_of_lists", + "from_edgelist", + "to_edgelist", +] def to_networkx_graph(data, create_using=None, multigraph_input=False): @@ -55,7 +36,7 @@ def to_networkx_graph(data, create_using=None, multigraph_input=False): The preferred way to call this is automatically from the class constructor - >>> d = {0: {1: {'weight':1}}} # dict-of-dicts single edge (0,1) + >>> d = {0: {1: {"weight": 1}}} # dict-of-dicts single edge (0,1) >>> G = nx.Graph(d) instead of the equivalent @@ -66,120 +47,134 @@ def to_networkx_graph(data, create_using=None, multigraph_input=False): ---------- data : object to be converted - Current known types are: + Current known types are: any NetworkX graph dict-of-dicts dict-of-lists - list of edges + container (e.g. set, list, tuple) of edges + iterator (e.g. itertools.chain) that produces edges + generator of edges Pandas DataFrame (row per edge) numpy matrix numpy ndarray scipy sparse matrix pygraphviz agraph - create_using : NetworkX graph - Use specified graph for result. Otherwise a new graph is created. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. multigraph_input : bool (default False) - If True and data is a dict_of_dicts, - try to create a multigraph assuming dict_of_dict_of_lists. - If data and create_using are both multigraphs then create - a multigraph from a multigraph. + If True and data is a dict_of_dicts, + try to create a multigraph assuming dict_of_dict_of_lists. + If data and create_using are both multigraphs then create + a multigraph from a multigraph. """ # NX graph if hasattr(data, "adj"): try: - result = from_dict_of_dicts(data.adj, - create_using=create_using, - multigraph_input=data.is_multigraph()) - if hasattr(data, 'graph'): # data.graph should be dict-like + result = from_dict_of_dicts( + data.adj, + create_using=create_using, + multigraph_input=data.is_multigraph(), + ) + if hasattr(data, "graph"): # data.graph should be dict-like result.graph.update(data.graph) - if hasattr(data, 'nodes'): # data.nodes should be dict-like - result._node.update((n, dd.copy()) for n, dd in data.nodes.items()) + if hasattr(data, "nodes"): # data.nodes should be dict-like + # result.add_node_from(data.nodes.items()) possible but + # for custom node_attr_dict_factory which may be hashable + # will be unexpected behavior + for n, dd in data.nodes.items(): + result._node[n].update(dd) return result - except: - raise nx.NetworkXError("Input is not a correct NetworkX graph.") + except Exception as e: + raise nx.NetworkXError("Input is not a correct NetworkX graph.") from e # pygraphviz agraph if hasattr(data, "is_strict"): try: return nx.nx_agraph.from_agraph(data, create_using=create_using) - except: - raise nx.NetworkXError("Input is not a correct pygraphviz graph.") + except Exception as e: + raise nx.NetworkXError("Input is not a correct pygraphviz graph.") from e # dict of dicts/lists if isinstance(data, dict): try: - return from_dict_of_dicts(data, create_using=create_using, - multigraph_input=multigraph_input) + return from_dict_of_dicts( + data, create_using=create_using, multigraph_input=multigraph_input + ) except: try: return from_dict_of_lists(data, create_using=create_using) - except: - raise TypeError("Input is not known type.") - - # list or generator of edges - - if (isinstance(data, (list, tuple)) or - any(hasattr(data, attr) for attr in ['_adjdict', 'next', '__next__'])): - try: - return from_edgelist(data, create_using=create_using) - except: - raise nx.NetworkXError("Input is not a valid edge list") + except Exception as e: + raise TypeError("Input is not known type.") from e # Pandas DataFrame try: import pandas as pd + if isinstance(data, pd.DataFrame): if data.shape[0] == data.shape[1]: try: return nx.from_pandas_adjacency(data, create_using=create_using) - except: + except Exception as e: msg = "Input is not a correct Pandas DataFrame adjacency matrix." - raise nx.NetworkXError(msg) + raise nx.NetworkXError(msg) from e else: try: - return nx.from_pandas_edgelist(data, edge_attr=True, create_using=create_using) - except: + return nx.from_pandas_edgelist( + data, edge_attr=True, create_using=create_using + ) + except Exception as e: msg = "Input is not a correct Pandas DataFrame edge-list." - raise nx.NetworkXError(msg) + raise nx.NetworkXError(msg) from e except ImportError: - msg = 'pandas not found, skipping conversion test.' + msg = "pandas not found, skipping conversion test." warnings.warn(msg, ImportWarning) # numpy matrix or ndarray try: import numpy + if isinstance(data, (numpy.matrix, numpy.ndarray)): try: return nx.from_numpy_matrix(data, create_using=create_using) - except: + except Exception as e: raise nx.NetworkXError( - "Input is not a correct numpy matrix or array.") + "Input is not a correct numpy matrix or array." + ) from e except ImportError: - warnings.warn('numpy not found, skipping conversion test.', - ImportWarning) + warnings.warn("numpy not found, skipping conversion test.", ImportWarning) # scipy sparse matrix - any format try: import scipy + if hasattr(data, "format"): try: return nx.from_scipy_sparse_matrix(data, create_using=create_using) - except: + except Exception as e: raise nx.NetworkXError( - "Input is not a correct scipy sparse matrix type.") + "Input is not a correct scipy sparse matrix type." + ) from e except ImportError: - warnings.warn('scipy not found, skipping conversion test.', - ImportWarning) + warnings.warn("scipy not found, skipping conversion test.", ImportWarning) + + # Note: most general check - should remain last in order of execution + # Includes containers (e.g. list, set, dict, etc.), generators, and + # iterators (e.g. itertools.chain) of edges + + if isinstance(data, (Collection, Generator, Iterator)): + try: + return from_edgelist(data, create_using=create_using) + except Exception as e: + raise nx.NetworkXError("Input is not a valid edge list") from e - raise nx.NetworkXError( - "Input is not a known data type for conversion.") + raise nx.NetworkXError("Input is not a known data type for conversion.") def to_dict_of_lists(G, nodelist=None): - """Return adjacency representation of graph as a dictionary of lists. + """Returns adjacency representation of graph as a dictionary of lists. Parameters ---------- @@ -204,27 +199,27 @@ def to_dict_of_lists(G, nodelist=None): def from_dict_of_lists(d, create_using=None): - """Return a graph from a dictionary of lists. + """Returns a graph from a dictionary of lists. Parameters ---------- d : dictionary of lists A dictionary of lists adjacency representation. - create_using : NetworkX graph - Use specified graph for result. Otherwise a new graph is created. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. Examples -------- - >>> dol = {0: [1]} # single edge (0,1) + >>> dol = {0: [1]} # single edge (0,1) >>> G = nx.from_dict_of_lists(dol) or - >>> G = nx.Graph(dol) # use Graph constructor + >>> G = nx.Graph(dol) # use Graph constructor """ - G = _prep_create_using(create_using) + G = nx.empty_graph(0, create_using) G.add_nodes_from(d) if G.is_multigraph() and not G.is_directed(): # a dict_of_lists can't show multiedges. BUT for undirected graphs, @@ -237,13 +232,14 @@ def from_dict_of_lists(d, create_using=None): G.add_edge(node, nbr) seen[node] = 1 # don't allow reverse edge to show up else: - G.add_edges_from(((node, nbr) for node, nbrlist in d.items() - for nbr in nbrlist)) + G.add_edges_from( + ((node, nbr) for node, nbrlist in d.items() for nbr in nbrlist) + ) return G def to_dict_of_dicts(G, nodelist=None, edge_data=None): - """Return adjacency representation of graph as a dictionary of dictionaries. + """Returns adjacency representation of graph as a dictionary of dictionaries. Parameters ---------- @@ -283,15 +279,15 @@ def to_dict_of_dicts(G, nodelist=None, edge_data=None): def from_dict_of_dicts(d, create_using=None, multigraph_input=False): - """Return a graph from a dictionary of dictionaries. + """Returns a graph from a dictionary of dictionaries. Parameters ---------- d : dictionary of dictionaries A dictionary of dictionaries adjacency representation. - create_using : NetworkX graph - Use specified graph for result. Otherwise a new graph is created. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. multigraph_input : bool (default False) When True, the values of the inner dict are assumed @@ -300,46 +296,52 @@ def from_dict_of_dicts(d, create_using=None, multigraph_input=False): Examples -------- - >>> dod = {0: {1: {'weight': 1}}} # single edge (0,1) + >>> dod = {0: {1: {"weight": 1}}} # single edge (0,1) >>> G = nx.from_dict_of_dicts(dod) or - >>> G = nx.Graph(dod) # use Graph constructor + >>> G = nx.Graph(dod) # use Graph constructor """ - G = _prep_create_using(create_using) + G = nx.empty_graph(0, create_using) G.add_nodes_from(d) # is dict a MultiGraph or MultiDiGraph? if multigraph_input: # make a copy of the list of edge data (but not the edge data) if G.is_directed(): if G.is_multigraph(): - G.add_edges_from((u, v, key, data) - for u, nbrs in d.items() - for v, datadict in nbrs.items() - for key, data in datadict.items()) + G.add_edges_from( + (u, v, key, data) + for u, nbrs in d.items() + for v, datadict in nbrs.items() + for key, data in datadict.items() + ) else: - G.add_edges_from((u, v, data) - for u, nbrs in d.items() - for v, datadict in nbrs.items() - for key, data in datadict.items()) + G.add_edges_from( + (u, v, data) + for u, nbrs in d.items() + for v, datadict in nbrs.items() + for key, data in datadict.items() + ) else: # Undirected if G.is_multigraph(): - seen = set() # don't add both directions of undirected graph + seen = set() # don't add both directions of undirected graph for u, nbrs in d.items(): for v, datadict in nbrs.items(): if (u, v) not in seen: - G.add_edges_from((u, v, key, data) - for key, data in datadict.items()) + G.add_edges_from( + (u, v, key, data) for key, data in datadict.items() + ) seen.add((v, u)) else: - seen = set() # don't add both directions of undirected graph + seen = set() # don't add both directions of undirected graph for u, nbrs in d.items(): for v, datadict in nbrs.items(): if (u, v) not in seen: - G.add_edges_from((u, v, data) - for key, data in datadict.items()) + G.add_edges_from( + (u, v, data) for key, data in datadict.items() + ) seen.add((v, u)) else: # not a multigraph to multigraph transfer @@ -355,14 +357,14 @@ def from_dict_of_dicts(d, create_using=None, multigraph_input=False): G[u][v][0].update(data) seen.add((v, u)) else: - G.add_edges_from(((u, v, data) - for u, nbrs in d.items() - for v, data in nbrs.items())) + G.add_edges_from( + ((u, v, data) for u, nbrs in d.items() for v, data in nbrs.items()) + ) return G def to_edgelist(G, nodelist=None): - """Return a list of edges in the graph. + """Returns a list of edges in the graph. Parameters ---------- @@ -379,26 +381,26 @@ def to_edgelist(G, nodelist=None): def from_edgelist(edgelist, create_using=None): - """Return a graph from a list of edges. + """Returns a graph from a list of edges. Parameters ---------- edgelist : list or iterator Edge tuples - create_using : NetworkX graph - Use specified graph for result. Otherwise a new graph is created. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. Examples -------- - >>> edgelist = [(0, 1)] # single edge (0,1) + >>> edgelist = [(0, 1)] # single edge (0,1) >>> G = nx.from_edgelist(edgelist) or - >>> G = nx.Graph(edgelist) # use Graph constructor + >>> G = nx.Graph(edgelist) # use Graph constructor """ - G = _prep_create_using(create_using) + G = nx.empty_graph(0, create_using) G.add_edges_from(edgelist) return G diff --git a/networkx/convert_matrix.py b/networkx/convert_matrix.py index 05d9811..240d3df 100644 --- a/networkx/convert_matrix.py +++ b/networkx/convert_matrix.py @@ -1,10 +1,5 @@ -# Copyright (C) 2006-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -"""Functions to convert NetworkX graphs to and from numpy/scipy matrices. +"""Functions to convert NetworkX graphs to and from common data containers +like numpy arrays, scipy sparse matrices, and pandas DataFrames. The preferred way of converting data to a NetworkX graph is through the graph constructor. The constructor calls the to_networkx_graph() function @@ -12,15 +7,15 @@ Examples -------- -Create a 10 node random graph from a numpy matrix +Create a 10 node random graph from a numpy array >>> import numpy as np ->>> a = np.reshape(np.random.random_integers(0, 1, size=100), (10, 10)) +>>> a = np.random.randint(0, 2, size=(10, 10)) >>> D = nx.DiGraph(a) or equivalently ->>> D = nx.to_networkx_graph(a, create_using=nx.DiGraph()) +>>> D = nx.to_networkx_graph(a, create_using=nx.DiGraph) See Also -------- @@ -29,20 +24,33 @@ import itertools import networkx as nx -from networkx.convert import _prep_create_using from networkx.utils import not_implemented_for -__all__ = ['from_numpy_matrix', 'to_numpy_matrix', - 'from_pandas_adjacency', 'to_pandas_adjacency', - 'from_pandas_edgelist', 'to_pandas_edgelist', - 'to_numpy_recarray', - 'from_scipy_sparse_matrix', 'to_scipy_sparse_matrix', - 'from_numpy_array', 'to_numpy_array'] - - -def to_pandas_adjacency(G, nodelist=None, dtype=None, order=None, - multigraph_weight=sum, weight='weight', nonedge=0.0): - """Return the graph adjacency matrix as a Pandas DataFrame. +__all__ = [ + "from_numpy_matrix", + "to_numpy_matrix", + "from_pandas_adjacency", + "to_pandas_adjacency", + "from_pandas_edgelist", + "to_pandas_edgelist", + "to_numpy_recarray", + "from_scipy_sparse_matrix", + "to_scipy_sparse_matrix", + "from_numpy_array", + "to_numpy_array", +] + + +def to_pandas_adjacency( + G, + nodelist=None, + dtype=None, + order=None, + multigraph_weight=sum, + weight="weight", + nonedge=0.0, +): + """Returns the graph adjacency matrix as a Pandas DataFrame. Parameters ---------- @@ -75,6 +83,8 @@ def to_pandas_adjacency(G, nodelist=None, dtype=None, order=None, Notes ----- + For directed graphs, entry i,j corresponds to an edge from i to j. + The DataFrame entries are assigned to the weight edge attribute. When an edge does not have a weight attribute, the value of the entry is set to the number 1. For multiple (parallel) edges, the values of the entries @@ -91,6 +101,7 @@ def to_pandas_adjacency(G, nodelist=None, dtype=None, order=None, resulting Pandas DataFrame can be modified as follows: >>> import pandas as pd + >>> pd.options.display.max_columns = 20 >>> import numpy as np >>> G = nx.Graph([(1, 1)]) >>> df = nx.to_pandas_adjacency(G, dtype=int) @@ -121,16 +132,23 @@ def to_pandas_adjacency(G, nodelist=None, dtype=None, order=None, """ import pandas as pd - M = to_numpy_matrix(G, nodelist=nodelist, dtype=dtype, order=order, - multigraph_weight=multigraph_weight, weight=weight, - nonedge=nonedge) + + M = to_numpy_array( + G, + nodelist=nodelist, + dtype=dtype, + order=order, + multigraph_weight=multigraph_weight, + weight=weight, + nonedge=nonedge, + ) if nodelist is None: nodelist = list(G) return pd.DataFrame(data=M, index=nodelist, columns=nodelist) def from_pandas_adjacency(df, create_using=None): - r"""Return a graph from Pandas DataFrame. + r"""Returns a graph from Pandas DataFrame. The Pandas DataFrame is interpreted as an adjacency matrix for the graph. @@ -139,15 +157,18 @@ def from_pandas_adjacency(df, create_using=None): df : Pandas DataFrame An adjacency matrix representation of a graph - create_using : NetworkX graph - Use specified graph for result. The default is Graph() + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. Notes ----- - If the numpy matrix has a single data type for each matrix entry it - will be converted to an appropriate Python data type. + For directed graphs, explicitly mention create_using=nx.DiGraph, + and entry i,j of df corresponds to an edge from i to j. - If the numpy matrix has a user-specified compound data type the names + If `df` has a single data type for each entry it will be converted to an + appropriate Python data type. + + If `df` has a user-specified compound data type the names of the data fields will be used as attribute keys in the resulting NetworkX graph. @@ -160,13 +181,14 @@ def from_pandas_adjacency(df, create_using=None): Simple integer weights on edges: >>> import pandas as pd + >>> pd.options.display.max_columns = 20 >>> df = pd.DataFrame([[1, 1], [2, 1]]) >>> df 0 1 0 1 1 1 2 1 >>> G = nx.from_pandas_adjacency(df) - >>> G.name = 'Graph from pandas adjacency matrix' + >>> G.name = "Graph from pandas adjacency matrix" >>> print(nx.info(G)) Name: Graph from pandas adjacency matrix Type: Graph @@ -176,21 +198,24 @@ def from_pandas_adjacency(df, create_using=None): """ - A = df.values - G = from_numpy_matrix(A, create_using=create_using) try: df = df[df.index] - except: - raise nx.NetworkXError("Columns must match Indices.", "%s not in columns" % - list(set(df.index).difference(set(df.columns)))) + except Exception as e: + missing = list(set(df.index).difference(set(df.columns))) + msg = f"{missing} not in columns" + raise nx.NetworkXError("Columns must match Indices.", msg) from e + + A = df.values + G = from_numpy_array(A, create_using=create_using) nx.relabel.relabel_nodes(G, dict(enumerate(df.columns)), copy=False) return G -def to_pandas_edgelist(G, source='source', target='target', nodelist=None, - dtype=None, order=None): - """Return the graph edge list as a Pandas DataFrame. +def to_pandas_edgelist( + G, source="source", target="target", nodelist=None, dtype=None, order=None +): + """Returns the graph edge list as a Pandas DataFrame. Parameters ---------- @@ -198,11 +223,11 @@ def to_pandas_edgelist(G, source='source', target='target', nodelist=None, The NetworkX graph used to construct the Pandas DataFrame. source : str or int, optional - A valid column name (string or iteger) for the source nodes (for the + A valid column name (string or integer) for the source nodes (for the directed case). target : str or int, optional - A valid column name (string or iteger) for the target nodes (for the + A valid column name (string or integer) for the target nodes (for the directed case). nodelist : list, optional @@ -215,35 +240,54 @@ def to_pandas_edgelist(G, source='source', target='target', nodelist=None, Examples -------- - >>> G = nx.Graph([('A', 'B', {'cost': 1, 'weight': 7}), - ... ('C', 'E', {'cost': 9, 'weight': 10})]) - >>> df = nx.to_pandas_edgelist(G, nodelist=['A', 'C']) - >>> df - cost source target weight - 0 1 A B 7 - 1 9 C E 10 + >>> G = nx.Graph( + ... [ + ... ("A", "B", {"cost": 1, "weight": 7}), + ... ("C", "E", {"cost": 9, "weight": 10}), + ... ] + ... ) + >>> df = nx.to_pandas_edgelist(G, nodelist=["A", "C"]) + >>> df[["source", "target", "cost", "weight"]] + source target cost weight + 0 A B 1 7 + 1 C E 9 10 """ import pandas as pd + if nodelist is None: edgelist = G.edges(data=True) else: edgelist = G.edges(nodelist, data=True) source_nodes = [s for s, t, d in edgelist] target_nodes = [t for s, t, d in edgelist] + all_keys = set().union(*(d.keys() for s, t, d in edgelist)) - edge_attr = {k: [d.get(k, float("nan")) for s, t, d in edgelist] for k in all_keys} + if source in all_keys: + raise nx.NetworkXError(f"Source name '{source}' is an edge attr name") + if target in all_keys: + raise nx.NetworkXError(f"Target name '{target}' is an edge attr name") + + nan = float("nan") + edge_attr = {k: [d.get(k, nan) for s, t, d in edgelist] for k in all_keys} + edgelistdict = {source: source_nodes, target: target_nodes} edgelistdict.update(edge_attr) return pd.DataFrame(edgelistdict) -def from_pandas_edgelist(df, source='source', target='target', edge_attr=None, - create_using=None): - """Return a graph from Pandas DataFrame containing an edge list. +def from_pandas_edgelist( + df, + source="source", + target="target", + edge_attr=None, + create_using=None, + edge_key=None, +): + """Returns a graph from Pandas DataFrame containing an edge list. The Pandas DataFrame should contain at least two columns of node names and - zero or more columns of node attributes. Each row will be processed as one + zero or more columns of edge attributes. Each row will be processed as one edge instance. Note: This function iterates over DataFrame.values, which is not @@ -258,20 +302,26 @@ def from_pandas_edgelist(df, source='source', target='target', edge_attr=None, An edge list representation of a graph source : str or int - A valid column name (string or iteger) for the source nodes (for the + A valid column name (string or integer) for the source nodes (for the directed case). target : str or int - A valid column name (string or iteger) for the target nodes (for the + A valid column name (string or integer) for the target nodes (for the directed case). - edge_attr : str or int, iterable, True - A valid column name (str or integer) or list of column names that will - be used to retrieve items from the row and add them to the graph as edge - attributes. If `True`, all of the remaining columns will be added. + edge_attr : str or int, iterable, True, or None + A valid column name (str or int) or iterable of column names that are + used to retrieve items and add them to the graph as edge attributes. + If `True`, all of the remaining columns will be added. + If `None`, no edge attributes are added to the graph. - create_using : NetworkX graph - Use specified graph for result. The default is Graph() + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + edge_key : str or None, optional (default=None) + A valid column name for the edge keys (for a MultiGraph). The values in + this column are used for the edge keys when adding edges if create_using + is a multigraph. See Also -------- @@ -282,77 +332,123 @@ def from_pandas_edgelist(df, source='source', target='target', edge_attr=None, Simple integer weights on edges: >>> import pandas as pd + >>> pd.options.display.max_columns = 20 >>> import numpy as np - >>> r = np.random.RandomState(seed=5) - >>> ints = r.random_integers(1, 10, size=(3,2)) - >>> a = ['A', 'B', 'C'] - >>> b = ['D', 'A', 'E'] - >>> df = pd.DataFrame(ints, columns=['weight', 'cost']) + >>> rng = np.random.RandomState(seed=5) + >>> ints = rng.randint(1, 11, size=(3, 2)) + >>> a = ["A", "B", "C"] + >>> b = ["D", "A", "E"] + >>> df = pd.DataFrame(ints, columns=["weight", "cost"]) >>> df[0] = a - >>> df['b'] = b - >>> df + >>> df["b"] = b + >>> df[["weight", "cost", 0, "b"]] weight cost 0 b 0 4 7 A D 1 7 1 B A 2 10 9 C E - >>> G = nx.from_pandas_edgelist(df, 0, 'b', ['weight', 'cost']) - >>> G['E']['C']['weight'] + >>> G = nx.from_pandas_edgelist(df, 0, "b", ["weight", "cost"]) + >>> G["E"]["C"]["weight"] 10 - >>> G['E']['C']['cost'] + >>> G["E"]["C"]["cost"] 9 - >>> edges = pd.DataFrame({'source': [0, 1, 2], - ... 'target': [2, 2, 3], - ... 'weight': [3, 4, 5], - ... 'color': ['red', 'blue', 'blue']}) + >>> edges = pd.DataFrame( + ... { + ... "source": [0, 1, 2], + ... "target": [2, 2, 3], + ... "weight": [3, 4, 5], + ... "color": ["red", "blue", "blue"], + ... } + ... ) >>> G = nx.from_pandas_edgelist(edges, edge_attr=True) - >>> G[0][2]['color'] + >>> G[0][2]["color"] 'red' + Build multigraph with custom keys: + + >>> edges = pd.DataFrame( + ... { + ... "source": [0, 1, 2, 0], + ... "target": [2, 2, 3, 2], + ... "my_edge_key": ["A", "B", "C", "D"], + ... "weight": [3, 4, 5, 6], + ... "color": ["red", "blue", "blue", "blue"], + ... } + ... ) + >>> G = nx.from_pandas_edgelist( + ... edges, + ... edge_key="my_edge_key", + ... edge_attr=["weight", "color"], + ... create_using=nx.MultiGraph(), + ... ) + >>> G[0][2] + AtlasView({'A': {'weight': 3, 'color': 'red'}, 'D': {'weight': 6, 'color': 'blue'}}) + + """ + g = nx.empty_graph(0, create_using) + + if edge_attr is None: + g.add_edges_from(zip(df[source], df[target])) + return g - g = _prep_create_using(create_using) - - # Index of source and target - src_i = df.columns.get_loc(source) - tar_i = df.columns.get_loc(target) - if edge_attr: - # If all additional columns requested, build up a list of tuples - # [(name, index),...] - if edge_attr is True: - # Create a list of all columns indices, ignore nodes - edge_i = [] - for i, col in enumerate(df.columns): - if col is not source and col is not target: - edge_i.append((col, i)) - # If a list or tuple of name is requested - elif isinstance(edge_attr, (list, tuple)): - edge_i = [(i, df.columns.get_loc(i)) for i in edge_attr] - # If a string or int is passed - else: - edge_i = [(edge_attr, df.columns.get_loc(edge_attr)), ] - - # Iteration on values returns the rows as Numpy arrays - for row in df.values: - s, t = row[src_i], row[tar_i] - if g.is_multigraph(): - g.add_edge(s, t) - key = max(g[s][t]) # default keys just count, so max is most recent - g[s][t][key].update((i, row[j]) for i, j in edge_i) + reserved_columns = [source, target] + + # Additional columns requested + attr_col_headings = [] + attribute_data = [] + if edge_attr is True: + attr_col_headings = [c for c in df.columns if c not in reserved_columns] + elif isinstance(edge_attr, (list, tuple)): + attr_col_headings = edge_attr + else: + attr_col_headings = [edge_attr] + if len(attr_col_headings) == 0: + raise nx.NetworkXError( + f"Invalid edge_attr argument: No columns found with name: {attr_col_headings}" + ) + + try: + attribute_data = zip(*[df[col] for col in attr_col_headings]) + except (KeyError, TypeError) as e: + msg = f"Invalid edge_attr argument: {edge_attr}" + raise nx.NetworkXError(msg) from e + + if g.is_multigraph(): + # => append the edge keys from the df to the bundled data + if edge_key is not None: + try: + multigraph_edge_keys = df[edge_key] + attribute_data = zip(attribute_data, multigraph_edge_keys) + except (KeyError, TypeError) as e: + msg = f"Invalid edge_key argument: {edge_key}" + raise nx.NetworkXError(msg) from e + + for s, t, attrs in zip(df[source], df[target], attribute_data): + if edge_key is not None: + attrs, multigraph_edge_key = attrs + key = g.add_edge(s, t, key=multigraph_edge_key) else: - g.add_edge(s, t) - g[s][t].update((i, row[j]) for i, j in edge_i) + key = g.add_edge(s, t) - # If no column names are given, then just return the edges. + g[s][t][key].update(zip(attr_col_headings, attrs)) else: - for row in df.values: - g.add_edge(row[src_i], row[tar_i]) + for s, t, attrs in zip(df[source], df[target], attribute_data): + g.add_edge(s, t) + g[s][t].update(zip(attr_col_headings, attrs)) return g -def to_numpy_matrix(G, nodelist=None, dtype=None, order=None, - multigraph_weight=sum, weight='weight', nonedge=0.0): - """Return the graph adjacency matrix as a NumPy matrix. +def to_numpy_matrix( + G, + nodelist=None, + dtype=None, + order=None, + multigraph_weight=sum, + weight="weight", + nonedge=0.0, +): + """Returns the graph adjacency matrix as a NumPy matrix. Parameters ---------- @@ -400,6 +496,8 @@ def to_numpy_matrix(G, nodelist=None, dtype=None, order=None, Notes ----- + For directed graphs, entry i,j corresponds to an edge from i to j. + The matrix entries are assigned to the weight edge attribute. When an edge does not have a weight attribute, the value of the entry is set to the number 1. For multiple (parallel) edges, the values of the entries @@ -419,10 +517,10 @@ def to_numpy_matrix(G, nodelist=None, dtype=None, order=None, >>> G = nx.Graph([(1, 1)]) >>> A = nx.to_numpy_matrix(G) >>> A - matrix([[ 1.]]) - >>> A.A[np.diag_indices_from(A)] *= 2 + matrix([[1.]]) + >>> A[np.diag_indices_from(A)] *= 2 >>> A - matrix([[ 2.]]) + matrix([[2.]]) Examples -------- @@ -436,22 +534,28 @@ def to_numpy_matrix(G, nodelist=None, dtype=None, order=None, >>> G.add_edge(2, 2) 1 >>> nx.to_numpy_matrix(G, nodelist=[0, 1, 2]) - matrix([[ 0., 2., 0.], - [ 1., 0., 0.], - [ 0., 0., 4.]]) + matrix([[0., 2., 0.], + [1., 0., 0.], + [0., 0., 4.]]) """ import numpy as np - A = to_numpy_array(G, nodelist=nodelist, dtype=dtype, order=order, - multigraph_weight=multigraph_weight, weight=weight, - nonedge=nonedge) + A = to_numpy_array( + G, + nodelist=nodelist, + dtype=dtype, + order=order, + multigraph_weight=multigraph_weight, + weight=weight, + nonedge=nonedge, + ) M = np.asmatrix(A, dtype=dtype) return M def from_numpy_matrix(A, parallel_edges=False, create_using=None): - """Return a graph from numpy matrix. + """Returns a graph from numpy matrix. The numpy matrix is interpreted as an adjacency matrix for the graph. @@ -461,23 +565,26 @@ def from_numpy_matrix(A, parallel_edges=False, create_using=None): An adjacency matrix representation of a graph parallel_edges : Boolean - If this is True, `create_using` is a multigraph, and `A` is an + If True, `create_using` is a multigraph, and `A` is an integer matrix, then entry *(i, j)* in the matrix is interpreted as the - number of parallel edges joining vertices *i* and *j* in the graph. If it - is False, then the entries in the adjacency matrix are interpreted as + number of parallel edges joining vertices *i* and *j* in the graph. + If False, then the entries in the adjacency matrix are interpreted as the weight of a single edge joining the vertices. - create_using : NetworkX graph - Use specified graph for result. The default is Graph() + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. Notes ----- - If `create_using` is an instance of :class:`networkx.MultiGraph` or + For directed graphs, explicitly mention create_using=nx.DiGraph, + and entry i,j of A corresponds to an edge from i to j. + + If `create_using` is :class:`networkx.MultiGraph` or :class:`networkx.MultiDiGraph`, `parallel_edges` is True, and the entries of `A` are of type :class:`int`, then this function returns a - multigraph (of the same type as `create_using`) with parallel edges. + multigraph (constructed from `create_using`) with parallel edges. - If `create_using` is an undirected multigraph, then only the edges + If `create_using` indicates an undirected multigraph, then only the edges indicated by the upper triangle of the matrix `A` will be added to the graph. @@ -497,23 +604,23 @@ def from_numpy_matrix(A, parallel_edges=False, create_using=None): Simple integer weights on edges: >>> import numpy as np - >>> A = np.matrix([[1, 1], [2, 1]]) + >>> A = np.array([[1, 1], [2, 1]]) >>> G = nx.from_numpy_matrix(A) - If `create_using` is a multigraph and the matrix has only integer entries, - the entries will be interpreted as weighted edges joining the vertices - (without creating parallel edges): + If `create_using` indicates a multigraph and the matrix has only integer + entries and `parallel_edges` is False, then the entries will be treated + as weights for edges joining the nodes (without creating parallel edges): - >>> A = np.matrix([[1, 1], [1, 2]]) - >>> G = nx.from_numpy_matrix(A, create_using=nx.MultiGraph()) + >>> A = np.array([[1, 1], [1, 2]]) + >>> G = nx.from_numpy_matrix(A, create_using=nx.MultiGraph) >>> G[1][1] AtlasView({0: {'weight': 2}}) - If `create_using` is a multigraph and the matrix has only integer entries - but `parallel_edges` is True, then the entries will be interpreted as - the number of parallel edges joining those two vertices: + If `create_using` indicates a multigraph and the matrix has only integer + entries and `parallel_edges` is True, then the entries will be treated + as the number of parallel edges joining those two vertices: - >>> A = np.matrix([[1, 1], [1, 2]]) + >>> A = np.array([[1, 1], [1, 2]]) >>> temp = nx.MultiGraph() >>> G = nx.from_numpy_matrix(A, parallel_edges=True, create_using=temp) >>> G[1][1] @@ -521,55 +628,62 @@ def from_numpy_matrix(A, parallel_edges=False, create_using=None): User defined compound data type on edges: - >>> dt = [('weight', float), ('cost', int)] - >>> A = np.matrix([[(1.0, 2)]], dtype=dt) + >>> dt = [("weight", float), ("cost", int)] + >>> A = np.array([[(1.0, 2)]], dtype=dt) >>> G = nx.from_numpy_matrix(A) >>> list(G.edges()) [(0, 0)] - >>> G[0][0]['cost'] + >>> G[0][0]["cost"] 2 - >>> G[0][0]['weight'] + >>> G[0][0]["weight"] 1.0 """ # This should never fail if you have created a numpy matrix with numpy... import numpy as np - kind_to_python_type = {'f': float, - 'i': int, - 'u': int, - 'b': bool, - 'c': complex, - 'S': str, - 'V': 'void'} - try: # Python 3.x - blurb = chr(1245) # just to trigger the exception - kind_to_python_type['U'] = str - except ValueError: # Python 2.7 - kind_to_python_type['U'] = unicode - G = _prep_create_using(create_using) + + kind_to_python_type = { + "f": float, + "i": int, + "u": int, + "b": bool, + "c": complex, + "S": str, + "V": "void", + } + kind_to_python_type["U"] = str + G = nx.empty_graph(0, create_using) n, m = A.shape if n != m: - raise nx.NetworkXError("Adjacency matrix is not square.", - "nx,ny=%s" % (A.shape,)) + raise nx.NetworkXError(f"Adjacency matrix not square: nx,ny={A.shape}") dt = A.dtype try: python_type = kind_to_python_type[dt.kind] - except: - raise TypeError("Unknown numpy data type: %s" % dt) + except Exception as e: + raise TypeError(f"Unknown numpy data type: {dt}") from e # Make sure we get even the isolated nodes of the graph. G.add_nodes_from(range(n)) # Get a list of all the entries in the matrix with nonzero entries. These - # coordinates will become the edges in the graph. - edges = zip(*(np.asarray(A).nonzero())) + # coordinates become edges in the graph. (convert to int from np.int64) + edges = ((int(e[0]), int(e[1])) for e in zip(*np.asarray(A).nonzero())) # handle numpy constructed data type - if python_type is 'void': + if python_type == "void": # Sort the fields by their offset, then by dtype, then by name. - fields = sorted((offset, dtype, name) for name, (dtype, offset) in - A.dtype.fields.items()) - triples = ((u, v, {name: kind_to_python_type[dtype.kind](val) - for (_, dtype, name), val in zip(fields, A[u, v])}) - for u, v in edges) + fields = sorted( + (offset, dtype, name) for name, (dtype, offset) in A.dtype.fields.items() + ) + triples = ( + ( + u, + v, + { + name: kind_to_python_type[dtype.kind](val) + for (_, dtype, name), val in zip(fields, A[u, v]) + }, + ) + for u, v in edges + ) # If the entries in the adjacency matrix are integers, the graph is a # multigraph, and parallel_edges is True, then create parallel edges, each # with weight 1, for each entry in the adjacency matrix. Otherwise, create @@ -583,11 +697,11 @@ def from_numpy_matrix(A, parallel_edges=False, create_using=None): # for d in range(A[u, v]): # G.add_edge(u, v, weight=1) # - triples = chain(((u, v, dict(weight=1)) for d in range(A[u, v])) - for (u, v) in edges) + triples = chain( + ((u, v, {"weight": 1}) for d in range(A[u, v])) for (u, v) in edges + ) else: # basic data type - triples = ((u, v, dict(weight=python_type(A[u, v]))) - for u, v in edges) + triples = ((u, v, dict(weight=python_type(A[u, v]))) for u, v in edges) # If we are creating an undirected multigraph, only add the edges from the # upper triangle of the matrix. Otherwise, add all the edges. This relies # on the fact that the vertices created in the @@ -602,14 +716,14 @@ def from_numpy_matrix(A, parallel_edges=False, create_using=None): return G -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def to_numpy_recarray(G, nodelist=None, dtype=None, order=None): - """Return the graph adjacency matrix as a NumPy recarray. + """Returns the graph adjacency matrix as a NumPy recarray. Parameters ---------- G : graph - The NetworkX graph used to construct the NumPy matrix. + The NetworkX graph used to construct the NumPy recarray. nodelist : list, optional The rows and columns are ordered according to the nodes in `nodelist`. @@ -632,25 +746,27 @@ def to_numpy_recarray(G, nodelist=None, dtype=None, order=None): Notes ----- - When `nodelist` does not contain every node in `G`, the matrix is built - from the subgraph of `G` that is induced by the nodes in `nodelist`. + When `nodelist` does not contain every node in `G`, the adjacency + matrix is built from the subgraph of `G` that is induced by the nodes in + `nodelist`. Examples -------- >>> G = nx.Graph() >>> G.add_edge(1, 2, weight=7.0, cost=5) - >>> A = nx.to_numpy_recarray(G, dtype=[('weight', float), ('cost', int)]) + >>> A = nx.to_numpy_recarray(G, dtype=[("weight", float), ("cost", int)]) >>> print(A.weight) - [[ 0. 7.] - [ 7. 0.]] + [[0. 7.] + [7. 0.]] >>> print(A.cost) [[0 5] [5 0]] """ if dtype is None: - dtype = [('weight', float)] + dtype = [("weight", float)] import numpy as np + if nodelist is None: nodelist = list(G) nodeset = set(nodelist) @@ -674,14 +790,13 @@ def to_numpy_recarray(G, nodelist=None, dtype=None, order=None): return M.view(np.recarray) -def to_scipy_sparse_matrix(G, nodelist=None, dtype=None, - weight='weight', format='csr'): - """Return the graph adjacency matrix as a SciPy sparse matrix. +def to_scipy_sparse_matrix(G, nodelist=None, dtype=None, weight="weight", format="csr"): + """Returns the graph adjacency matrix as a SciPy sparse matrix. Parameters ---------- G : graph - The NetworkX graph used to construct the NumPy matrix. + The NetworkX graph used to construct the sparse matrix. nodelist : list, optional The rows and columns are ordered according to the nodes in `nodelist`. @@ -707,17 +822,17 @@ def to_scipy_sparse_matrix(G, nodelist=None, dtype=None, Notes ----- + For directed graphs, matrix entry i,j corresponds to an edge from i to j. + The matrix entries are populated using the edge attribute held in parameter weight. When an edge does not have that attribute, the value of the entry is 1. For multiple edges the matrix values are the sums of the edge weights. - When `nodelist` does not contain every node in `G`, the matrix is built - from the subgraph of `G` that is induced by the nodes in `nodelist`. - - Uses coo_matrix format. To convert to other formats specify the - format= keyword. + When `nodelist` does not contain every node in `G`, the adjacency matrix + is built from the subgraph of `G` that is induced by the nodes in + `nodelist`. The convention used for self-loop edges in graphs is to assign the diagonal matrix entry value to the weight attribute of the edge @@ -757,6 +872,7 @@ def to_scipy_sparse_matrix(G, nodelist=None, dtype=None, https://docs.scipy.org/doc/scipy/reference/sparse.html """ from scipy import sparse + if nodelist is None: nodelist = list(G) nlen = len(nodelist) @@ -768,9 +884,13 @@ def to_scipy_sparse_matrix(G, nodelist=None, dtype=None, raise nx.NetworkXError(msg) index = dict(zip(nodelist, range(nlen))) - coefficients = zip(*((index[u], index[v], d.get(weight, 1)) - for u, v, d in G.edges(nodelist, data=True) - if u in index and v in index)) + coefficients = zip( + *( + (index[u], index[v], d.get(weight, 1)) + for u, v, d in G.edges(nodelist, data=True) + if u in index and v in index + ) + ) try: row, col, data = coefficients except ValueError: @@ -778,8 +898,7 @@ def to_scipy_sparse_matrix(G, nodelist=None, dtype=None, row, col, data = [], [], [] if G.is_directed(): - M = sparse.coo_matrix((data, (row, col)), - shape=(nlen, nlen), dtype=dtype) + M = sparse.coo_matrix((data, (row, col)), shape=(nlen, nlen), dtype=dtype) else: # symmetrize matrix d = data + data @@ -787,19 +906,25 @@ def to_scipy_sparse_matrix(G, nodelist=None, dtype=None, c = col + row # selfloop entries get double counted when symmetrizing # so we subtract the data on the diagonal - selfloops = list(nx.selfloop_edges(G, data=True)) + selfloops = list(nx.selfloop_edges(G.subgraph(nodelist), data=True)) if selfloops: - diag_index, diag_data = zip(*((index[u], -d.get(weight, 1)) - for u, v, d in selfloops - if u in index and v in index)) + diag_index, diag_data = zip( + *( + (index[u], -d.get(weight, 1)) + for u, v, d in selfloops + if u in index and v in index + ) + ) d += diag_data r += diag_index c += diag_index M = sparse.coo_matrix((d, (r, c)), shape=(nlen, nlen), dtype=dtype) try: return M.asformat(format) - except AttributeError: - raise nx.NetworkXError("Unknown sparse matrix format: %s" % format) + # From Scipy 1.1.0, asformat will throw a ValueError instead of an + # AttributeError if the format if not recognized. + except (AttributeError, ValueError) as e: + raise nx.NetworkXError(f"Unknown sparse matrix format: {format}") from e def _csr_gen_triples(A): @@ -851,18 +976,19 @@ def _generate_weighted_edges(A): `A` is a SciPy sparse matrix (in any format). """ - if A.format == 'csr': + if A.format == "csr": return _csr_gen_triples(A) - if A.format == 'csc': + if A.format == "csc": return _csc_gen_triples(A) - if A.format == 'dok': + if A.format == "dok": return _dok_gen_triples(A) # If A is in any other format (including COO), convert it to COO format. return _coo_gen_triples(A.tocoo()) -def from_scipy_sparse_matrix(A, parallel_edges=False, create_using=None, - edge_attribute='weight'): +def from_scipy_sparse_matrix( + A, parallel_edges=False, create_using=None, edge_attribute="weight" +): """Creates a new graph from an adjacency matrix given as a SciPy sparse matrix. @@ -874,12 +1000,12 @@ def from_scipy_sparse_matrix(A, parallel_edges=False, create_using=None, parallel_edges : Boolean If this is True, `create_using` is a multigraph, and `A` is an integer matrix, then entry *(i, j)* in the matrix is interpreted as the - number of parallel edges joining vertices *i* and *j* in the graph. If it - is False, then the entries in the adjacency matrix are interpreted as + number of parallel edges joining vertices *i* and *j* in the graph. + If it is False, then the entries in the matrix are interpreted as the weight of a single edge joining the vertices. - create_using: NetworkX graph - Use specified graph for result. The default is Graph() + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. edge_attribute: string Name of edge attribute to store matrix numeric value. The data will @@ -887,14 +1013,16 @@ def from_scipy_sparse_matrix(A, parallel_edges=False, create_using=None, Notes ----- + For directed graphs, explicitly mention create_using=nx.DiGraph, + and entry i,j of A corresponds to an edge from i to j. - If `create_using` is an instance of :class:`networkx.MultiGraph` or + If `create_using` is :class:`networkx.MultiGraph` or :class:`networkx.MultiDiGraph`, `parallel_edges` is True, and the entries of `A` are of type :class:`int`, then this function returns a - multigraph (of the same type as `create_using`) with parallel edges. + multigraph (constructed from `create_using`) with parallel edges. In this case, `edge_attribute` will be ignored. - If `create_using` is an undirected multigraph, then only the edges + If `create_using` indicates an undirected multigraph, then only the edges indicated by the upper triangle of the matrix `A` will be added to the graph. @@ -904,31 +1032,31 @@ def from_scipy_sparse_matrix(A, parallel_edges=False, create_using=None, >>> A = sp.sparse.eye(2, 2, 1) >>> G = nx.from_scipy_sparse_matrix(A) - If `create_using` is a multigraph and the matrix has only integer entries, - the entries will be interpreted as weighted edges joining the vertices - (without creating parallel edges): + If `create_using` indicates a multigraph and the matrix has only integer + entries and `parallel_edges` is False, then the entries will be treated + as weights for edges joining the nodes (without creating parallel edges): >>> A = sp.sparse.csr_matrix([[1, 1], [1, 2]]) - >>> G = nx.from_scipy_sparse_matrix(A, create_using=nx.MultiGraph()) + >>> G = nx.from_scipy_sparse_matrix(A, create_using=nx.MultiGraph) >>> G[1][1] AtlasView({0: {'weight': 2}}) - If `create_using` is a multigraph and the matrix has only integer entries - but `parallel_edges` is True, then the entries will be interpreted as - the number of parallel edges joining those two vertices: + If `create_using` indicates a multigraph and the matrix has only integer + entries and `parallel_edges` is True, then the entries will be treated + as the number of parallel edges joining those two vertices: >>> A = sp.sparse.csr_matrix([[1, 1], [1, 2]]) - >>> G = nx.from_scipy_sparse_matrix(A, parallel_edges=True, - ... create_using=nx.MultiGraph()) + >>> G = nx.from_scipy_sparse_matrix( + ... A, parallel_edges=True, create_using=nx.MultiGraph + ... ) >>> G[1][1] AtlasView({0: {'weight': 1}, 1: {'weight': 1}}) """ - G = _prep_create_using(create_using) + G = nx.empty_graph(0, create_using) n, m = A.shape if n != m: - raise nx.NetworkXError( - "Adjacency matrix is not square. nx,ny=%s" % (A.shape,)) + raise nx.NetworkXError(f"Adjacency matrix not square: nx,ny={A.shape}") # Make sure we get even the isolated nodes of the graph. G.add_nodes_from(range(n)) # Create an iterable over (u, v, w) triples and for each triple, add an @@ -939,7 +1067,7 @@ def from_scipy_sparse_matrix(A, parallel_edges=False, create_using=None, # with weight 1, for each entry in the adjacency matrix. Otherwise, create # one edge for each positive entry in the adjacency matrix and set the # weight of that edge to be the entry in the matrix. - if A.dtype.kind in ('i', 'u') and G.is_multigraph() and parallel_edges: + if A.dtype.kind in ("i", "u") and G.is_multigraph() and parallel_edges: chain = itertools.chain.from_iterable # The following line is equivalent to: # @@ -962,9 +1090,16 @@ def from_scipy_sparse_matrix(A, parallel_edges=False, create_using=None, return G -def to_numpy_array(G, nodelist=None, dtype=None, order=None, - multigraph_weight=sum, weight='weight', nonedge=0.0): - """Return the graph adjacency matrix as a NumPy array. +def to_numpy_array( + G, + nodelist=None, + dtype=None, + order=None, + multigraph_weight=sum, + weight="weight", + nonedge=0.0, +): + """Returns the graph adjacency matrix as a NumPy array. Parameters ---------- @@ -1012,6 +1147,8 @@ def to_numpy_array(G, nodelist=None, dtype=None, order=None, Notes ----- + For directed graphs, entry i,j corresponds to an edge from i to j. + Entries in the adjacency matrix are assigned to the weight edge attribute. When an edge does not have a weight attribute, the value of the entry is set to the number 1. For multiple (parallel) edges, the values of the @@ -1028,17 +1165,13 @@ def to_numpy_array(G, nodelist=None, dtype=None, order=None, resulting NumPy array can be modified as follows: >>> import numpy as np - >>> try: - ... np.set_printoptions(legacy="1.13") - ... except TypeError: - ... pass >>> G = nx.Graph([(1, 1)]) >>> A = nx.to_numpy_array(G) >>> A - array([[ 1.]]) + array([[1.]]) >>> A[np.diag_indices_from(A)] *= 2 >>> A - array([[ 2.]]) + array([[2.]]) Examples -------- @@ -1052,9 +1185,9 @@ def to_numpy_array(G, nodelist=None, dtype=None, order=None, >>> G.add_edge(2, 2) 1 >>> nx.to_numpy_array(G, nodelist=[0, 1, 2]) - array([[ 0., 2., 0.], - [ 1., 0., 0.], - [ 0., 0., 4.]]) + array([[0., 2., 0.], + [1., 0., 0.], + [0., 0., 4.]]) """ import numpy as np @@ -1109,8 +1242,8 @@ def to_numpy_array(G, nodelist=None, dtype=None, order=None, operator = {sum: np.nansum, min: np.nanmin, max: np.nanmax} try: op = operator[multigraph_weight] - except: - raise ValueError('multigraph_weight must be sum, min, or max') + except Exception as e: + raise ValueError("multigraph_weight must be sum, min, or max") from e for u, v, attrs in G.edges(data=True): if (u in nodeset) and (v in nodeset): @@ -1137,7 +1270,7 @@ def to_numpy_array(G, nodelist=None, dtype=None, order=None, def from_numpy_array(A, parallel_edges=False, create_using=None): - """Return a graph from NumPy array. + """Returns a graph from NumPy array. The NumPy array is interpreted as an adjacency matrix for the graph. @@ -1148,23 +1281,25 @@ def from_numpy_array(A, parallel_edges=False, create_using=None): parallel_edges : Boolean If this is True, `create_using` is a multigraph, and `A` is an - integer array, then entry *(i, j)* in the adjacency matrix is - interpreted as the number of parallel edges joining vertices *i* - and *j* in the graph. If it is False, then the entries in the - adjacency matrix are interpreted as the weight of a single edge - joining the vertices. + integer array, then entry *(i, j)* in the array is interpreted as the + number of parallel edges joining vertices *i* and *j* in the graph. + If it is False, then the entries in the array are interpreted as + the weight of a single edge joining the vertices. - create_using : NetworkX graph - Use specified graph for result. The default is Graph() + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. Notes ----- - If `create_using` is an instance of :class:`networkx.MultiGraph` or + For directed graphs, explicitly mention create_using=nx.DiGraph, + and entry i,j of A corresponds to an edge from i to j. + + If `create_using` is :class:`networkx.MultiGraph` or :class:`networkx.MultiDiGraph`, `parallel_edges` is True, and the entries of `A` are of type :class:`int`, then this function returns a multigraph (of the same type as `create_using`) with parallel edges. - If `create_using` is an undirected multigraph, then only the edges + If `create_using` indicates an undirected multigraph, then only the edges indicated by the upper triangle of the array `A` will be added to the graph. @@ -1187,20 +1322,21 @@ def from_numpy_array(A, parallel_edges=False, create_using=None): >>> A = np.array([[1, 1], [2, 1]]) >>> G = nx.from_numpy_array(A) >>> G.edges(data=True) - EdgeDataView([(0, 0, {'weight': 1}), (0, 1, {'weight': 2}), (1, 1, {'weight': 1})]) + EdgeDataView([(0, 0, {'weight': 1}), (0, 1, {'weight': 2}), \ +(1, 1, {'weight': 1})]) - If `create_using` is a multigraph and the array has only integer entries, - the entries will be interpreted as weighted edges joining the vertices - (without creating parallel edges): + If `create_using` indicates a multigraph and the array has only integer + entries and `parallel_edges` is False, then the entries will be treated + as weights for edges joining the nodes (without creating parallel edges): >>> A = np.array([[1, 1], [1, 2]]) - >>> G = nx.from_numpy_array(A, create_using=nx.MultiGraph()) + >>> G = nx.from_numpy_array(A, create_using=nx.MultiGraph) >>> G[1][1] AtlasView({0: {'weight': 2}}) - If `create_using` is a multigraph and the array has only integer entries - but `parallel_edges` is True, then the entries will be interpreted as - the number of parallel edges joining those two vertices: + If `create_using` indicates a multigraph and the array has only integer + entries and `parallel_edges` is True, then the entries will be treated + as the number of parallel edges joining those two vertices: >>> A = np.array([[1, 1], [1, 2]]) >>> temp = nx.MultiGraph() @@ -1210,33 +1346,17 @@ def from_numpy_array(A, parallel_edges=False, create_using=None): User defined compound data type on edges: - >>> dt = [('weight', float), ('cost', int)] + >>> dt = [("weight", float), ("cost", int)] >>> A = np.array([[(1.0, 2)]], dtype=dt) >>> G = nx.from_numpy_array(A) >>> G.edges() EdgeView([(0, 0)]) - >>> G[0][0]['cost'] + >>> G[0][0]["cost"] 2 - >>> G[0][0]['weight'] + >>> G[0][0]["weight"] 1.0 """ - return from_numpy_matrix(A, parallel_edges=parallel_edges, - create_using=create_using) - - -# fixture for nose tests -def setup_module(module): - from nose import SkipTest - try: - import numpy - except: - raise SkipTest("NumPy not available") - try: - import scipy - except: - raise SkipTest("SciPy not available") - try: - import pandas - except: - raise SkipTest("Pandas not available") + return from_numpy_matrix( + A, parallel_edges=parallel_edges, create_using=create_using + ) diff --git a/networkx/drawing/layout.py b/networkx/drawing/layout.py index 1513cd7..e02de9a 100644 --- a/networkx/drawing/layout.py +++ b/networkx/drawing/layout.py @@ -1,13 +1,3 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# Richard Penney -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg , -# Dan Schult """ ****** Layout @@ -25,18 +15,24 @@ Warning: Most layout routines have only been tested in 2-dimensions. """ -from __future__ import division import networkx as nx from networkx.utils import random_state -__all__ = ['circular_layout', - 'kamada_kawai_layout', - 'random_layout', - 'rescale_layout', - 'shell_layout', - 'spring_layout', - 'spectral_layout', - 'fruchterman_reingold_layout'] +__all__ = [ + "bipartite_layout", + "circular_layout", + "kamada_kawai_layout", + "random_layout", + "rescale_layout", + "rescale_layout_dict", + "shell_layout", + "spring_layout", + "spectral_layout", + "planar_layout", + "fruchterman_reingold_layout", + "spiral_layout", + "multipartite_layout", +] def _process_params(G, center, dim): @@ -61,7 +57,7 @@ def _process_params(G, center, dim): @random_state(3) -def random_layout(G, center=None, dim=2, random_state=None): +def random_layout(G, center=None, dim=2, seed=None): """Position nodes uniformly at random in the unit square. For every node, a position is generated by choosing each of dim @@ -80,10 +76,10 @@ def random_layout(G, center=None, dim=2, random_state=None): dim : int Dimension of layout. - random_state : int, RandomState instance or None optional (default=None) + seed : int, RandomState instance or None optional (default=None) Set the random state for deterministic node layouts. - If int, `random_state` is the seed used by the random number generator, - if numpy.random.RandomState instance, `random_state` is the random + If int, `seed` is the seed used by the random number generator, + if numpy.random.RandomState instance, `seed` is the random number generator, if None, the random number generator is the RandomState instance used by numpy.random. @@ -102,8 +98,7 @@ def random_layout(G, center=None, dim=2, random_state=None): import numpy as np G, center = _process_params(G, center, dim) - shape = (len(G), dim) - pos = random_state.rand(*shape) + center + pos = seed.rand(len(G), dim) + center pos = pos.astype(np.float32) pos = dict(zip(G, pos)) @@ -129,12 +124,18 @@ def circular_layout(G, scale=1, center=None, dim=2): Dimension of layout. If dim>2, the remaining dimensions are set to zero in the returned positions. + If dim<2, a ValueError is raised. Returns ------- pos : dict A dictionary of positions keyed by node + Raises + ------- + ValueError + If dim < 2 + Examples -------- >>> G = nx.path_graph(4) @@ -148,6 +149,9 @@ def circular_layout(G, scale=1, center=None, dim=2): """ import numpy as np + if dim < 2: + raise ValueError("cannot handle dimensions < 2") + G, center = _process_params(G, center, dim) paddims = max(0, (dim - 2)) @@ -160,15 +164,16 @@ def circular_layout(G, scale=1, center=None, dim=2): # Discard the extra angle since it matches 0 radians. theta = np.linspace(0, 1, len(G) + 1)[:-1] * 2 * np.pi theta = theta.astype(np.float32) - pos = np.column_stack([np.cos(theta), np.sin(theta), - np.zeros((len(G), paddims))]) + pos = np.column_stack( + [np.cos(theta), np.sin(theta), np.zeros((len(G), paddims))] + ) pos = rescale_layout(pos, scale=scale) + center pos = dict(zip(G, pos)) return pos -def shell_layout(G, nlist=None, scale=1, center=None, dim=2): +def shell_layout(G, nlist=None, rotate=None, scale=1, center=None, dim=2): """Position nodes in concentric circles. Parameters @@ -179,6 +184,11 @@ def shell_layout(G, nlist=None, scale=1, center=None, dim=2): nlist : list of lists List of node lists for each shell. + rotate : angle in radians (default=pi/len(nlist)) + Angle by which to rotate the starting position of each shell + relative to the starting position of the previous shell. + To recreate behavior before v2.5 use rotate=0. + scale : number (default: 1) Scale factor for positions. @@ -187,12 +197,18 @@ def shell_layout(G, nlist=None, scale=1, center=None, dim=2): dim : int Dimension of layout, currently only dim=2 is supported. + Other dimension values result in a ValueError. Returns ------- pos : dict A dictionary of positions keyed by node + Raises + ------- + ValueError + If dim != 2 + Examples -------- >>> G = nx.path_graph(4) @@ -207,6 +223,9 @@ def shell_layout(G, nlist=None, scale=1, center=None, dim=2): """ import numpy as np + if dim != 2: + raise ValueError("can only handle 2 dimensions") + G, center = _process_params(G, center, dim) if len(G) == 0: @@ -218,40 +237,154 @@ def shell_layout(G, nlist=None, scale=1, center=None, dim=2): # draw the whole graph in one shell nlist = [list(G)] + radius_bump = scale / len(nlist) + if len(nlist[0]) == 1: # single node at center radius = 0.0 else: # else start at r=1 - radius = 1.0 + radius = radius_bump + if rotate is None: + rotate = np.pi / len(nlist) + first_theta = rotate npos = {} for nodes in nlist: - # Discard the extra angle since it matches 0 radians. - theta = np.linspace(0, 1, len(nodes) + 1)[:-1] * 2 * np.pi - theta = theta.astype(np.float32) - pos = np.column_stack([np.cos(theta), np.sin(theta)]) - pos = rescale_layout(pos, scale=scale * radius / len(nlist)) + center + # Discard the last angle (endpoint=False) since 2*pi matches 0 radians + theta = ( + np.linspace(0, 2 * np.pi, len(nodes), endpoint=False, dtype=np.float32) + + first_theta + ) + pos = radius * np.column_stack([np.cos(theta), np.sin(theta)]) + center npos.update(zip(nodes, pos)) - radius += 1.0 + radius += radius_bump + first_theta += rotate return npos +def bipartite_layout( + G, nodes, align="vertical", scale=1, center=None, aspect_ratio=4 / 3 +): + """Position nodes in two straight lines. + + Parameters + ---------- + G : NetworkX graph or list of nodes + A position will be assigned to every node in G. + + nodes : list or container + Nodes in one node set of the bipartite graph. + This set will be placed on left or top. + + align : string (default='vertical') + The alignment of nodes. Vertical or horizontal. + + scale : number (default: 1) + Scale factor for positions. + + center : array-like or None + Coordinate pair around which to center the layout. + + aspect_ratio : number (default=4/3): + The ratio of the width to the height of the layout. + + Returns + ------- + pos : dict + A dictionary of positions keyed by node. + + Examples + -------- + >>> G = nx.bipartite.gnmk_random_graph(3, 5, 10, seed=123) + >>> top = nx.bipartite.sets(G)[0] + >>> pos = nx.bipartite_layout(G, top) + + Notes + ----- + This algorithm currently only works in two dimensions and does not + try to minimize edge crossings. + + """ + + import numpy as np + + G, center = _process_params(G, center=center, dim=2) + if len(G) == 0: + return {} + + height = 1 + width = aspect_ratio * height + offset = (width / 2, height / 2) + + top = set(nodes) + bottom = set(G) - top + nodes = list(top) + list(bottom) + + if align == "vertical": + left_xs = np.repeat(0, len(top)) + right_xs = np.repeat(width, len(bottom)) + left_ys = np.linspace(0, height, len(top)) + right_ys = np.linspace(0, height, len(bottom)) + + top_pos = np.column_stack([left_xs, left_ys]) - offset + bottom_pos = np.column_stack([right_xs, right_ys]) - offset + + pos = np.concatenate([top_pos, bottom_pos]) + pos = rescale_layout(pos, scale=scale) + center + pos = dict(zip(nodes, pos)) + return pos + + if align == "horizontal": + top_ys = np.repeat(height, len(top)) + bottom_ys = np.repeat(0, len(bottom)) + top_xs = np.linspace(0, width, len(top)) + bottom_xs = np.linspace(0, width, len(bottom)) + + top_pos = np.column_stack([top_xs, top_ys]) - offset + bottom_pos = np.column_stack([bottom_xs, bottom_ys]) - offset + + pos = np.concatenate([top_pos, bottom_pos]) + pos = rescale_layout(pos, scale=scale) + center + pos = dict(zip(nodes, pos)) + return pos + + msg = "align must be either vertical or horizontal." + raise ValueError(msg) + + @random_state(10) -def fruchterman_reingold_layout(G, - k=None, - pos=None, - fixed=None, - iterations=50, - threshold=1e-4, - weight='weight', - scale=1, - center=None, - dim=2, - random_state=None): +def fruchterman_reingold_layout( + G, + k=None, + pos=None, + fixed=None, + iterations=50, + threshold=1e-4, + weight="weight", + scale=1, + center=None, + dim=2, + seed=None, +): """Position nodes using Fruchterman-Reingold force-directed algorithm. + The algorithm simulates a force-directed representation of the network + treating edges as springs holding nodes close, while treating nodes + as repelling objects, sometimes called an anti-gravity force. + Simulation continues until the positions are close to an equilibrium. + + There are some hard-coded values: minimal distance between + nodes (0.01) and "temperature" of 0.1 to ensure nodes don't fly away. + During the simulation, `k` helps determine the distance between nodes, + though `scale` and `center` determine the size and place after + rescaling occurs at the end of the simulation. + + Fixing some nodes doesn't allow them to move in the simulation. + It also turns off the rescaling feature at the simulation's end. + In addition, setting `scale` to `None` turns off rescaling. + Parameters ---------- G : NetworkX graph or list of nodes @@ -269,6 +402,7 @@ def fruchterman_reingold_layout(G, fixed : list or None optional (default=None) Nodes to keep fixed at initial position. + ValueError raised if `fixed` specified and `pos` not. iterations : int optional (default=50) Maximum number of iterations taken @@ -281,8 +415,9 @@ def fruchterman_reingold_layout(G, The edge attribute that holds the numerical value used for the edge weight. If None, then all edge weights are 1. - scale : number (default: 1) + scale : number or None (default: 1) Scale factor for positions. Not used unless `fixed is None`. + If scale is None, no rescaling is performed. center : array-like or None Coordinate pair around which to center the layout. @@ -291,10 +426,10 @@ def fruchterman_reingold_layout(G, dim : int Dimension of layout. - random_state : int, RandomState instance or None optional (default=None) + seed : int, RandomState instance or None optional (default=None) Set the random state for deterministic node layouts. - If int, `random_state` is the seed used by the random number generator, - if numpy.random.RandomState instance, `random_state` is the random + If int, `seed` is the seed used by the random number generator, + if numpy.random.RandomState instance, `seed` is the random number generator, if None, the random number generator is the RandomState instance used by numpy.random. @@ -317,22 +452,27 @@ def fruchterman_reingold_layout(G, G, center = _process_params(G, center, dim) if fixed is not None: - nfixed = dict(zip(G, range(len(G)))) - fixed = np.asarray([nfixed[v] for v in fixed]) + if pos is None: + raise ValueError("nodes are fixed without positions given") + for node in fixed: + if node not in pos: + raise ValueError("nodes are fixed without positions given") + nfixed = {node: i for i, node in enumerate(G)} + fixed = np.asarray([nfixed[node] for node in fixed]) if pos is not None: # Determine size of existing domain to adjust initial positions dom_size = max(coord for pos_tup in pos.values() for coord in pos_tup) if dom_size == 0: dom_size = 1 - shape = (len(G), dim) - pos_arr = random_state.rand(*shape) * dom_size + center + pos_arr = seed.rand(len(G), dim) * dom_size + center for i, n in enumerate(G): if n in pos: pos_arr[i] = np.asarray(pos[n]) else: pos_arr = None + dom_size = 1 if len(G) == 0: return {} @@ -343,23 +483,24 @@ def fruchterman_reingold_layout(G, # Sparse matrix if len(G) < 500: # sparse solver for large graphs raise ValueError - A = nx.to_scipy_sparse_matrix(G, weight=weight, dtype='f') + A = nx.to_scipy_sparse_matrix(G, weight=weight, dtype="f") if k is None and fixed is not None: # We must adjust k by domain size for layouts not near 1x1 nnodes, _ = A.shape k = dom_size / np.sqrt(nnodes) - pos = _sparse_fruchterman_reingold(A, k, pos_arr, fixed, - iterations, threshold, - dim, random_state) - except: - A = nx.to_numpy_matrix(G, weight=weight) + pos = _sparse_fruchterman_reingold( + A, k, pos_arr, fixed, iterations, threshold, dim, seed + ) + except ValueError: + A = nx.to_numpy_array(G, weight=weight) if k is None and fixed is not None: # We must adjust k by domain size for layouts not near 1x1 nnodes, _ = A.shape k = dom_size / np.sqrt(nnodes) - pos = _fruchterman_reingold(A, k, pos_arr, fixed, iterations, - threshold, dim, random_state) - if fixed is None: + pos = _fruchterman_reingold( + A, k, pos_arr, fixed, iterations, threshold, dim, seed + ) + if fixed is None and scale is not None: pos = rescale_layout(pos, scale=scale) + center pos = dict(zip(G, pos)) return pos @@ -369,28 +510,22 @@ def fruchterman_reingold_layout(G, @random_state(7) -def _fruchterman_reingold(A, k=None, pos=None, fixed=None, iterations=50, - threshold=1e-4, dim=2, random_state=None): +def _fruchterman_reingold( + A, k=None, pos=None, fixed=None, iterations=50, threshold=1e-4, dim=2, seed=None +): # Position nodes in adjacency matrix A using Fruchterman-Reingold # Entry point for NetworkX graph is fruchterman_reingold_layout() - try: - import numpy as np - except ImportError: - msg = "_fruchterman_reingold() requires numpy: http://scipy.org/ " - raise ImportError(msg) + import numpy as np try: nnodes, _ = A.shape - except AttributeError: + except AttributeError as e: msg = "fruchterman_reingold() takes an adjacency matrix as input" - raise nx.NetworkXError(msg) - - # make sure we have an array instead of a matrix - A = np.asarray(A) + raise nx.NetworkXError(msg) from e if pos is None: # random initial positions - pos = np.asarray(random_state.rand(nnodes, dim), dtype=A.dtype) + pos = np.asarray(seed.rand(nnodes, dim), dtype=A.dtype) else: # make sure positions are of same type as matrix pos = pos.astype(A.dtype) @@ -418,13 +553,13 @@ def _fruchterman_reingold(A, k=None, pos=None, fixed=None, iterations=50, # enforce minimum distance of 0.01 np.clip(distance, 0.01, None, out=distance) # displacement "force" - displacement = np.einsum('ijk,ij->ik', - delta, - (k * k / distance**2 - A * distance / k)) + displacement = np.einsum( + "ijk,ij->ik", delta, (k * k / distance ** 2 - A * distance / k) + ) # update positions length = np.linalg.norm(displacement, axis=-1) length = np.where(length < 0.01, 0.1, length) - delta_pos = np.einsum('ij,i->ij', displacement, t / length) + delta_pos = np.einsum("ij,i->ij", displacement, t / length) if fixed is not None: # don't change positions of fixed nodes delta_pos[fixed] = 0.0 @@ -438,36 +573,33 @@ def _fruchterman_reingold(A, k=None, pos=None, fixed=None, iterations=50, @random_state(7) -def _sparse_fruchterman_reingold(A, k=None, pos=None, fixed=None, - iterations=50, threshold=1e-4, dim=2, - random_state=None): +def _sparse_fruchterman_reingold( + A, k=None, pos=None, fixed=None, iterations=50, threshold=1e-4, dim=2, seed=None +): # Position nodes in adjacency matrix A using Fruchterman-Reingold # Entry point for NetworkX graph is fruchterman_reingold_layout() # Sparse version - try: - import numpy as np - except ImportError: - m = "_sparse_fruchterman_reingold() requires numpy: http://scipy.org/" - raise ImportError(m) + import numpy as np + try: nnodes, _ = A.shape - except AttributeError: + except AttributeError as e: msg = "fruchterman_reingold() takes an adjacency matrix as input" - raise nx.NetworkXError(msg) + raise nx.NetworkXError(msg) from e try: - from scipy.sparse import spdiags, coo_matrix - except ImportError: + from scipy.sparse import coo_matrix + except ImportError as e: msg = "_sparse_fruchterman_reingold() scipy numpy: http://scipy.org/ " - raise ImportError(msg) + raise ImportError(msg) from e # make sure we have a LIst of Lists representation try: A = A.tolil() - except: + except AttributeError: A = (coo_matrix(A)).tolil() if pos is None: # random initial positions - pos = np.asarray(random_state.rand(nnodes, dim), dtype=A.dtype) + pos = np.asarray(seed.rand(nnodes, dim), dtype=A.dtype) else: # make sure positions are of same type as matrix pos = pos.astype(A.dtype) @@ -496,16 +628,17 @@ def _sparse_fruchterman_reingold(A, k=None, pos=None, fixed=None, # difference between this row's node position and all others delta = (pos[i] - pos).T # distance between points - distance = np.sqrt((delta**2).sum(axis=0)) + distance = np.sqrt((delta ** 2).sum(axis=0)) # enforce minimum distance of 0.01 distance = np.where(distance < 0.01, 0.01, distance) # the adjacency matrix row Ai = np.asarray(A.getrowview(i).toarray()) # displacement "force" - displacement[:, i] +=\ - (delta * (k * k / distance**2 - Ai * distance / k)).sum(axis=1) + displacement[:, i] += ( + delta * (k * k / distance ** 2 - Ai * distance / k) + ).sum(axis=1) # update positions - length = np.sqrt((displacement**2).sum(axis=0)) + length = np.sqrt((displacement ** 2).sum(axis=0)) length = np.where(length < 0.01, 0.1, length) delta_pos = (displacement * t / length).T pos += delta_pos @@ -517,12 +650,9 @@ def _sparse_fruchterman_reingold(A, k=None, pos=None, fixed=None, return pos -def kamada_kawai_layout(G, dist=None, - pos=None, - weight='weight', - scale=1, - center=None, - dim=2): +def kamada_kawai_layout( + G, dist=None, pos=None, weight="weight", scale=1, center=None, dim=2 +): """Position nodes using Kamada-Kawai path-length cost-function. Parameters @@ -530,7 +660,7 @@ def kamada_kawai_layout(G, dist=None, G : NetworkX graph or list of nodes A position will be assigned to every node in G. - dist : float (default=None) + dist : dict (default=None) A two-level dictionary of optimal distances between nodes, indexed by source and destination node. If None, the distance is computed using shortest_path_length(). @@ -538,7 +668,7 @@ def kamada_kawai_layout(G, dist=None, pos : dict or None optional (default=None) Initial positions for nodes as a dictionary with node as keys and values as a coordinate list or tuple. If None, then use - circular_layout(). + circular_layout() for dim >= 2 and a linear layout for dim == 1. weight : string or None optional (default='weight') The edge attribute that holds the numerical value used for @@ -563,14 +693,12 @@ def kamada_kawai_layout(G, dist=None, >>> G = nx.path_graph(4) >>> pos = nx.kamada_kawai_layout(G) """ - try: - import numpy as np - except ImportError: - msg = 'Kamada-Kawai layout requires numpy: http://scipy.org' - raise ImportError(msg) + import numpy as np G, center = _process_params(G, center, dim) nNodes = len(G) + if nNodes == 0: + return {} if dist is None: dist = dict(nx.shortest_path_length(G, weight=weight)) @@ -585,7 +713,12 @@ def kamada_kawai_layout(G, dist=None, dist_mtx[row][col] = rdist[nc] if pos is None: - pos = circular_layout(G, dim=dim) + if dim >= 3: + pos = random_layout(G, dim=dim) + elif dim == 2: + pos = circular_layout(G, dim=dim) + else: + pos = {n: pt for n, pt in zip(G, np.linspace(0, 1, len(G)))} pos_arr = np.array([pos[n] for n in G]) pos = _kamada_kawai_solve(dist_mtx, pos_arr, dim) @@ -600,18 +733,18 @@ def _kamada_kawai_solve(dist_mtx, pos_arr, dim): # and starting locations. import numpy as np - try: - from scipy.optimize import minimize - except ImportError: - msg = 'Kamada-Kawai layout requires scipy: http://scipy.org' - raise ImportError(msg) + from scipy.optimize import minimize meanwt = 1e-3 - costargs = (np, 1 / (dist_mtx + np.eye(dist_mtx.shape[0]) * 1e-3), - meanwt, dim) + costargs = (np, 1 / (dist_mtx + np.eye(dist_mtx.shape[0]) * 1e-3), meanwt, dim) - optresult = minimize(_kamada_kawai_costfn, pos_arr.ravel(), - method='L-BFGS-B', args=costargs, jac=True) + optresult = minimize( + _kamada_kawai_costfn, + pos_arr.ravel(), + method="L-BFGS-B", + args=costargs, + jac=True, + ) return optresult.x.reshape((-1, dim)) @@ -623,16 +756,15 @@ def _kamada_kawai_costfn(pos_vec, np, invdist, meanweight, dim): delta = pos_arr[:, np.newaxis, :] - pos_arr[np.newaxis, :, :] nodesep = np.linalg.norm(delta, axis=-1) - direction = np.einsum('ijk,ij->ijk', - delta, - 1 / (nodesep + np.eye(nNodes) * 1e-3)) + direction = np.einsum("ijk,ij->ijk", delta, 1 / (nodesep + np.eye(nNodes) * 1e-3)) offset = nodesep * invdist - 1.0 offset[np.diag_indices(nNodes)] = 0 cost = 0.5 * np.sum(offset ** 2) - grad = (np.einsum('ij,ij,ijk->ik', invdist, offset, direction) - - np.einsum('ij,ij,ijk->jk', invdist, offset, direction)) + grad = np.einsum("ij,ij,ijk->ik", invdist, offset, direction) - np.einsum( + "ij,ij,ijk->jk", invdist, offset, direction + ) # Additional parabolic term to encourage mean position to be near origin: sumpos = np.sum(pos_arr, axis=0) @@ -642,9 +774,14 @@ def _kamada_kawai_costfn(pos_vec, np, invdist, meanweight, dim): return (cost, grad.ravel()) -def spectral_layout(G, weight='weight', scale=1, center=None, dim=2): +def spectral_layout(G, weight="weight", scale=1, center=None, dim=2): """Position nodes using the eigenvectors of the graph Laplacian. + Using the unnormalized Laplacian, the layout shows possible clusters of + nodes which are an approximation of the ratio cut. If dim is the number of + dimensions then the positions are the entries of the dim eigenvectors + corresponding to the ascending eigenvalues starting from the second one. + Parameters ---------- G : NetworkX graph or list of nodes @@ -698,20 +835,20 @@ def spectral_layout(G, weight='weight', scale=1, center=None, dim=2): # Sparse matrix if len(G) < 500: # dense solver is faster for small graphs raise ValueError - A = nx.to_scipy_sparse_matrix(G, weight=weight, dtype='d') + A = nx.to_scipy_sparse_matrix(G, weight=weight, dtype="d") # Symmetrize directed graphs if G.is_directed(): A = A + np.transpose(A) pos = _sparse_spectral(A, dim) except (ImportError, ValueError): # Dense matrix - A = nx.to_numpy_matrix(G, weight=weight) + A = nx.to_numpy_array(G, weight=weight) # Symmetrize directed graphs if G.is_directed(): - A = A + np.transpose(A) + A += A.T pos = _spectral(A, dim) - pos = rescale_layout(pos, scale) + center + pos = rescale_layout(pos, scale=scale) + center pos = dict(zip(G, pos)) return pos @@ -719,27 +856,21 @@ def spectral_layout(G, weight='weight', scale=1, center=None, dim=2): def _spectral(A, dim=2): # Input adjacency matrix A # Uses dense eigenvalue solver from numpy - try: - import numpy as np - except ImportError: - msg = "spectral_layout() requires numpy: http://scipy.org/ " - raise ImportError(msg) + import numpy as np + try: nnodes, _ = A.shape - except AttributeError: + except AttributeError as e: msg = "spectral() takes an adjacency matrix as input" - raise nx.NetworkXError(msg) + raise nx.NetworkXError(msg) from e - # form Laplacian matrix - # make sure we have an array instead of a matrix - A = np.asarray(A) - I = np.identity(nnodes, dtype=A.dtype) - D = I * np.sum(A, axis=1) # diagonal of degrees + # form Laplacian matrix where D is diagonal of degrees + D = np.identity(nnodes, dtype=A.dtype) * np.sum(A, axis=1) L = D - A eigenvalues, eigenvectors = np.linalg.eig(L) # sort and keep smallest nonzero - index = np.argsort(eigenvalues)[1:dim + 1] # 0 index is zero eigenvalue + index = np.argsort(eigenvalues)[1 : dim + 1] # 0 index is zero eigenvalue return np.real(eigenvectors[:, index]) @@ -747,18 +878,15 @@ def _sparse_spectral(A, dim=2): # Input adjacency matrix A # Uses sparse eigenvalue solver from scipy # Could use multilevel methods here, see Koren "On spectral graph drawing" - try: - import numpy as np - from scipy.sparse import spdiags - from scipy.sparse.linalg.eigen import eigsh - except ImportError: - msg = "_sparse_spectral() requires scipy & numpy: http://scipy.org/ " - raise ImportError(msg) + import numpy as np + from scipy.sparse import spdiags + from scipy.sparse.linalg.eigen import eigsh + try: nnodes, _ = A.shape - except AttributeError: + except AttributeError as e: msg = "sparse_spectral() takes an adjacency matrix as input" - raise nx.NetworkXError(msg) + raise nx.NetworkXError(msg) from e # form Laplacian matrix data = np.asarray(A.sum(axis=1).T) @@ -769,13 +897,244 @@ def _sparse_spectral(A, dim=2): # number of Lanczos vectors for ARPACK solver.What is the right scaling? ncv = max(2 * k + 1, int(np.sqrt(nnodes))) # return smallest k eigenvalues and eigenvectors - eigenvalues, eigenvectors = eigsh(L, k, which='SM', ncv=ncv) + eigenvalues, eigenvectors = eigsh(L, k, which="SM", ncv=ncv) index = np.argsort(eigenvalues)[1:k] # 0 index is zero eigenvalue return np.real(eigenvectors[:, index]) +def planar_layout(G, scale=1, center=None, dim=2): + """Position nodes without edge intersections. + + Parameters + ---------- + G : NetworkX graph or list of nodes + A position will be assigned to every node in G. If G is of type + nx.PlanarEmbedding, the positions are selected accordingly. + + scale : number (default: 1) + Scale factor for positions. + + center : array-like or None + Coordinate pair around which to center the layout. + + dim : int + Dimension of layout. + + Returns + ------- + pos : dict + A dictionary of positions keyed by node + + Raises + ------ + NetworkXException + If G is not planar + + Examples + -------- + >>> G = nx.path_graph(4) + >>> pos = nx.planar_layout(G) + """ + import numpy as np + + if dim != 2: + raise ValueError("can only handle 2 dimensions") + + G, center = _process_params(G, center, dim) + + if len(G) == 0: + return {} + + if isinstance(G, nx.PlanarEmbedding): + embedding = G + else: + is_planar, embedding = nx.check_planarity(G) + if not is_planar: + raise nx.NetworkXException("G is not planar.") + pos = nx.combinatorial_embedding_to_pos(embedding) + node_list = list(embedding) + pos = np.row_stack([pos[x] for x in node_list]) + pos = pos.astype(np.float64) + pos = rescale_layout(pos, scale=scale) + center + return dict(zip(node_list, pos)) + + +def spiral_layout(G, scale=1, center=None, dim=2, resolution=0.35, equidistant=False): + """Position nodes in a spiral layout. + + Parameters + ---------- + G : NetworkX graph or list of nodes + A position will be assigned to every node in G. + scale : number (default: 1) + Scale factor for positions. + center : array-like or None + Coordinate pair around which to center the layout. + dim : int + Dimension of layout, currently only dim=2 is supported. + Other dimension values result in a ValueError. + resolution : float + The compactness of the spiral layout returned. + Lower values result in more compressed spiral layouts. + equidistant : bool + If True, nodes will be plotted equidistant from each other. + Returns + ------- + pos : dict + A dictionary of positions keyed by node + Raises + ------- + ValueError + If dim != 2 + + Examples + -------- + >>> G = nx.path_graph(4) + >>> pos = nx.spiral_layout(G) + + Notes + ----- + This algorithm currently only works in two dimensions. + + """ + import numpy as np + + if dim != 2: + raise ValueError("can only handle 2 dimensions") + + G, center = _process_params(G, center, dim) + + if len(G) == 0: + return {} + if len(G) == 1: + return {nx.utils.arbitrary_element(G): center} + + pos = [] + if equidistant: + chord = 1 + step = 0.5 + theta = resolution + for _ in range(len(G)): + r = step * theta + theta += chord / r + pos.append([np.cos(theta) * r, np.sin(theta) * r]) + + else: + # set the starting angle and step + step = 1 + angle = 0.0 + dist = 0.0 + # set the radius for the spiral to the number of nodes in the graph + radius = len(G) + + while dist * np.hypot(np.cos(angle), np.sin(angle)) < radius: + pos.append([dist * np.cos(angle), dist * np.sin(angle)]) + dist += step + angle += resolution + + pos = rescale_layout(np.array(pos), scale=scale) + center + + pos = dict(zip(G, pos)) + + return pos + + +def multipartite_layout(G, subset_key="subset", align="vertical", scale=1, center=None): + """Position nodes in layers of straight lines. + + Parameters + ---------- + G : NetworkX graph or list of nodes + A position will be assigned to every node in G. + + subset_key : string (default='subset') + Key of node data to be used as layer subset. + + align : string (default='vertical') + The alignment of nodes. Vertical or horizontal. + + scale : number (default: 1) + Scale factor for positions. + + center : array-like or None + Coordinate pair around which to center the layout. + + Returns + ------- + pos : dict + A dictionary of positions keyed by node. + + Examples + -------- + >>> G = nx.complete_multipartite_graph(28, 16, 10) + >>> pos = nx.multipartite_layout(G) + + Notes + ----- + This algorithm currently only works in two dimensions and does not + try to minimize edge crossings. + + Network does not need to be a complete multipartite graph. As long as nodes + have subset_key data, they will be placed in the corresponding layers. + + """ + import numpy as np + + G, center = _process_params(G, center=center, dim=2) + if len(G) == 0: + return {} + + layers = {} + for v, data in G.nodes(data=True): + try: + layer = data[subset_key] + except KeyError: + msg = "all nodes must have subset_key (default='subset') as data" + raise ValueError(msg) + layers[layer] = [v] + layers.get(layer, []) + + pos = None + nodes = [] + if align == "vertical": + width = len(layers) + for i, layer in layers.items(): + height = len(layer) + xs = np.repeat(i, height) + ys = np.arange(0, height, dtype=float) + offset = ((width - 1) / 2, (height - 1) / 2) + layer_pos = np.column_stack([xs, ys]) - offset + if pos is None: + pos = layer_pos + else: + pos = np.concatenate([pos, layer_pos]) + nodes.extend(layer) + pos = rescale_layout(pos, scale=scale) + center + pos = dict(zip(nodes, pos)) + return pos + + if align == "horizontal": + height = len(layers) + for i, layer in layers.items(): + width = len(layer) + xs = np.arange(0, width, dtype=float) + ys = np.repeat(i, width) + offset = ((width - 1) / 2, (height - 1) / 2) + layer_pos = np.column_stack([xs, ys]) - offset + if pos is None: + pos = layer_pos + else: + pos = np.concatenate([pos, layer_pos]) + nodes.extend(layer) + pos = rescale_layout(pos, scale=scale) + center + pos = dict(zip(nodes, pos)) + return pos + + msg = "align must be either vertical or horizontal." + raise ValueError(msg) + + def rescale_layout(pos, scale=1): - """Return scaled position array to (-scale, scale) in all axes. + """Returns scaled position array to (-scale, scale) in all axes. The function acts on NumPy arrays which hold position information. Each position is one row of the array. The dimension of the space @@ -799,6 +1158,9 @@ def rescale_layout(pos, scale=1): pos : numpy array scaled positions. Each row is a position. + See Also + -------- + rescale_layout_dict """ # Find max length over all dimensions lim = 0 # max coordinate for all axes @@ -812,14 +1174,38 @@ def rescale_layout(pos, scale=1): return pos -# fixture for nose tests -def setup_module(module): - from nose import SkipTest - try: - import numpy - except: - raise SkipTest("NumPy not available") - try: - import scipy - except: - raise SkipTest("SciPy not available") +def rescale_layout_dict(pos, scale=1): + """Return a dictionary of scaled positions keyed by node + + Parameters + ---------- + pos : A dictionary of positions keyed by node + + scale : number (default: 1) + The size of the resulting extent in all directions. + + Returns + ------- + pos : A dictionary of positions keyed by node + + Examples + -------- + >>> pos = {0: (0, 0), 1: (1, 1), 2: (0.5, 0.5)} + >>> nx.rescale_layout_dict(pos) + {0: (-1.0, -1.0), 1: (1.0, 1.0), 2: (0.0, 0.0)} + + >>> pos = {0: (0, 0), 1: (-1, 1), 2: (-0.5, 0.5)} + >>> nx.rescale_layout_dict(pos, scale=2) + {0: (2.0, -2.0), 1: (-2.0, 2.0), 2: (0.0, 0.0)} + + See Also + -------- + rescale_layout + """ + import numpy as np + + if not pos: # empty_graph + return {} + pos_v = np.array(list(pos.values())) + pos_v = rescale_layout(pos_v, scale=scale) + return {k: tuple(v) for k, v in zip(pos.keys(), pos_v)} diff --git a/networkx/drawing/nx_agraph.py b/networkx/drawing/nx_agraph.py index d58952b..c508cf9 100644 --- a/networkx/drawing/nx_agraph.py +++ b/networkx/drawing/nx_agraph.py @@ -1,11 +1,3 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Author: Aric Hagberg (hagberg@lanl.gov) """ *************** Graphviz AGraph @@ -27,31 +19,34 @@ import tempfile import networkx as nx -__all__ = ['from_agraph', 'to_agraph', - 'write_dot', 'read_dot', - 'graphviz_layout', - 'pygraphviz_layout', - 'view_pygraphviz'] +__all__ = [ + "from_agraph", + "to_agraph", + "write_dot", + "read_dot", + "graphviz_layout", + "pygraphviz_layout", + "view_pygraphviz", +] def from_agraph(A, create_using=None): - """Return a NetworkX Graph or DiGraph from a PyGraphviz graph. + """Returns a NetworkX Graph or DiGraph from a PyGraphviz graph. Parameters ---------- A : PyGraphviz AGraph A graph created with PyGraphviz - create_using : NetworkX graph class instance - The output is created using the given graph class instance + create_using : NetworkX graph constructor, optional (default=None) + Graph type to create. If graph instance, then cleared before populated. + If `None`, then the appropriate Graph type is inferred from `A`. Examples -------- >>> K5 = nx.complete_graph(5) >>> A = nx.nx_agraph.to_agraph(K5) >>> G = nx.nx_agraph.from_agraph(A) - >>> G = nx.nx_agraph.from_agraph(A) - Notes ----- @@ -69,14 +64,14 @@ def from_agraph(A, create_using=None): if create_using is None: if A.is_directed(): if A.is_strict(): - create_using = nx.DiGraph() + create_using = nx.DiGraph else: - create_using = nx.MultiDiGraph() + create_using = nx.MultiDiGraph else: if A.is_strict(): - create_using = nx.Graph() + create_using = nx.Graph else: - create_using = nx.MultiGraph() + create_using = nx.MultiGraph # assign defaults N = nx.empty_graph(0, create_using) @@ -98,21 +93,21 @@ def from_agraph(A, create_using=None): str_attr = {str(k): v for k, v in attr.items()} if not N.is_multigraph(): if e.name is not None: - str_attr['key'] = e.name + str_attr["key"] = e.name N.add_edge(u, v, **str_attr) else: N.add_edge(u, v, key=e.name, **str_attr) # add default attributes for graph, nodes, and edges # hang them on N.graph_attr - N.graph['graph'] = dict(A.graph_attr) - N.graph['node'] = dict(A.node_attr) - N.graph['edge'] = dict(A.edge_attr) + N.graph["graph"] = dict(A.graph_attr) + N.graph["node"] = dict(A.node_attr) + N.graph["edge"] = dict(A.edge_attr) return N def to_agraph(N): - """Return a pygraphviz graph from a NetworkX graph N. + """Returns a pygraphviz graph from a NetworkX graph N. Parameters ---------- @@ -133,43 +128,44 @@ def to_agraph(N): """ try: import pygraphviz - except ImportError: - raise ImportError('requires pygraphviz ', - 'http://pygraphviz.github.io/') + except ImportError as e: + raise ImportError("requires pygraphviz " "http://pygraphviz.github.io/") from e directed = N.is_directed() strict = nx.number_of_selfloops(N) == 0 and not N.is_multigraph() A = pygraphviz.AGraph(name=N.name, strict=strict, directed=directed) # default graph attributes - A.graph_attr.update(N.graph.get('graph', {})) - A.node_attr.update(N.graph.get('node', {})) - A.edge_attr.update(N.graph.get('edge', {})) + A.graph_attr.update(N.graph.get("graph", {})) + A.node_attr.update(N.graph.get("node", {})) + A.edge_attr.update(N.graph.get("edge", {})) - A.graph_attr.update(N.graph) + A.graph_attr.update( + (k, v) for k, v in N.graph.items() if k not in ("graph", "node", "edge") + ) # add nodes for n, nodedata in N.nodes(data=True): A.add_node(n) - if nodedata is not None: - a = A.get_node(n) - a.attr.update({k: str(v) for k, v in nodedata.items()}) + # Add node data + a = A.get_node(n) + a.attr.update({k: str(v) for k, v in nodedata.items()}) # loop over edges if N.is_multigraph(): for u, v, key, edgedata in N.edges(data=True, keys=True): - str_edgedata = {k: str(v) for k, v in edgedata.items() if k != 'key'} + str_edgedata = {k: str(v) for k, v in edgedata.items() if k != "key"} A.add_edge(u, v, key=str(key)) - if edgedata is not None: - a = A.get_edge(u, v) - a.attr.update(str_edgedata) + # Add edge data + a = A.get_edge(u, v) + a.attr.update(str_edgedata) else: for u, v, edgedata in N.edges(data=True): str_edgedata = {k: str(v) for k, v in edgedata.items()} A.add_edge(u, v) - if edgedata is not None: - a = A.get_edge(u, v) - a.attr.update(str_edgedata) + # Add edge data + a = A.get_edge(u, v) + a.attr.update(str_edgedata) return A @@ -184,11 +180,6 @@ def write_dot(G, path): path : filename Filename or file handle to write """ - try: - import pygraphviz - except ImportError: - raise ImportError('requires pygraphviz ', - 'http://pygraphviz.github.io/') A = to_agraph(G) A.write(path) A.clear() @@ -196,7 +187,7 @@ def write_dot(G, path): def read_dot(path): - """Return a NetworkX graph from a dot file on path. + """Returns a NetworkX graph from a dot file on path. Parameters ---------- @@ -205,14 +196,17 @@ def read_dot(path): """ try: import pygraphviz - except ImportError: - raise ImportError('read_dot() requires pygraphviz ', - 'http://pygraphviz.github.io/') + except ImportError as e: + raise ImportError( + "read_dot() requires pygraphviz " "http://pygraphviz.github.io/" + ) from e A = pygraphviz.AGraph(file=path) - return from_agraph(A) + gr = from_agraph(A) + A.clear() + return gr -def graphviz_layout(G, prog='neato', root=None, args=''): +def graphviz_layout(G, prog="neato", root=None, args=""): """Create node positions for G using Graphviz. Parameters @@ -226,24 +220,24 @@ def graphviz_layout(G, prog='neato', root=None, args=''): args : string, optional Extra arguments to Graphviz layout program - Returns : dictionary + Returns + ------- Dictionary of x, y, positions keyed by node. Examples -------- >>> G = nx.petersen_graph() >>> pos = nx.nx_agraph.graphviz_layout(G) - >>> pos = nx.nx_agraph.graphviz_layout(G, prog='dot') + >>> pos = nx.nx_agraph.graphviz_layout(G, prog="dot") Notes ----- This is a wrapper for pygraphviz_layout. - """ return pygraphviz_layout(G, prog=prog, root=root, args=args) -def pygraphviz_layout(G, prog='neato', root=None, args=''): +def pygraphviz_layout(G, prog="neato", root=None, args=""): """Create node positions for G using Graphviz. Parameters @@ -257,40 +251,54 @@ def pygraphviz_layout(G, prog='neato', root=None, args=''): args : string, optional Extra arguments to Graphviz layout program - Returns : dictionary + Returns + ------- + node_pos : dict Dictionary of x, y, positions keyed by node. Examples -------- >>> G = nx.petersen_graph() >>> pos = nx.nx_agraph.graphviz_layout(G) - >>> pos = nx.nx_agraph.graphviz_layout(G, prog='dot') + >>> pos = nx.nx_agraph.graphviz_layout(G, prog="dot") + + Notes + ----- + If you use complex node objects, they may have the same string + representation and GraphViz could treat them as the same node. + The layout may assign both nodes a single location. See Issue #1568 + If this occurs in your case, consider relabeling the nodes just + for the layout computation using something similar to:: + + >>> H = nx.convert_node_labels_to_integers(G, label_attribute="node_label") + >>> H_layout = nx.nx_agraph.pygraphviz_layout(G, prog="dot") + >>> G_layout = {H.nodes[n]["node_label"]: p for n, p in H_layout.items()} """ try: import pygraphviz - except ImportError: - raise ImportError('requires pygraphviz ', - 'http://pygraphviz.github.io/') + except ImportError as e: + raise ImportError("requires pygraphviz " "http://pygraphviz.github.io/") from e if root is not None: - args += "-Groot=%s" % root + args += f"-Groot={root}" A = to_agraph(G) A.layout(prog=prog, args=args) node_pos = {} for n in G: node = pygraphviz.Node(A, n) try: - xx, yy = node.attr["pos"].split(',') - node_pos[n] = (float(xx), float(yy)) + xs = node.attr["pos"].split(",") + node_pos[n] = tuple(float(x) for x in xs) except: print("no position for node", n) node_pos[n] = (0.0, 0.0) return node_pos -@nx.utils.open_file(5, 'w') -def view_pygraphviz(G, edgelabel=None, prog='dot', args='', - suffix='', path=None): +@nx.utils.open_file(5, "w+b") +def view_pygraphviz( + G, edgelabel=None, prog="dot", args="", suffix="", path=None, show=True +): """Views the graph G using the specified layout algorithm. Parameters @@ -313,6 +321,10 @@ def view_pygraphviz(G, edgelabel=None, prog='dot', args='', path : str, None The filename used to save the image. If None, save to a temporary file. File formats are the same as those from pygraphviz.agraph.draw. + show : bool, default = True + Whether to display the graph with `networkx.utils.default_opener`, + default is `True`. If `False`, the rendered graph is still available + at `path`. Returns ------- @@ -331,8 +343,6 @@ def view_pygraphviz(G, edgelabel=None, prog='dot', args='', if not len(G): raise nx.NetworkXException("An empty graph cannot be drawn.") - import pygraphviz - # If we are providing default values for graphviz, these must be set # before any nodes or edges are added to the PyGraphviz graph object. # The reason for this is that default values only affect incoming objects. @@ -340,18 +350,20 @@ def view_pygraphviz(G, edgelabel=None, prog='dot', args='', # then they inherit no value and are set only if explicitly set. # to_agraph() uses these values. - attrs = ['edge', 'node', 'graph'] + attrs = ["edge", "node", "graph"] for attr in attrs: if attr not in G.graph: G.graph[attr] = {} # These are the default values. - edge_attrs = {'fontsize': '10'} - node_attrs = {'style': 'filled', - 'fillcolor': '#0000FF40', - 'height': '0.75', - 'width': '0.75', - 'shape': 'circle'} + edge_attrs = {"fontsize": "10"} + node_attrs = { + "style": "filled", + "fillcolor": "#0000FF40", + "height": "0.75", + "width": "0.75", + "shape": "circle", + } graph_attrs = {} def update_attrs(which, attrs): @@ -370,23 +382,25 @@ def clean_attrs(which, added): del G.graph[which] # Update all default values - update_attrs('edge', edge_attrs) - update_attrs('node', node_attrs) - update_attrs('graph', graph_attrs) + update_attrs("edge", edge_attrs) + update_attrs("node", node_attrs) + update_attrs("graph", graph_attrs) # Convert to agraph, so we inherit default values A = to_agraph(G) # Remove the default values we added to the original graph. - clean_attrs('edge', edge_attrs) - clean_attrs('node', node_attrs) - clean_attrs('graph', graph_attrs) + clean_attrs("edge", edge_attrs) + clean_attrs("node", node_attrs) + clean_attrs("graph", graph_attrs) # If the user passed in an edgelabel, we update the labels for all edges. if edgelabel is not None: - if not hasattr(edgelabel, '__call__'): + if not hasattr(edgelabel, "__call__"): + def func(data): - return ''.join([" ", str(data[edgelabel]), " "]) + return "".join([" ", str(data[edgelabel]), " "]) + else: func = edgelabel @@ -395,29 +409,35 @@ def func(data): for u, v, key, data in G.edges(keys=True, data=True): # PyGraphviz doesn't convert the key to a string. See #339 edge = A.get_edge(u, v, str(key)) - edge.attr['label'] = str(func(data)) + edge.attr["label"] = str(func(data)) else: for u, v, data in G.edges(data=True): edge = A.get_edge(u, v) - edge.attr['label'] = str(func(data)) + edge.attr["label"] = str(func(data)) if path is None: - ext = 'png' + ext = "png" if suffix: - suffix = '_%s.%s' % (suffix, ext) + suffix = f"_{suffix}.{ext}" else: - suffix = '.%s' % (ext,) + suffix = f".{ext}" path = tempfile.NamedTemporaryFile(suffix=suffix, delete=False) else: # Assume the decorator worked and it is a file-object. pass - display_pygraphviz(A, path=path, prog=prog, args=args) + # Write graph to file + A.draw(path=path, format=None, prog=prog, args=args) + path.close() + + # Show graph in a new window (depends on platform configuration) + if show: + nx.utils.default_opener(path.name) return path.name, A -def display_pygraphviz(graph, path, format=None, prog=None, args=''): +def display_pygraphviz(graph, path, format=None, prog=None, args=""): """Internal function to display a graph in OS dependent manner. Parameters @@ -441,6 +461,14 @@ def display_pygraphviz(graph, path, format=None, prog=None, args=''): calls if you experience problems. """ + import warnings + + warnings.warn( + "display_pygraphviz is deprecated and will be removed in NetworkX 3.0. " + "To view a graph G using pygraphviz, use nx.nx_agraph.view_pygraphviz(G). " + "To view a graph from file, consider nx.utils.default_opener(filename).", + DeprecationWarning, + ) if format is None: filename = path.name format = os.path.splitext(filename)[1].lower()[1:] @@ -453,12 +481,3 @@ def display_pygraphviz(graph, path, format=None, prog=None, args=''): graph.draw(path, format, prog, args) path.close() nx.utils.default_opener(filename) - - -# fixture for nose tests -def setup_module(module): - from nose import SkipTest - try: - import pygraphviz - except: - raise SkipTest("pygraphviz not available") diff --git a/networkx/drawing/nx_pydot.py b/networkx/drawing/nx_pydot.py index 3ba631d..1591e78 100644 --- a/networkx/drawing/nx_pydot.py +++ b/networkx/drawing/nx_pydot.py @@ -10,42 +10,24 @@ See Also -------- pydot: https://github.com/erocarrera/pydot -Graphviz: http://www.research.att.com/sw/tools/graphviz/ +Graphviz: https://www.graphviz.org DOT Language: http://www.graphviz.org/doc/info/lang.html """ -# Author: Aric Hagberg (aric.hagberg@gmail.com) - -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# Cecil Curry -# All rights reserved. -# BSD license. from locale import getpreferredencoding -from networkx.utils import open_file, make_str -from pkg_resources import parse_version +from networkx.utils import open_file import networkx as nx -__all__ = ['write_dot', 'read_dot', 'graphviz_layout', 'pydot_layout', - 'to_pydot', 'from_pydot'] +__all__ = [ + "write_dot", + "read_dot", + "graphviz_layout", + "pydot_layout", + "to_pydot", + "from_pydot", +] -# Minimum required version of pydot, which broke backwards API compatibility in -# non-trivial ways and is thus a hard NetworkX requirement. Note that, although -# pydot 1.2.0 was the first to do so, pydot 1.2.3 resolves a critical long- -# standing Python 2.x issue required for sane NetworkX operation. See also: -# https://github.com/erocarrera/pydot/blob/master/ChangeLog -PYDOT_VERSION_MIN = '1.2.3' -# 2.x/3.x compatibility -try: - basestring -except NameError: - basestring = str - unicode = str - - -@open_file(1, mode='w') +@open_file(1, mode="w") def write_dot(G, path): """Write NetworkX graph G to Graphviz dot format on path. @@ -56,9 +38,9 @@ def write_dot(G, path): return -@open_file(0, mode='r') +@open_file(0, mode="r") def read_dot(path): - """Return a NetworkX :class:`MultiGraph` or :class:`MultiDiGraph` from the + """Returns a NetworkX :class:`MultiGraph` or :class:`MultiDiGraph` from the dot file with the passed path. If this file contains multiple graphs, only the first such graph is @@ -79,7 +61,8 @@ def read_dot(path): Use `G = nx.Graph(read_dot(path))` to return a :class:`Graph` instead of a :class:`MultiGraph`. """ - pydot = _import_pydot() + import pydot + data = path.read() # List of one or more "pydot.Dot" instances deserialized from this file. @@ -90,7 +73,7 @@ def read_dot(path): def from_pydot(P): - """Return a NetworkX graph from a Pydot graph. + """Returns a NetworkX graph from a Pydot graph. Parameters ---------- @@ -106,7 +89,7 @@ def from_pydot(P): -------- >>> K5 = nx.complete_graph(5) >>> A = nx.nx_pydot.to_pydot(K5) - >>> G = nx.nx_pydot.from_pydot(A) # return MultiGraph + >>> G = nx.nx_pydot.from_pydot(A) # return MultiGraph # make a Graph instead of MultiGraph >>> G = nx.Graph(nx.nx_pydot.from_pydot(A)) @@ -117,7 +100,7 @@ def from_pydot(P): else: multiedges = True - if P.get_type() == 'graph': # undirected + if P.get_type() == "graph": # undirected if multiedges: N = nx.MultiGraph() else: @@ -130,13 +113,13 @@ def from_pydot(P): # assign defaults name = P.get_name().strip('"') - if name != '': + if name != "": N.name = name # add nodes, attributes to N.node_attr for p in P.get_node_list(): n = p.get_name().strip('"') - if n in ('node', 'graph', 'edge'): + if n in ("node", "graph", "edge"): continue N.add_node(n, **p.get_attributes()) @@ -148,16 +131,16 @@ def from_pydot(P): s = [] d = [] - if isinstance(u, basestring): + if isinstance(u, str): s.append(u.strip('"')) else: - for unodes in u['nodes']: + for unodes in u["nodes"]: s.append(unodes.strip('"')) - if isinstance(v, basestring): + if isinstance(v, str): d.append(v.strip('"')) else: - for vnodes in v['nodes']: + for vnodes in v["nodes"]: d.append(vnodes.strip('"')) for source_node in s: @@ -167,20 +150,20 @@ def from_pydot(P): # add default attributes for graph, nodes, edges pattr = P.get_attributes() if pattr: - N.graph['graph'] = pattr + N.graph["graph"] = pattr try: - N.graph['node'] = P.get_node_defaults()[0] - except: # IndexError,TypeError: + N.graph["node"] = P.get_node_defaults()[0] + except (IndexError, TypeError): pass # N.graph['node']={} try: - N.graph['edge'] = P.get_edge_defaults()[0] - except: # IndexError,TypeError: + N.graph["edge"] = P.get_edge_defaults()[0] + except (IndexError, TypeError): pass # N.graph['edge']={} return N def to_pydot(N): - """Return a pydot graph from a NetworkX graph N. + """Returns a pydot graph from a NetworkX graph N. Parameters ---------- @@ -196,84 +179,97 @@ def to_pydot(N): ----- """ - pydot = _import_pydot() + import pydot # set Graphviz graph type if N.is_directed(): - graph_type = 'digraph' + graph_type = "digraph" else: - graph_type = 'graph' + graph_type = "graph" strict = nx.number_of_selfloops(N) == 0 and not N.is_multigraph() name = N.name - graph_defaults = N.graph.get('graph', {}) - if name is '': - P = pydot.Dot('', graph_type=graph_type, strict=strict, - **graph_defaults) + graph_defaults = N.graph.get("graph", {}) + if name == "": + P = pydot.Dot("", graph_type=graph_type, strict=strict, **graph_defaults) else: - P = pydot.Dot('"%s"' % name, graph_type=graph_type, strict=strict, - **graph_defaults) + P = pydot.Dot( + f'"{name}"', graph_type=graph_type, strict=strict, **graph_defaults + ) try: - P.set_node_defaults(**N.graph['node']) + P.set_node_defaults(**N.graph["node"]) except KeyError: pass try: - P.set_edge_defaults(**N.graph['edge']) + P.set_edge_defaults(**N.graph["edge"]) except KeyError: pass for n, nodedata in N.nodes(data=True): - str_nodedata = dict((k, make_str(v)) for k, v in nodedata.items()) - p = pydot.Node(make_str(n), **str_nodedata) + str_nodedata = {k: str(v) for k, v in nodedata.items()} + p = pydot.Node(str(n), **str_nodedata) P.add_node(p) if N.is_multigraph(): for u, v, key, edgedata in N.edges(data=True, keys=True): - str_edgedata = dict((k, make_str(v)) for k, v in edgedata.items() if k != 'key') - edge = pydot.Edge(make_str(u), make_str(v), - key=make_str(key), **str_edgedata) + str_edgedata = {k: str(v) for k, v in edgedata.items() if k != "key"} + edge = pydot.Edge(str(u), str(v), key=str(key), **str_edgedata) P.add_edge(edge) else: for u, v, edgedata in N.edges(data=True): - str_edgedata = dict((k, make_str(v)) for k, v in edgedata.items()) - edge = pydot.Edge(make_str(u), make_str(v), **str_edgedata) + str_edgedata = {k: str(v) for k, v in edgedata.items()} + edge = pydot.Edge(str(u), str(v), **str_edgedata) P.add_edge(edge) return P -def graphviz_layout(G, prog='neato', root=None, **kwds): +def graphviz_layout(G, prog="neato", root=None): """Create node positions using Pydot and Graphviz. Returns a dictionary of positions keyed by node. + Parameters + ---------- + G : NetworkX Graph + The graph for which the layout is computed. + prog : string (default: 'neato') + The name of the GraphViz program to use for layout. + Options depend on GraphViz version but may include: + 'dot', 'twopi', 'fdp', 'sfdp', 'circo' + root : Node from G or None (default: None) + The node of G from which to start some layout algorithms. + + Returns + ------- + Dictionary of (x, y) positions keyed by node. + Examples -------- >>> G = nx.complete_graph(4) >>> pos = nx.nx_pydot.graphviz_layout(G) - >>> pos = nx.nx_pydot.graphviz_layout(G, prog='dot') + >>> pos = nx.nx_pydot.graphviz_layout(G, prog="dot") Notes ----- This is a wrapper for pydot_layout. """ - return pydot_layout(G=G, prog=prog, root=root, **kwds) + return pydot_layout(G=G, prog=prog, root=root) -# FIXME: Document the "root" parameter. -# FIXME: Why does this function accept a variadic dictionary of keyword arguments -# (i.e., "**kwds") but fail to do anything with those arguments? This is probably -# wrong, as unrecognized keyword arguments will be silently ignored. -def pydot_layout(G, prog='neato', root=None, **kwds): +def pydot_layout(G, prog="neato", root=None): """Create node positions using :mod:`pydot` and Graphviz. Parameters -------- G : Graph NetworkX graph to be laid out. - prog : optional[str] - Basename of the GraphViz command with which to layout this graph. - Defaults to `neato`, the default GraphViz command for undirected graphs. + prog : string (default: 'neato') + Name of the GraphViz command to use for layout. + Options depend on GraphViz version but may include: + 'dot', 'twopi', 'fdp', 'sfdp', 'circo' + root : Node from G or None (default: None) + The node of G from which to start some layout algorithms. Returns -------- @@ -284,27 +280,41 @@ def pydot_layout(G, prog='neato', root=None, **kwds): -------- >>> G = nx.complete_graph(4) >>> pos = nx.nx_pydot.pydot_layout(G) - >>> pos = nx.nx_pydot.pydot_layout(G, prog='dot') + >>> pos = nx.nx_pydot.pydot_layout(G, prog="dot") + + Notes + ----- + If you use complex node objects, they may have the same string + representation and GraphViz could treat them as the same node. + The layout may assign both nodes a single location. See Issue #1568 + If this occurs in your case, consider relabeling the nodes just + for the layout computation using something similar to: + + H = nx.convert_node_labels_to_integers(G, label_attribute='node_label') + H_layout = nx.nx_pydot.pydot_layout(G, prog='dot') + G_layout = {H.nodes[n]['node_label']: p for n, p in H_layout.items()} + """ - pydot = _import_pydot() + import pydot + P = to_pydot(G) if root is not None: - P.set("root", make_str(root)) + P.set("root", str(root)) # List of low-level bytes comprising a string in the dot language converted # from the passed graph with the passed external GraphViz command. D_bytes = P.create_dot(prog=prog) - # Unique string decoded from these bytes with the preferred locale encoding. - D = unicode(D_bytes, encoding=getpreferredencoding()) + # Unique string decoded from these bytes with the preferred locale encoding + D = str(D_bytes, encoding=getpreferredencoding()) if D == "": # no data returned - print("Graphviz layout with %s failed" % (prog)) + print(f"Graphviz layout with {prog} failed") print() print("To debug what happened try:") print("P = nx.nx_pydot.to_pydot(G)") - print("P.write_dot(\"file.dot\")") - print("And then run %s on file.dot" % (prog)) + print('P.write_dot("file.dot")') + print(f"And then run {prog} on file.dot") return # List of one or more "pydot.Dot" instances deserialized from this string. @@ -316,7 +326,7 @@ def pydot_layout(G, prog='neato', root=None, **kwds): node_pos = {} for n in G.nodes(): - pydot_node = pydot.Node(make_str(n)).get_name() + pydot_node = pydot.Node(str(n)).get_name() node = Q.get_node(pydot_node) if isinstance(node, list): @@ -326,43 +336,3 @@ def pydot_layout(G, prog='neato', root=None, **kwds): xx, yy = pos.split(",") node_pos[n] = (float(xx), float(yy)) return node_pos - - -def _import_pydot(): - ''' - Import and return the `pydot` module if the currently installed version of - this module satisfies NetworkX requirements _or_ raise an exception. - - Returns - -------- - :mod:`pydot` - Imported `pydot` module object. - - Raises - -------- - ImportError - If the `pydot` module is either unimportable _or_ importable but of - insufficient version. - ''' - - import pydot - - # If the currently installed version of pydot is older than this minimum, - # raise an exception. The pkg_resources.parse_version() function bundled - # with setuptools is commonly regarded to be the most robust means of - # comparing version strings. (Your mileage may vary.) - if parse_version(pydot.__version__) < parse_version(PYDOT_VERSION_MIN): - raise ImportError( - 'pydot %s < %s' % (pydot.__version__, PYDOT_VERSION_MIN)) - - return pydot - -# fixture for nose tests - - -def setup_module(module): - from nose import SkipTest - try: - return _import_pydot() - except ImportError: - raise SkipTest("pydot not available") diff --git a/networkx/drawing/nx_pylab.py b/networkx/drawing/nx_pylab.py index fe8c14d..e4ff1f9 100644 --- a/networkx/drawing/nx_pylab.py +++ b/networkx/drawing/nx_pylab.py @@ -1,11 +1,3 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Author: Aric Hagberg (hagberg@lanl.gov) """ ********** Matplotlib @@ -21,24 +13,33 @@ pygraphviz: http://pygraphviz.github.io/ """ +from numbers import Number import networkx as nx -from networkx.utils import is_string_like -from networkx.drawing.layout import shell_layout, \ - circular_layout, kamada_kawai_layout, spectral_layout, \ - spring_layout, random_layout - -__all__ = ['draw', - 'draw_networkx', - 'draw_networkx_nodes', - 'draw_networkx_edges', - 'draw_networkx_labels', - 'draw_networkx_edge_labels', - 'draw_circular', - 'draw_kamada_kawai', - 'draw_random', - 'draw_spectral', - 'draw_spring', - 'draw_shell'] +from networkx.drawing.layout import ( + shell_layout, + circular_layout, + kamada_kawai_layout, + spectral_layout, + spring_layout, + random_layout, + planar_layout, +) + +__all__ = [ + "draw", + "draw_networkx", + "draw_networkx_nodes", + "draw_networkx_edges", + "draw_networkx_labels", + "draw_networkx_edge_labels", + "draw_circular", + "draw_kamada_kawai", + "draw_random", + "draw_spectral", + "draw_spring", + "draw_planar", + "draw_shell", +] def draw(G, pos=None, ax=None, **kwds): @@ -83,16 +84,13 @@ def draw(G, pos=None, ax=None, **kwds): Notes ----- This function has the same name as pylab.draw and pyplot.draw - so beware when using - - >>> from networkx import * + so beware when using `from networkx import *` since you might overwrite the pylab.draw function. With pyplot use >>> import matplotlib.pyplot as plt - >>> import networkx as nx >>> G = nx.dodecahedral_graph() >>> nx.draw(G) # networkx draw() >>> plt.draw() # pyplot draw() @@ -102,8 +100,8 @@ def draw(G, pos=None, ax=None, **kwds): """ try: import matplotlib.pyplot as plt - except ImportError: - raise ImportError("Matplotlib required for draw()") + except ImportError as e: + raise ImportError("Matplotlib required for draw()") from e except RuntimeError: print("Matplotlib unable to open display") raise @@ -112,22 +110,19 @@ def draw(G, pos=None, ax=None, **kwds): cf = plt.gcf() else: cf = ax.get_figure() - cf.set_facecolor('w') + cf.set_facecolor("w") if ax is None: if cf._axstack() is None: ax = cf.add_axes((0, 0, 1, 1)) else: ax = cf.gca() - if 'with_labels' not in kwds: - kwds['with_labels'] = 'labels' in kwds + if "with_labels" not in kwds: + kwds["with_labels"] = "labels" in kwds - try: - draw_networkx(G, pos=pos, ax=ax, **kwds) - ax.set_axis_off() - plt.draw_if_interactive() - except: - raise + draw_networkx(G, pos=pos, ax=ax, **kwds) + ax.set_axis_off() + plt.draw_if_interactive() return @@ -179,18 +174,18 @@ def draw_networkx(G, pos=None, arrows=True, with_labels=True, **kwds): Size of nodes. If an array is specified it must be the same length as nodelist. - node_color : color string, or array of floats, (default='r') - Node color. Can be a single color format string, - or a sequence of colors with the same length as nodelist. - If numeric values are specified they will be mapped to - colors using the cmap and vmin,vmax parameters. See + node_color : color or array of colors (default='#1f78b4') + Node color. Can be a single color or a sequence of colors with the same + length as nodelist. Color can be string, or rgb (or rgba) tuple of + floats from 0-1. If numeric values are specified they will be + mapped to colors using the cmap and vmin,vmax parameters. See matplotlib.scatter for more details. node_shape : string, optional (default='o') The shape of the node. Specification is as matplotlib.scatter marker, one of 'so^>v>> nx.draw(G, pos=nx.spring_layout(G)) # use spring layout >>> import matplotlib.pyplot as plt - >>> limits = plt.axis('off') # turn of axis + >>> limits = plt.axis("off") # turn of axis Also see the NetworkX drawing examples at https://networkx.github.io/documentation/latest/auto_examples/index.html @@ -265,36 +264,97 @@ def draw_networkx(G, pos=None, arrows=True, with_labels=True, **kwds): """ try: import matplotlib.pyplot as plt - except ImportError: - raise ImportError("Matplotlib required for draw()") + except ImportError as e: + raise ImportError("Matplotlib required for draw()") from e except RuntimeError: print("Matplotlib unable to open display") raise + valid_node_kwds = ( + "nodelist", + "node_size", + "node_color", + "node_shape", + "alpha", + "cmap", + "vmin", + "vmax", + "ax", + "linewidths", + "edgecolors", + "label", + ) + + valid_edge_kwds = ( + "edgelist", + "width", + "edge_color", + "style", + "alpha", + "arrowstyle", + "arrowsize", + "edge_cmap", + "edge_vmin", + "edge_vmax", + "ax", + "label", + "node_size", + "nodelist", + "node_shape", + "connectionstyle", + "min_source_margin", + "min_target_margin", + ) + + valid_label_kwds = ( + "labels", + "font_size", + "font_color", + "font_family", + "font_weight", + "alpha", + "bbox", + "ax", + "horizontalalignment", + "verticalalignment", + ) + + valid_kwds = valid_node_kwds + valid_edge_kwds + valid_label_kwds + + if any([k not in valid_kwds for k in kwds]): + invalid_args = ", ".join([k for k in kwds if k not in valid_kwds]) + raise ValueError(f"Received invalid argument(s): {invalid_args}") + + node_kwds = {k: v for k, v in kwds.items() if k in valid_node_kwds} + edge_kwds = {k: v for k, v in kwds.items() if k in valid_edge_kwds} + label_kwds = {k: v for k, v in kwds.items() if k in valid_label_kwds} + if pos is None: pos = nx.drawing.spring_layout(G) # default to spring layout - node_collection = draw_networkx_nodes(G, pos, **kwds) - edge_collection = draw_networkx_edges(G, pos, arrows=arrows, **kwds) + draw_networkx_nodes(G, pos, **node_kwds) + draw_networkx_edges(G, pos, arrows=arrows, **edge_kwds) if with_labels: - draw_networkx_labels(G, pos, **kwds) + draw_networkx_labels(G, pos, **label_kwds) plt.draw_if_interactive() -def draw_networkx_nodes(G, pos, - nodelist=None, - node_size=300, - node_color='r', - node_shape='o', - alpha=1.0, - cmap=None, - vmin=None, - vmax=None, - ax=None, - linewidths=None, - edgecolors=None, - label=None, - **kwds): +def draw_networkx_nodes( + G, + pos, + nodelist=None, + node_size=300, + node_color="#1f78b4", + node_shape="o", + alpha=None, + cmap=None, + vmin=None, + vmax=None, + ax=None, + linewidths=None, + edgecolors=None, + label=None, +): """Draw the nodes of the graph G. This draws only the nodes of the graph G. @@ -318,11 +378,11 @@ def draw_networkx_nodes(G, pos, Size of nodes (default=300). If an array is specified it must be the same length as nodelist. - node_color : color string, or array of floats - Node color. Can be a single color format string (default='r'), - or a sequence of colors with the same length as nodelist. - If numeric values are specified they will be mapped to - colors using the cmap and vmin,vmax parameters. See + node_color : color or array of colors (default='#1f78b4') + Node color. Can be a single color or a sequence of colors with the same + length as nodelist. Color can be string, or rgb (or rgba) tuple of + floats from 0-1. If numeric values are specified they will be + mapped to colors using the cmap and vmin,vmax parameters. See matplotlib.scatter for more details. node_shape : string @@ -330,7 +390,7 @@ def draw_networkx_nodes(G, pos, marker, one of 'so^>v 1: + shrink_source = shrink_target = to_marker_edge(node_size, node_shape) + + if shrink_source < min_source_margin: + shrink_source = min_source_margin + + if shrink_target < min_target_margin: + shrink_target = min_target_margin + + if len(arrow_colors) == len(edge_pos): arrow_color = arrow_colors[i] - else: + elif len(arrow_colors) == 1: arrow_color = arrow_colors[0] - if len(lw) > 1: - line_width = lw[i] + else: # Cycle through colors + arrow_color = arrow_colors[i % len(arrow_colors)] + + if np.iterable(width): + if len(width) == len(edge_pos): + line_width = width[i] + else: + line_width = width[i % len(width)] else: - line_width = lw[0] - arrow = FancyArrowPatch((x1, y1), (x2, y2), - arrowstyle=arrowstyle, - shrinkA=shrink_source, - shrinkB=shrink_target, - mutation_scale=mutation_scale, - color=arrow_color, - linewidth=line_width, - zorder=1) # arrows go behind nodes + line_width = width + + arrow = FancyArrowPatch( + (x1, y1), + (x2, y2), + arrowstyle=arrowstyle, + shrinkA=shrink_source, + shrinkB=shrink_target, + mutation_scale=mutation_scale, + color=arrow_color, + linewidth=line_width, + connectionstyle=connectionstyle, + linestyle=style, + zorder=1, + ) # arrows go behind nodes # There seems to be a bug in matplotlib to make collections of # FancyArrowPatch instances. Until fixed, the patches are added @@ -697,24 +775,37 @@ def to_marker_edge(marker_size, marker): w = maxx - minx h = maxy - miny - padx, pady = 0.05 * w, 0.05 * h + padx, pady = 0.05 * w, 0.05 * h corners = (minx - padx, miny - pady), (maxx + padx, maxy + pady) ax.update_datalim(corners) ax.autoscale_view() + ax.tick_params( + axis="both", + which="both", + bottom=False, + left=False, + labelbottom=False, + labelleft=False, + ) + return arrow_collection -def draw_networkx_labels(G, pos, - labels=None, - font_size=12, - font_color='k', - font_family='sans-serif', - font_weight='normal', - alpha=1.0, - bbox=None, - ax=None, - **kwds): +def draw_networkx_labels( + G, + pos, + labels=None, + font_size=12, + font_color="k", + font_family="sans-serif", + font_weight="normal", + alpha=None, + bbox=None, + horizontalalignment="center", + verticalalignment="center", + ax=None, +): """Draw node labels on the graph G. Parameters @@ -728,6 +819,8 @@ def draw_networkx_labels(G, pos, labels : dictionary, optional (default=None) Node labels in a dictionary keyed by node of text labels + Node-keys in labels should appear as keys in `pos`. + If needed use: `{n:lab for n,lab in labels.items() if n in pos}` font_size : int Font size for text labels (default=12) @@ -741,12 +834,19 @@ def draw_networkx_labels(G, pos, font_weight : string Font weight (default='normal') - alpha : float - The text transparency (default=1.0) + alpha : float or None + The text transparency (default=None) + + horizontalalignment : {'center', 'right', 'left'} + Horizontal alignment (default='center') + + verticalalignment : {'center', 'top', 'bottom', 'baseline', 'center_baseline'} + Vertical alignment (default='center') ax : Matplotlib Axes object, optional Draw the graph in the specified Matplotlib axes. + Returns ------- dict @@ -770,9 +870,8 @@ def draw_networkx_labels(G, pos, """ try: import matplotlib.pyplot as plt - import matplotlib.cbook as cb - except ImportError: - raise ImportError("Matplotlib required for draw()") + except ImportError as e: + raise ImportError("Matplotlib required for draw()") from e except RuntimeError: print("Matplotlib unable to open display") raise @@ -781,47 +880,58 @@ def draw_networkx_labels(G, pos, ax = plt.gca() if labels is None: - labels = dict((n, n) for n in G.nodes()) - - # set optional alignment - horizontalalignment = kwds.get('horizontalalignment', 'center') - verticalalignment = kwds.get('verticalalignment', 'center') + labels = {n: n for n in G.nodes()} text_items = {} # there is no text collection so we'll fake one for n, label in labels.items(): (x, y) = pos[n] - if not is_string_like(label): + if not isinstance(label, str): label = str(label) # this makes "1" and 1 labeled the same - t = ax.text(x, y, - label, - size=font_size, - color=font_color, - family=font_family, - weight=font_weight, - alpha=alpha, - horizontalalignment=horizontalalignment, - verticalalignment=verticalalignment, - transform=ax.transData, - bbox=bbox, - clip_on=True, - ) + t = ax.text( + x, + y, + label, + size=font_size, + color=font_color, + family=font_family, + weight=font_weight, + alpha=alpha, + horizontalalignment=horizontalalignment, + verticalalignment=verticalalignment, + transform=ax.transData, + bbox=bbox, + clip_on=True, + ) text_items[n] = t + ax.tick_params( + axis="both", + which="both", + bottom=False, + left=False, + labelbottom=False, + labelleft=False, + ) + return text_items -def draw_networkx_edge_labels(G, pos, - edge_labels=None, - label_pos=0.5, - font_size=10, - font_color='k', - font_family='sans-serif', - font_weight='normal', - alpha=1.0, - bbox=None, - ax=None, - rotate=True, - **kwds): +def draw_networkx_edge_labels( + G, + pos, + edge_labels=None, + label_pos=0.5, + font_size=10, + font_color="k", + font_family="sans-serif", + font_weight="normal", + alpha=None, + bbox=None, + horizontalalignment="center", + verticalalignment="center", + ax=None, + rotate=True, +): """Draw edge labels. Parameters @@ -836,8 +946,8 @@ def draw_networkx_edge_labels(G, pos, ax : Matplotlib Axes object, optional Draw the graph in the specified Matplotlib axes. - alpha : float - The text transparency (default=1.0) + alpha : float or None + The text transparency (default=None) edge_labels : dictionary Edge labels in a dictionary keyed by edge two-tuple of text @@ -865,6 +975,15 @@ def draw_networkx_edge_labels(G, pos, clip_on : bool Turn on clipping at axis boundaries (default=True) + horizontalalignment : {'center', 'right', 'left'} + Horizontal alignment (default='center') + + verticalalignment : {'center', 'top', 'bottom', 'baseline', 'center_baseline'} + Vertical alignment (default='center') + + ax : Matplotlib Axes object, optional + Draw the graph in the specified Matplotlib axes. + Returns ------- dict @@ -889,8 +1008,8 @@ def draw_networkx_edge_labels(G, pos, try: import matplotlib.pyplot as plt import numpy as np - except ImportError: - raise ImportError("Matplotlib required for draw()") + except ImportError as e: + raise ImportError("Matplotlib required for draw()") from e except RuntimeError: print("Matplotlib unable to open display") raise @@ -905,8 +1024,10 @@ def draw_networkx_edge_labels(G, pos, for (n1, n2), label in labels.items(): (x1, y1) = pos[n1] (x2, y2) = pos[n2] - (x, y) = (x1 * label_pos + x2 * (1.0 - label_pos), - y1 * label_pos + y2 * (1.0 - label_pos)) + (x, y) = ( + x1 * label_pos + x2 * (1.0 - label_pos), + y1 * label_pos + y2 * (1.0 - label_pos), + ) if rotate: # in degrees @@ -914,44 +1035,49 @@ def draw_networkx_edge_labels(G, pos, # make label orientation "right-side-up" if angle > 90: angle -= 180 - if angle < - 90: + if angle < -90: angle += 180 # transform data coordinate angle to screen coordinate angle xy = np.array((x, y)) - trans_angle = ax.transData.transform_angles(np.array((angle,)), - xy.reshape((1, 2)))[0] + trans_angle = ax.transData.transform_angles( + np.array((angle,)), xy.reshape((1, 2)) + )[0] else: trans_angle = 0.0 # use default box of white with white border if bbox is None: - bbox = dict(boxstyle='round', - ec=(1.0, 1.0, 1.0), - fc=(1.0, 1.0, 1.0), - ) - if not is_string_like(label): + bbox = dict(boxstyle="round", ec=(1.0, 1.0, 1.0), fc=(1.0, 1.0, 1.0)) + if not isinstance(label, str): label = str(label) # this makes "1" and 1 labeled the same - # set optional alignment - horizontalalignment = kwds.get('horizontalalignment', 'center') - verticalalignment = kwds.get('verticalalignment', 'center') - - t = ax.text(x, y, - label, - size=font_size, - color=font_color, - family=font_family, - weight=font_weight, - alpha=alpha, - horizontalalignment=horizontalalignment, - verticalalignment=verticalalignment, - rotation=trans_angle, - transform=ax.transData, - bbox=bbox, - zorder=1, - clip_on=True, - ) + t = ax.text( + x, + y, + label, + size=font_size, + color=font_color, + family=font_family, + weight=font_weight, + alpha=alpha, + horizontalalignment=horizontalalignment, + verticalalignment=verticalalignment, + rotation=trans_angle, + transform=ax.transData, + bbox=bbox, + zorder=1, + clip_on=True, + ) text_items[(n1, n2)] = t + ax.tick_params( + axis="both", + which="both", + bottom=False, + left=False, + labelbottom=False, + labelleft=False, + ) + return text_items @@ -1004,7 +1130,12 @@ def draw_random(G, **kwargs): def draw_spectral(G, **kwargs): - """Draw the graph G with a spectral layout. + """Draw the graph G with a spectral 2D layout. + + Using the unnormalized Laplacian, the layout shows possible clusters of + nodes which are an approximation of the ratio cut. The positions are the + entries of the second and third eigenvectors corresponding to the + ascending eigenvalues starting from the second one. Parameters ---------- @@ -1048,12 +1179,28 @@ def draw_shell(G, **kwargs): with the exception of the pos parameter which is not used by this function. """ - nlist = kwargs.get('nlist', None) + nlist = kwargs.get("nlist", None) if nlist is not None: - del(kwargs['nlist']) + del kwargs["nlist"] draw(G, shell_layout(G, nlist=nlist), **kwargs) +def draw_planar(G, **kwargs): + """Draw a planar networkx graph with planar layout. + + Parameters + ---------- + G : graph + A planar networkx graph + + kwargs : optional keywords + See networkx.draw_networkx() for a description of optional keywords, + with the exception of the pos parameter which is not used by this + function. + """ + draw(G, planar_layout(G), **kwargs) + + def apply_alpha(colors, alpha, elem_list, cmap=None, vmin=None, vmax=None): """Apply an alpha (or list of alphas) to the colors provided. @@ -1092,19 +1239,18 @@ def apply_alpha(colors, alpha, elem_list, cmap=None, vmin=None, vmax=None): Array containing RGBA format values for each of the node colours. """ - import numbers from itertools import islice, cycle try: import numpy as np from matplotlib.colors import colorConverter import matplotlib.cm as cm - except ImportError: - raise ImportError("Matplotlib required for draw()") + except ImportError as e: + raise ImportError("Matplotlib required for draw()") from e # If we have been provided with a list of numbers as long as elem_list, # apply the color mapping. - if len(colors) == len(elem_list) and isinstance(colors[0], numbers.Number): + if len(colors) == len(elem_list) and isinstance(colors[0], Number): mapper = cm.ScalarMappable(cmap=cmap) mapper.set_clim(vmin, vmax) rgba_colors = mapper.to_rgba(colors) @@ -1115,8 +1261,7 @@ def apply_alpha(colors, alpha, elem_list, cmap=None, vmin=None, vmax=None): try: rgba_colors = np.array([colorConverter.to_rgba(colors)]) except ValueError: - rgba_colors = np.array([colorConverter.to_rgba(color) - for color in colors]) + rgba_colors = np.array([colorConverter.to_rgba(color) for color in colors]) # Set the final column of the rgba_colors to have the relevant alpha values try: # If alpha is longer than the number of colors, resize to the number of @@ -1124,23 +1269,11 @@ def apply_alpha(colors, alpha, elem_list, cmap=None, vmin=None, vmax=None): # rgba_colors) is the same as the number of elements, resize the array, # to avoid it being interpreted as a colormap by scatter() if len(alpha) > len(rgba_colors) or rgba_colors.size == len(elem_list): - rgba_colors.resize((len(elem_list), 4)) + rgba_colors = np.resize(rgba_colors, (len(elem_list), 4)) rgba_colors[1:, 0] = rgba_colors[0, 0] rgba_colors[1:, 1] = rgba_colors[0, 1] rgba_colors[1:, 2] = rgba_colors[0, 2] - rgba_colors[:, 3] = list(islice(cycle(alpha), len(rgba_colors))) + rgba_colors[:, 3] = list(islice(cycle(alpha), len(rgba_colors))) except TypeError: rgba_colors[:, -1] = alpha return rgba_colors - -# fixture for nose tests - - -def setup_module(module): - from nose import SkipTest - try: - import matplotlib as mpl - mpl.use('PS', warn=False) - import matplotlib.pyplot as plt - except: - raise SkipTest("matplotlib not available") diff --git a/networkx/drawing/tests/__init__.py b/networkx/drawing/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/drawing/tests/test_agraph.py b/networkx/drawing/tests/test_agraph.py index e9f7926..7972d67 100644 --- a/networkx/drawing/tests/test_agraph.py +++ b/networkx/drawing/tests/test_agraph.py @@ -1,33 +1,28 @@ """Unit tests for PyGraphviz interface.""" import os import tempfile -from nose import SkipTest -from nose.tools import assert_true, assert_equal, assert_raises -from networkx.testing import assert_edges_equal, assert_nodes_equal +import pytest + +pygraphviz = pytest.importorskip("pygraphviz") -import networkx as nx +from networkx.testing import assert_edges_equal, assert_nodes_equal, assert_graphs_equal + +import networkx as nx -class TestAGraph(object): - @classmethod - def setupClass(cls): - global pygraphviz - try: - import pygraphviz - except ImportError: - raise SkipTest('PyGraphviz not available.') +class TestAGraph: def build_graph(self, G): - edges = [('A', 'B'), ('A', 'C'), ('A', 'C'), ('B', 'C'), ('A', 'D')] + edges = [("A", "B"), ("A", "C"), ("A", "C"), ("B", "C"), ("A", "D")] G.add_edges_from(edges) - G.add_node('E') - G.graph['metal'] = 'bronze' + G.add_node("E") + G.graph["metal"] = "bronze" return G def assert_equal(self, G1, G2): assert_nodes_equal(G1.nodes(), G2.nodes()) assert_edges_equal(G1.edges(), G2.edges()) - assert_equal(G1.graph['metal'], G2.graph['metal']) + assert G1.graph["metal"] == G2.graph["metal"] def agraph_checks(self, G): G = self.build_graph(G) @@ -42,19 +37,40 @@ def agraph_checks(self, G): self.assert_equal(H, Hin) (fd, fname) = tempfile.mkstemp() - with open(fname, 'w') as fh: + with open(fname, "w") as fh: nx.drawing.nx_agraph.write_dot(H, fh) - with open(fname, 'r') as fh: + with open(fname) as fh: Hin = nx.nx_agraph.read_dot(fh) os.unlink(fname) self.assert_equal(H, Hin) def test_from_agraph_name(self): - G = nx.Graph(name='test') + G = nx.Graph(name="test") + A = nx.nx_agraph.to_agraph(G) + H = nx.nx_agraph.from_agraph(A) + assert G.name == "test" + + @pytest.mark.parametrize( + "graph_class", (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph) + ) + def test_from_agraph_create_using(self, graph_class): + G = nx.path_graph(3) A = nx.nx_agraph.to_agraph(G) + H = nx.nx_agraph.from_agraph(A, create_using=graph_class) + assert isinstance(H, graph_class) + + def test_from_agraph_named_edges(self): + # Create an AGraph from an existing (non-multi) Graph + G = nx.Graph() + G.add_nodes_from([0, 1]) + A = nx.nx_agraph.to_agraph(G) + # Add edge (+ name, given by key) to the AGraph + A.add_edge(0, 1, key="foo") + # Verify a.name roundtrips out to 'key' in from_agraph H = nx.nx_agraph.from_agraph(A) - assert_equal(G.name, 'test') + assert isinstance(H, nx.Graph) + assert ("0", "1", {"key": "foo"}) in H.edges(data=True) def test_undirected(self): self.agraph_checks(nx.Graph()) @@ -68,17 +84,68 @@ def test_multi_undirected(self): def test_multi_directed(self): self.agraph_checks(nx.MultiDiGraph()) + def test_to_agraph_with_nodedata(self): + G = nx.Graph() + G.add_node(1, color="red") + A = nx.nx_agraph.to_agraph(G) + assert dict(A.nodes()[0].attr) == {"color": "red"} + + @pytest.mark.parametrize("graph_class", (nx.Graph, nx.MultiGraph)) + def test_to_agraph_with_edgedata(self, graph_class): + G = graph_class() + G.add_nodes_from([0, 1]) + G.add_edge(0, 1, color="yellow") + A = nx.nx_agraph.to_agraph(G) + assert dict(A.edges()[0].attr) == {"color": "yellow"} + + def test_view_pygraphviz_path(self, tmp_path): + G = nx.complete_graph(3) + input_path = str(tmp_path / "graph.png") + out_path, A = nx.nx_agraph.view_pygraphviz(G, path=input_path, show=False) + assert out_path == input_path + # Ensure file is not empty + with open(input_path, "rb") as fh: + data = fh.read() + assert len(data) > 0 + + def test_view_pygraphviz_file_suffix(self, tmp_path): + G = nx.complete_graph(3) + path, A = nx.nx_agraph.view_pygraphviz(G, suffix=1, show=False) + assert path[-6:] == "_1.png" + def test_view_pygraphviz(self): G = nx.Graph() # "An empty graph cannot be drawn." - assert_raises(nx.NetworkXException, nx.nx_agraph.view_pygraphviz, G) + pytest.raises(nx.NetworkXException, nx.nx_agraph.view_pygraphviz, G) G = nx.barbell_graph(4, 6) - nx.nx_agraph.view_pygraphviz(G) + nx.nx_agraph.view_pygraphviz(G, show=False) - def test_view_pygraphviz_edgelable(self): + def test_view_pygraphviz_edgelabel(self): G = nx.Graph() G.add_edge(1, 2, weight=7) G.add_edge(2, 3, weight=8) - nx.nx_agraph.view_pygraphviz(G, edgelabel='weight') + path, A = nx.nx_agraph.view_pygraphviz(G, edgelabel="weight", show=False) + for edge in A.edges(): + assert edge.attr["weight"] in ("7", "8") + + def test_view_pygraphviz_callable_edgelabel(self): + G = nx.complete_graph(3) + + def foo_label(data): + return "foo" + + path, A = nx.nx_agraph.view_pygraphviz(G, edgelabel=foo_label, show=False) + for edge in A.edges(): + assert edge.attr["label"] == "foo" + + def test_view_pygraphviz_multigraph_edgelabels(self): + G = nx.MultiGraph() + G.add_edge(0, 1, key=0, name="left_fork") + G.add_edge(0, 1, key=1, name="right_fork") + path, A = nx.nx_agraph.view_pygraphviz(G, edgelabel="name", show=False) + edges = A.edges() + assert len(edges) == 2 + for edge in edges: + assert edge.attr["label"].strip() in ("left_fork", "right_fork") def test_graph_with_reserved_keywords(self): # test attribute/keyword clash case for #1582 @@ -86,7 +153,97 @@ def test_graph_with_reserved_keywords(self): # edges: u,v G = nx.Graph() G = self.build_graph(G) - G.node['E']['n'] = 'keyword' - G.edges[('A', 'B')]['u'] = 'keyword' - G.edges[('A', 'B')]['v'] = 'keyword' + G.nodes["E"]["n"] = "keyword" + G.edges[("A", "B")]["u"] = "keyword" + G.edges[("A", "B")]["v"] = "keyword" + A = nx.nx_agraph.to_agraph(G) + + def test_view_pygraphviz_no_added_attrs_to_input(self): + G = nx.complete_graph(2) + path, A = nx.nx_agraph.view_pygraphviz(G, show=False) + assert G.graph == {} + + @pytest.mark.xfail(reason="known bug in clean_attrs") + def test_view_pygraphviz_leaves_input_graph_unmodified(self): + G = nx.complete_graph(2) + # Add entries to graph dict that to_agraph handles specially + G.graph["node"] = {"width": "0.80"} + G.graph["edge"] = {"fontsize": "14"} + path, A = nx.nx_agraph.view_pygraphviz(G, show=False) + assert G.graph == {"node": {"width": "0.80"}, "edge": {"fontsize": "14"}} + + def test_graph_with_AGraph_attrs(self): + G = nx.complete_graph(2) + # Add entries to graph dict that to_agraph handles specially + G.graph["node"] = {"width": "0.80"} + G.graph["edge"] = {"fontsize": "14"} + path, A = nx.nx_agraph.view_pygraphviz(G, show=False) + # Ensure user-specified values are not lost + assert dict(A.node_attr)["width"] == "0.80" + assert dict(A.edge_attr)["fontsize"] == "14" + + def test_round_trip_empty_graph(self): + G = nx.Graph() A = nx.nx_agraph.to_agraph(G) + H = nx.nx_agraph.from_agraph(A) + # assert_graphs_equal(G, H) + AA = nx.nx_agraph.to_agraph(H) + HH = nx.nx_agraph.from_agraph(AA) + assert_graphs_equal(H, HH) + G.graph["graph"] = {} + G.graph["node"] = {} + G.graph["edge"] = {} + assert_graphs_equal(G, HH) + + @pytest.mark.xfail(reason="integer->string node conversion in round trip") + def test_round_trip_integer_nodes(self): + G = nx.complete_graph(3) + A = nx.nx_agraph.to_agraph(G) + H = nx.nx_agraph.from_agraph(A) + assert_graphs_equal(G, H) + + def test_graphviz_alias(self): + G = self.build_graph(nx.Graph()) + pos_graphviz = nx.nx_agraph.graphviz_layout(G) + pos_pygraphviz = nx.nx_agraph.pygraphviz_layout(G) + assert pos_graphviz == pos_pygraphviz + + @pytest.mark.parametrize("root", range(5)) + def test_pygraphviz_layout_root(self, root): + # NOTE: test depends on layout prog being deterministic + G = nx.complete_graph(5) + A = nx.nx_agraph.to_agraph(G) + # Get layout with root arg is not None + pygv_layout = nx.nx_agraph.pygraphviz_layout(G, prog="circo", root=root) + # Equivalent layout directly on AGraph + A.layout(args=f"-Groot={root}", prog="circo") + # Parse AGraph layout + a1_pos = tuple(float(v) for v in dict(A.get_node("1").attr)["pos"].split(",")) + assert pygv_layout[1] == a1_pos + + def test_2d_layout(self): + G = nx.Graph() + G = self.build_graph(G) + G.graph["dimen"] = 2 + pos = nx.nx_agraph.pygraphviz_layout(G, prog="neato") + pos = list(pos.values()) + assert len(pos) == 5 + assert len(pos[0]) == 2 + + def test_3d_layout(self): + G = nx.Graph() + G = self.build_graph(G) + G.graph["dimen"] = 3 + pos = nx.nx_agraph.pygraphviz_layout(G, prog="neato") + pos = list(pos.values()) + assert len(pos) == 5 + assert len(pos[0]) == 3 + + def test_display_pygraphviz_deprecation_warning(self): + G = nx.complete_graph(2) + path_name, A = nx.nx_agraph.view_pygraphviz(G, show=False) + # Monkeypatch default_opener to prevent window opening + nx.utils.default_opener = lambda x: None + with pytest.warns(DeprecationWarning, match="display_pygraphviz is deprecated"): + with open(path_name, "wb") as fh: + nx.nx_agraph.display_pygraphviz(A, fh, prog="dot") diff --git a/networkx/drawing/tests/test_layout.py b/networkx/drawing/tests/test_layout.py index 89ff918..b94d1b8 100644 --- a/networkx/drawing/tests/test_layout.py +++ b/networkx/drawing/tests/test_layout.py @@ -1,35 +1,43 @@ """Unit tests for layout functions.""" -from nose import SkipTest -from nose.tools import assert_almost_equal, assert_equal, \ - assert_false, assert_raises import networkx as nx +from networkx.testing import almost_equal +import pytest -class TestLayout(object): - numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test - scipy = None +numpy = pytest.importorskip("numpy") +test_smoke_empty_graphscipy = pytest.importorskip("scipy") + +class TestLayout: @classmethod - def setupClass(cls): - global numpy, scipy - try: - import numpy - except ImportError: - raise SkipTest('NumPy not available.') - try: - import scipy - except ImportError: - pass # Almost all tests still viable - - def setUp(self): - self.Gi = nx.grid_2d_graph(5, 5) - self.Gs = nx.Graph() - nx.add_path(self.Gs, 'abcdef') - self.bigG = nx.grid_2d_graph(25, 25) # bigger than 500 nodes for sparse + def setup_class(cls): + cls.Gi = nx.grid_2d_graph(5, 5) + cls.Gs = nx.Graph() + nx.add_path(cls.Gs, "abcdef") + cls.bigG = nx.grid_2d_graph(25, 25) # > 500 nodes for sparse + + @staticmethod + def collect_node_distances(positions): + distances = [] + prev_val = None + for k in positions: + if prev_val is not None: + diff = positions[k] - prev_val + distances.append(numpy.dot(diff, diff) ** 0.5) + prev_val = positions[k] + return distances + + def test_spring_fixed_without_pos(self): + G = nx.path_graph(4) + pytest.raises(ValueError, nx.spring_layout, G, fixed=[0]) + pos = {0: (1, 1), 2: (0, 0)} + pytest.raises(ValueError, nx.spring_layout, G, fixed=[0, 1], pos=pos) + nx.spring_layout(G, fixed=[0, 2], pos=pos) # No ValueError def test_spring_init_pos(self): # Tests GH #2448 import math + G = nx.Graph() G.add_edges_from([(0, 1), (1, 2), (2, 0), (2, 3)]) @@ -37,44 +45,53 @@ def test_spring_init_pos(self): fixed_pos = [0] pos = nx.fruchterman_reingold_layout(G, pos=init_pos, fixed=fixed_pos) has_nan = any(math.isnan(c) for coords in pos.values() for c in coords) - assert_false(has_nan, 'values should not be nan') + assert not has_nan, "values should not be nan" def test_smoke_empty_graph(self): G = [] - vpos = nx.random_layout(G) - vpos = nx.circular_layout(G) - vpos = nx.spring_layout(G) - vpos = nx.fruchterman_reingold_layout(G) - vpos = nx.spectral_layout(G) - vpos = nx.shell_layout(G) - if self.scipy is not None: - vpos = nx.kamada_kawai_layout(G) + nx.random_layout(G) + nx.circular_layout(G) + nx.planar_layout(G) + nx.spring_layout(G) + nx.fruchterman_reingold_layout(G) + nx.spectral_layout(G) + nx.shell_layout(G) + nx.bipartite_layout(G, G) + nx.spiral_layout(G) + nx.multipartite_layout(G) + nx.kamada_kawai_layout(G) def test_smoke_int(self): G = self.Gi - vpos = nx.random_layout(G) - vpos = nx.circular_layout(G) - vpos = nx.spring_layout(G) - vpos = nx.fruchterman_reingold_layout(G) - vpos = nx.fruchterman_reingold_layout(self.bigG) - vpos = nx.spectral_layout(G) - vpos = nx.spectral_layout(G.to_directed()) - vpos = nx.spectral_layout(self.bigG) - vpos = nx.spectral_layout(self.bigG.to_directed()) - vpos = nx.shell_layout(G) - if self.scipy is not None: - vpos = nx.kamada_kawai_layout(G) + nx.random_layout(G) + nx.circular_layout(G) + nx.planar_layout(G) + nx.spring_layout(G) + nx.fruchterman_reingold_layout(G) + nx.fruchterman_reingold_layout(self.bigG) + nx.spectral_layout(G) + nx.spectral_layout(G.to_directed()) + nx.spectral_layout(self.bigG) + nx.spectral_layout(self.bigG.to_directed()) + nx.shell_layout(G) + nx.spiral_layout(G) + nx.kamada_kawai_layout(G) + nx.kamada_kawai_layout(G, dim=1) + nx.kamada_kawai_layout(G, dim=3) def test_smoke_string(self): G = self.Gs - vpos = nx.random_layout(G) - vpos = nx.circular_layout(G) - vpos = nx.spring_layout(G) - vpos = nx.fruchterman_reingold_layout(G) - vpos = nx.spectral_layout(G) - vpos = nx.shell_layout(G) - if self.scipy is not None: - vpos = nx.kamada_kawai_layout(G) + nx.random_layout(G) + nx.circular_layout(G) + nx.planar_layout(G) + nx.spring_layout(G) + nx.fruchterman_reingold_layout(G) + nx.spectral_layout(G) + nx.shell_layout(G) + nx.spiral_layout(G) + nx.kamada_kawai_layout(G) + nx.kamada_kawai_layout(G, dim=1) + nx.kamada_kawai_layout(G, dim=3) def check_scale_and_center(self, pos, scale, center): center = numpy.array(center) @@ -97,8 +114,20 @@ def test_scale_and_center_arg(self): sc(nx.spectral_layout(G, scale=2, center=c), scale=2, center=c) sc(nx.circular_layout(G, scale=2, center=c), scale=2, center=c) sc(nx.shell_layout(G, scale=2, center=c), scale=2, center=c) - if self.scipy is not None: - sc(nx.kamada_kawai_layout(G, scale=2, center=c), scale=2, center=c) + sc(nx.spiral_layout(G, scale=2, center=c), scale=2, center=c) + sc(nx.kamada_kawai_layout(G, scale=2, center=c), scale=2, center=c) + + c = (2, 3, 5) + sc(nx.kamada_kawai_layout(G, dim=3, scale=2, center=c), scale=2, center=c) + + def test_planar_layout_non_planar_input(self): + G = nx.complete_graph(9) + pytest.raises(nx.NetworkXException, nx.planar_layout, G) + + def test_smoke_planar_layout_embedding_input(self): + embedding = nx.PlanarEmbedding() + embedding.set_data({0: [1, 2], 1: [0, 2], 2: [0, 1]}) + nx.planar_layout(embedding) def test_default_scale_and_center(self): sc = self.check_scale_and_center @@ -110,36 +139,49 @@ def test_default_scale_and_center(self): sc(nx.spectral_layout(G), scale=1, center=c) sc(nx.circular_layout(G), scale=1, center=c) sc(nx.shell_layout(G), scale=1, center=c) - if self.scipy is not None: - sc(nx.kamada_kawai_layout(G), scale=1, center=c) + sc(nx.spiral_layout(G), scale=1, center=c) + sc(nx.kamada_kawai_layout(G), scale=1, center=c) + + c = (0, 0, 0) + sc(nx.kamada_kawai_layout(G, dim=3), scale=1, center=c) + + def test_circular_planar_and_shell_dim_error(self): + G = nx.path_graph(4) + pytest.raises(ValueError, nx.circular_layout, G, dim=1) + pytest.raises(ValueError, nx.shell_layout, G, dim=1) + pytest.raises(ValueError, nx.shell_layout, G, dim=3) + pytest.raises(ValueError, nx.planar_layout, G, dim=1) + pytest.raises(ValueError, nx.planar_layout, G, dim=3) def test_adjacency_interface_numpy(self): - A = nx.to_numpy_matrix(self.Gs) + A = nx.to_numpy_array(self.Gs) pos = nx.drawing.layout._fruchterman_reingold(A) - assert_equal(pos.shape, (6, 2)) + assert pos.shape == (6, 2) pos = nx.drawing.layout._fruchterman_reingold(A, dim=3) - assert_equal(pos.shape, (6, 3)) + assert pos.shape == (6, 3) + pos = nx.drawing.layout._sparse_fruchterman_reingold(A) + assert pos.shape == (6, 2) def test_adjacency_interface_scipy(self): - try: - import scipy - except ImportError: - raise SkipTest('scipy not available.') - A = nx.to_scipy_sparse_matrix(self.Gs, dtype='d') + A = nx.to_scipy_sparse_matrix(self.Gs, dtype="d") pos = nx.drawing.layout._sparse_fruchterman_reingold(A) - assert_equal(pos.shape, (6, 2)) + assert pos.shape == (6, 2) pos = nx.drawing.layout._sparse_spectral(A) - assert_equal(pos.shape, (6, 2)) + assert pos.shape == (6, 2) pos = nx.drawing.layout._sparse_fruchterman_reingold(A, dim=3) - assert_equal(pos.shape, (6, 3)) + assert pos.shape == (6, 3) def test_single_nodes(self): G = nx.path_graph(1) vpos = nx.shell_layout(G) - assert_false(vpos[0].any()) - G = nx.path_graph(3) - vpos = nx.shell_layout(G, [[0], [1, 2]]) - assert_false(vpos[0].any()) + assert not vpos[0].any() + G = nx.path_graph(4) + vpos = nx.shell_layout(G, [[0], [1, 2], [3]]) + assert not vpos[0].any() + assert vpos[3].any() # ensure node 3 not at origin (#3188) + assert numpy.linalg.norm(vpos[3]) <= 1 # ensure node 3 fits (#3753) + vpos = nx.shell_layout(G, [[0], [1, 2], [3]], rotate=0) + assert numpy.linalg.norm(vpos[3]) <= 1 # ensure node 3 fits (#3753) def test_smoke_initial_pos_fruchterman_reingold(self): pos = nx.circular_layout(self.Gi) @@ -148,53 +190,122 @@ def test_smoke_initial_pos_fruchterman_reingold(self): def test_fixed_node_fruchterman_reingold(self): # Dense version (numpy based) pos = nx.circular_layout(self.Gi) - npos = nx.fruchterman_reingold_layout(self.Gi, pos=pos, fixed=[(0, 0)]) - assert_equal(tuple(pos[(0, 0)]), tuple(npos[(0, 0)])) + npos = nx.spring_layout(self.Gi, pos=pos, fixed=[(0, 0)]) + assert tuple(pos[(0, 0)]) == tuple(npos[(0, 0)]) # Sparse version (scipy based) pos = nx.circular_layout(self.bigG) - npos = nx.fruchterman_reingold_layout(self.bigG, pos=pos, fixed=[(0, 0)]) + npos = nx.spring_layout(self.bigG, pos=pos, fixed=[(0, 0)]) for axis in range(2): - assert_almost_equal(pos[(0, 0)][axis], npos[(0, 0)][axis]) + assert almost_equal(pos[(0, 0)][axis], npos[(0, 0)][axis]) def test_center_parameter(self): G = nx.path_graph(1) - vpos = nx.random_layout(G, center=(1, 1)) + nx.random_layout(G, center=(1, 1)) vpos = nx.circular_layout(G, center=(1, 1)) - assert_equal(tuple(vpos[0]), (1, 1)) + assert tuple(vpos[0]) == (1, 1) + vpos = nx.planar_layout(G, center=(1, 1)) + assert tuple(vpos[0]) == (1, 1) vpos = nx.spring_layout(G, center=(1, 1)) - assert_equal(tuple(vpos[0]), (1, 1)) + assert tuple(vpos[0]) == (1, 1) vpos = nx.fruchterman_reingold_layout(G, center=(1, 1)) - assert_equal(tuple(vpos[0]), (1, 1)) + assert tuple(vpos[0]) == (1, 1) vpos = nx.spectral_layout(G, center=(1, 1)) - assert_equal(tuple(vpos[0]), (1, 1)) + assert tuple(vpos[0]) == (1, 1) vpos = nx.shell_layout(G, center=(1, 1)) - assert_equal(tuple(vpos[0]), (1, 1)) + assert tuple(vpos[0]) == (1, 1) + vpos = nx.spiral_layout(G, center=(1, 1)) + assert tuple(vpos[0]) == (1, 1) def test_center_wrong_dimensions(self): G = nx.path_graph(1) - assert_raises(ValueError, nx.random_layout, G, center=(1, 1, 1)) - assert_raises(ValueError, nx.circular_layout, G, center=(1, 1, 1)) - assert_raises(ValueError, nx.spring_layout, G, center=(1, 1, 1)) - assert_raises(ValueError, nx.fruchterman_reingold_layout, G, center=(1, 1, 1)) - assert_raises(ValueError, nx.fruchterman_reingold_layout, G, dim=3, center=(1, 1)) - assert_raises(ValueError, nx.spectral_layout, G, center=(1, 1, 1)) - assert_raises(ValueError, nx.spectral_layout, G, dim=3, center=(1, 1)) - assert_raises(ValueError, nx.shell_layout, G, center=(1, 1, 1)) + assert id(nx.spring_layout) == id(nx.fruchterman_reingold_layout) + pytest.raises(ValueError, nx.random_layout, G, center=(1, 1, 1)) + pytest.raises(ValueError, nx.circular_layout, G, center=(1, 1, 1)) + pytest.raises(ValueError, nx.planar_layout, G, center=(1, 1, 1)) + pytest.raises(ValueError, nx.spring_layout, G, center=(1, 1, 1)) + pytest.raises(ValueError, nx.spring_layout, G, dim=3, center=(1, 1)) + pytest.raises(ValueError, nx.spectral_layout, G, center=(1, 1, 1)) + pytest.raises(ValueError, nx.spectral_layout, G, dim=3, center=(1, 1)) + pytest.raises(ValueError, nx.shell_layout, G, center=(1, 1, 1)) + pytest.raises(ValueError, nx.spiral_layout, G, center=(1, 1, 1)) + pytest.raises(ValueError, nx.kamada_kawai_layout, G, center=(1, 1, 1)) def test_empty_graph(self): G = nx.empty_graph() vpos = nx.random_layout(G, center=(1, 1)) - assert_equal(vpos, {}) + assert vpos == {} vpos = nx.circular_layout(G, center=(1, 1)) - assert_equal(vpos, {}) + assert vpos == {} + vpos = nx.planar_layout(G, center=(1, 1)) + assert vpos == {} + vpos = nx.bipartite_layout(G, G) + assert vpos == {} vpos = nx.spring_layout(G, center=(1, 1)) - assert_equal(vpos, {}) + assert vpos == {} vpos = nx.fruchterman_reingold_layout(G, center=(1, 1)) - assert_equal(vpos, {}) + assert vpos == {} vpos = nx.spectral_layout(G, center=(1, 1)) - assert_equal(vpos, {}) + assert vpos == {} vpos = nx.shell_layout(G, center=(1, 1)) - assert_equal(vpos, {}) + assert vpos == {} + vpos = nx.spiral_layout(G, center=(1, 1)) + assert vpos == {} + vpos = nx.multipartite_layout(G, center=(1, 1)) + assert vpos == {} + vpos = nx.kamada_kawai_layout(G, center=(1, 1)) + assert vpos == {} + + def test_bipartite_layout(self): + G = nx.complete_bipartite_graph(3, 5) + top, bottom = nx.bipartite.sets(G) + + vpos = nx.bipartite_layout(G, top) + assert len(vpos) == len(G) + + top_x = vpos[list(top)[0]][0] + bottom_x = vpos[list(bottom)[0]][0] + for node in top: + assert vpos[node][0] == top_x + for node in bottom: + assert vpos[node][0] == bottom_x + + vpos = nx.bipartite_layout( + G, top, align="horizontal", center=(2, 2), scale=2, aspect_ratio=1 + ) + assert len(vpos) == len(G) + + top_y = vpos[list(top)[0]][1] + bottom_y = vpos[list(bottom)[0]][1] + for node in top: + assert vpos[node][1] == top_y + for node in bottom: + assert vpos[node][1] == bottom_y + + pytest.raises(ValueError, nx.bipartite_layout, G, top, align="foo") + + def test_multipartite_layout(self): + sizes = (0, 5, 7, 2, 8) + G = nx.complete_multipartite_graph(*sizes) + + vpos = nx.multipartite_layout(G) + assert len(vpos) == len(G) + + start = 0 + for n in sizes: + end = start + n + assert all(vpos[start][0] == vpos[i][0] for i in range(start + 1, end)) + start += n + + vpos = nx.multipartite_layout(G, align="horizontal", scale=2, center=(2, 2)) + assert len(vpos) == len(G) + + start = 0 + for n in sizes: + end = start + n + assert all(vpos[start][1] == vpos[i][1] for i in range(start + 1, end)) + start += n + + pytest.raises(ValueError, nx.multipartite_layout, G, align="foo") def test_kamada_kawai_costfn_1d(self): costfn = nx.drawing.layout._kamada_kawai_costfn @@ -204,30 +315,22 @@ def test_kamada_kawai_costfn_1d(self): cost, grad = costfn(pos, numpy, invdist, meanweight=0, dim=1) - assert_almost_equal(cost, ((3 / 2.0 - 1) ** 2)) - assert_almost_equal(grad[0], -0.5) - assert_almost_equal(grad[1], 0.5) + assert almost_equal(cost, ((3 / 2.0 - 1) ** 2)) + assert almost_equal(grad[0], -0.5) + assert almost_equal(grad[1], 0.5) - def test_kamada_kawai_costfn_2d(self): + def check_kamada_kawai_costfn(self, pos, invdist, meanwt, dim): costfn = nx.drawing.layout._kamada_kawai_costfn - pos = numpy.array([[1.3, -3.2], - [2.7, -0.3], - [5.1, 2.5]]) - invdist = 1 / numpy.array([[0.1, 2.1, 1.7], - [2.1, 0.2, 0.6], - [1.7, 0.6, 0.3]]) - meanwt = 0.3 - - cost, grad = costfn(pos.ravel(), numpy, invdist, - meanweight=meanwt, dim=2) + cost, grad = costfn(pos.ravel(), numpy, invdist, meanweight=meanwt, dim=dim) expected_cost = 0.5 * meanwt * numpy.sum(numpy.sum(pos, axis=0) ** 2) for i in range(pos.shape[0]): for j in range(i + 1, pos.shape[0]): - expected_cost += (numpy.linalg.norm(pos[i] - pos[j]) * invdist[i][j] - 1.0) ** 2 + diff = numpy.linalg.norm(pos[i] - pos[j]) + expected_cost += (diff * invdist[i][j] - 1.0) ** 2 - assert_almost_equal(cost, expected_cost) + assert almost_equal(cost, expected_cost) dx = 1e-4 for nd in range(pos.shape[0]): @@ -236,12 +339,68 @@ def test_kamada_kawai_costfn_2d(self): pos0 = pos.flatten() pos0[idx] += dx - cplus = costfn(pos0, numpy, invdist, - meanweight=meanwt, dim=pos.shape[1])[0] + cplus = costfn( + pos0, numpy, invdist, meanweight=meanwt, dim=pos.shape[1] + )[0] pos0[idx] -= 2 * dx - cminus = costfn(pos0, numpy, invdist, - meanweight=meanwt, dim=pos.shape[1])[0] + cminus = costfn( + pos0, numpy, invdist, meanweight=meanwt, dim=pos.shape[1] + )[0] + + assert almost_equal(grad[idx], (cplus - cminus) / (2 * dx), places=5) + + def test_kamada_kawai_costfn(self): + invdist = 1 / numpy.array([[0.1, 2.1, 1.7], [2.1, 0.2, 0.6], [1.7, 0.6, 0.3]]) + meanwt = 0.3 + + # 2d + pos = numpy.array([[1.3, -3.2], [2.7, -0.3], [5.1, 2.5]]) + + self.check_kamada_kawai_costfn(pos, invdist, meanwt, 2) + + # 3d + pos = numpy.array([[0.9, 8.6, -8.7], [-10, -0.5, -7.1], [9.1, -8.1, 1.6]]) - assert_almost_equal(grad[idx], (cplus - cminus) / (2 * dx), - places=5) + self.check_kamada_kawai_costfn(pos, invdist, meanwt, 3) + + def test_spiral_layout(self): + + G = self.Gs + + # a lower value of resolution should result in a more compact layout + # intuitively, the total distance from the start and end nodes + # via each node in between (transiting through each) will be less, + # assuming rescaling does not occur on the computed node positions + pos_standard = nx.spiral_layout(G, resolution=0.35) + pos_tighter = nx.spiral_layout(G, resolution=0.34) + distances = self.collect_node_distances(pos_standard) + distances_tighter = self.collect_node_distances(pos_tighter) + assert sum(distances) > sum(distances_tighter) + + # return near-equidistant points after the first value if set to true + pos_equidistant = nx.spiral_layout(G, equidistant=True) + distances_equidistant = self.collect_node_distances(pos_equidistant) + for d in range(1, len(distances_equidistant) - 1): + # test similarity to two decimal places + assert almost_equal( + distances_equidistant[d], distances_equidistant[d + 1], 2 + ) + + def test_rescale_layout_dict(self): + G = nx.empty_graph() + vpos = nx.random_layout(G, center=(1, 1)) + assert nx.rescale_layout_dict(vpos) == {} + + G = nx.empty_graph(2) + vpos = {0: (0.0, 0.0), 1: (1.0, 1.0)} + s_vpos = nx.rescale_layout_dict(vpos) + norm = numpy.linalg.norm + assert norm([sum(x) for x in zip(*s_vpos.values())]) < 1e-6 + + G = nx.empty_graph(3) + vpos = {0: (0, 0), 1: (1, 1), 2: (0.5, 0.5)} + s_vpos = nx.rescale_layout_dict(vpos) + assert s_vpos == {0: (-1, -1), 1: (1, 1), 2: (0, 0)} + s_vpos = nx.rescale_layout_dict(vpos, scale=2) + assert s_vpos == {0: (-2, -2), 1: (2, 2), 2: (0, 0)} diff --git a/networkx/drawing/tests/test_pydot.py b/networkx/drawing/tests/test_pydot.py index a02f3e6..04a2d79 100644 --- a/networkx/drawing/tests/test_pydot.py +++ b/networkx/drawing/tests/test_pydot.py @@ -1,65 +1,51 @@ """Unit tests for pydot drawing functions.""" -try: - try: - from cStringIO import StringIO - except ImportError: - from StringIO import StringIO -except ImportError: - from io import StringIO -import sys +from io import StringIO import tempfile -from nose.tools import assert_equal, assert_is_instance, assert_true import networkx as nx from networkx.testing import assert_graphs_equal +import pytest -class TestPydot(object): - @classmethod - def setupClass(cls): - ''' - Fixture defining the `pydot` global to be the `pydot` module if both - importable and of sufficient version _or_ skipping this test. - ''' - global pydot - pydot = nx.nx_pydot.setup_module(sys.modules[__name__]) - assert pydot is not None +pydot = pytest.importorskip("pydot") + +class TestPydot: def pydot_checks(self, G, prog): - ''' + """ Validate :mod:`pydot`-based usage of the passed NetworkX graph with the passed basename of an external GraphViz command (e.g., `dot`, `neato`). - ''' + """ # Set the name of this graph to... "G". Failing to do so will # subsequently trip an assertion expecting this name. - G.graph['name'] = 'G' + G.graph["name"] = "G" # Add arbitrary nodes and edges to the passed empty graph. - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'C'), ('A', 'D')]) - G.add_node('E') + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "C"), ("A", "D")]) + G.add_node("E") # Validate layout of this graph with the passed GraphViz command. graph_layout = nx.nx_pydot.pydot_layout(G, prog=prog) - assert_is_instance(graph_layout, dict) + assert isinstance(graph_layout, dict) # Convert this graph into a "pydot.Dot" instance. P = nx.nx_pydot.to_pydot(G) # Convert this "pydot.Dot" instance back into a graph of the same type. - G2 = G.fresh_copy().__class__(nx.nx_pydot.from_pydot(P)) + G2 = G.__class__(nx.nx_pydot.from_pydot(P)) # Validate the original and resulting graphs to be the same. assert_graphs_equal(G, G2) - # Serialize this "pydot.Dot" instance to a temporary file in dot format. + # Serialize this "pydot.Dot" instance to a temporary file in dot format fname = tempfile.mktemp() P.write_raw(fname) # Deserialize a list of new "pydot.Dot" instances back from this file. - Pin_list = pydot.graph_from_dot_file(path=fname, encoding='utf-8') + Pin_list = pydot.graph_from_dot_file(path=fname, encoding="utf-8") # Validate this file to contain only one graph. - assert_equal(len(Pin_list), 1) + assert len(Pin_list) == 1 # The single "pydot.Dot" instance deserialized from this file. Pin = Pin_list[0] @@ -71,36 +57,36 @@ def pydot_checks(self, G, prog): n2 = sorted([p.get_name() for p in Pin.get_node_list()]) # Validate these instances to contain the same nodes. - assert_equal(n1, n2) + assert n1 == n2 # Sorted list of all edges in the original "pydot.Dot" instance. - e1 = sorted([ - (e.get_source(), e.get_destination()) for e in P.get_edge_list()]) + e1 = sorted([(e.get_source(), e.get_destination()) for e in P.get_edge_list()]) # Sorted list of all edges in the original "pydot.Dot" instance. - e2 = sorted([ - (e.get_source(), e.get_destination()) for e in Pin.get_edge_list()]) + e2 = sorted( + [(e.get_source(), e.get_destination()) for e in Pin.get_edge_list()] + ) # Validate these instances to contain the same edges. - assert_equal(e1, e2) + assert e1 == e2 # Deserialize a new graph of the same type back from this file. Hin = nx.nx_pydot.read_dot(fname) - Hin = G.fresh_copy().__class__(Hin) + Hin = G.__class__(Hin) # Validate the original and resulting graphs to be the same. assert_graphs_equal(G, Hin) def test_undirected(self): - self.pydot_checks(nx.Graph(), prog='neato') + self.pydot_checks(nx.Graph(), prog="neato") def test_directed(self): - self.pydot_checks(nx.DiGraph(), prog='dot') + self.pydot_checks(nx.DiGraph(), prog="dot") def test_read_write(self): G = nx.MultiGraph() - G.graph['name'] = 'G' - G.add_edge('1', '2', key='0') # read assumes strings + G.graph["name"] = "G" + G.add_edge("1", "2", key="0") # read assumes strings fh = StringIO() nx.nx_pydot.write_dot(G, fh) fh.seek(0) diff --git a/networkx/drawing/tests/test_pylab.py b/networkx/drawing/tests/test_pylab.py index 5735417..00b3ddd 100644 --- a/networkx/drawing/tests/test_pylab.py +++ b/networkx/drawing/tests/test_pylab.py @@ -1,97 +1,199 @@ """Unit tests for matplotlib drawing functions.""" import os import itertools -from nose import SkipTest +import pytest + +mpl = pytest.importorskip("matplotlib") +mpl.use("PS") +plt = pytest.importorskip("matplotlib.pyplot") +plt.rcParams["text.usetex"] = False + import networkx as nx -class TestPylab(object): +class TestPylab: @classmethod - def setupClass(cls): - global plt - try: - import matplotlib as mpl - mpl.use('PS', warn=False) - import matplotlib.pyplot as plt - plt.rcParams['text.usetex'] = False - except ImportError: - raise SkipTest('matplotlib not available.') - except RuntimeError: - raise SkipTest('matplotlib not available.') - - def setUp(self): - self.G = nx.barbell_graph(4, 6) + def setup_class(cls): + cls.G = nx.barbell_graph(4, 6) def test_draw(self): try: - functions = [nx.draw_circular, - nx.draw_kamada_kawai, - nx.draw_random, - nx.draw_spectral, - nx.draw_spring, - nx.draw_shell] - options = [{ - 'node_color': 'black', - 'node_size': 100, - 'width': 3, - }] + functions = [ + nx.draw_circular, + nx.draw_kamada_kawai, + nx.draw_planar, + nx.draw_random, + nx.draw_spectral, + nx.draw_spring, + nx.draw_shell, + ] + options = [{"node_color": "black", "node_size": 100, "width": 3}] for function, option in itertools.product(functions, options): function(self.G, **option) - plt.savefig('test.ps') + plt.savefig("test.ps") finally: try: - os.unlink('test.ps') + os.unlink("test.ps") + except OSError: + pass + + def test_draw_shell_nlist(self): + try: + nlist = [list(range(4)), list(range(4, 10)), list(range(10, 14))] + nx.draw_shell(self.G, nlist=nlist) + plt.savefig("test.ps") + finally: + try: + os.unlink("test.ps") except OSError: pass def test_edge_colormap(self): colors = range(self.G.number_of_edges()) - nx.draw_spring(self.G, edge_color=colors, width=4, - edge_cmap=plt.cm.Blues, with_labels=True) - plt.show() + nx.draw_spring( + self.G, edge_color=colors, width=4, edge_cmap=plt.cm.Blues, with_labels=True + ) + # plt.show() def test_arrows(self): nx.draw_spring(self.G.to_directed()) - plt.show() + # plt.show() def test_edge_colors_and_widths(self): - nx.draw_random(self.G, edgelist=[(0, 1), (0, 2)], width=[1, 2], edge_colors=['r', 'b']) + pos = nx.circular_layout(self.G) + for G in (self.G, self.G.to_directed()): + nx.draw_networkx_nodes(G, pos, node_color=[(1.0, 1.0, 0.2, 0.5)]) + nx.draw_networkx_labels(G, pos) + # edge with default color and width + nx.draw_networkx_edges( + G, pos, edgelist=[(0, 1)], width=None, edge_color=None + ) + # edges with global color strings and widths in lists + nx.draw_networkx_edges( + G, pos, edgelist=[(0, 2), (0, 3)], width=[3], edge_color=["r"] + ) + # edges with color strings and widths for each edge + nx.draw_networkx_edges( + G, pos, edgelist=[(0, 2), (0, 3)], width=[1, 3], edge_color=["r", "b"] + ) + # edges with fewer color strings and widths than edges + nx.draw_networkx_edges( + G, + pos, + edgelist=[(1, 2), (1, 3), (2, 3), (3, 4)], + width=[1, 3], + edge_color=["g", "m", "c"], + ) + # edges with more color strings and widths than edges + nx.draw_networkx_edges( + G, + pos, + edgelist=[(3, 4)], + width=[1, 2, 3, 4], + edge_color=["r", "b", "g", "k"], + ) + # with rgb tuple and 3 edges - is interpreted with cmap + nx.draw_networkx_edges( + G, pos, edgelist=[(4, 5), (5, 6), (6, 7)], edge_color=(1.0, 0.4, 0.3) + ) + # with rgb tuple in list + nx.draw_networkx_edges( + G, pos, edgelist=[(7, 8), (8, 9)], edge_color=[(0.4, 1.0, 0.0)] + ) + # with rgba tuple and 4 edges - is interpretted with cmap + nx.draw_networkx_edges( + G, + pos, + edgelist=[(9, 10), (10, 11), (10, 12), (10, 13)], + edge_color=(0.0, 1.0, 1.0, 0.5), + ) + # with rgba tuple in list + nx.draw_networkx_edges( + G, + pos, + edgelist=[(9, 10), (10, 11), (10, 12), (10, 13)], + edge_color=[(0.0, 1.0, 1.0, 0.5)], + ) + # with color string and global alpha + nx.draw_networkx_edges( + G, pos, edgelist=[(11, 12), (11, 13)], edge_color="purple", alpha=0.2 + ) + # with color string in a list + nx.draw_networkx_edges( + G, pos, edgelist=[(11, 12), (11, 13)], edge_color=["purple"] + ) + # with single edge and hex color string + nx.draw_networkx_edges(G, pos, edgelist=[(12, 13)], edge_color="#1f78b4f0") + + # edge_color as numeric using vmin, vmax + nx.draw_networkx_edges( + G, + pos, + edgelist=[(7, 8), (8, 9)], + edge_color=[0.2, 0.5], + edge_vmin=0.1, + edge_vmax=0.6, + ) + + # plt.show() def test_labels_and_colors(self): G = nx.cubical_graph() pos = nx.spring_layout(G) # positions for all nodes # nodes - nx.draw_networkx_nodes(G, pos, - nodelist=[0, 1, 2, 3], - node_color='r', - node_size=500, - alpha=0.8) - nx.draw_networkx_nodes(G, pos, - nodelist=[4, 5, 6, 7], - node_color='b', - node_size=500, - alpha=0.8) + nx.draw_networkx_nodes( + G, pos, nodelist=[0, 1, 2, 3], node_color="r", node_size=500, alpha=0.75 + ) + nx.draw_networkx_nodes( + G, + pos, + nodelist=[4, 5, 6, 7], + node_color="b", + node_size=500, + alpha=[0.25, 0.5, 0.75, 1.0], + ) # edges nx.draw_networkx_edges(G, pos, width=1.0, alpha=0.5) - nx.draw_networkx_edges(G, pos, - edgelist=[(0, 1), (1, 2), (2, 3), (3, 0)], - width=8, alpha=0.5, edge_color='r') - nx.draw_networkx_edges(G, pos, - edgelist=[(4, 5), (5, 6), (6, 7), (7, 4)], - width=8, alpha=0.5, edge_color='b') + nx.draw_networkx_edges( + G, + pos, + edgelist=[(0, 1), (1, 2), (2, 3), (3, 0)], + width=8, + alpha=0.5, + edge_color="r", + ) + nx.draw_networkx_edges( + G, + pos, + edgelist=[(4, 5), (5, 6), (6, 7), (7, 4)], + width=8, + alpha=0.5, + edge_color="b", + ) + nx.draw_networkx_edges( + G, + pos, + edgelist=[(4, 5), (5, 6), (6, 7), (7, 4)], + min_source_margin=0.5, + min_target_margin=0.75, + width=8, + edge_color="b", + ) # some math labels labels = {} - labels[0] = r'$a$' - labels[1] = r'$b$' - labels[2] = r'$c$' - labels[3] = r'$d$' - labels[4] = r'$\alpha$' - labels[5] = r'$\beta$' - labels[6] = r'$\gamma$' - labels[7] = r'$\delta$' + labels[0] = r"$a$" + labels[1] = r"$b$" + labels[2] = r"$c$" + labels[3] = r"$d$" + labels[4] = r"$\alpha$" + labels[5] = r"$\beta$" + labels[6] = r"$\gamma$" + labels[7] = r"$\delta$" nx.draw_networkx_labels(G, pos, labels, font_size=16) - plt.show() + nx.draw_networkx_edge_labels(G, pos, edge_labels=None, rotate=False) + nx.draw_networkx_edge_labels(G, pos, edge_labels={(4, 5): "4-5"}) + # plt.show() def test_axes(self): fig, ax = plt.subplots() @@ -101,6 +203,34 @@ def test_empty_graph(self): G = nx.Graph() nx.draw(G) + def test_draw_empty_nodes_return_values(self): + # See Issue #3833 + from matplotlib.collections import PathCollection, LineCollection + + G = nx.Graph([(1, 2), (2, 3)]) + DG = nx.DiGraph([(1, 2), (2, 3)]) + pos = nx.circular_layout(G) + assert isinstance(nx.draw_networkx_nodes(G, pos, nodelist=[]), PathCollection) + assert isinstance(nx.draw_networkx_nodes(DG, pos, nodelist=[]), PathCollection) + + # drawing empty edges either return an empty LineCollection or empty list. + assert isinstance( + nx.draw_networkx_edges(G, pos, edgelist=[], arrows=True), LineCollection + ) + assert isinstance( + nx.draw_networkx_edges(G, pos, edgelist=[], arrows=False), LineCollection + ) + assert isinstance( + nx.draw_networkx_edges(DG, pos, edgelist=[], arrows=False), LineCollection + ) + assert nx.draw_networkx_edges(DG, pos, edgelist=[], arrows=True) == [] + + def test_multigraph_edgelist_tuples(self): + # See Issue #3295 + G = nx.path_graph(3, create_using=nx.MultiDiGraph) + nx.draw_networkx(G, edgelist=[(0, 1, 0)]) + nx.draw_networkx(G, edgelist=[(0, 1, 0)], node_size=[10, 20, 0]) + def test_alpha_iter(self): pos = nx.random_layout(self.G) # with fewer alpha elements than nodes @@ -116,3 +246,12 @@ def test_alpha_iter(self): alpha.append(1) plt.subplot(133) nx.draw_networkx_nodes(self.G, pos, alpha=alpha) + + def test_error_invalid_kwds(self): + with pytest.raises(ValueError, match="Received invalid argument"): + nx.draw(self.G, foo="bar") + + def test_np_edgelist(self): + # see issue #4129 + np = pytest.importorskip("numpy") + nx.draw_networkx(self.G, edgelist=np.array([(0, 2), (0, 3)])) diff --git a/networkx/exception.py b/networkx/exception.py index aea55d9..96694cc 100644 --- a/networkx/exception.py +++ b/networkx/exception.py @@ -1,16 +1,3 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: -# Aric Hagberg -# Pieter Swart -# Dan Schult -# Loïc Séguin-C. """ ********** Exceptions @@ -20,20 +7,20 @@ """ __all__ = [ - 'HasACycle', - 'NodeNotFound', - 'PowerIterationFailedConvergence', - 'ExceededMaxIterations', - 'AmbiguousSolution', - 'NetworkXAlgorithmError', - 'NetworkXException', - 'NetworkXError', - 'NetworkXNoCycle', - 'NetworkXNoPath', - 'NetworkXNotImplemented', - 'NetworkXPointlessConcept', - 'NetworkXUnbounded', - 'NetworkXUnfeasible', + "HasACycle", + "NodeNotFound", + "PowerIterationFailedConvergence", + "ExceededMaxIterations", + "AmbiguousSolution", + "NetworkXAlgorithmError", + "NetworkXException", + "NetworkXError", + "NetworkXNoCycle", + "NetworkXNoPath", + "NetworkXNotImplemented", + "NetworkXPointlessConcept", + "NetworkXUnbounded", + "NetworkXUnfeasible", ] @@ -132,7 +119,7 @@ class PowerIterationFailedConvergence(ExceededMaxIterations): """ def __init__(self, num_iterations, *args, **kw): - msg = 'power iteration failed to converge within {} iterations' - exception_message = msg.format(num_iterations) - superinit = super(PowerIterationFailedConvergence, self).__init__ + msg = f"power iteration failed to converge within {num_iterations} iterations" + exception_message = msg + superinit = super().__init__ superinit(self, exception_message, *args, **kw) diff --git a/networkx/generators/__init__.py b/networkx/generators/__init__.py index 4eb54ad..a9cb1e1 100644 --- a/networkx/generators/__init__.py +++ b/networkx/generators/__init__.py @@ -4,6 +4,7 @@ """ from networkx.generators.atlas import * from networkx.generators.classic import * +from networkx.generators.cographs import * from networkx.generators.community import * from networkx.generators.degree_seq import * from networkx.generators.directed import * @@ -11,7 +12,9 @@ from networkx.generators.ego import * from networkx.generators.expanders import * from networkx.generators.geometric import * +from networkx.generators.internet_as_graphs import * from networkx.generators.intersection import * +from networkx.generators.interval_graph import * from networkx.generators.joint_degree_seq import * from networkx.generators.lattice import * from networkx.generators.line import * @@ -21,6 +24,8 @@ from networkx.generators.random_graphs import * from networkx.generators.small import * from networkx.generators.social import * +from networkx.generators.sudoku import * +from networkx.generators.spectral_graph_forge import * from networkx.generators.stochastic import * from networkx.generators.trees import * from networkx.generators.triads import * diff --git a/networkx/generators/atlas.py b/networkx/generators/atlas.py index 5450077..c5104e1 100644 --- a/networkx/generators/atlas.py +++ b/networkx/generators/atlas.py @@ -1,12 +1,3 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Author: -# Pieter Swart """ Generators for the small graph atlas. """ @@ -17,7 +8,7 @@ import networkx as nx -__all__ = ['graph_atlas', 'graph_atlas_g'] +__all__ = ["graph_atlas", "graph_atlas_g"] #: The total number of graphs in the atlas. #: @@ -56,11 +47,11 @@ #: #: with gzip.open('atlas.dat.gz', 'wb') as f: #: for i, G in enumerate(graph_atlas_g()): -#: f.write(bytes('GRAPH {}\n'.format(i), encoding='utf-8')) -#: f.write(bytes('NODES {}\n'.format(len(G)), encoding='utf-8')) +#: f.write(bytes(f'GRAPH {i}\n', encoding='utf-8')) +#: f.write(bytes(f'NODES {len(G)}\n', encoding='utf-8')) #: write_edgelist(G, f, data=False) #: -ATLAS_FILE = os.path.join(THIS_DIR, 'atlas.dat.gz') +ATLAS_FILE = os.path.join(THIS_DIR, "atlas.dat.gz") def _generate_graphs(): @@ -70,9 +61,9 @@ def _generate_graphs(): This function reads the file given in :data:`.ATLAS_FILE`. """ - with gzip.open(ATLAS_FILE, 'rb') as f: + with gzip.open(ATLAS_FILE, "rb") as f: line = f.readline() - while line and line.startswith(b'GRAPH'): + while line and line.startswith(b"GRAPH"): # The first two lines of each entry tell us the index of the # graph in the list and the number of nodes in the graph. # They look like this: @@ -87,11 +78,11 @@ def _generate_graphs(): # GRAPH line (or until the end of the file). edgelist = [] line = f.readline() - while line and not line.startswith(b'GRAPH'): + while line and not line.startswith(b"GRAPH"): edgelist.append(line.rstrip()) line = f.readline() G = nx.Graph() - G.name = 'G{}'.format(graph_index) + G.name = f"G{graph_index}" G.add_nodes_from(range(num_nodes)) G.add_edges_from(tuple(map(int, e.split())) for e in edgelist) yield G @@ -131,12 +122,12 @@ def graph_atlas(i): """ if not (0 <= i < NUM_GRAPHS): - raise ValueError('index must be between 0 and {}'.format(NUM_GRAPHS)) + raise ValueError(f"index must be between 0 and {NUM_GRAPHS}") return next(islice(_generate_graphs(), i, None)) def graph_atlas_g(): - """Return the list of all graphs with up to seven nodes named in the + """Returns the list of all graphs with up to seven nodes named in the Graph Atlas. The graphs are listed in increasing order by diff --git a/networkx/generators/classic.py b/networkx/generators/classic.py index 22741a3..90de3e0 100644 --- a/networkx/generators/classic.py +++ b/networkx/generators/classic.py @@ -1,12 +1,3 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg (hagberg@lanl.gov) -# Pieter Swart (swart@lanl.gov) """Generators for some classic graphs. The typical graph generator is called as follows: @@ -18,42 +9,47 @@ in this module return a Graph class (i.e. a simple, undirected graph). """ -from __future__ import division import itertools import networkx as nx from networkx.classes import Graph from networkx.exception import NetworkXError -from networkx.utils import accumulate +from itertools import accumulate from networkx.utils import nodes_or_number from networkx.utils import pairwise -__all__ = ['balanced_tree', - 'barbell_graph', - 'complete_graph', - 'complete_multipartite_graph', - 'circular_ladder_graph', - 'circulant_graph', - 'cycle_graph', - 'dorogovtsev_goltsev_mendes_graph', - 'empty_graph', - 'full_rary_tree', - 'ladder_graph', - 'lollipop_graph', - 'null_graph', - 'path_graph', - 'star_graph', - 'trivial_graph', - 'turan_graph', - 'wheel_graph'] +__all__ = [ + "balanced_tree", + "barbell_graph", + "binomial_tree", + "complete_graph", + "complete_multipartite_graph", + "circular_ladder_graph", + "circulant_graph", + "cycle_graph", + "dorogovtsev_goltsev_mendes_graph", + "empty_graph", + "full_rary_tree", + "ladder_graph", + "lollipop_graph", + "null_graph", + "path_graph", + "star_graph", + "trivial_graph", + "turan_graph", + "wheel_graph", +] # ------------------------------------------------------------------- # Some Classic Graphs # ------------------------------------------------------------------- + def _tree_edges(n, r): + if n == 0: + return # helper function for trees # yields edges in rooted tree at 0 with n nodes and branching ratio r nodes = iter(range(n)) @@ -84,9 +80,8 @@ def full_rary_tree(r, n, create_using=None): branching factor of the tree n : int Number of nodes in the tree - create_using : Graph, optional (default None) - If provided this graph is cleared of nodes and edges and filled - with the new graph. Usually used to set the type of the graph. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. Returns ------- @@ -104,7 +99,7 @@ def full_rary_tree(r, n, create_using=None): def balanced_tree(r, h, create_using=None): - """Return the perfectly balanced `r`-ary tree of height `h`. + """Returns the perfectly balanced `r`-ary tree of height `h`. Parameters ---------- @@ -115,9 +110,8 @@ def balanced_tree(r, h, create_using=None): h : int Height of the tree. - create_using : Graph, optional (default None) - If provided this graph is cleared of nodes and edges and filled - with the new graph. Usually used to set the type of the graph. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. Returns ------- @@ -150,7 +144,7 @@ def balanced_tree(r, h, create_using=None): def barbell_graph(m1, m2, create_using=None): - """Return the Barbell Graph: two complete graphs connected by a path. + """Returns the Barbell Graph: two complete graphs connected by a path. For $m1 > 1$ and $m2 >= 0$. @@ -170,25 +164,24 @@ def barbell_graph(m1, m2, create_using=None): and Jim Fill's e-text on Random Walks on Graphs. """ - if create_using is not None and create_using.is_directed(): - raise NetworkXError("Directed Graph not supported") if m1 < 2: - raise NetworkXError( - "Invalid graph description, m1 should be >=2") + raise NetworkXError("Invalid graph description, m1 should be >=2") if m2 < 0: - raise NetworkXError( - "Invalid graph description, m2 should be >=0") + raise NetworkXError("Invalid graph description, m2 should be >=0") # left barbell G = complete_graph(m1, create_using) + if G.is_directed(): + raise NetworkXError("Directed Graph not supported") # connecting path G.add_nodes_from(range(m1, m1 + m2 - 1)) if m2 > 1: G.add_edges_from(pairwise(range(m1, m1 + m2))) # right barbell - G.add_edges_from((u, v) for u in range(m1 + m2, 2 * m1 + m2) - for v in range(u + 1, 2 * m1 + m2)) + G.add_edges_from( + (u, v) for u in range(m1 + m2, 2 * m1 + m2) for v in range(u + 1, 2 * m1 + m2) + ) # connect it up G.add_edge(m1 - 1, m1) if m2 > 0: @@ -196,6 +189,34 @@ def barbell_graph(m1, m2, create_using=None): return G +def binomial_tree(n): + """Returns the Binomial Tree of order n. + + The binomial tree of order 0 consists of a single vertex. A binomial tree of order k + is defined recursively by linking two binomial trees of order k-1: the root of one is + the leftmost child of the root of the other. + + Parameters + ---------- + n : int + Order of the binomial tree. + + Returns + ------- + G : NetworkX graph + A binomial tree of $2^n$ vertices and $2^n - 1$ edges. + + """ + G = nx.empty_graph(1) + N = 1 + for i in range(n): + edges = [(u + N, v + N) for (u, v) in G.edges] + G.add_edges_from(edges) + G.add_edge(0, N) + N *= 2 + return G + + @nodes_or_number(0) def complete_graph(n, create_using=None): """ Return the complete graph `K_n` with n nodes. @@ -205,9 +226,8 @@ def complete_graph(n, create_using=None): n : int or iterable container of nodes If n is an integer, nodes are from range(n). If n is a container of nodes, those nodes appear in the graph. - create_using : Graph, optional (default None) - If provided this graph is cleared of nodes and edges and filled - with the new graph. Usually used to set the type of the graph. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. Examples -------- @@ -236,7 +256,7 @@ def complete_graph(n, create_using=None): def circular_ladder_graph(n, create_using=None): - """Return the circular ladder graph $CL_n$ of length n. + """Returns the circular ladder graph $CL_n$ of length n. $CL_n$ consists of two concentric n-cycles in which each of the n pairs of concentric nodes are joined by an edge. @@ -265,9 +285,8 @@ def circulant_graph(n, offsets, create_using=None): The number of vertices the generated graph is to contain. offsets : list of integers A list of vertex offsets, $x_1$ up to $x_m$, as described above. - create_using : Graph, optional (default None) - If provided this graph is cleared of nodes and edges and filled - with the new graph. Usually used to set the type of the graph. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. Examples -------- @@ -278,8 +297,17 @@ def circulant_graph(n, offsets, create_using=None): >>> import networkx >>> G = networkx.generators.classic.circulant_graph(10, [1]) >>> edges = [ - ... (0, 9), (0, 1), (1, 2), (2, 3), (3, 4), - ... (4, 5), (5, 6), (6, 7), (7, 8), (8, 9)] + ... (0, 9), + ... (0, 1), + ... (1, 2), + ... (2, 3), + ... (3, 4), + ... (4, 5), + ... (5, 6), + ... (6, 7), + ... (7, 8), + ... (8, 9), + ... ] ... >>> sorted(edges) == sorted(G.edges()) True @@ -289,8 +317,17 @@ def circulant_graph(n, offsets, create_using=None): >>> G = networkx.generators.classic.circulant_graph(5, [1, 2]) >>> edges = [ - ... (0, 1), (0, 2), (0, 3), (0, 4), (1, 2), - ... (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)] + ... (0, 1), + ... (0, 2), + ... (0, 3), + ... (0, 4), + ... (1, 2), + ... (1, 3), + ... (1, 4), + ... (2, 3), + ... (2, 4), + ... (3, 4), + ... ] ... >>> sorted(edges) == sorted(G.edges()) True @@ -306,7 +343,7 @@ def circulant_graph(n, offsets, create_using=None): @nodes_or_number(0) def cycle_graph(n, create_using=None): - """Return the cycle graph $C_n$ of cyclically connected nodes. + """Returns the cycle graph $C_n$ of cyclically connected nodes. $C_n$ is a path with its two end-nodes connected. @@ -315,9 +352,8 @@ def cycle_graph(n, create_using=None): n : int or iterable container of nodes If n is an integer, nodes are from `range(n)`. If n is a container of nodes, those nodes appear in the graph. - create_using : Graph, optional (default Graph()) - If provided this graph is cleared of nodes and edges and filled - with the new graph. Usually used to set the type of the graph. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. Notes ----- @@ -332,22 +368,22 @@ def cycle_graph(n, create_using=None): def dorogovtsev_goltsev_mendes_graph(n, create_using=None): - """Return the hierarchically constructed Dorogovtsev-Goltsev-Mendes graph. + """Returns the hierarchically constructed Dorogovtsev-Goltsev-Mendes graph. n is the generation. See: arXiv:/cond-mat/0112143 by Dorogovtsev, Goltsev and Mendes. """ - if create_using is not None: - if create_using.is_directed(): - raise NetworkXError("Directed Graph not supported") - if create_using.is_multigraph(): - raise NetworkXError("Multigraph not supported") G = empty_graph(0, create_using) + if G.is_directed(): + raise NetworkXError("Directed Graph not supported") + if G.is_multigraph(): + raise NetworkXError("Multigraph not supported") + G.add_edge(0, 1) if n == 0: return G - new_node = 2 # next node to be added + new_node = 2 # next node to be added for i in range(1, n + 1): # iterate over number of generations. last_generation_edges = list(G.edges()) number_of_edges_in_last_generation = len(last_generation_edges) @@ -359,17 +395,25 @@ def dorogovtsev_goltsev_mendes_graph(n, create_using=None): @nodes_or_number(0) -def empty_graph(n=0, create_using=None): - """Return the empty graph with n nodes and zero edges. +def empty_graph(n=0, create_using=None, default=nx.Graph): + """Returns the empty graph with n nodes and zero edges. Parameters ---------- n : int or iterable container of nodes (default = 0) If n is an integer, nodes are from `range(n)`. If n is a container of nodes, those nodes appear in the graph. - create_using : Graph, optional (default Graph()) - If provided this graph is cleared of nodes and edges and filled - with the new graph. Usually used to set the type of the graph. + create_using : Graph Instance, Constructor or None + Indicator of type of graph to return. + If a Graph-type instance, then clear and use it. + If None, use the `default` constructor. + If a constructor, call it to create an empty graph. + default : Graph constructor (optional, default = nx.Graph) + The constructor to use if create_using is None. + If None, then nx.Graph is used. + This is used when passing an unknown `create_using` value + through your home-grown function to `empty_graph` and + you want a default constructor other than nx.Graph. Examples -------- @@ -386,18 +430,20 @@ def empty_graph(n=0, create_using=None): Notes ----- - The variable create_using should point to a "graph"-like object that + The variable create_using should be a Graph Constructor or a + "graph"-like object. Constructors, e.g. `nx.Graph` or `nx.MultiGraph` + will be used to create the returned graph. "graph"-like objects will be cleared (nodes and edges will be removed) and refitted as an empty "graph" with nodes specified in n. This capability is useful for specifying the class-nature of the resulting empty "graph" (i.e. Graph, DiGraph, MyWeirdGraphClass, etc.). - The variable create_using has two main uses: + The variable create_using has three main uses: Firstly, the variable create_using can be used to create an empty digraph, multigraph, etc. For example, >>> n = 10 - >>> G = nx.empty_graph(n, create_using=nx.DiGraph()) + >>> G = nx.empty_graph(n, create_using=nx.DiGraph) will create an empty digraph on n nodes. @@ -407,15 +453,34 @@ def empty_graph(n=0, create_using=None): will empty G (i.e. delete all nodes and edges using G.clear()) and then add n nodes and zero edges, and return the modified graph. + Thirdly, when constructing your home-grown graph creation function + you can use empty_graph to construct the graph by passing a user + defined create_using to empty_graph. In this case, if you want the + default constructor to be other than nx.Graph, specify `default`. + + >>> def mygraph(n, create_using=None): + ... G = nx.empty_graph(n, create_using, nx.MultiGraph) + ... G.add_edges_from([(0, 1), (0, 1)]) + ... return G + >>> G = mygraph(3) + >>> G.is_multigraph() + True + >>> G = mygraph(3, nx.Graph) + >>> G.is_multigraph() + False + See also create_empty_copy(G). """ if create_using is None: - # default empty graph is a simple graph - G = Graph() - else: + G = default() + elif hasattr(create_using, "_adj"): + # create_using is a NetworkX style Graph + create_using.clear() G = create_using - G.clear() + else: + # try create_using as constructor + G = create_using() n_name, nodes = n G.add_nodes_from(nodes) @@ -423,7 +488,7 @@ def empty_graph(n=0, create_using=None): def ladder_graph(n, create_using=None): - """Return the Ladder graph of length n. + """Returns the Ladder graph of length n. This is two paths of n nodes, with each pair connected by a single edge. @@ -431,9 +496,9 @@ def ladder_graph(n, create_using=None): Node labels are the integers 0 to 2*n - 1. """ - if create_using is not None and create_using.is_directed(): - raise NetworkXError("Directed Graph not supported") G = empty_graph(2 * n, create_using) + if G.is_directed(): + raise NetworkXError("Directed Graph not supported") G.add_edges_from(pairwise(range(n))) G.add_edges_from(pairwise(range(n, 2 * n))) G.add_edges_from((v, v + n) for v in range(n)) @@ -442,7 +507,7 @@ def ladder_graph(n, create_using=None): @nodes_or_number([0, 1]) def lollipop_graph(m, n, create_using=None): - """Return the Lollipop Graph; `K_m` connected to `P_n`. + """Returns the Lollipop Graph; `K_m` connected to `P_n`. This is the Barbell Graph without the right barbell. @@ -454,9 +519,8 @@ def lollipop_graph(m, n, create_using=None): The nodes for m appear in the complete graph $K_m$ and the nodes for n appear in the path $P_n$ - create_using : Graph, optional (default Graph()) - If provided this graph is cleared of nodes and edges and filled - with the new graph. Usually used to set the type of the graph. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. Notes ----- @@ -473,17 +537,15 @@ def lollipop_graph(m, n, create_using=None): N = len(n_nodes) if isinstance(m, int): n_nodes = [len(m_nodes) + i for i in n_nodes] - if create_using is not None and create_using.is_directed(): - raise NetworkXError("Directed Graph not supported") if M < 2: - raise NetworkXError( - "Invalid graph description, m should be >=2") + raise NetworkXError("Invalid graph description, m should be >=2") if N < 0: - raise NetworkXError( - "Invalid graph description, n should be >=0") + raise NetworkXError("Invalid graph description, n should be >=0") # the ball G = complete_graph(m_nodes, create_using) + if G.is_directed(): + raise NetworkXError("Directed Graph not supported") # the stick G.add_nodes_from(n_nodes) if N > 1: @@ -495,7 +557,7 @@ def lollipop_graph(m, n, create_using=None): def null_graph(create_using=None): - """Return the Null graph with no nodes or edges. + """Returns the Null graph with no nodes or edges. See empty_graph for the use of create_using. @@ -506,16 +568,15 @@ def null_graph(create_using=None): @nodes_or_number(0) def path_graph(n, create_using=None): - """Return the Path graph `P_n` of linearly connected nodes. + """Returns the Path graph `P_n` of linearly connected nodes. Parameters ---------- n : int or iterable If an integer, node labels are 0 to n with center 0. If an iterable of nodes, the center is the first. - create_using : Graph, optional (default Graph()) - If provided this graph is cleared of nodes and edges and filled - with the new graph. Usually used to set the type of the graph. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. """ n_name, nodes = n @@ -535,9 +596,8 @@ def star_graph(n, create_using=None): n : int or iterable If an integer, node labels are 0 to n with center 0. If an iterable of nodes, the center is the first. - create_using : Graph, optional (default Graph()) - If provided this graph is cleared of nodes and edges and filled - with the new graph. Usually used to set the type of the graph. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. Notes ----- @@ -607,15 +667,14 @@ def wheel_graph(n, create_using=None): n : int or iterable If an integer, node labels are 0 to n with center 0. If an iterable of nodes, the center is the first. - create_using : Graph, optional (default Graph()) - If provided this graph is cleared of nodes and edges and filled - with the new graph. Usually used to set the type of the graph. - Node labels are the integers 0 to n - 1. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + Node labels are the integers 0 to n - 1. """ n_name, nodes = n if n_name == 0: - G = empty_graph(0, create_using=create_using) + G = empty_graph(0, create_using) return G G = star_graph(nodes, create_using) if len(G) > 2: @@ -649,9 +708,8 @@ def complete_multipartite_graph(*subset_sizes): Creating a complete tripartite graph, with subsets of one, two, and three vertices, respectively. - >>> import networkx as nx >>> G = nx.complete_multipartite_graph(1, 2, 3) - >>> [G.nodes[u]['subset'] for u in G] + >>> [G.nodes[u]["subset"] for u in G] [0, 1, 1, 2, 2, 2] >>> list(G.edges(0)) [(0, 1), (0, 2), (0, 3), (0, 4), (0, 5)] @@ -660,8 +718,8 @@ def complete_multipartite_graph(*subset_sizes): >>> list(G.edges(4)) [(4, 0), (4, 1), (4, 2)] - >>> G = nx.complete_multipartite_graph('a', 'bc', 'def') - >>> [G.nodes[u]['subset'] for u in sorted(G)] + >>> G = nx.complete_multipartite_graph("a", "bc", "def") + >>> [G.nodes[u]["subset"] for u in sorted(G)] [0, 1, 1, 2, 2, 2] Notes @@ -698,8 +756,8 @@ def complete_multipartite_graph(*subset_sizes): try: for (i, subset) in enumerate(subsets): G.add_nodes_from(subset, subset=i) - except TypeError: - raise NetworkXError("Arguments must be all ints or all iterables") + except TypeError as e: + raise NetworkXError("Arguments must be all ints or all iterables") from e # Across subsets, all vertices should be adjacent. # We can use itertools.combinations() because undirected. diff --git a/networkx/generators/cographs.py b/networkx/generators/cographs.py new file mode 100644 index 0000000..e876358 --- /dev/null +++ b/networkx/generators/cographs.py @@ -0,0 +1,66 @@ +r"""Generators for cographs + +A cograph is a graph containing no path on four vertices. +Cographs or $P_4$-free graphs can be obtained from a single vertex +by disjoint union and complementation operations. + +References +---------- +.. [0] D.G. Corneil, H. Lerchs, L.Stewart Burlingham, + "Complement reducible graphs", + Discrete Applied Mathematics, Volume 3, Issue 3, 1981, Pages 163-174, + ISSN 0166-218X. +""" +import networkx as nx +from networkx.utils import py_random_state + +__all__ = ["random_cograph"] + + +@py_random_state(1) +def random_cograph(n, seed=None): + r"""Returns a random cograph with $2 ^ n$ nodes. + + A cograph is a graph containing no path on four vertices. + Cographs or $P_4$-free graphs can be obtained from a single vertex + by disjoint union and complementation operations. + + This generator starts off from a single vertex and performes disjoint + union and full join operations on itself. + The decision on which operation will take place is random. + + Parameters + ---------- + n : int + The order of the cograph. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G : A random graph containing no path on four vertices. + + See Also + -------- + full_join + union + + References + ---------- + .. [1] D.G. Corneil, H. Lerchs, L.Stewart Burlingham, + "Complement reducible graphs", + Discrete Applied Mathematics, Volume 3, Issue 3, 1981, Pages 163-174, + ISSN 0166-218X. + """ + R = nx.empty_graph(1) + + for i in range(n): + RR = nx.relabel_nodes(R.copy(), lambda x: x + len(R)) + + if seed.randint(0, 1) == 0: + R = nx.full_join(R, RR) + else: + R = nx.disjoint_union(R, RR) + + return R diff --git a/networkx/generators/community.py b/networkx/generators/community.py index de4efa8..6c4ac04 100644 --- a/networkx/generators/community.py +++ b/networkx/generators/community.py @@ -1,22 +1,50 @@ """Generators for classes of graphs used in studying social networks.""" import itertools import math -import random import networkx as nx -# Copyright(C) 2011, 2015 by -# Ben Edwards -# Aric Hagberg -# Konstantinos Karakatsanis -# All rights reserved. -# BSD license. -__author__ = """\n""".join(['Ben Edwards (bedwards@cs.unm.edu)', - 'Aric Hagberg (hagberg@lanl.gov)', - 'Konstantinos Karakatsanis ' - '']) -__all__ = ['caveman_graph', 'connected_caveman_graph', - 'relaxed_caveman_graph', 'random_partition_graph', - 'planted_partition_graph', 'gaussian_random_partition_graph', - 'ring_of_cliques', 'windmill_graph'] +from networkx.utils import py_random_state + +# Accommodates for both SciPy and non-SciPy implementations.. +try: + from scipy.special import zeta as _zeta + + def zeta(x, q, tolerance): + return _zeta(x, q) + + +except ImportError: + + def zeta(x, q, tolerance): + """The Hurwitz zeta function, or the Riemann zeta function of two + arguments. + + ``x`` must be greater than one and ``q`` must be positive. + + This function repeatedly computes subsequent partial sums until + convergence, as decided by ``tolerance``. + """ + z = 0 + z_prev = -float("inf") + k = 0 + while abs(z - z_prev) > tolerance: + z_prev = z + z += 1 / ((k + q) ** x) + k += 1 + return z + + +__all__ = [ + "caveman_graph", + "connected_caveman_graph", + "relaxed_caveman_graph", + "random_partition_graph", + "planted_partition_graph", + "gaussian_random_partition_graph", + "ring_of_cliques", + "windmill_graph", + "stochastic_block_model", + "LFR_benchmark_graph", +] def caveman_graph(l, k): @@ -77,13 +105,18 @@ def connected_caveman_graph(l, k): l : int number of cliques k : int - size of cliques + size of cliques (k at least 2 or NetworkXError is raised) Returns ------- G : NetworkX Graph connected caveman graph + Raises + ------ + NetworkXError + If the size of cliques `k` is smaller than 2. + Notes ----- This returns an undirected graph, it can be converted to a directed @@ -101,6 +134,11 @@ def connected_caveman_graph(l, k): .. [1] Watts, D. J. 'Networks, Dynamics, and the Small-World Phenomenon.' Amer. J. Soc. 105, 493-527, 1999. """ + if k < 2: + raise nx.NetworkXError( + "The size of cliques in a connected caveman graph " "must be at least 2." + ) + G = nx.caveman_graph(l, k) for start in range(0, l * k, k): G.remove_edge(start, start + 1) @@ -108,8 +146,9 @@ def connected_caveman_graph(l, k): return G +@py_random_state(3) def relaxed_caveman_graph(l, k, p, seed=None): - """Return a relaxed caveman graph. + """Returns a relaxed caveman graph. A relaxed caveman graph starts with `l` cliques of size `k`. Edges are then randomly rewired with probability `p` to link different cliques. @@ -122,8 +161,9 @@ def relaxed_caveman_graph(l, k, p, seed=None): Size of cliques p : float Probabilty of rewiring each edge. - seed : int,optional - Seed for random number generator(default=None) + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -132,7 +172,7 @@ def relaxed_caveman_graph(l, k, p, seed=None): Raises ------ - NetworkXError: + NetworkXError If p is not in [0,1] Examples @@ -145,13 +185,11 @@ def relaxed_caveman_graph(l, k, p, seed=None): Physics Reports Volume 486, Issues 3-5, February 2010, Pages 75-174. https://arxiv.org/abs/0906.0612 """ - if seed is not None: - random.seed(seed) G = nx.caveman_graph(l, k) nodes = list(G) for (u, v) in G.edges(): - if random.random() < p: # rewire the edge - x = random.choice(nodes) + if seed.random() < p: # rewire the edge + x = seed.choice(nodes) if G.has_edge(u, x): continue G.remove_edge(u, v) @@ -159,8 +197,9 @@ def relaxed_caveman_graph(l, k, p, seed=None): return G +@py_random_state(3) def random_partition_graph(sizes, p_in, p_out, seed=None, directed=False): - """Return the random partition graph with a partition of sizes. + """Returns the random partition graph with a partition of sizes. A partition graph is a graph of communities with sizes defined by s in sizes. Nodes in the same group are connected with probability @@ -177,8 +216,9 @@ def random_partition_graph(sizes, p_in, p_out, seed=None, directed=False): probability of edges between groups directed : boolean optional, default=False Whether to create a directed graph - seed : int optional, default None - A seed for the random number generator + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -192,10 +232,10 @@ def random_partition_graph(sizes, p_in, p_out, seed=None, directed=False): Examples -------- - >>> G = nx.random_partition_graph([10,10,10],.25,.01) + >>> G = nx.random_partition_graph([10, 10, 10], 0.25, 0.01) >>> len(G) 30 - >>> partition = G.graph['partition'] + >>> partition = G.graph["partition"] >>> len(partition) 3 @@ -213,75 +253,31 @@ def random_partition_graph(sizes, p_in, p_out, seed=None, directed=False): """ # Use geometric method for O(n+m) complexity algorithm # partition = nx.community_sets(nx.get_node_attributes(G, 'affiliation')) - if seed is not None: - random.seed(seed) if not 0.0 <= p_in <= 1.0: raise nx.NetworkXError("p_in must be in [0,1]") if not 0.0 <= p_out <= 1.0: raise nx.NetworkXError("p_out must be in [0,1]") - if directed: - G = nx.DiGraph() - else: - G = nx.Graph() - G.graph['partition'] = [] - n = sum(sizes) - G.add_nodes_from(range(n)) - # start with len(sizes) groups of gnp random graphs with parameter p_in - # graphs are unioned together with node labels starting at - # 0, sizes[0], sizes[0]+sizes[1], ... - next_group = {} # maps node key (int) to first node in next group - start = 0 - group = 0 - for n in sizes: - edges = ((u + start, v + start) - for u, v in - nx.fast_gnp_random_graph(n, p_in, directed=directed).edges()) - G.add_edges_from(edges) - next_group.update(dict.fromkeys(range(start, start + n), start + n)) - G.graph['partition'].append(set(range(start, start + n))) - group += 1 - start += n - # handle edge cases - if p_out == 0: - return G - if p_out == 1: - for n in next_group: - targets = range(next_group[n], len(G)) - G.add_edges_from(zip([n] * len(targets), targets)) - if directed: - G.add_edges_from(zip(targets, [n] * len(targets))) - return G - # connect each node in group randomly with the nodes not in group - # use geometric method like fast_gnp_random_graph() - lp = math.log(1.0 - p_out) - n = len(G) - if directed: - for u in range(n): - v = 0 - while v < n: - lr = math.log(1.0 - random.random()) - v += int(lr / lp) - # skip over nodes in the same group as v, including self loops - if next_group.get(v, n) == next_group[u]: - v = next_group[u] - if v < n: - G.add_edge(u, v) - v += 1 - else: - for u in range(n - 1): - v = next_group[u] # start with next node not in this group - while v < n: - lr = math.log(1.0 - random.random()) - v += int(lr / lp) - if v < n: - G.add_edge(u, v) - v += 1 - return G + # create connection matrix + num_blocks = len(sizes) + p = [[p_out for s in range(num_blocks)] for r in range(num_blocks)] + for r in range(num_blocks): + p[r][r] = p_in + return stochastic_block_model( + sizes, + p, + nodelist=None, + seed=seed, + directed=directed, + selfloops=False, + sparse=True, + ) + +@py_random_state(4) def planted_partition_graph(l, k, p_in, p_out, seed=None, directed=False): - """Return the planted l-partition graph. + """Returns the planted l-partition graph. This model partitions a graph with n=l*k vertices in l groups with k vertices each. Vertices of the same @@ -298,8 +294,9 @@ def planted_partition_graph(l, k, p_in, p_out, seed=None, directed=False): probability of connecting vertices within a group p_out : float probability of connected vertices between groups - seed : int,optional - Seed for random number generator(default=None) + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. directed : bool,optional (default=False) If True return a directed graph @@ -310,12 +307,12 @@ def planted_partition_graph(l, k, p_in, p_out, seed=None, directed=False): Raises ------ - NetworkXError: + NetworkXError If p_in,p_out are not in [0,1] or Examples -------- - >>> G = nx.planted_partition_graph(4, 3, 0.5, 0.1,seed=42) + >>> G = nx.planted_partition_graph(4, 3, 0.5, 0.1, seed=42) See Also -------- @@ -330,11 +327,11 @@ def planted_partition_graph(l, k, p_in, p_out, seed=None, directed=False): .. [2] Santo Fortunato 'Community Detection in Graphs' Physical Reports Volume 486, Issue 3-5 p. 75-174. https://arxiv.org/abs/0906.0612 """ - return random_partition_graph([k] * l, p_in, p_out, seed, directed) + return random_partition_graph([k] * l, p_in, p_out, seed=seed, directed=directed) -def gaussian_random_partition_graph(n, s, v, p_in, p_out, directed=False, - seed=None): +@py_random_state(6) +def gaussian_random_partition_graph(n, s, v, p_in, p_out, directed=False, seed=None): """Generate a Gaussian random partition graph. A Gaussian random partition graph is created by creating k partitions @@ -356,8 +353,9 @@ def gaussian_random_partition_graph(n, s, v, p_in, p_out, directed=False, Probability of inter cluster connection. directed : boolean, optional default=False Whether to create a directed graph or not - seed : int - Seed value for random number generator + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -382,7 +380,7 @@ def gaussian_random_partition_graph(n, s, v, p_in, p_out, directed=False, Examples -------- - >>> G = nx.gaussian_random_partition_graph(100,10,10,.25,.1) + >>> G = nx.gaussian_random_partition_graph(100, 10, 10, 0.25, 0.1) >>> len(G) 100 @@ -397,7 +395,7 @@ def gaussian_random_partition_graph(n, s, v, p_in, p_out, directed=False, assigned = 0 sizes = [] while True: - size = int(random.normalvariate(s, float(s) / v + 0.5)) + size = int(seed.gauss(s, float(s) / v + 0.5)) if size < 1: # how to handle 0 or negative sizes? continue if assigned + size >= n: @@ -405,7 +403,7 @@ def gaussian_random_partition_graph(n, s, v, p_in, p_out, directed=False, break assigned += size sizes.append(size) - return random_partition_graph(sizes, p_in, p_out, directed, seed) + return random_partition_graph(sizes, p_in, p_out, seed=seed, directed=directed) def ring_of_cliques(num_cliques, clique_size): @@ -417,14 +415,14 @@ def ring_of_cliques(num_cliques, clique_size): Parameters ---------- num_cliques : int - Number of cliques + Number of cliques clique_size : int - Size of cliques + Size of cliques Returns ------- G : NetworkX Graph - ring of cliques graph + ring of cliques graph Raises ------ @@ -447,18 +445,19 @@ def ring_of_cliques(num_cliques, clique_size): simply adds the link without removing any link from the cliques. """ if num_cliques < 2: - raise nx.NetworkXError('A ring of cliques must have at least ' - 'two cliques') + raise nx.NetworkXError("A ring of cliques must have at least " "two cliques") if clique_size < 2: - raise nx.NetworkXError('The cliques must have at least two nodes') + raise nx.NetworkXError("The cliques must have at least two nodes") G = nx.Graph() for i in range(num_cliques): - edges = itertools.combinations(range(i * clique_size, i * clique_size + - clique_size), 2) + edges = itertools.combinations( + range(i * clique_size, i * clique_size + clique_size), 2 + ) G.add_edges_from(edges) - G.add_edge(i * clique_size + 1, (i + 1) * clique_size % - (num_cliques * clique_size)) + G.add_edge( + i * clique_size + 1, (i + 1) * clique_size % (num_cliques * clique_size) + ) return G @@ -474,14 +473,14 @@ def windmill_graph(n, k): Parameters ---------- n : int - Number of cliques + Number of cliques k : int - Size of cliques + Size of cliques Returns ------- G : NetworkX Graph - windmill graph with n cliques of size k + windmill graph with n cliques of size k Raises ------ @@ -500,13 +499,566 @@ def windmill_graph(n, k): are in the opposite order as the parameters of this method. """ if n < 2: - msg = 'A windmill graph must have at least two cliques' + msg = "A windmill graph must have at least two cliques" raise nx.NetworkXError(msg) if k < 2: - raise nx.NetworkXError('The cliques must have at least two nodes') + raise nx.NetworkXError("The cliques must have at least two nodes") - G = nx.disjoint_union_all(itertools.chain([nx.complete_graph(k)], - (nx.complete_graph(k - 1) - for _ in range(n - 1)))) + G = nx.disjoint_union_all( + itertools.chain( + [nx.complete_graph(k)], (nx.complete_graph(k - 1) for _ in range(n - 1)) + ) + ) G.add_edges_from((0, i) for i in range(k, G.number_of_nodes())) return G + + +@py_random_state(3) +def stochastic_block_model( + sizes, p, nodelist=None, seed=None, directed=False, selfloops=False, sparse=True +): + """Returns a stochastic block model graph. + + This model partitions the nodes in blocks of arbitrary sizes, and places + edges between pairs of nodes independently, with a probability that depends + on the blocks. + + Parameters + ---------- + sizes : list of ints + Sizes of blocks + p : list of list of floats + Element (r,s) gives the density of edges going from the nodes + of group r to nodes of group s. + p must match the number of groups (len(sizes) == len(p)), + and it must be symmetric if the graph is undirected. + nodelist : list, optional + The block tags are assigned according to the node identifiers + in nodelist. If nodelist is None, then the ordering is the + range [0,sum(sizes)-1]. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + directed : boolean optional, default=False + Whether to create a directed graph or not. + selfloops : boolean optional, default=False + Whether to include self-loops or not. + sparse: boolean optional, default=True + Use the sparse heuristic to speed up the generator. + + Returns + ------- + g : NetworkX Graph or DiGraph + Stochastic block model graph of size sum(sizes) + + Raises + ------ + NetworkXError + If probabilities are not in [0,1]. + If the probability matrix is not square (directed case). + If the probability matrix is not symmetric (undirected case). + If the sizes list does not match nodelist or the probability matrix. + If nodelist contains duplicate. + + Examples + -------- + >>> sizes = [75, 75, 300] + >>> probs = [[0.25, 0.05, 0.02], [0.05, 0.35, 0.07], [0.02, 0.07, 0.40]] + >>> g = nx.stochastic_block_model(sizes, probs, seed=0) + >>> len(g) + 450 + >>> H = nx.quotient_graph(g, g.graph["partition"], relabel=True) + >>> for v in H.nodes(data=True): + ... print(round(v[1]["density"], 3)) + ... + 0.245 + 0.348 + 0.405 + >>> for v in H.edges(data=True): + ... print(round(1.0 * v[2]["weight"] / (sizes[v[0]] * sizes[v[1]]), 3)) + ... + 0.051 + 0.022 + 0.07 + + See Also + -------- + random_partition_graph + planted_partition_graph + gaussian_random_partition_graph + gnp_random_graph + + References + ---------- + .. [1] Holland, P. W., Laskey, K. B., & Leinhardt, S., + "Stochastic blockmodels: First steps", + Social networks, 5(2), 109-137, 1983. + """ + # Check if dimensions match + if len(sizes) != len(p): + raise nx.NetworkXException("'sizes' and 'p' do not match.") + # Check for probability symmetry (undirected) and shape (directed) + for row in p: + if len(p) != len(row): + raise nx.NetworkXException("'p' must be a square matrix.") + if not directed: + p_transpose = [list(i) for i in zip(*p)] + for i in zip(p, p_transpose): + for j in zip(i[0], i[1]): + if abs(j[0] - j[1]) > 1e-08: + raise nx.NetworkXException("'p' must be symmetric.") + # Check for probability range + for row in p: + for prob in row: + if prob < 0 or prob > 1: + raise nx.NetworkXException("Entries of 'p' not in [0,1].") + # Check for nodelist consistency + if nodelist is not None: + if len(nodelist) != sum(sizes): + raise nx.NetworkXException("'nodelist' and 'sizes' do not match.") + if len(nodelist) != len(set(nodelist)): + raise nx.NetworkXException("nodelist contains duplicate.") + else: + nodelist = range(0, sum(sizes)) + + # Setup the graph conditionally to the directed switch. + block_range = range(len(sizes)) + if directed: + g = nx.DiGraph() + block_iter = itertools.product(block_range, block_range) + else: + g = nx.Graph() + block_iter = itertools.combinations_with_replacement(block_range, 2) + # Split nodelist in a partition (list of sets). + size_cumsum = [sum(sizes[0:x]) for x in range(0, len(sizes) + 1)] + g.graph["partition"] = [ + set(nodelist[size_cumsum[x] : size_cumsum[x + 1]]) + for x in range(0, len(size_cumsum) - 1) + ] + # Setup nodes and graph name + for block_id, nodes in enumerate(g.graph["partition"]): + for node in nodes: + g.add_node(node, block=block_id) + + g.name = "stochastic_block_model" + + # Test for edge existence + parts = g.graph["partition"] + for i, j in block_iter: + if i == j: + if directed: + if selfloops: + edges = itertools.product(parts[i], parts[i]) + else: + edges = itertools.permutations(parts[i], 2) + else: + edges = itertools.combinations(parts[i], 2) + if selfloops: + edges = itertools.chain(edges, zip(parts[i], parts[i])) + for e in edges: + if seed.random() < p[i][j]: + g.add_edge(*e) + else: + edges = itertools.product(parts[i], parts[j]) + if sparse: + if p[i][j] == 1: # Test edges cases p_ij = 0 or 1 + for e in edges: + g.add_edge(*e) + elif p[i][j] > 0: + while True: + try: + logrand = math.log(seed.random()) + skip = math.floor(logrand / math.log(1 - p[i][j])) + # consume "skip" edges + next(itertools.islice(edges, skip, skip), None) + e = next(edges) + g.add_edge(*e) # __safe + except StopIteration: + break + else: + for e in edges: + if seed.random() < p[i][j]: + g.add_edge(*e) # __safe + return g + + +def _zipf_rv_below(gamma, xmin, threshold, seed): + """Returns a random value chosen from the bounded Zipf distribution. + + Repeatedly draws values from the Zipf distribution until the + threshold is met, then returns that value. + """ + result = nx.utils.zipf_rv(gamma, xmin, seed) + while result > threshold: + result = nx.utils.zipf_rv(gamma, xmin, seed) + return result + + +def _powerlaw_sequence(gamma, low, high, condition, length, max_iters, seed): + """Returns a list of numbers obeying a constrained power law distribution. + + ``gamma`` and ``low`` are the parameters for the Zipf distribution. + + ``high`` is the maximum allowed value for values draw from the Zipf + distribution. For more information, see :func:`_zipf_rv_below`. + + ``condition`` and ``length`` are Boolean-valued functions on + lists. While generating the list, random values are drawn and + appended to the list until ``length`` is satisfied by the created + list. Once ``condition`` is satisfied, the sequence generated in + this way is returned. + + ``max_iters`` indicates the number of times to generate a list + satisfying ``length``. If the number of iterations exceeds this + value, :exc:`~networkx.exception.ExceededMaxIterations` is raised. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + """ + for i in range(max_iters): + seq = [] + while not length(seq): + seq.append(_zipf_rv_below(gamma, low, high, seed)) + if condition(seq): + return seq + raise nx.ExceededMaxIterations("Could not create power law sequence") + + +# TODO Needs documentation. +def _generate_min_degree(gamma, average_degree, max_degree, tolerance, max_iters): + """Returns a minimum degree from the given average degree.""" + min_deg_top = max_degree + min_deg_bot = 1 + min_deg_mid = (min_deg_top - min_deg_bot) / 2 + min_deg_bot + itrs = 0 + mid_avg_deg = 0 + while abs(mid_avg_deg - average_degree) > tolerance: + if itrs > max_iters: + raise nx.ExceededMaxIterations("Could not match average_degree") + mid_avg_deg = 0 + for x in range(int(min_deg_mid), max_degree + 1): + mid_avg_deg += (x ** (-gamma + 1)) / zeta(gamma, min_deg_mid, tolerance) + if mid_avg_deg > average_degree: + min_deg_top = min_deg_mid + min_deg_mid = (min_deg_top - min_deg_bot) / 2 + min_deg_bot + else: + min_deg_bot = min_deg_mid + min_deg_mid = (min_deg_top - min_deg_bot) / 2 + min_deg_bot + itrs += 1 + # return int(min_deg_mid + 0.5) + return round(min_deg_mid) + + +def _generate_communities(degree_seq, community_sizes, mu, max_iters, seed): + """Returns a list of sets, each of which represents a community. + + ``degree_seq`` is the degree sequence that must be met by the + graph. + + ``community_sizes`` is the community size distribution that must be + met by the generated list of sets. + + ``mu`` is a float in the interval [0, 1] indicating the fraction of + intra-community edges incident to each node. + + ``max_iters`` is the number of times to try to add a node to a + community. This must be greater than the length of + ``degree_seq``, otherwise this function will always fail. If + the number of iterations exceeds this value, + :exc:`~networkx.exception.ExceededMaxIterations` is raised. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + The communities returned by this are sets of integers in the set {0, + ..., *n* - 1}, where *n* is the length of ``degree_seq``. + + """ + # This assumes the nodes in the graph will be natural numbers. + result = [set() for _ in community_sizes] + n = len(degree_seq) + free = list(range(n)) + for i in range(max_iters): + v = free.pop() + c = seed.choice(range(len(community_sizes))) + # s = int(degree_seq[v] * (1 - mu) + 0.5) + s = round(degree_seq[v] * (1 - mu)) + # If the community is large enough, add the node to the chosen + # community. Otherwise, return it to the list of unaffiliated + # nodes. + if s < community_sizes[c]: + result[c].add(v) + else: + free.append(v) + # If the community is too big, remove a node from it. + if len(result[c]) > community_sizes[c]: + free.append(result[c].pop()) + if not free: + return result + msg = "Could not assign communities; try increasing min_community" + raise nx.ExceededMaxIterations(msg) + + +@py_random_state(11) +def LFR_benchmark_graph( + n, + tau1, + tau2, + mu, + average_degree=None, + min_degree=None, + max_degree=None, + min_community=None, + max_community=None, + tol=1.0e-7, + max_iters=500, + seed=None, +): + r"""Returns the LFR benchmark graph. + + This algorithm proceeds as follows: + + 1) Find a degree sequence with a power law distribution, and minimum + value ``min_degree``, which has approximate average degree + ``average_degree``. This is accomplished by either + + a) specifying ``min_degree`` and not ``average_degree``, + b) specifying ``average_degree`` and not ``min_degree``, in which + case a suitable minimum degree will be found. + + ``max_degree`` can also be specified, otherwise it will be set to + ``n``. Each node *u* will have `\mu \mathrm{deg}(u)` edges + joining it to nodes in communities other than its own and `(1 - + \mu) \mathrm{deg}(u)` edges joining it to nodes in its own + community. + 2) Generate community sizes according to a power law distribution + with exponent ``tau2``. If ``min_community`` and + ``max_community`` are not specified they will be selected to be + ``min_degree`` and ``max_degree``, respectively. Community sizes + are generated until the sum of their sizes equals ``n``. + 3) Each node will be randomly assigned a community with the + condition that the community is large enough for the node's + intra-community degree, `(1 - \mu) \mathrm{deg}(u)` as + described in step 2. If a community grows too large, a random node + will be selected for reassignment to a new community, until all + nodes have been assigned a community. + 4) Each node *u* then adds `(1 - \mu) \mathrm{deg}(u)` + intra-community edges and `\mu \mathrm{deg}(u)` inter-community + edges. + + Parameters + ---------- + n : int + Number of nodes in the created graph. + + tau1 : float + Power law exponent for the degree distribution of the created + graph. This value must be strictly greater than one. + + tau2 : float + Power law exponent for the community size distribution in the + created graph. This value must be strictly greater than one. + + mu : float + Fraction of intra-community edges incident to each node. This + value must be in the interval [0, 1]. + + average_degree : float + Desired average degree of nodes in the created graph. This value + must be in the interval [0, *n*]. Exactly one of this and + ``min_degree`` must be specified, otherwise a + :exc:`NetworkXError` is raised. + + min_degree : int + Minimum degree of nodes in the created graph. This value must be + in the interval [0, *n*]. Exactly one of this and + ``average_degree`` must be specified, otherwise a + :exc:`NetworkXError` is raised. + + max_degree : int + Maximum degree of nodes in the created graph. If not specified, + this is set to ``n``, the total number of nodes in the graph. + + min_community : int + Minimum size of communities in the graph. If not specified, this + is set to ``min_degree``. + + max_community : int + Maximum size of communities in the graph. If not specified, this + is set to ``n``, the total number of nodes in the graph. + + tol : float + Tolerance when comparing floats, specifically when comparing + average degree values. + + max_iters : int + Maximum number of iterations to try to create the community sizes, + degree distribution, and community affiliations. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G : NetworkX graph + The LFR benchmark graph generated according to the specified + parameters. + + Each node in the graph has a node attribute ``'community'`` that + stores the community (that is, the set of nodes) that includes + it. + + Raises + ------ + NetworkXError + If any of the parameters do not meet their upper and lower bounds: + + - ``tau1`` and ``tau2`` must be strictly greater than 1. + - ``mu`` must be in [0, 1]. + - ``max_degree`` must be in {1, ..., *n*}. + - ``min_community`` and ``max_community`` must be in {0, ..., + *n*}. + + If not exactly one of ``average_degree`` and ``min_degree`` is + specified. + + If ``min_degree`` is not specified and a suitable ``min_degree`` + cannot be found. + + ExceededMaxIterations + If a valid degree sequence cannot be created within + ``max_iters`` number of iterations. + + If a valid set of community sizes cannot be created within + ``max_iters`` number of iterations. + + If a valid community assignment cannot be created within ``10 * + n * max_iters`` number of iterations. + + Examples + -------- + Basic usage:: + + >>> from networkx.generators.community import LFR_benchmark_graph + >>> n = 250 + >>> tau1 = 3 + >>> tau2 = 1.5 + >>> mu = 0.1 + >>> G = LFR_benchmark_graph( + ... n, tau1, tau2, mu, average_degree=5, min_community=20, seed=10 + ... ) + + Continuing the example above, you can get the communities from the + node attributes of the graph:: + + >>> communities = {frozenset(G.nodes[v]["community"]) for v in G} + + Notes + ----- + This algorithm differs slightly from the original way it was + presented in [1]. + + 1) Rather than connecting the graph via a configuration model then + rewiring to match the intra-community and inter-community + degrees, we do this wiring explicitly at the end, which should be + equivalent. + 2) The code posted on the author's website [2] calculates the random + power law distributed variables and their average using + continuous approximations, whereas we use the discrete + distributions here as both degree and community size are + discrete. + + Though the authors describe the algorithm as quite robust, testing + during development indicates that a somewhat narrower parameter set + is likely to successfully produce a graph. Some suggestions have + been provided in the event of exceptions. + + References + ---------- + .. [1] "Benchmark graphs for testing community detection algorithms", + Andrea Lancichinetti, Santo Fortunato, and Filippo Radicchi, + Phys. Rev. E 78, 046110 2008 + .. [2] http://santo.fortunato.googlepages.com/inthepress2 + + """ + # Perform some basic parameter validation. + if not tau1 > 1: + raise nx.NetworkXError("tau1 must be greater than one") + if not tau2 > 1: + raise nx.NetworkXError("tau2 must be greater than one") + if not 0 <= mu <= 1: + raise nx.NetworkXError("mu must be in the interval [0, 1]") + + # Validate parameters for generating the degree sequence. + if max_degree is None: + max_degree = n + elif not 0 < max_degree <= n: + raise nx.NetworkXError("max_degree must be in the interval (0, n]") + if not ((min_degree is None) ^ (average_degree is None)): + raise nx.NetworkXError( + "Must assign exactly one of min_degree and" " average_degree" + ) + if min_degree is None: + min_degree = _generate_min_degree( + tau1, average_degree, max_degree, tol, max_iters + ) + + # Generate a degree sequence with a power law distribution. + low, high = min_degree, max_degree + + def condition(seq): + return sum(seq) % 2 == 0 + + def length(seq): + return len(seq) >= n + + deg_seq = _powerlaw_sequence(tau1, low, high, condition, length, max_iters, seed) + + # Validate parameters for generating the community size sequence. + if min_community is None: + min_community = min(deg_seq) + if max_community is None: + max_community = max(deg_seq) + + # Generate a community size sequence with a power law distribution. + # + # TODO The original code incremented the number of iterations each + # time a new Zipf random value was drawn from the distribution. This + # differed from the way the number of iterations was incremented in + # `_powerlaw_degree_sequence`, so this code was changed to match + # that one. As a result, this code is allowed many more chances to + # generate a valid community size sequence. + low, high = min_community, max_community + + def condition(seq): + return sum(seq) == n + + def length(seq): + return sum(seq) >= n + + comms = _powerlaw_sequence(tau2, low, high, condition, length, max_iters, seed) + + # Generate the communities based on the given degree sequence and + # community sizes. + max_iters *= 10 * n + communities = _generate_communities(deg_seq, comms, mu, max_iters, seed) + + # Finally, generate the benchmark graph based on the given + # communities, joining nodes according to the intra- and + # inter-community degrees. + G = nx.Graph() + G.add_nodes_from(range(n)) + for c in communities: + for u in c: + while G.degree(u) < round(deg_seq[u] * (1 - mu)): + v = seed.choice(list(c)) + G.add_edge(u, v) + while G.degree(u) < deg_seq[u]: + v = seed.choice(range(n)) + if v not in c: + G.add_edge(u, v) + G.nodes[u]["community"] = c + return G diff --git a/networkx/generators/degree_seq.py b/networkx/generators/degree_seq.py index df57ac1..968498f 100644 --- a/networkx/generators/degree_seq.py +++ b/networkx/generators/degree_seq.py @@ -1,43 +1,25 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg (aric.hagberg@gmail.com) -# Pieter Swart (swart@lanl.gov) -# Dan Schult (dschult@colgate.edu) -# Joel Miller (joel.c.miller.research@gmail.com) -# Nathan Lemons (nlemons@gmail.com) -# Brian Cloteaux (brian.cloteaux@nist.gov) """Generate graphs with a given degree sequence or expected degree sequence. """ -from __future__ import division import heapq from itertools import chain from itertools import combinations -# In Python 3, the function is `zip_longest`, in Python 2 `izip_longest`. -try: - from itertools import zip_longest -except ImportError: - from itertools import izip_longest as zip_longest +from itertools import zip_longest import math from operator import itemgetter -import random import networkx as nx -from networkx.utils import random_weighted_sample - -__all__ = ['configuration_model', - 'directed_configuration_model', - 'expected_degree_graph', - 'havel_hakimi_graph', - 'directed_havel_hakimi_graph', - 'degree_sequence_tree', - 'random_degree_sequence_graph'] +from networkx.utils import random_weighted_sample, py_random_state + +__all__ = [ + "configuration_model", + "directed_configuration_model", + "expected_degree_graph", + "havel_hakimi_graph", + "directed_havel_hakimi_graph", + "degree_sequence_tree", + "random_degree_sequence_graph", +] chaini = chain.from_iterable @@ -74,16 +56,16 @@ def _to_stublist(degree_sequence): return list(chaini([n] * d for n, d in enumerate(degree_sequence))) -def _configuration_model(deg_sequence, create_using, directed=False, - in_deg_sequence=None, seed=None): +def _configuration_model( + deg_sequence, create_using, directed=False, in_deg_sequence=None, seed=None +): """Helper function for generating either undirected or directed configuration model graphs. ``deg_sequence`` is a list of nonnegative integers representing the degree of the node whose label is the index of the list element. - ``create_using`` is a NetworkX graph instance. For more information - on this keyword argument, see :func:`~networkx.empty_graph`. + ``create_using`` see :func:`~networkx.empty_graph`. ``directed`` and ``in_deg_sequence`` are required if you want the returned graph to be generated using the directed configuration @@ -99,7 +81,7 @@ def _configuration_model(deg_sequence, create_using, directed=False, ``deg_sequence`` and ``in_deg_sequence`` need not be the same length. - ``seed`` is the seed for the random number generator. + ``seed`` is a random.Random or numpy.random.RandomState instance This function returns a graph, directed if and only if ``directed`` is ``True``, generated according to the configuration model @@ -108,16 +90,14 @@ def _configuration_model(deg_sequence, create_using, directed=False, functions. """ - if seed is not None: - random.seed(seed) n = len(deg_sequence) - G = nx.empty_graph(n, create_using=create_using) + G = nx.empty_graph(n, create_using) # If empty, return the null graph immediately. if n == 0: return G # Build a list of available degree-repeated nodes. For example, # for degree sequence [3, 2, 1, 1, 1], the "stub list" is - # initially [1, 1, 1, 2, 2, 3, 4, 5], that is, node 1 has degree + # initially [0, 0, 0, 1, 1, 2, 3, 4], that is, node 0 has degree # 3 and thus is repeated 3 times, etc. # # Also, shuffle the stub list in order to get a random sequence of @@ -130,8 +110,8 @@ def _configuration_model(deg_sequence, create_using, directed=False, out_stublist = _to_stublist(out_deg) in_stublist = _to_stublist(in_deg) - random.shuffle(out_stublist) - random.shuffle(in_stublist) + seed.shuffle(out_stublist) + seed.shuffle(in_stublist) else: stublist = _to_stublist(deg_sequence) # Choose a random balanced bipartition of the stublist, which @@ -139,14 +119,15 @@ def _configuration_model(deg_sequence, create_using, directed=False, # shuffle the list and then split it in half. n = len(stublist) half = n // 2 - random.shuffle(stublist) + seed.shuffle(stublist) out_stublist, in_stublist = stublist[:half], stublist[half:] G.add_edges_from(zip(out_stublist, in_stublist)) return G +@py_random_state(2) def configuration_model(deg_sequence, create_using=None, seed=None): - """Return a random graph with the given degree sequence. + """Returns a random graph with the given degree sequence. The configuration model generates a random pseudograph (graph with parallel edges and self loops) by randomly assigning edges to @@ -156,10 +137,11 @@ def configuration_model(deg_sequence, create_using=None, seed=None): ---------- deg_sequence : list of nonnegative integers Each list entry corresponds to the degree of a node. - create_using : graph, optional (default MultiGraph) - Return graph of this type. The instance will be cleared. - seed : hashable object, optional - Seed for random number generator. + create_using : NetworkX graph constructor, optional (default MultiGraph) + Graph type to create. If graph instance, then cleared before populated. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -233,23 +215,23 @@ def configuration_model(deg_sequence, create_using=None, seed=None): """ if sum(deg_sequence) % 2 != 0: - msg = 'Invalid degree sequence: sum of degrees must be even, not odd' + msg = "Invalid degree sequence: sum of degrees must be even, not odd" raise nx.NetworkXError(msg) - if create_using is None: - create_using = nx.MultiGraph() - elif create_using.is_directed(): - raise nx.NetworkXNotImplemented('not implemented for directed graphs') + G = nx.empty_graph(0, create_using, default=nx.MultiGraph) + if G.is_directed(): + raise nx.NetworkXNotImplemented("not implemented for directed graphs") - G = _configuration_model(deg_sequence, create_using, seed=seed) + G = _configuration_model(deg_sequence, G, seed=seed) return G -def directed_configuration_model(in_degree_sequence, - out_degree_sequence, - create_using=None, seed=None): - """Return a directed_random graph with the given degree sequences. +@py_random_state(3) +def directed_configuration_model( + in_degree_sequence, out_degree_sequence, create_using=None, seed=None +): + """Returns a directed_random graph with the given degree sequences. The configuration model generates a random directed pseudograph (graph with parallel edges and self loops) by randomly assigning @@ -261,10 +243,11 @@ def directed_configuration_model(in_degree_sequence, Each list entry corresponds to the in-degree of a node. out_degree_sequence : list of nonnegative integers Each list entry corresponds to the out-degree of a node. - create_using : graph, optional (default MultiDiGraph) - Return graph of this type. The instance will be cleared. - seed : hashable object, optional - Seed for random number generator. + create_using : NetworkX graph constructor, optional (default MultiDiGraph) + Graph type to create. If graph instance, then cleared before populated. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -328,21 +311,27 @@ def directed_configuration_model(in_degree_sequence, """ if sum(in_degree_sequence) != sum(out_degree_sequence): - msg = 'Invalid degree sequences: sequences must have equal sums' + msg = "Invalid degree sequences: sequences must have equal sums" raise nx.NetworkXError(msg) if create_using is None: - create_using = nx.MultiDiGraph() + create_using = nx.MultiDiGraph - G = _configuration_model(out_degree_sequence, create_using, directed=True, - in_deg_sequence=in_degree_sequence, seed=seed) + G = _configuration_model( + out_degree_sequence, + create_using, + directed=True, + in_deg_sequence=in_degree_sequence, + seed=seed, + ) name = "directed configuration_model {} nodes {} edges" return G +@py_random_state(1) def expected_degree_graph(w, seed=None, selfloops=True): - r"""Return a random graph with given expected degrees. + r"""Returns a random graph with given expected degrees. Given a sequence of expected degrees $W=(w_0,w_1,\ldots,w_{n-1})$ of length $n$ this algorithm assigns an edge between node $u$ and @@ -358,8 +347,9 @@ def expected_degree_graph(w, seed=None, selfloops=True): The list of expected degrees. selfloops: bool (default=True) Set to False to remove the possibility of self-loop edges. - seed : hashable object, optional - The seed for the random number generator. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -367,8 +357,8 @@ def expected_degree_graph(w, seed=None, selfloops=True): Examples -------- - >>> z=[10 for i in range(100)] - >>> G=nx.expected_degree_graph(z) + >>> z = [10 for i in range(100)] + >>> G = nx.expected_degree_graph(z) Notes ----- @@ -419,8 +409,6 @@ def expected_degree_graph(w, seed=None, selfloops=True): if n == 0 or max(w) == 0: return G - if seed is not None: - random.seed(seed) rho = 1 / sum(w) # Sort the weights in decreasing order. The original order of the # weights dictates the order of the (integer) node labels, so we @@ -439,11 +427,11 @@ def expected_degree_graph(w, seed=None, selfloops=True): p = min(seq[v] * factor, 1) while v < n and p > 0: if p != 1: - r = random.random() + r = seed.random() v += int(math.floor(math.log(r, 1 - p))) if v < n: q = min(seq[v] * factor, 1) - if random.random() < q / p: + if seed.random() < q / p: G.add_edge(mapping[u], mapping[v]) v += 1 p = q @@ -451,15 +439,15 @@ def expected_degree_graph(w, seed=None, selfloops=True): def havel_hakimi_graph(deg_sequence, create_using=None): - """Return a simple graph with given degree sequence constructed + """Returns a simple graph with given degree sequence constructed using the Havel-Hakimi algorithm. Parameters ---------- deg_sequence: list of integers Each integer corresponds to the degree of a node (need not be sorted). - create_using : graph, optional (default Graph) - Return graph of this type. The instance will be cleared. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. Directed graphs are not allowed. Raises @@ -490,12 +478,12 @@ def havel_hakimi_graph(deg_sequence, create_using=None): and Factors Discrete Mathematics, 6(1), pp. 79-88 (1973) """ if not nx.is_graphical(deg_sequence): - raise nx.NetworkXError('Invalid degree sequence') - if create_using is not None and create_using.is_directed(): - raise nx.NetworkXError("Directed graphs are not supported") + raise nx.NetworkXError("Invalid degree sequence") p = len(deg_sequence) G = nx.empty_graph(p, create_using) + if G.is_directed(): + raise nx.NetworkXError("Directed graphs are not supported") num_degs = [[] for i in range(p)] dmax, dsum, n = 0, 0, 0 for d in deg_sequence: @@ -516,7 +504,7 @@ def havel_hakimi_graph(deg_sequence, create_using=None): # If there are not enough stubs to connect to, then the sequence is # not graphical if dmax > n - 1: - raise nx.NetworkXError('Non-graphical integer sequence') + raise nx.NetworkXError("Non-graphical integer sequence") # Remove largest stub in list source = num_degs[dmax].pop() @@ -542,19 +530,17 @@ def havel_hakimi_graph(deg_sequence, create_using=None): return G -def directed_havel_hakimi_graph(in_deg_sequence, - out_deg_sequence, - create_using=None): - """Return a directed graph with the given degree sequences. +def directed_havel_hakimi_graph(in_deg_sequence, out_deg_sequence, create_using=None): + """Returns a directed graph with the given degree sequences. Parameters ---------- in_deg_sequence : list of integers - Each list entry corresponds to the in-degree of a node. + Each list entry corresponds to the in-degree of a node. out_deg_sequence : list of integers - Each list entry corresponds to the out-degree of a node. - create_using : graph, optional (default DiGraph) - Return graph of this type. The instance will be cleared. + Each list entry corresponds to the out-degree of a node. + create_using : NetworkX graph constructor, optional (default DiGraph) + Graph type to create. If graph instance, then cleared before populated. Returns ------- @@ -582,18 +568,15 @@ def directed_havel_hakimi_graph(in_deg_sequence, Algorithms for Constructing Graphs and Digraphs with Given Valences and Factors Discrete Mathematics, 6(1), pp. 79-88 (1973) """ - assert(nx.utils.is_list_of_ints(in_deg_sequence)) - assert(nx.utils.is_list_of_ints(out_deg_sequence)) - - if create_using is None: - create_using = nx.DiGraph() + in_deg_sequence = nx.utils.make_list_of_ints(in_deg_sequence) + out_deg_sequence = nx.utils.make_list_of_ints(out_deg_sequence) # Process the sequences and form two heaps to store degree pairs with # either zero or nonzero out degrees sumin, sumout = 0, 0 nin, nout = len(in_deg_sequence), len(out_deg_sequence) maxn = max(nin, nout) - G = nx.empty_graph(maxn, create_using) + G = nx.empty_graph(maxn, create_using, default=nx.DiGraph) if maxn == 0: return G maxin = 0 @@ -606,7 +589,8 @@ def directed_havel_hakimi_graph(in_deg_sequence, in_deg = in_deg_sequence[n] if in_deg < 0 or out_deg < 0: raise nx.NetworkXError( - 'Invalid degree sequences. Sequence values must be positive.') + "Invalid degree sequences. Sequence values must be positive." + ) sumin, sumout, maxin = sumin + in_deg, sumout + out_deg, max(maxin, in_deg) if in_deg > 0: stubheap.append((-1 * out_deg, -1 * in_deg, n)) @@ -614,7 +598,8 @@ def directed_havel_hakimi_graph(in_deg_sequence, zeroheap.append((-1 * out_deg, n)) if sumin != sumout: raise nx.NetworkXError( - 'Invalid degree sequences. Sequences must have equal sums.') + "Invalid degree sequences. Sequences must have equal sums." + ) heapq.heapify(stubheap) heapq.heapify(zeroheap) @@ -625,7 +610,7 @@ def directed_havel_hakimi_graph(in_deg_sequence, (freeout, freein, target) = heapq.heappop(stubheap) freein *= -1 if freein > len(stubheap) + len(zeroheap): - raise nx.NetworkXError('Non-digraphical integer sequence') + raise nx.NetworkXError("Non-digraphical integer sequence") # Attach arcs from the nodes with the most stubs mslen = 0 @@ -636,7 +621,7 @@ def directed_havel_hakimi_graph(in_deg_sequence, else: (stubout, stubin, stubsource) = heapq.heappop(stubheap) if stubout == 0: - raise nx.NetworkXError('Non-digraphical integer sequence') + raise nx.NetworkXError("Non-digraphical integer sequence") G.add_edge(stubsource, target) # Check if source is now totally connected if stubout + 1 < 0 or stubin < 0: @@ -666,13 +651,16 @@ def degree_sequence_tree(deg_sequence, create_using=None): # The sum of the degree sequence must be even (for any undirected graph). degree_sum = sum(deg_sequence) if degree_sum % 2 != 0: - msg = 'Invalid degree sequence: sum of degrees must be even, not odd' + msg = "Invalid degree sequence: sum of degrees must be even, not odd" raise nx.NetworkXError(msg) if len(deg_sequence) - degree_sum // 2 != 1: - msg = ('Invalid degree sequence: tree must have number of nodes equal' - ' to one less than the number of edges') + msg = ( + "Invalid degree sequence: tree must have number of nodes equal" + " to one less than the number of edges" + ) raise nx.NetworkXError(msg) - if create_using is not None and create_using.is_directed(): + G = nx.empty_graph(0, create_using) + if G.is_directed(): raise nx.NetworkXError("Directed Graph not supported") # Sort all degrees greater than 1 in decreasing order. @@ -682,7 +670,7 @@ def degree_sequence_tree(deg_sequence, create_using=None): # make path graph as backbone n = len(deg) + 2 - G = nx.path_graph(n, create_using) + nx.add_path(G, range(n)) last = n # add the leaves @@ -698,8 +686,9 @@ def degree_sequence_tree(deg_sequence, create_using=None): return G +@py_random_state(1) def random_degree_sequence_graph(sequence, seed=None, tries=10): - r"""Return a simple random graph with the given degree sequence. + r"""Returns a simple random graph with the given degree sequence. If the maximum degree $d_m$ in the sequence is $O(m^{1/4})$ then the algorithm produces almost uniform random graphs in $O(m d_m)$ time @@ -709,8 +698,9 @@ def random_degree_sequence_graph(sequence, seed=None, tries=10): ---------- sequence : list of integers Sequence of degrees - seed : hashable object, optional - Seed for random number generator + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. tries : int, optional Maximum number of tries to create a graph @@ -746,27 +736,26 @@ def random_degree_sequence_graph(sequence, seed=None, tries=10): Examples -------- >>> sequence = [1, 2, 2, 3] - >>> G = nx.random_degree_sequence_graph(sequence) + >>> G = nx.random_degree_sequence_graph(sequence, seed=42) >>> sorted(d for n, d in G.degree()) [1, 2, 2, 3] """ - DSRG = DegreeSequenceRandomGraph(sequence, seed=seed) + DSRG = DegreeSequenceRandomGraph(sequence, seed) for try_n in range(tries): try: return DSRG.generate() except nx.NetworkXUnfeasible: pass - raise nx.NetworkXError('failed to generate graph in %d tries' % tries) + raise nx.NetworkXError(f"failed to generate graph in {tries} tries") -class DegreeSequenceRandomGraph(object): +class DegreeSequenceRandomGraph: # class to generate random graphs with a given degree sequence # use random_degree_sequence_graph() - def __init__(self, degree, seed=None): + def __init__(self, degree, rng): if not nx.is_graphical(degree): - raise nx.NetworkXUnfeasible('degree sequence is not graphical') - if seed is not None: - random.seed(seed) + raise nx.NetworkXUnfeasible("degree sequence is not graphical") + self.rng = rng self.degree = list(degree) # node labels are integers 0,...,n-1 self.m = sum(self.degree) / 2.0 # number of edges @@ -816,7 +805,7 @@ def p(self, u, v): def q(self, u, v): # remaining degree probability - norm = float(max(self.remaining_degree.values()))**2 + norm = float(max(self.remaining_degree.values())) ** 2 return self.remaining_degree[u] * self.remaining_degree[v] / norm def suitable_edge(self): @@ -830,25 +819,27 @@ def suitable_edge(self): def phase1(self): # choose node pairs from (degree) weighted distribution - while sum(self.remaining_degree.values()) >= 2 * self.dmax**2: - u, v = sorted(random_weighted_sample(self.remaining_degree, 2)) + rem_deg = self.remaining_degree + while sum(rem_deg.values()) >= 2 * self.dmax ** 2: + u, v = sorted(random_weighted_sample(rem_deg, 2, self.rng)) if self.graph.has_edge(u, v): continue - if random.random() < self.p(u, v): # accept edge + if self.rng.random() < self.p(u, v): # accept edge self.graph.add_edge(u, v) self.update_remaining(u, v) def phase2(self): # choose remaining nodes uniformly at random and use rejection sampling - while len(self.remaining_degree) >= 2 * self.dmax: - norm = float(max(self.remaining_degree.values()))**2 + remaining_deg = self.remaining_degree + rng = self.rng + while len(remaining_deg) >= 2 * self.dmax: while True: - u, v = sorted(random.sample(self.remaining_degree.keys(), 2)) + u, v = sorted(rng.sample(remaining_deg.keys(), 2)) if self.graph.has_edge(u, v): continue - if random.random() < self.q(u, v): + if rng.random() < self.q(u, v): break - if random.random() < self.p(u, v): # accept edge + if rng.random() < self.p(u, v): # accept edge self.graph.add_edge(u, v) self.update_remaining(u, v) @@ -856,15 +847,17 @@ def phase3(self): # build potential remaining edges and choose with rejection sampling potential_edges = combinations(self.remaining_degree, 2) # build auxiliary graph of potential edges not already in graph - H = nx.Graph([(u, v) for (u, v) in potential_edges - if not self.graph.has_edge(u, v)]) + H = nx.Graph( + [(u, v) for (u, v) in potential_edges if not self.graph.has_edge(u, v)] + ) + rng = self.rng while self.remaining_degree: if not self.suitable_edge(): - raise nx.NetworkXUnfeasible('no suitable edges left') + raise nx.NetworkXUnfeasible("no suitable edges left") while True: - u, v = sorted(random.choice(list(H.edges()))) - if random.random() < self.q(u, v): + u, v = sorted(rng.choice(list(H.edges()))) + if rng.random() < self.q(u, v): break - if random.random() < self.p(u, v): # accept edge + if rng.random() < self.p(u, v): # accept edge self.graph.add_edge(u, v) self.update_remaining(u, v, aux_graph=H) diff --git a/networkx/generators/directed.py b/networkx/generators/directed.py index 3b0784e..0e6009b 100644 --- a/networkx/generators/directed.py +++ b/networkx/generators/directed.py @@ -1,35 +1,29 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2006-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# Copyright (C) 2009 by Willem Ligtenberg -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg (hagberg@lanl.gov) -# Willem Ligtenberg (W.P.A.Ligtenberg@tue.nl) """ Generators for some directed graphs, including growing network (GN) graphs and scale-free graphs. """ -from __future__ import division from collections import Counter -import random import networkx as nx from networkx.generators.classic import empty_graph from networkx.utils import discrete_sequence from networkx.utils import weighted_choice +from networkx.utils import py_random_state -__all__ = ['gn_graph', 'gnc_graph', 'gnr_graph', 'random_k_out_graph', - 'scale_free_graph'] +__all__ = [ + "gn_graph", + "gnc_graph", + "gnr_graph", + "random_k_out_graph", + "scale_free_graph", +] +@py_random_state(3) def gn_graph(n, kernel=None, create_using=None, seed=None): - """Return the growing network (GN) digraph with `n` nodes. + """Returns the growing network (GN) digraph with `n` nodes. The GN graph is built by adding nodes one at a time with a link to one previously added node. The target node for the link is chosen with @@ -44,10 +38,11 @@ def gn_graph(n, kernel=None, create_using=None, seed=None): The number of nodes for the generated graph. kernel : function The attachment kernel. - create_using : graph, optional (default DiGraph) - Return graph of this type. The instance will be cleared. - seed : hashable object, optional - The seed for the random number generator. + create_using : NetworkX graph constructor, optional (default DiGraph) + Graph type to create. If graph instance, then cleared before populated. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Examples -------- @@ -67,18 +62,14 @@ def gn_graph(n, kernel=None, create_using=None, seed=None): Organization of Growing Random Networks, Phys. Rev. E, 63, 066123, 2001. """ - if create_using is None: - create_using = nx.DiGraph() - elif not create_using.is_directed(): - raise nx.NetworkXError("Directed Graph required in create_using") + G = empty_graph(1, create_using, default=nx.DiGraph) + if not G.is_directed(): + raise nx.NetworkXError("create_using must indicate a Directed Graph") if kernel is None: - def kernel(x): return x - - if seed is not None: - random.seed(seed) - G = empty_graph(1, create_using) + def kernel(x): + return x if n == 1: return G @@ -90,15 +81,16 @@ def kernel(x): return x # compute distribution from kernel and degree dist = [kernel(d) for d in ds] # choose target from discrete distribution - target = discrete_sequence(1, distribution=dist)[0] + target = discrete_sequence(1, distribution=dist, seed=seed)[0] G.add_edge(source, target) ds.append(1) # the source has only one link (degree one) ds[target] += 1 # add one to the target link degree return G +@py_random_state(3) def gnr_graph(n, p, create_using=None, seed=None): - """Return the growing network with redirection (GNR) digraph with `n` + """Returns the growing network with redirection (GNR) digraph with `n` nodes and redirection probability `p`. The GNR graph is built by adding nodes one at a time with a link to one @@ -114,10 +106,11 @@ def gnr_graph(n, p, create_using=None, seed=None): The number of nodes for the generated graph. p : float The redirection probability. - create_using : graph, optional (default DiGraph) - Return graph of this type. The instance will be cleared. - seed : hashable object, optional - The seed for the random number generator. + create_using : NetworkX graph constructor, optional (default DiGraph) + Graph type to create. If graph instance, then cleared before populated. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Examples -------- @@ -133,30 +126,24 @@ def gnr_graph(n, p, create_using=None, seed=None): Organization of Growing Random Networks, Phys. Rev. E, 63, 066123, 2001. """ - if create_using is None: - create_using = nx.DiGraph() - elif not create_using.is_directed(): - raise nx.NetworkXError("Directed Graph required in create_using") - - if seed is not None: - random.seed(seed) - - G = empty_graph(1, create_using) + G = empty_graph(1, create_using, default=nx.DiGraph) + if not G.is_directed(): + raise nx.NetworkXError("create_using must indicate a Directed Graph") if n == 1: return G for source in range(1, n): - target = random.randrange(0, source) - if random.random() < p and target != 0: + target = seed.randrange(0, source) + if seed.random() < p and target != 0: target = next(G.successors(target)) G.add_edge(source, target) - return G +@py_random_state(2) def gnc_graph(n, create_using=None, seed=None): - """Return the growing network with copying (GNC) digraph with `n` nodes. + """Returns the growing network with copying (GNC) digraph with `n` nodes. The GNC graph is built by adding nodes one at a time with a link to one previously added node (chosen uniformly at random) and to all of that @@ -166,10 +153,11 @@ def gnc_graph(n, create_using=None, seed=None): ---------- n : int The number of nodes for the generated graph. - create_using : graph, optional (default DiGraph) - Return graph of this type. The instance will be cleared. - seed : hashable object, optional - The seed for the random number generator. + create_using : NetworkX graph constructor, optional (default DiGraph) + Graph type to create. If graph instance, then cleared before populated. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. References ---------- @@ -177,30 +165,32 @@ def gnc_graph(n, create_using=None, seed=None): Network Growth by Copying, Phys. Rev. E, 71, 036118, 2005k.}, """ - if create_using is None: - create_using = nx.DiGraph() - elif not create_using.is_directed(): - raise nx.NetworkXError("Directed Graph required in create_using") - - if seed is not None: - random.seed(seed) - - G = empty_graph(1, create_using) + G = empty_graph(1, create_using, default=nx.DiGraph) + if not G.is_directed(): + raise nx.NetworkXError("create_using must indicate a Directed Graph") if n == 1: return G for source in range(1, n): - target = random.randrange(0, source) + target = seed.randrange(0, source) for succ in G.successors(target): G.add_edge(source, succ) G.add_edge(source, target) - return G -def scale_free_graph(n, alpha=0.41, beta=0.54, gamma=0.05, delta_in=0.2, - delta_out=0, create_using=None, seed=None): +@py_random_state(7) +def scale_free_graph( + n, + alpha=0.41, + beta=0.54, + gamma=0.05, + delta_in=0.2, + delta_out=0, + create_using=None, + seed=None, +): """Returns a scale-free directed graph. Parameters @@ -219,13 +209,16 @@ def scale_free_graph(n, alpha=0.41, beta=0.54, gamma=0.05, delta_in=0.2, Probability for adding a new node connected to an existing node chosen randomly according to the out-degree distribution. delta_in : float - Bias for choosing ndoes from in-degree distribution. + Bias for choosing nodes from in-degree distribution. delta_out : float - Bias for choosing ndoes from out-degree distribution. - create_using : graph, optional (default MultiDiGraph) - Use this graph instance to start the process (default=3-cycle). - seed : integer, optional - Seed for random number generator + Bias for choosing nodes from out-degree distribution. + create_using : NetworkX graph constructor, optional + The default is a MultiDiGraph 3-cycle. + If a graph instance, use it without clearing first. + If a graph constructor, call it to construct an empty graph. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Examples -------- @@ -248,40 +241,37 @@ def scale_free_graph(n, alpha=0.41, beta=0.54, gamma=0.05, delta_in=0.2, def _choose_node(G, distribution, delta, psum): cumsum = 0.0 # normalization - r = random.random() + r = seed.random() for n, d in distribution: cumsum += (d + delta) / psum if r < cumsum: break return n - if create_using is None: + if create_using is None or not hasattr(create_using, "_adj"): # start with 3-cycle - G = nx.MultiDiGraph() + G = nx.empty_graph(3, create_using, default=nx.MultiDiGraph) G.add_edges_from([(0, 1), (1, 2), (2, 0)]) else: - # keep existing graph structure? G = create_using - if not (G.is_directed() and G.is_multigraph()): - raise nx.NetworkXError("MultiDiGraph required in create_using") + if not (G.is_directed() and G.is_multigraph()): + raise nx.NetworkXError("MultiDiGraph required in create_using") if alpha <= 0: - raise ValueError('alpha must be >= 0.') + raise ValueError("alpha must be > 0.") if beta <= 0: - raise ValueError('beta must be >= 0.') + raise ValueError("beta must be > 0.") if gamma <= 0: - raise ValueError('beta must be >= 0.') + raise ValueError("gamma must be > 0.") - if alpha + beta + gamma != 1.0: - raise ValueError('alpha+beta+gamma must equal 1.') + if abs(alpha + beta + gamma - 1.0) >= 1e-9: + raise ValueError("alpha+beta+gamma must equal 1.") - # seed random number generated (uses None as default) - random.seed(seed) number_of_edges = G.number_of_edges() while len(G) < n: psum_in = number_of_edges + delta_in * len(G) psum_out = number_of_edges + delta_out * len(G) - r = random.random() + r = seed.random() # random choice in alpha,beta,gamma ranges if r < alpha: # alpha @@ -303,12 +293,11 @@ def _choose_node(G, distribution, delta, psum): w = len(G) G.add_edge(v, w) number_of_edges += 1 - return G -def random_uniform_k_out_graph(n, k, self_loops=True, with_replacement=True, - seed=None): +@py_random_state(4) +def random_uniform_k_out_graph(n, k, self_loops=True, with_replacement=True, seed=None): """Returns a random `k`-out graph with uniform attachment. A random `k`-out graph with uniform attachment is a multidigraph @@ -333,9 +322,9 @@ def random_uniform_k_out_graph(n, k, self_loops=True, with_replacement=True, neighbors are chosen without replacement and the returned graph will be a directed graph. - seed: int - If provided, this is used as the seed for the random number - generator. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -364,15 +353,13 @@ def random_uniform_k_out_graph(n, k, self_loops=True, with_replacement=True, set to positive infinity. """ - random.seed(seed) - if with_replacement: create_using = nx.MultiDiGraph() def sample(v, nodes): if not self_loops: nodes = nodes - {v} - return (random.choice(list(nodes)) for i in range(k)) + return (seed.choice(list(nodes)) for i in range(k)) else: create_using = nx.DiGraph() @@ -380,15 +367,16 @@ def sample(v, nodes): def sample(v, nodes): if not self_loops: nodes = nodes - {v} - return random.sample(nodes, k) + return seed.sample(nodes, k) - G = nx.empty_graph(n, create_using=create_using) + G = nx.empty_graph(n, create_using) nodes = set(G) for u in G: G.add_edges_from((u, v) for v in sample(u, nodes)) return G +@py_random_state(4) def random_k_out_graph(n, k, alpha, self_loops=True, seed=None): """Returns a random `k`-out graph with preferential attachment. @@ -427,9 +415,9 @@ def random_k_out_graph(n, k, alpha, self_loops=True, seed=None): self_loops : bool If True, self-loops are allowed when generating the graph. - seed: int - If provided, this is used as the seed for the random number - generator. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -457,19 +445,18 @@ def random_k_out_graph(n, k, alpha, self_loops=True, seed=None): """ if alpha < 0: - raise ValueError('alpha must be positive') - random.seed(seed) - G = nx.empty_graph(n, create_using=nx.MultiDiGraph()) + raise ValueError("alpha must be positive") + G = nx.empty_graph(n, create_using=nx.MultiDiGraph) weights = Counter({v: alpha for v in G}) for i in range(k * n): - u = random.choice([v for v, d in G.out_degree() if d < k]) + u = seed.choice([v for v, d in G.out_degree() if d < k]) # If self-loops are not allowed, make the source node `u` have # weight zero. if not self_loops: adjustment = Counter({u: weights[u]}) else: adjustment = Counter() - v = weighted_choice(weights - adjustment) + v = weighted_choice(weights - adjustment, seed=seed) G.add_edge(u, v) weights[v] += 1 return G diff --git a/networkx/generators/duplication.py b/networkx/generators/duplication.py index e02a03c..3bc8cbf 100644 --- a/networkx/generators/duplication.py +++ b/networkx/generators/duplication.py @@ -1,15 +1,3 @@ -# duplication.py - functions for generating graphs by duplicating nodes -# -# Copyright 2016-2018 NetworkX developers. -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Functions for generating graphs based on the "duplication" method. These graph generators start with a small initial graph then duplicate @@ -17,16 +5,16 @@ generally inspired by biological networks. """ -import random - import networkx as nx +from networkx.utils import py_random_state from networkx.exception import NetworkXError -__all__ = ['partial_duplication_graph', 'duplication_divergence_graph'] +__all__ = ["partial_duplication_graph", "duplication_divergence_graph"] +@py_random_state(4) def partial_duplication_graph(N, n, p, q, seed=None): - """Return a random graph using the partial duplication model. + """Returns a random graph using the partial duplication model. Parameters ---------- @@ -45,8 +33,9 @@ def partial_duplication_graph(N, n, p, q, seed=None): The probability of joining the source node to the duplicate node. Must be a number in the between zero and one, inclusive. - seed : int, optional - Seed for random number generator (default=None). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Notes ----- @@ -76,29 +65,28 @@ def partial_duplication_graph(N, n, p, q, seed=None): raise NetworkXError(msg) if n > N: raise NetworkXError("partial duplication graph must have n <= N.") - if seed is not None: - random.seed(seed) G = nx.complete_graph(n) for new_node in range(n, N): - # Add a new vertex, v, to the graph. - G.add_node(new_node) - # Pick a random vertex, u, already in the graph. - src_node = random.randint(0, new_node) + src_node = seed.randint(0, new_node - 1) - # Join v and u with probability q. - if random.random() < q: - G.add_edge(new_node, src_node) + # Add a new vertex, v, to the graph. + G.add_node(new_node) # For each neighbor of u... for neighbor_node in list(nx.all_neighbors(G, src_node)): # Add the neighbor to v with probability p. - if random.random() < p: + if seed.random() < p: G.add_edge(new_node, neighbor_node) + + # Join v and u with probability q. + if seed.random() < q: + G.add_edge(new_node, src_node) return G +@py_random_state(2) def duplication_divergence_graph(n, p, seed=None): """Returns an undirected graph using the duplication-divergence model. @@ -112,8 +100,9 @@ def duplication_divergence_graph(n, p, seed=None): The desired number of nodes in the graph. p : float The probability for retaining the edge of the replicated node. - seed : int, optional - A seed for the random number generator of :mod:`random` (default=None). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -140,13 +129,11 @@ def duplication_divergence_graph(n, p, seed=None): """ if p > 1 or p < 0: - msg = "NetworkXError p={0} is not in [0,1].".format(p) + msg = f"NetworkXError p={p} is not in [0,1]." raise nx.NetworkXError(msg) if n < 2: - msg = 'n must be greater than or equal to 2' + msg = "n must be greater than or equal to 2" raise nx.NetworkXError(msg) - if seed is not None: - random.seed(seed) G = nx.Graph() @@ -155,13 +142,13 @@ def duplication_divergence_graph(n, p, seed=None): i = 2 while i < n: # Choose a random node from current graph to duplicate. - random_node = random.choice(list(G)) + random_node = seed.choice(list(G)) # Make the replica. G.add_node(i) # flag indicates whether at least one edge is connected on the replica. flag = False for nbr in G.neighbors(random_node): - if random.random() < p: + if seed.random() < p: # Link retention step. G.add_edge(i, nbr) flag = True diff --git a/networkx/generators/ego.py b/networkx/generators/ego.py index 2a417ce..cca7dfa 100644 --- a/networkx/generators/ego.py +++ b/networkx/generators/ego.py @@ -1,15 +1,7 @@ """ Ego graph. """ -# Copyright (C) 2010 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -__author__ = """\n""".join(['Drew Conway ', - 'Aric Hagberg ']) -__all__ = ['ego_graph'] +__all__ = ["ego_graph"] import networkx as nx @@ -51,17 +43,18 @@ def ego_graph(G, n, radius=1, center=True, undirected=False, distance=None): """ if undirected: if distance is not None: - sp, _ = nx.single_source_dijkstra(G.to_undirected(), - n, cutoff=radius, - weight=distance) + sp, _ = nx.single_source_dijkstra( + G.to_undirected(), n, cutoff=radius, weight=distance + ) else: - sp = dict(nx.single_source_shortest_path_length(G.to_undirected(), - n, cutoff=radius)) + sp = dict( + nx.single_source_shortest_path_length( + G.to_undirected(), n, cutoff=radius + ) + ) else: if distance is not None: - sp, _ = nx.single_source_dijkstra(G, - n, cutoff=radius, - weight=distance) + sp, _ = nx.single_source_dijkstra(G, n, cutoff=radius, weight=distance) else: sp = dict(nx.single_source_shortest_path_length(G, n, cutoff=radius)) diff --git a/networkx/generators/expanders.py b/networkx/generators/expanders.py index f37808c..88c49ea 100644 --- a/networkx/generators/expanders.py +++ b/networkx/generators/expanders.py @@ -1,14 +1,10 @@ -# -*- coding: utf-8 -*- -# Copyright 2014 "cheebee7i". -# Copyright 2014 "alexbrc". -# Copyright 2014 Jeffrey Finkelstein . """Provides explicit constructions of expander graphs. """ import itertools import networkx as nx -__all__ = ['margulis_gabber_galil_graph', 'chordal_cycle_graph'] +__all__ = ["margulis_gabber_galil_graph", "chordal_cycle_graph", "paley_graph"] # Other discrete torus expanders can be constructed by using the following edge @@ -44,7 +40,7 @@ # (x, (y + (2*x + 2)) % n), # def margulis_gabber_galil_graph(n, create_using=None): - """Return the Margulis-Gabber-Galil undirected MultiGraph on `n^2` nodes. + r"""Returns the Margulis-Gabber-Galil undirected MultiGraph on `n^2` nodes. The undirected MultiGraph is regular with degree `8`. Nodes are integer pairs. The second-largest eigenvalue of the adjacency matrix of the graph @@ -54,9 +50,8 @@ def margulis_gabber_galil_graph(n, create_using=None): ---------- n : int Determines the number of nodes in the graph: `n^2`. - create_using : graph-like - A graph-like object that receives the constructed edges. If None, - then a :class:`~networkx.MultiGraph` instance is used. + create_using : NetworkX graph constructor, optional (default MultiGraph) + Graph type to create. If graph instance, then cleared before populated. Returns ------- @@ -69,24 +64,25 @@ def margulis_gabber_galil_graph(n, create_using=None): If the graph is directed or not a multigraph. """ - if create_using is None: - create_using = nx.MultiGraph() - elif create_using.is_directed() or not create_using.is_multigraph(): + G = nx.empty_graph(0, create_using, default=nx.MultiGraph) + if G.is_directed() or not G.is_multigraph(): msg = "`create_using` must be an undirected multigraph." raise nx.NetworkXError(msg) - G = create_using - G.clear() for (x, y) in itertools.product(range(n), repeat=2): - for (u, v) in (((x + 2 * y) % n, y), ((x + (2 * y + 1)) % n, y), - (x, (y + 2 * x) % n), (x, (y + (2 * x + 1)) % n)): + for (u, v) in ( + ((x + 2 * y) % n, y), + ((x + (2 * y + 1)) % n, y), + (x, (y + 2 * x) % n), + (x, (y + (2 * x + 1)) % n), + ): G.add_edge((x, y), (u, v)) - G.graph['name'] = "margulis_gabber_galil_graph({0})".format(n) + G.graph["name"] = f"margulis_gabber_galil_graph({n})" return G def chordal_cycle_graph(p, create_using=None): - """Return the chordal cycle graph on `p` nodes. + """Returns the chordal cycle graph on `p` nodes. The returned graph is a cycle graph on `p` nodes with chords joining each vertex `x` to its inverse modulo `p`. This graph is a (mildly explicit) @@ -101,9 +97,8 @@ def chordal_cycle_graph(p, create_using=None): The number of vertices in the graph. This also indicates where the chordal edges in the cycle will be created. - create_using : graph-like - A graph-like object that receives the constructed edges. If None, - then a :class:`~networkx.MultiGraph` instance is used. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. Returns ------- @@ -114,8 +109,7 @@ def chordal_cycle_graph(p, create_using=None): ------ NetworkXError - If the graph provided in `create_using` is directed or not a - multigraph. + If `create_using` indicates directed or not a multigraph. References ---------- @@ -125,13 +119,11 @@ def chordal_cycle_graph(p, create_using=None): Birkhäuser Verlag, Basel, 1994. """ - if create_using is None: - create_using = nx.MultiGraph() - elif create_using.is_directed() or not create_using.is_multigraph(): + G = nx.empty_graph(0, create_using, default=nx.MultiGraph) + if G.is_directed() or not G.is_multigraph(): msg = "`create_using` must be an undirected multigraph." raise nx.NetworkXError(msg) - G = create_using - G.clear() + for x in range(p): left = (x - 1) % p right = (x + 1) % p @@ -148,5 +140,63 @@ def chordal_cycle_graph(p, create_using=None): chord = pow(x, p - 2, p) if x > 0 else 0 for y in (left, right, chord): G.add_edge(x, y) - G.graph['name'] = "chordal_cycle_graph({0})".format(p) + G.graph["name"] = f"chordal_cycle_graph({p})" + return G + + +def paley_graph(p, create_using=None): + """Returns the Paley (p-1)/2-regular graph on p nodes. + + The returned graph is a graph on Z/pZ with edges between x and y + if and only if x-y is a nonzero square in Z/pZ. + + If p = 1 mod 4, -1 is a square in Z/pZ and therefore x-y is a square if and + only if y-x is also a square, i.e the edges in the Paley graph are symmetric. + + If p = 3 mod 4, -1 is not a square in Z/pZ and therefore either x-y or y-x + is a square in Z/pZ but not both. + + Note that a more general definition of Paley graphs extends this construction + to graphs over q=p^n vertices, by using the finite field F_q instead of Z/pZ. + This construction requires to compute squares in general finite fields and is + not what is implemented here (i.e paley_graph(25) does not return the true + Paley graph associated with 5^2). + + Parameters + ---------- + p : int, an odd prime number. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : graph + The constructed directed graph. + + Raises + ------ + NetworkXError + If the graph is a multigraph. + + References + ---------- + Chapter 13 in B. Bollobas, Random Graphs. Second edition. + Cambridge Studies in Advanced Mathematics, 73. + Cambridge University Press, Cambridge (2001). + """ + G = nx.empty_graph(0, create_using, default=nx.DiGraph) + if G.is_multigraph(): + msg = "`create_using` cannot be a multigraph." + raise nx.NetworkXError(msg) + + # Compute the squares in Z/pZ. + # Make it a set to uniquify (there are exactly (p-1)/2 squares in Z/pZ + # when is prime). + square_set = {(x ** 2) % p for x in range(1, p) if (x ** 2) % p != 0} + + for x in range(p): + for x2 in square_set: + G.add_edge(x, (x + x2) % p) + G.graph["name"] = f"paley({p})" return G diff --git a/networkx/generators/geometric.py b/networkx/generators/geometric.py index 88d8256..31b870f 100644 --- a/networkx/generators/geometric.py +++ b/networkx/generators/geometric.py @@ -1,28 +1,11 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg (hagberg@lanl.gov) -# Dan Schult (dschult@colgate.edu) -# Ben Edwards (BJEdwards@gmail.com) -# Arya McCarthy (admccarthy@smu.edu) -# Cole MacLean (maclean.cole@gmail.com) - """Generators for geometric graphs. """ -from __future__ import division from bisect import bisect_left -from itertools import combinations -from itertools import product +from itertools import accumulate, combinations, product from math import sqrt import math -import random -from random import uniform + try: from scipy.spatial import cKDTree as KDTree except ImportError: @@ -31,11 +14,16 @@ _is_scipy_available = True import networkx as nx -from networkx.utils import nodes_or_number +from networkx.utils import nodes_or_number, py_random_state -__all__ = ['geographical_threshold_graph', 'waxman_graph', - 'navigable_small_world_graph', 'random_geometric_graph', - 'soft_random_geometric_graph', 'thresholded_random_geometric_graph'] +__all__ = [ + "geographical_threshold_graph", + "waxman_graph", + "navigable_small_world_graph", + "random_geometric_graph", + "soft_random_geometric_graph", + "thresholded_random_geometric_graph", +] def euclidean(x, y): @@ -54,7 +42,7 @@ def _fast_edges(G, radius, p): Requires scipy to be installed. """ - pos = nx.get_node_attributes(G, 'pos') + pos = nx.get_node_attributes(G, "pos") nodes, coords = list(zip(*pos.items())) kdtree = KDTree(coords) # Cannot provide generator. edge_indexes = kdtree.query_pairs(radius, p) @@ -70,14 +58,15 @@ def _slow_edges(G, radius, p): """ # TODO This can be parallelized. edges = [] - for (u, pu), (v, pv) in combinations(G.nodes(data='pos'), 2): + for (u, pu), (v, pv) in combinations(G.nodes(data="pos"), 2): if sum(abs(a - b) ** p for a, b in zip(pu, pv)) <= radius ** p: edges.append((u, v)) return edges +@py_random_state(5) @nodes_or_number(0) -def random_geometric_graph(n, radius, dim=2, pos=None, p=2): +def random_geometric_graph(n, radius, dim=2, pos=None, p=2, seed=None): """Returns a random geometric graph in the unit cube of dimensions `dim`. The random geometric graph model places `n` nodes uniformly at @@ -101,10 +90,13 @@ def random_geometric_graph(n, radius, dim=2, pos=None, p=2): Which Minkowski distance metric to use. `p` has to meet the condition ``1 <= p <= infinity``. - If this argument is not specified, the :math:`L^2` metric + If this argument is not specified, the :math:`L^2` metric (the Euclidean distance metric), p = 2 is used. This should not be confused with the `p` of an Erdős-Rényi random graph, which represents probability. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -157,8 +149,8 @@ def random_geometric_graph(n, radius, dim=2, pos=None, p=2): # If no positions are provided, choose uniformly random vectors in # Euclidean space of the specified dimension. if pos is None: - pos = {v: [random.random() for i in range(dim)] for v in nodes} - nx.set_node_attributes(G, pos, 'pos') + pos = {v: [seed.random() for i in range(dim)] for v in nodes} + nx.set_node_attributes(G, pos, "pos") if _is_scipy_available: edges = _fast_edges(G, radius, p) @@ -169,18 +161,22 @@ def random_geometric_graph(n, radius, dim=2, pos=None, p=2): return G +@py_random_state(6) @nodes_or_number(0) -def soft_random_geometric_graph(n, radius, dim=2, pos=None, p=2, p_dist=None): - """Returns a soft random geometric graph in the unit cube of dimensions `dim`. +def soft_random_geometric_graph( + n, radius, dim=2, pos=None, p=2, p_dist=None, seed=None +): + r"""Returns a soft random geometric graph in the unit cube. The soft random geometric graph [1] model places `n` nodes uniformly at - random in the unit cube. Two nodes of distance, dist, computed by the `p`-Minkowski - distance metric are joined by an edge with probability `p_dist` if the computed - distance metric value of the nodes is at most `radius`, otherwise - they are not joined. + random in the unit cube in dimension `dim`. Two nodes of distance, `dist`, + computed by the `p`-Minkowski distance metric are joined by an edge with + probability `p_dist` if the computed distance metric value of the nodes + is at most `radius`, otherwise they are not joined. - Edges within `radius` of each other are determined using a KDTree when SciPy - is available. This reduces the time complexity from :math:`O(n^2)` to :math:`O(n)`. + Edges within `radius` of each other are determined using a KDTree when + SciPy is available. This reduces the time complexity from :math:`O(n^2)` + to :math:`O(n)`. Parameters ---------- @@ -193,25 +189,28 @@ def soft_random_geometric_graph(n, radius, dim=2, pos=None, p=2, p_dist=None): pos : dict, optional A dictionary keyed by node with node positions as values. p : float, optional - Which Minkowski distance metric to use. `p` has to meet the condition - ``1 <= p <= infinity``. + Which Minkowski distance metric to use. + `p` has to meet the condition ``1 <= p <= infinity``. - If this argument is not specified, the :math:`L^2` metric (the Euclidean - distance metric), p = 2 is used. + If this argument is not specified, the :math:`L^2` metric + (the Euclidean distance metric), p = 2 is used. This should not be confused with the `p` of an Erdős-Rényi random graph, which represents probability. p_dist : function, optional - A probability density function computing the probability of - connecting two nodes that are of distance, dist, computed by the - Minkowski distance metric. The probability density function, `p_dist`, must - be any function that takes the metric value as input + A probability density function computing the probability of + connecting two nodes that are of distance, dist, computed by the + Minkowski distance metric. The probability density function, `p_dist`, + must be any function that takes the metric value as input and outputs a single probability value between 0-1. The scipy.stats - package has many probability distribution functions implemented and tools - for custom probability distribution definitions [2], and passing the .pdf - method of scipy.stats distributions can be used here. If the probability - function, `p_dist`, is not supplied, the default function is an exponential - distribution with rate parameter :math:`\lambda=1`. + package has many probability distribution functions implemented and + tools for custom probability distribution definitions [2], and passing + the .pdf method of scipy.stats distributions can be used here. If the + probability function, `p_dist`, is not supplied, the default function + is an exponential distribution with rate parameter :math:`\lambda=1`. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -231,9 +230,9 @@ def soft_random_geometric_graph(n, radius, dim=2, pos=None, p=2, p_dist=None): Custom Graph: Create a soft random geometric graph on 100 uniformly distributed nodes - where nodes are joined by an edge with probability computed from an exponential - distribution with rate parameter :math:`\lambda=1` if their Euclidean distance - is at most 0.2. + where nodes are joined by an edge with probability computed from an + exponential distribution with rate parameter :math:`\lambda=1` if their + Euclidean distance is at most 0.2. Notes ----- @@ -245,7 +244,7 @@ def soft_random_geometric_graph(n, radius, dim=2, pos=None, p=2, p_dist=None): For example, to use a 2D Gaussian distribution of node positions with mean (0, 0) and standard deviation 2 - The scipy.stats package can be used to define the probaility distribution + The scipy.stats package can be used to define the probability distribution with the .pdf method used as `p_dist`. :: @@ -254,26 +253,26 @@ def soft_random_geometric_graph(n, radius, dim=2, pos=None, p=2, p_dist=None): >>> import math >>> n = 100 >>> pos = {i: (random.gauss(0, 2), random.gauss(0, 2)) for i in range(n)} - >>> def p_dist(dist): return math.exp(-dist) + >>> p_dist = lambda dist: math.exp(-dist) >>> G = nx.soft_random_geometric_graph(n, 0.2, pos=pos, p_dist=p_dist) References ---------- .. [1] Penrose, Mathew D. "Connectivity of soft random geometric graphs." The Annals of Applied Probability 26.2 (2016): 986-1028. - [2] scipy.stats - https://docs.scipy.org/doc/scipy/reference/tutorial/stats.html + [2] scipy.stats - + https://docs.scipy.org/doc/scipy/reference/tutorial/stats.html """ - n_name, nodes = n G = nx.Graph() - G.name = 'soft_random_geometric_graph({}, {}, {})'.format(n, radius, dim) + G.name = f"soft_random_geometric_graph({n}, {radius}, {dim})" G.add_nodes_from(nodes) # If no positions are provided, choose uniformly random vectors in # Euclidean space of the specified dimension. if pos is None: - pos = {v: [random.random() for i in range(dim)] for v in nodes} - nx.set_node_attributes(G, pos, 'pos') + pos = {v: [seed.random() for i in range(dim)] for v in nodes} + nx.set_node_attributes(G, pos, "pos") # if p_dist function not supplied the default function is an exponential # distribution with rate parameter :math:`\lambda=1`. @@ -285,12 +284,13 @@ def p_dist(dist): def should_join(pair): u, v = pair u_pos, v_pos = pos[u], pos[v] - dist = (sum(abs(a - b) ** p for a, b in zip(u_pos, v_pos)))**(1 / p) - # Check if dist is <= radius parameter. This check is redundant if scipy - # is available and _fast_edges routine is used, but provides the check in case - # scipy is not available and all edge combinations need to be checked + dist = (sum(abs(a - b) ** p for a, b in zip(u_pos, v_pos))) ** (1 / p) + # Check if dist <= radius parameter. This check is redundant if scipy + # is available and _fast_edges routine is used, but provides the + # check in case scipy is not available and all edge combinations + # need to be checked if dist <= radius: - return random.random() < p_dist(dist) + return seed.random() < p_dist(dist) else: return False @@ -303,9 +303,11 @@ def should_join(pair): return G +@py_random_state(7) @nodes_or_number(0) -def geographical_threshold_graph(n, theta, dim=2, pos=None, - weight=None, metric=None, p_dist=None): +def geographical_threshold_graph( + n, theta, dim=2, pos=None, weight=None, metric=None, p_dist=None, seed=None +): r"""Returns a geographical threshold graph. The geographical threshold graph model places $n$ nodes uniformly at @@ -314,7 +316,7 @@ def geographical_threshold_graph(n, theta, dim=2, pos=None, .. math:: - (w_u + w_v)h(r) \ge \theta + (w_u + w_v)h(r) \ge \theta where `r` is the distance between `u` and `v`, h(r) is a probability of connection as a function of `r`, and :math:`\theta` as the threshold @@ -350,16 +352,20 @@ def geographical_threshold_graph(n, theta, dim=2, pos=None, .. _metric: https://en.wikipedia.org/wiki/Metric_%28mathematics%29 p_dist : function, optional - A probability density function computing the probability of + A probability density function computing the probability of connecting two nodes that are of distance, r, computed by metric. The probability density function, `p_dist`, must be any function that takes the metric value as input - and outputs a single probability value between 0-1. The scipy.stats - package has many probability distribution functions implemented and tools - for custom probability distribution definitions [2], and passing the .pdf - method of scipy.stats distributions can be used here. If the probability + and outputs a single probability value between 0-1. + The scipy.stats package has many probability distribution functions + implemented and tools for custom probability distribution + definitions [2], and passing the .pdf method of scipy.stats + distributions can be used here. If the probability function, `p_dist`, is not supplied, the default exponential function :math: `r^{-2}` is used. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -401,15 +407,6 @@ def geographical_threshold_graph(n, theta, dim=2, pos=None, If node positions are not specified they are randomly assigned from the uniform distribution. - Starting in NetworkX 2.1 the parameter ``alpha`` is deprecated and replaced - with the customizable ``p_dist`` function parameter, which defaults to r^-2 - if ``p_dist`` is not supplied. To reproduce networks of earlier NetworkX - versions, a custom function needs to be defined and passed as the ``p_dist`` - parameter. For example, if the parameter ``alpha`` = 2 was used in NetworkX 2.0, - the custom function def custom_dist(r): r**-2 can be passed in versions >=2.1 - as the parameter p_dist = custom_dist to produce an equivalent network. Note the - change in sign from +2 to -2 in this parameter change. - References ---------- .. [1] Masuda, N., Miwa, H., Konno, N.: @@ -427,21 +424,22 @@ def geographical_threshold_graph(n, theta, dim=2, pos=None, # If no weights are provided, choose them from an exponential # distribution. if weight is None: - weight = {v: random.expovariate(1) for v in G} + weight = {v: seed.expovariate(1) for v in G} # If no positions are provided, choose uniformly random vectors in # Euclidean space of the specified dimension. if pos is None: - pos = {v: [random.random() for i in range(dim)] for v in nodes} + pos = {v: [seed.random() for i in range(dim)] for v in nodes} # If no distance metric is provided, use Euclidean distance. if metric is None: metric = euclidean - nx.set_node_attributes(G, weight, 'weight') - nx.set_node_attributes(G, pos, 'pos') + nx.set_node_attributes(G, weight, "weight") + nx.set_node_attributes(G, pos, "pos") # if p_dist is not supplied, use default r^-2 if p_dist is None: + def p_dist(r): - return r**-2 + return r ** -2 # Returns ``True`` if and only if the nodes whose attributes are # ``du`` and ``dv`` should be joined, according to the threshold @@ -456,10 +454,12 @@ def should_join(pair): return G +@py_random_state(6) @nodes_or_number(0) -def waxman_graph(n, beta=0.4, alpha=0.1, L=None, domain=(0, 0, 1, 1), - metric=None): - r"""Return a Waxman random graph. +def waxman_graph( + n, beta=0.4, alpha=0.1, L=None, domain=(0, 0, 1, 1), metric=None, seed=None +): + r"""Returns a Waxman random graph. The Waxman random graph model places `n` nodes uniformly at random in a rectangular domain. Each pair of nodes at distance `d` is @@ -508,6 +508,10 @@ def waxman_graph(n, beta=0.4, alpha=0.1, L=None, domain=(0, 0, 1, 1), .. _metric: https://en.wikipedia.org/wiki/Metric_%28mathematics%29 + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + Returns ------- Graph @@ -544,8 +548,8 @@ def waxman_graph(n, beta=0.4, alpha=0.1, L=None, domain=(0, 0, 1, 1), G.add_nodes_from(nodes) (xmin, ymin, xmax, ymax) = domain # Each node gets a uniformly random position in the given rectangle. - pos = {v: (uniform(xmin, xmax), uniform(ymin, ymax)) for v in G} - nx.set_node_attributes(G, pos, 'pos') + pos = {v: (seed.uniform(xmin, xmax), seed.uniform(ymin, ymax)) for v in G} + nx.set_node_attributes(G, pos, "pos") # If no distance metric is provided, use Euclidean distance. if metric is None: metric = euclidean @@ -558,20 +562,25 @@ def waxman_graph(n, beta=0.4, alpha=0.1, L=None, domain=(0, 0, 1, 1), if L is None: L = max(metric(x, y) for x, y in combinations(pos.values(), 2)) - def dist(u, v): return metric(pos[u], pos[v]) + def dist(u, v): + return metric(pos[u], pos[v]) + else: - def dist(u, v): return random.random() * L + + def dist(u, v): + return seed.random() * L # `pair` is the pair of nodes to decide whether to join. def should_join(pair): - return random.random() < beta * math.exp(-dist(*pair) / (alpha * L)) + return seed.random() < beta * math.exp(-dist(*pair) / (alpha * L)) G.add_edges_from(filter(should_join, combinations(G, 2))) return G +@py_random_state(5) def navigable_small_world_graph(n, p=1, q=1, r=2, dim=2, seed=None): - r"""Return a navigable small-world graph. + r"""Returns a navigable small-world graph. A navigable small-world graph is a directed grid with additional long-range connections that are chosen randomly. @@ -607,22 +616,22 @@ def navigable_small_world_graph(n, p=1, q=1, r=2, dim=2, seed=None): connecting to a node at lattice distance $d$ is $1/d^r$. dim : int Dimension of grid - seed : int, optional - Seed for random number generator (default=None). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. References ---------- .. [1] J. Kleinberg. The small-world phenomenon: An algorithmic perspective. Proc. 32nd ACM Symposium on Theory of Computing, 2000. """ - if (p < 1): + if p < 1: raise nx.NetworkXException("p must be >= 1") - if (q < 0): + if q < 0: raise nx.NetworkXException("q must be >= 0") - if (r < 0): + if r < 0: raise nx.NetworkXException("r must be >= 1") - if seed is not None: - random.seed(seed) + G = nx.DiGraph() nodes = list(product(range(n), repeat=dim)) for p1 in nodes: @@ -633,27 +642,32 @@ def navigable_small_world_graph(n, p=1, q=1, r=2, dim=2, seed=None): d = sum((abs(b - a) for a, b in zip(p1, p2))) if d <= p: G.add_edge(p1, p2) - probs.append(d**-r) - cdf = list(nx.utils.accumulate(probs)) + probs.append(d ** -r) + cdf = list(accumulate(probs)) for _ in range(q): - target = nodes[bisect_left(cdf, random.uniform(0, cdf[-1]))] + target = nodes[bisect_left(cdf, seed.uniform(0, cdf[-1]))] G.add_edge(p1, target) return G +@py_random_state(7) @nodes_or_number(0) -def thresholded_random_geometric_graph(n, radius, theta, dim=2, pos=None, weight=None, p=2): - """Returns a thresholded random geometric graph in the unit cube of dimensions `dim`. - - The thresholded random geometric graph [1] model places `n` nodes uniformly at - random in the unit cube. Each node `u` is assigned a weight - :math:`w_u`. Two nodes `u` and `v` are joined by an edge if they are within - the maximum connection distance, `radius` computed by the `p`-Minkowski distance - and the summation of weights :math:`w_u` + :math:`w_v` is greater than or equal +def thresholded_random_geometric_graph( + n, radius, theta, dim=2, pos=None, weight=None, p=2, seed=None +): + r"""Returns a thresholded random geometric graph in the unit cube. + + The thresholded random geometric graph [1] model places `n` nodes + uniformly at random in the unit cube of dimensions `dim`. Each node + `u` is assigned a weight :math:`w_u`. Two nodes `u` and `v` are + joined by an edge if they are within the maximum connection distance, + `radius` computed by the `p`-Minkowski distance and the summation of + weights :math:`w_u` + :math:`w_v` is greater than or equal to the threshold parameter `theta`. - Edges within `radius` of each other are determined using a KDTree when SciPy - is available. This reduces the time complexity from :math:`O(n^2)` to :math:`O(n)`. + Edges within `radius` of each other are determined using a KDTree when + SciPy is available. This reduces the time complexity from :math:`O(n^2)` + to :math:`O(n)`. Parameters ---------- @@ -673,11 +687,14 @@ def thresholded_random_geometric_graph(n, radius, theta, dim=2, pos=None, weight Which Minkowski distance metric to use. `p` has to meet the condition ``1 <= p <= infinity``. - If this argument is not specified, the :math:`L^2` metric (the Euclidean - distance metric), p = 2 is used. + If this argument is not specified, the :math:`L^2` metric + (the Euclidean distance metric), p = 2 is used. This should not be confused with the `p` of an Erdős-Rényi random graph, which represents probability. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -700,10 +717,10 @@ def thresholded_random_geometric_graph(n, radius, theta, dim=2, pos=None, weight Custom Graph: - Create a thresholded random geometric graph on 50 uniformly distributed nodes - where nodes are joined by an edge if their sum weights drawn from a exponential - distribution with rate = 5 are >= theta = 0.1 and their Euclidean distance - is at most 0.2. + Create a thresholded random geometric graph on 50 uniformly distributed + nodes where nodes are joined by an edge if their sum weights drawn from + a exponential distribution with rate = 5 are >= theta = 0.1 and their + Euclidean distance is at most 0.2. Notes ----- @@ -727,7 +744,7 @@ def thresholded_random_geometric_graph(n, radius, theta, dim=2, pos=None, weight >>> n = 50 >>> pos = {i: (random.gauss(0, 2), random.gauss(0, 2)) for i in range(n)} >>> w = {i: random.expovariate(5.0) for i in range(n)} - >>> G = nx.thresholded_random_geometric_graph(n, 0.2, 0.1, pos=pos, weight=w) + >>> G = nx.thresholded_random_geometric_graph(n, 0.2, 0.1, 2, pos, w) References ---------- @@ -737,21 +754,20 @@ def thresholded_random_geometric_graph(n, radius, theta, dim=2, pos=None, weight n_name, nodes = n G = nx.Graph() - namestr = 'thresholded_random_geometric_graph({}, {}, {}, {})' - G.name = namestr.format(n, radius, theta, dim) + G.name = f"thresholded_random_geometric_graph({n}, {radius}, {theta}, {dim})" G.add_nodes_from(nodes) # If no weights are provided, choose them from an exponential # distribution. if weight is None: - weight = {v: random.expovariate(1) for v in G} + weight = {v: seed.expovariate(1) for v in G} # If no positions are provided, choose uniformly random vectors in # Euclidean space of the specified dimension. if pos is None: - pos = {v: [random.random() for i in range(dim)] for v in nodes} + pos = {v: [seed.random() for i in range(dim)] for v in nodes} # If no distance metric is provided, use Euclidean distance. - nx.set_node_attributes(G, weight, 'weight') - nx.set_node_attributes(G, pos, 'pos') + nx.set_node_attributes(G, weight, "weight") + nx.set_node_attributes(G, pos, "pos") # Returns ``True`` if and only if the nodes whose attributes are # ``du`` and ``dv`` should be joined, according to the threshold @@ -761,10 +777,11 @@ def should_join(pair): u, v = pair u_weight, v_weight = weight[u], weight[v] u_pos, v_pos = pos[u], pos[v] - dist = (sum(abs(a - b) ** p for a, b in zip(u_pos, v_pos)))**(1 / p) - # Check if dist is <= radius parameter. This check is redundant if scipy - # is available and _fast_edges routine is used, but provides the check in case - # scipy is not available and all edge combinations need to be checked + dist = (sum(abs(a - b) ** p for a, b in zip(u_pos, v_pos))) ** (1 / p) + # Check if dist is <= radius parameter. This check is redundant if + # scipy is available and _fast_edges routine is used, but provides + # the check in case scipy is not available and all edge combinations + # need to be checked if dist <= radius: return theta <= u_weight + v_weight else: diff --git a/networkx/generators/harary_graph.py b/networkx/generators/harary_graph.py new file mode 100644 index 0000000..7af21b3 --- /dev/null +++ b/networkx/generators/harary_graph.py @@ -0,0 +1,197 @@ +"""Generators for Harary graphs + +This module gives two generators for the Harary graph, which was +introduced by the famous mathematician Frank Harary in his 1962 work [H]_. +The first generator gives the Harary graph that maximizes the node +connectivity with given number of nodes and given number of edges. +The second generator gives the Harary graph that minimizes +the number of edges in the graph with given node connectivity and +number of nodes. + +References +---------- +.. [H] Harary, F. "The Maximum Connectivity of a Graph." + Proc. Nat. Acad. Sci. USA 48, 1142-1146, 1962. + +""" + +import networkx as nx +from networkx.exception import NetworkXError + +__all__ = ["hnm_harary_graph", "hkn_harary_graph"] + + +def hnm_harary_graph(n, m, create_using=None): + """Returns the Harary graph with given numbers of nodes and edges. + + The Harary graph $H_{n,m}$ is the graph that maximizes node connectivity + with $n$ nodes and $m$ edges. + + This maximum node connectivity is known to be floor($2m/n$). [1]_ + + Parameters + ---------- + n: integer + The number of nodes the generated graph is to contain + + m: integer + The number of edges the generated graph is to contain + + create_using : NetworkX graph constructor, optional Graph type + to create (default=nx.Graph). If graph instance, then cleared + before populated. + + Returns + ------- + NetworkX graph + The Harary graph $H_{n,m}$. + + See Also + -------- + hkn_harary_graph + + Notes + ----- + This algorithm runs in $O(m)$ time. + It is implemented by following the Reference [2]_. + + References + ---------- + .. [1] F. T. Boesch, A. Satyanarayana, and C. L. Suffel, + "A Survey of Some Network Reliability Analysis and Synthesis Results," + Networks, pp. 99-107, 2009. + + .. [2] Harary, F. "The Maximum Connectivity of a Graph." + Proc. Nat. Acad. Sci. USA 48, 1142-1146, 1962. + """ + + if n < 1: + raise NetworkXError("The number of nodes must be >= 1!") + if m < n - 1: + raise NetworkXError("The number of edges must be >= n - 1 !") + if m > n * (n - 1) // 2: + raise NetworkXError("The number of edges must be <= n(n-1)/2") + + # Construct an empty graph with n nodes first + H = nx.empty_graph(n, create_using) + # Get the floor of average node degree + d = 2 * m // n + + # Test the parity of n and d + if (n % 2 == 0) or (d % 2 == 0): + # Start with a regular graph of d degrees + offset = d // 2 + for i in range(n): + for j in range(1, offset + 1): + H.add_edge(i, (i - j) % n) + H.add_edge(i, (i + j) % n) + if d & 1: + # in case d is odd; n must be even in this case + half = n // 2 + for i in range(0, half): + # add edges diagonally + H.add_edge(i, i + half) + # Get the remainder of 2*m modulo n + r = 2 * m % n + if r > 0: + # add remaining edges at offset+1 + for i in range(0, r // 2): + H.add_edge(i, i + offset + 1) + else: + # Start with a regular graph of (d - 1) degrees + offset = (d - 1) // 2 + for i in range(n): + for j in range(1, offset + 1): + H.add_edge(i, (i - j) % n) + H.add_edge(i, (i + j) % n) + half = n // 2 + for i in range(0, m - n * offset): + # add the remaining m - n*offset edges between i and i+half + H.add_edge(i, (i + half) % n) + + return H + + +def hkn_harary_graph(k, n, create_using=None): + """Returns the Harary graph with given node connectivity and node number. + + The Harary graph $H_{k,n}$ is the graph that minimizes the number of + edges needed with given node connectivity $k$ and node number $n$. + + This smallest number of edges is known to be ceil($kn/2$) [1]_. + + Parameters + ---------- + k: integer + The node connectivity of the generated graph + + n: integer + The number of nodes the generated graph is to contain + + create_using : NetworkX graph constructor, optional Graph type + to create (default=nx.Graph). If graph instance, then cleared + before populated. + + Returns + ------- + NetworkX graph + The Harary graph $H_{k,n}$. + + See Also + -------- + hnm_harary_graph + + Notes + ----- + This algorithm runs in $O(kn)$ time. + It is implemented by following the Reference [2]_. + + References + ---------- + .. [1] Weisstein, Eric W. "Harary Graph." From MathWorld--A Wolfram Web + Resource. http://mathworld.wolfram.com/HararyGraph.html. + + .. [2] Harary, F. "The Maximum Connectivity of a Graph." + Proc. Nat. Acad. Sci. USA 48, 1142-1146, 1962. + """ + + if k < 1: + raise NetworkXError("The node connectivity must be >= 1!") + if n < k + 1: + raise NetworkXError("The number of nodes must be >= k+1 !") + + # in case of connectivity 1, simply return the path graph + if k == 1: + H = nx.path_graph(n, create_using) + return H + + # Construct an empty graph with n nodes first + H = nx.empty_graph(n, create_using) + + # Test the parity of k and n + if (k % 2 == 0) or (n % 2 == 0): + # Construct a regular graph with k degrees + offset = k // 2 + for i in range(n): + for j in range(1, offset + 1): + H.add_edge(i, (i - j) % n) + H.add_edge(i, (i + j) % n) + if k & 1: + # odd degree; n must be even in this case + half = n // 2 + for i in range(0, half): + # add edges diagonally + H.add_edge(i, i + half) + else: + # Construct a regular graph with (k - 1) degrees + offset = (k - 1) // 2 + for i in range(n): + for j in range(1, offset + 1): + H.add_edge(i, (i - j) % n) + H.add_edge(i, (i + j) % n) + half = n // 2 + for i in range(0, half + 1): + # add half+1 edges between i and i+half + H.add_edge(i, (i + half) % n) + + return H diff --git a/networkx/generators/internet_as_graphs.py b/networkx/generators/internet_as_graphs.py new file mode 100644 index 0000000..77fdb26 --- /dev/null +++ b/networkx/generators/internet_as_graphs.py @@ -0,0 +1,442 @@ +"""Generates graphs resembling the Internet Autonomous System network""" + +import networkx as nx +from networkx.utils import py_random_state + +__all__ = ["random_internet_as_graph"] + + +def uniform_int_from_avg(a, m, seed): + """ Pick a random integer with uniform probability. + + Returns a random integer uniformly taken from a distribution with + minimum value 'a' and average value 'm', X~U(a,b), E[X]=m, X in N where + b = 2*m - a. + + Notes + ----- + p = (b-floor(b))/2 + X = X1 + X2; X1~U(a,floor(b)), X2~B(p) + E[X] = E[X1] + E[X2] = (floor(b)+a)/2 + (b-floor(b))/2 = (b+a)/2 = m + """ + + from math import floor + + assert m >= a + b = 2 * m - a + p = (b - floor(b)) / 2 + X1 = int(round(seed.random() * (floor(b) - a) + a)) + if seed.random() < p: + X2 = 1 + else: + X2 = 0 + return X1 + X2 + + +def choose_pref_attach(degs, seed): + """ Pick a random value, with a probability given by its weight. + + Returns a random choice among degs keys, each of which has a + probability proportional to the corresponding dictionary value. + + Parameters + ---------- + degs: dictionary + It contains the possible values (keys) and the corresponding + probabilities (values) + seed: random state + + Returns + ------- + v: object + A key of degs or None if degs is empty + """ + + if len(degs) == 0: + return None + s = sum(degs.values()) + if s == 0: + return seed.choice(list(degs.keys())) + v = seed.random() * s + + nodes = list(degs.keys()) + i = 0 + acc = degs[nodes[i]] + while v > acc: + i += 1 + acc += degs[nodes[i]] + return nodes[i] + + +class AS_graph_generator: + """ Generates random internet AS graphs. + """ + + def __init__(self, n, seed): + """ Initializes variables. Immediate numbers are taken from [1]. + + Parameters + ---------- + n: integer + Number of graph nodes + seed: random state + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + GG: AS_graph_generator object + + References + ---------- + [1] A. Elmokashfi, A. Kvalbein and C. Dovrolis, "On the Scalability of + BGP: The Role of Topology Growth," in IEEE Journal on Selected Areas + in Communications, vol. 28, no. 8, pp. 1250-1261, October 2010. + """ + + self.seed = seed + self.n_t = min(n, int(round(self.seed.random() * 2 + 4))) # num of T nodes + self.n_m = int(round(0.15 * n)) # number of M nodes + self.n_cp = int(round(0.05 * n)) # number of CP nodes + self.n_c = max(0, n - self.n_t - self.n_m - self.n_cp) # number of C nodes + + self.d_m = 2 + (2.5 * n) / 10000 # average multihoming degree for M nodes + self.d_cp = 2 + (1.5 * n) / 10000 # avg multihoming degree for CP nodes + self.d_c = 1 + (5 * n) / 100000 # average multihoming degree for C nodes + + self.p_m_m = 1 + (2 * n) / 10000 # avg num of peer edges between M and M + self.p_cp_m = 0.2 + (2 * n) / 10000 # avg num of peer edges between CP, M + self.p_cp_cp = 0.05 + (2 * n) / 100000 # avg num of peer edges btwn CP, CP + + self.t_m = 0.375 # probability M's provider is T + self.t_cp = 0.375 # probability CP's provider is T + self.t_c = 0.125 # probability C's provider is T + + def t_graph(self): + """ Generates the core mesh network of tier one nodes of a AS graph. + + Returns + ------- + G: Networkx Graph + Core network + """ + + self.G = nx.Graph() + for i in range(self.n_t): + self.G.add_node(i, type="T") + for r in self.regions: + self.regions[r].add(i) + for j in self.G.nodes(): + if i != j: + self.add_edge(i, j, "peer") + self.customers[i] = set() + self.providers[i] = set() + return self.G + + def add_edge(self, i, j, kind): + if kind == "transit": + customer = str(i) + else: + customer = "none" + self.G.add_edge(i, j, type=kind, customer=customer) + + def choose_peer_pref_attach(self, node_list): + """ Pick a node with a probability weighted by its peer degree. + + Pick a node from node_list with preferential attachment + computed only on their peer degree + """ + + d = {} + for n in node_list: + d[n] = self.G.nodes[n]["peers"] + return choose_pref_attach(d, self.seed) + + def choose_node_pref_attach(self, node_list): + """ Pick a node with a probability weighted by its degree. + + Pick a node from node_list with preferential attachment + computed on their degree + """ + + degs = dict(self.G.degree(node_list)) + return choose_pref_attach(degs, self.seed) + + def add_customer(self, i, j): + """ Keep the dictionaries 'customers' and 'providers' consistent. + """ + + self.customers[j].add(i) + self.providers[i].add(j) + for z in self.providers[j]: + self.customers[z].add(i) + self.providers[i].add(z) + + def add_node(self, i, kind, reg2prob, avg_deg, t_edge_prob): + """ Add a node and its customer transit edges to the graph. + + Parameters + ---------- + i: object + Identifier of the new node + kind: string + Type of the new node. Options are: 'M' for middle node, 'CP' for + content provider and 'C' for customer. + reg2prob: float + Probability the new node can be in two different regions. + avg_deg: float + Average number of transit nodes of which node i is customer. + t_edge_prob: float + Probability node i establish a customer transit edge with a tier + one (T) node + + Returns + ------- + i: object + Identifier of the new node + """ + + regs = 1 # regions in which node resides + if self.seed.random() < reg2prob: # node is in two regions + regs = 2 + node_options = set() + + self.G.add_node(i, type=kind, peers=0) + self.customers[i] = set() + self.providers[i] = set() + self.nodes[kind].add(i) + for r in self.seed.sample(list(self.regions), regs): + node_options = node_options.union(self.regions[r]) + self.regions[r].add(i) + + edge_num = uniform_int_from_avg(1, avg_deg, self.seed) + + t_options = node_options.intersection(self.nodes["T"]) + m_options = node_options.intersection(self.nodes["M"]) + if i in m_options: + m_options.remove(i) + d = 0 + while d < edge_num and (len(t_options) > 0 or len(m_options) > 0): + if len(m_options) == 0 or ( + len(t_options) > 0 and self.seed.random() < t_edge_prob + ): # add edge to a T node + j = self.choose_node_pref_attach(t_options) + t_options.remove(j) + else: + j = self.choose_node_pref_attach(m_options) + m_options.remove(j) + self.add_edge(i, j, "transit") + self.add_customer(i, j) + d += 1 + + return i + + def add_m_peering_link(self, m, to_kind): + """ Add a peering link between two middle tier (M) nodes. + + Target node j is drawn considering a preferential attachment based on + other M node peering degree. + + Parameters + ---------- + m: object + Node identifier + to_kind: string + type for target node j (must be always M) + + Returns + ------- + success: boolean + """ + + # candidates are of type 'M' and are not customers of m + node_options = self.nodes["M"].difference(self.customers[m]) + # candidates are not providers of m + node_options = node_options.difference(self.providers[m]) + # remove self + if m in node_options: + node_options.remove(m) + + # remove candidates we are already connected to + for j in self.G.neighbors(m): + if j in node_options: + node_options.remove(j) + + if len(node_options) > 0: + j = self.choose_peer_pref_attach(node_options) + self.add_edge(m, j, "peer") + self.G.nodes[m]["peers"] += 1 + self.G.nodes[j]["peers"] += 1 + return True + else: + return False + + def add_cp_peering_link(self, cp, to_kind): + """ Add a peering link to a content provider (CP) node. + + Target node j can be CP or M and it is drawn uniformely among the nodes + belonging to the same region as cp. + + Parameters + ---------- + cp: object + Node identifier + to_kind: string + type for target node j (must be M or CP) + + Returns + ------- + success: boolean + """ + + node_options = set() + for r in self.regions: # options include nodes in the same region(s) + if cp in self.regions[r]: + node_options = node_options.union(self.regions[r]) + + # options are restricted to the indicated kind ('M' or 'CP') + node_options = self.nodes[to_kind].intersection(node_options) + + # remove self + if cp in node_options: + node_options.remove(cp) + + # remove nodes that are cp's providers + node_options = node_options.difference(self.providers[cp]) + + # remove nodes we are already connected to + for j in self.G.neighbors(cp): + if j in node_options: + node_options.remove(j) + + if len(node_options) > 0: + j = self.seed.sample(node_options, 1)[0] + self.add_edge(cp, j, "peer") + self.G.nodes[cp]["peers"] += 1 + self.G.nodes[j]["peers"] += 1 + return True + else: + return False + + def graph_regions(self, rn): + """ Initializes AS network regions. + + Parameters + ---------- + rn: integer + Number of regions + """ + + self.regions = {} + for i in range(rn): + self.regions["REG" + str(i)] = set() + + def add_peering_links(self, from_kind, to_kind): + """ Utility function to add peering links among node groups. + """ + peer_link_method = None + if from_kind == "M": + peer_link_method = self.add_m_peering_link + m = self.p_m_m + if from_kind == "CP": + peer_link_method = self.add_cp_peering_link + if to_kind == "M": + m = self.p_cp_m + else: + m = self.p_cp_cp + + for i in self.nodes[from_kind]: + num = uniform_int_from_avg(0, m, self.seed) + for _ in range(num): + peer_link_method(i, to_kind) + + def generate(self): + """ Generates a random AS network graph as described in [1]. + + Returns + ------- + G: Graph object + + Notes + ----- + The process steps are the following: first we create the core network + of tier one nodes, then we add the middle tier (M), the content + provider (CP) and the customer (C) nodes along with their transit edges + (link i,j means i is customer of j). Finally we add peering links + between M nodes, between M and CP nodes and between CP node couples. + For a detailed description of the algorithm, please refer to [1]. + + References + ---------- + [1] A. Elmokashfi, A. Kvalbein and C. Dovrolis, "On the Scalability of + BGP: The Role of Topology Growth," in IEEE Journal on Selected Areas + in Communications, vol. 28, no. 8, pp. 1250-1261, October 2010. + """ + + self.graph_regions(5) + self.customers = {} + self.providers = {} + self.nodes = {"T": set(), "M": set(), "CP": set(), "C": set()} + + self.t_graph() + self.nodes["T"] = set(list(self.G.nodes())) + + i = len(self.nodes["T"]) + for _ in range(self.n_m): + self.nodes["M"].add(self.add_node(i, "M", 0.2, self.d_m, self.t_m)) + i += 1 + for _ in range(self.n_cp): + self.nodes["CP"].add(self.add_node(i, "CP", 0.05, self.d_cp, self.t_cp)) + i += 1 + for _ in range(self.n_c): + self.nodes["C"].add(self.add_node(i, "C", 0, self.d_c, self.t_c)) + i += 1 + + self.add_peering_links("M", "M") + self.add_peering_links("CP", "M") + self.add_peering_links("CP", "CP") + + return self.G + + +@py_random_state(1) +def random_internet_as_graph(n, seed=None): + """ Generates a random undirected graph resembling the Internet AS network + + Parameters + ---------- + n: integer in [1000, 10000] + Number of graph nodes + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G: Networkx Graph object + A randomly generated undirected graph + + Notes + ----- + This algorithm returns an undirected graph resembling the Internet + Autonomous System (AS) network, it uses the approach by Elmokashfi et al. + [1] and it grants the properties described in the related paper [1]. + + Each node models an autonomous system, with an attribute 'type' specifying + its kind; tier-1 (T), mid-level (M), customer (C) or content-provider (CP). + Each edge models an ADV communication link (hence, bidirectional) with + attributes: + - type: transit|peer, the kind of commercial agreement between nodes; + - customer: , the identifier of the node acting as customer + ('none' if type is peer). + + References + ---------- + [1] A. Elmokashfi, A. Kvalbein and C. Dovrolis, "On the Scalability of + BGP: The Role of Topology Growth," in IEEE Journal on Selected Areas + in Communications, vol. 28, no. 8, pp. 1250-1261, October 2010. + """ + + GG = AS_graph_generator(n, seed) + G = GG.generate() + return G diff --git a/networkx/generators/intersection.py b/networkx/generators/intersection.py index caee1f7..ee1e048 100644 --- a/networkx/generators/intersection.py +++ b/networkx/generators/intersection.py @@ -1,27 +1,20 @@ -# -*- coding: utf-8 -*- """ Generators for random intersection graphs. """ -# Copyright (C) 2011 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -import random import networkx as nx from networkx.algorithms import bipartite +from networkx.utils import py_random_state -__author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)']) - -__all__ = ['uniform_random_intersection_graph', - 'k_random_intersection_graph', - 'general_random_intersection_graph', - ] +__all__ = [ + "uniform_random_intersection_graph", + "k_random_intersection_graph", + "general_random_intersection_graph", +] +@py_random_state(3) def uniform_random_intersection_graph(n, m, p, seed=None): - """Return a uniform random intersection graph. + """Returns a uniform random intersection graph. Parameters ---------- @@ -31,8 +24,9 @@ def uniform_random_intersection_graph(n, m, p, seed=None): The number of nodes in the second bipartite set (attributes) p : float Probability of connecting nodes between bipartite sets - seed : int, optional - Seed for random number generator (default=None). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. See Also -------- @@ -47,12 +41,13 @@ def uniform_random_intersection_graph(n, m, p, seed=None): An equivalence theorem relating the evolution of the g(n, m, p) and g(n, p) models. Random Struct. Algorithms 16, 2 (2000), 156–176. """ - G = bipartite.random_graph(n, m, p, seed=seed) + G = bipartite.random_graph(n, m, p, seed) return nx.projected_graph(G, range(n)) -def k_random_intersection_graph(n, m, k): - """Return a intersection graph with randomly chosen attribute sets for +@py_random_state(3) +def k_random_intersection_graph(n, m, k, seed=None): + """Returns a intersection graph with randomly chosen attribute sets for each node that are of equal size (k). Parameters @@ -63,8 +58,9 @@ def k_random_intersection_graph(n, m, k): The number of nodes in the second bipartite set (attributes) k : float Size of attribute set to assign to each node. - seed : int, optional - Seed for random number generator (default=None). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. See Also -------- @@ -79,13 +75,14 @@ def k_random_intersection_graph(n, m, k): G = nx.empty_graph(n + m) mset = range(n, n + m) for v in range(n): - targets = random.sample(mset, k) + targets = seed.sample(mset, k) G.add_edges_from(zip([v] * len(targets), targets)) return nx.projected_graph(G, range(n)) -def general_random_intersection_graph(n, m, p): - """Return a random intersection graph with independent probabilities +@py_random_state(3) +def general_random_intersection_graph(n, m, p, seed=None): + """Returns a random intersection graph with independent probabilities for connections between node and attribute sets. Parameters @@ -96,8 +93,9 @@ def general_random_intersection_graph(n, m, p): The number of nodes in the second bipartite set (attributes) p : list of floats of length m Probabilities for connecting nodes to each attribute - seed : int, optional - Seed for random number generator (default=None). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. See Also -------- @@ -117,6 +115,6 @@ def general_random_intersection_graph(n, m, p): mset = range(n, n + m) for u in range(n): for v, q in zip(mset, p): - if random.random() < q: + if seed.random() < q: G.add_edge(u, v) return nx.projected_graph(G, range(n)) diff --git a/networkx/generators/interval_graph.py b/networkx/generators/interval_graph.py new file mode 100644 index 0000000..be15ba9 --- /dev/null +++ b/networkx/generators/interval_graph.py @@ -0,0 +1,69 @@ +""" +Generators for interval graph. +""" +from collections.abc import Sequence +import networkx as nx + +__all__ = ["interval_graph"] + + +def interval_graph(intervals): + """ Generates an interval graph for a list of intervals given. + + In graph theory, an interval graph is an undirected graph formed from a set + of closed intervals on the real line, with a vertex for each interval + and an edge between vertices whose intervals intersect. + It is the intersection graph of the intervals. + + More information can be found at: + https://en.wikipedia.org/wiki/Interval_graph + + Parameters + ---------- + intervals : a sequence of intervals, say (l, r) where l is the left end, + and r is the right end of the closed interval. + + Returns + ------- + G : networkx graph + + Examples + -------- + >>> intervals = [(-2, 3), [1, 4], (2, 3), (4, 6)] + >>> G = nx.interval_graph(intervals) + >>> sorted(G.edges) + [((-2, 3), (1, 4)), ((-2, 3), (2, 3)), ((1, 4), (2, 3)), ((1, 4), (4, 6))] + + Raises + -------- + :exc:`TypeError` + if `intervals` contains None or an element which is not + collections.abc.Sequence or not a length of 2. + :exc:`ValueError` + if `intervals` contains an interval such that min1 > max1 + where min1,max1 = interval + """ + intervals = list(intervals) + for interval in intervals: + if not (isinstance(interval, Sequence) and len(interval) == 2): + raise TypeError( + "Each interval must have length 2, and be a " + "collections.abc.Sequence such as tuple or list." + ) + if interval[0] > interval[1]: + raise ValueError( + f"Interval must have lower value first. " f"Got {interval}" + ) + + graph = nx.Graph() + + tupled_intervals = [tuple(interval) for interval in intervals] + graph.add_nodes_from(tupled_intervals) + + while tupled_intervals: + min1, max1 = interval1 = tupled_intervals.pop() + for interval2 in tupled_intervals: + min2, max2 = interval2 + if max1 >= min2 and max2 >= min1: + graph.add_edge(interval1, interval2) + return graph diff --git a/networkx/generators/joint_degree_seq.py b/networkx/generators/joint_degree_seq.py index 46f9a31..d1fcb00 100644 --- a/networkx/generators/joint_degree_seq.py +++ b/networkx/generators/joint_degree_seq.py @@ -1,22 +1,18 @@ -# Copyright (C) 2016-2018 by -# Minas Gjoka -# BSD license. -# -# Author: Minas Gjoka (minas.gjoka@gmail.com) -"""Generate graphs with a given joint degree """ -from __future__ import division - -import random +"""Generate graphs with a given joint degree and directed joint degree""" import networkx as nx +from networkx.utils import py_random_state -__all__ = ['is_valid_joint_degree', - 'joint_degree_graph'] +__all__ = [ + "is_valid_joint_degree", + "is_valid_directed_joint_degree", + "joint_degree_graph", + "directed_joint_degree_graph", +] def is_valid_joint_degree(joint_degrees): - """ Checks whether the given joint degree dictionary is realizable - as a simple graph. + """ Checks whether the given joint degree dictionary is realizable. A *joint degree dictionary* is a dictionary of dictionaries, in which entry ``joint_degrees[k][l]`` is an integer representing the @@ -28,7 +24,7 @@ def is_valid_joint_degree(joint_degrees): - the total number of nodes of degree *k*, computed by ``sum(joint_degrees[k].values()) / k``, must be an integer, - the total number of edges joining nodes of degree *k* with - nodes of degree *l* cannot exceed the total number of possible edges, + nodes of degree *l* cannot exceed the total number of possible edges, - each diagonal entry ``joint_degrees[k][k]`` must be even (this is a convention assumed by the :func:`joint_degree_graph` function). @@ -68,8 +64,7 @@ def is_valid_joint_degree(joint_degrees): if not float(joint_degrees[k][l]).is_integer(): return False - if (k != l) and (joint_degrees[k][l] > - degree_count[k] * degree_count[l]): + if (k != l) and (joint_degrees[k][l] > degree_count[k] * degree_count[l]): return False elif k == l: if joint_degrees[k][k] > degree_count[k] * (degree_count[k] - 1): @@ -83,8 +78,7 @@ def is_valid_joint_degree(joint_degrees): def _neighbor_switch(G, w, unsat, h_node_residual, avoid_node_id=None): - """ Releases one free stub for saturated node ``w``, while preserving - joint degree in graph G. + """ Releases one free stub for ``w``, while preserving joint degree in G. Parameters ---------- @@ -146,6 +140,7 @@ def _neighbor_switch(G, w, unsat, h_node_residual, avoid_node_id=None): unsat.remove(w_prime) +@py_random_state(1) def joint_degree_graph(joint_degrees, seed=None): """ Generates a random simple graph with the given joint degree dictionary. @@ -154,8 +149,9 @@ def joint_degree_graph(joint_degrees, seed=None): joint_degrees : dictionary of dictionary of integers A joint degree dictionary in which entry ``joint_degrees[k][l]`` is the number of edges joining nodes of degree *k* with nodes of degree *l*. - seed : hashable object, optional - Seed for random number generator. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -188,26 +184,24 @@ def joint_degree_graph(joint_degrees, seed=None): References ---------- .. [1] M. Gjoka, B. Tillman, A. Markopoulou, "Construction of Simple - Graphs with a Target Joint Degree Matrix and Beyond", IEEE Infocom, '15. + Graphs with a Target Joint Degree Matrix and Beyond", IEEE Infocom, '15 Examples -------- - >>> import networkx as nx - >>> joint_degrees = {1: {4: 1}, - ... 2: {2: 2, 3: 2, 4: 2}, - ... 3: {2: 2, 4: 1}, - ... 4: {1: 1, 2: 2, 3: 1}} - >>> G=nx.joint_degree_graph(joint_degrees) + >>> joint_degrees = { + ... 1: {4: 1}, + ... 2: {2: 2, 3: 2, 4: 2}, + ... 3: {2: 2, 4: 1}, + ... 4: {1: 1, 2: 2, 3: 1}, + ... } + >>> G = nx.joint_degree_graph(joint_degrees) >>> """ if not is_valid_joint_degree(joint_degrees): - msg = 'Input joint degree dict not realizable as a simple graph' + msg = "Input joint degree dict not realizable as a simple graph" raise nx.NetworkXError(msg) - if seed is not None: - random.seed(seed) - # compute degree count from joint_degrees degree_count = {k: sum(l.values()) // k for k, l in joint_degrees.items() if k > 0} @@ -249,11 +243,11 @@ def joint_degree_graph(joint_degrees, seed=None): l_nodes = h_degree_nodelist[l] # k_unsat and l_unsat consist of nodes of degree k and l that - # are unsaturated i.e. nodes that have at least 1 available stub - k_unsat = set(v for v in k_nodes if h_node_residual[v] > 0) + # are unsaturated (nodes that have at least 1 available stub) + k_unsat = {v for v in k_nodes if h_node_residual[v] > 0} if k != l: - l_unsat = set(w for w in l_nodes if h_node_residual[w] > 0) + l_unsat = {w for w in l_nodes if h_node_residual[w] > 0} else: l_unsat = k_unsat n_edges_add = joint_degrees[k][l] // 2 @@ -261,8 +255,8 @@ def joint_degree_graph(joint_degrees, seed=None): while n_edges_add > 0: # randomly pick nodes v and w that have degrees k and l - v = k_nodes[random.randrange(k_size)] - w = l_nodes[random.randrange(l_size)] + v = k_nodes[seed.randrange(k_size)] + w = l_nodes[seed.randrange(l_size)] # if nodes v and w are disconnected then attempt to connect if not G.has_edge(v, w) and (v != w): @@ -276,8 +270,9 @@ def joint_degree_graph(joint_degrees, seed=None): if k != l: _neighbor_switch(G, w, l_unsat, h_node_residual) else: - _neighbor_switch(G, w, l_unsat, h_node_residual, - avoid_node_id=v) + _neighbor_switch( + G, w, l_unsat, h_node_residual, avoid_node_id=v + ) # add edge (v, w) and update data structures G.add_edge(v, w) @@ -290,3 +285,386 @@ def joint_degree_graph(joint_degrees, seed=None): if h_node_residual[w] == 0: l_unsat.discard(w) return G + + +def is_valid_directed_joint_degree(in_degrees, out_degrees, nkk): + """ Checks whether the given directed joint degree input is realizable + + Parameters + ---------- + in_degrees : list of integers + in degree sequence contains the in degrees of nodes. + out_degrees : list of integers + out degree sequence contains the out degrees of nodes. + nkk : dictionary of dictionary of integers + directed joint degree dictionary. for nodes of out degree k (first + level of dict) and nodes of in degree l (seconnd level of dict) + describes the number of edges. + + Returns + ------- + boolean + returns true if given input is realizable, else returns false. + + Notes + ----- + Here is the list of conditions that the inputs (in/out degree sequences, + nkk) need to satisfy for simple directed graph realizability: + + - Condition 0: in_degrees and out_degrees have the same length + - Condition 1: nkk[k][l] is integer for all k,l + - Condition 2: sum(nkk[k])/k = number of nodes with partition id k, is an + integer and matching degree sequence + - Condition 3: number of edges and non-chords between k and l cannot exceed + maximum possible number of edges + + + References + ---------- + [1] B. Tillman, A. Markopoulou, C. T. Butts & M. Gjoka, + "Construction of Directed 2K Graphs". In Proc. of KDD 2017. + """ + V = {} # number of nodes with in/out degree. + forbidden = {} + if len(in_degrees) != len(out_degrees): + return False + + for idx in range(0, len(in_degrees)): + i = in_degrees[idx] + o = out_degrees[idx] + V[(i, 0)] = V.get((i, 0), 0) + 1 + V[(o, 1)] = V.get((o, 1), 0) + 1 + + forbidden[(o, i)] = forbidden.get((o, i), 0) + 1 + + S = {} # number of edges going from in/out degree nodes. + for k in nkk: + for l in nkk[k]: + val = nkk[k][l] + if not float(val).is_integer(): # condition 1 + return False + + if val > 0: + S[(k, 1)] = S.get((k, 1), 0) + val + S[(l, 0)] = S.get((l, 0), 0) + val + # condition 3 + if val + forbidden.get((k, l), 0) > V[(k, 1)] * V[(l, 0)]: + return False + + for s in S: + if not float(S[s]) / s[0] == V[s]: # condition 2 + return False + + # if all conditions abive have been satisfied then the input nkk is + # realizable as a simple graph. + return True + + +def _directed_neighbor_switch( + G, w, unsat, h_node_residual_out, chords, h_partition_in, partition +): + """ Releases one free stub for node w, while preserving joint degree in G. + + Parameters + ---------- + G : networkx directed graph + graph within which the edge swap will take place. + w : integer + node id for which we need to perform a neighbor switch. + unsat: set of integers + set of node ids that have the same degree as w and are unsaturated. + h_node_residual_out: dict of integers + for a given node, keeps track of the remaining stubs to be added. + chords: set of tuples + keeps track of available positions to add edges. + h_partition_in: dict of integers + for a given node, keeps track of its partition id (in degree). + partition: integer + partition id to check if chords have to be updated. + + Notes + ----- + First, it selects node w_prime that (1) has the same degree as w and + (2) is unsaturated. Then, it selects node v, a neighbor of w, that is + not connected to w_prime and does an edge swap i.e. removes (w,v) and + adds (w_prime,v). If neighbor switch is not possible for w using + w_prime and v, then return w_prime; in [1] it's proven that + such unsaturated nodes can be used. + + References + ---------- + [1] B. Tillman, A. Markopoulou, C. T. Butts & M. Gjoka, + "Construction of Directed 2K Graphs". In Proc. of KDD 2017. + """ + w_prime = unsat.pop() + unsat.add(w_prime) + # select node t, a neighbor of w, that is not connected to w_prime + w_neighbs = list(G.successors(w)) + # slightly faster declaring this variable + w_prime_neighbs = list(G.successors(w_prime)) + + for v in w_neighbs: + if (v not in w_prime_neighbs) and w_prime != v: + # removes (w,v), add (w_prime,v) and update data structures + G.remove_edge(w, v) + G.add_edge(w_prime, v) + + if h_partition_in[v] == partition: + chords.add((w, v)) + chords.discard((w_prime, v)) + + h_node_residual_out[w] += 1 + h_node_residual_out[w_prime] -= 1 + if h_node_residual_out[w_prime] == 0: + unsat.remove(w_prime) + return None + + # If neighbor switch didn't work, use unsaturated node + return w_prime + + +def _directed_neighbor_switch_rev( + G, w, unsat, h_node_residual_in, chords, h_partition_out, partition +): + """ The reverse of directed_neighbor_switch. + + Parameters + ---------- + G : networkx directed graph + graph within which the edge swap will take place. + w : integer + node id for which we need to perform a neighbor switch. + unsat: set of integers + set of node ids that have the same degree as w and are unsaturated. + h_node_residual_in: dict of integers + for a given node, keeps track of the remaining stubs to be added. + chords: set of tuples + keeps track of available positions to add edges. + h_partition_out: dict of integers + for a given node, keeps track of its partition id (out degree). + partition: integer + partition id to check if chords have to be updated. + + Notes + ----- + Same operation as directed_neighbor_switch except it handles this operation + for incoming edges instead of outgoing. + """ + w_prime = unsat.pop() + unsat.add(w_prime) + # slightly faster declaring these as variables. + w_neighbs = list(G.predecessors(w)) + w_prime_neighbs = list(G.predecessors(w_prime)) + # select node v, a neighbor of w, that is not connected to w_prime. + for v in w_neighbs: + if (v not in w_prime_neighbs) and w_prime != v: + # removes (v,w), add (v,w_prime) and update data structures. + G.remove_edge(v, w) + G.add_edge(v, w_prime) + if h_partition_out[v] == partition: + chords.add((v, w)) + chords.discard((v, w_prime)) + + h_node_residual_in[w] += 1 + h_node_residual_in[w_prime] -= 1 + if h_node_residual_in[w_prime] == 0: + unsat.remove(w_prime) + return None + + # If neighbor switch didn't work, use the unsaturated node. + return w_prime + + +@py_random_state(3) +def directed_joint_degree_graph(in_degrees, out_degrees, nkk, seed=None): + """ Generates a random simple directed graph with the joint degree. + + Parameters + ---------- + degree_seq : list of tuples (of size 3) + degree sequence contains tuples of nodes with node id, in degree and + out degree. + nkk : dictionary of dictionary of integers + directed joint degree dictionary, for nodes of out degree k (first + level of dict) and nodes of in degree l (second level of dict) + describes the number of edges. + seed : hashable object, optional + Seed for random number generator. + + Returns + ------- + G : Graph + A directed graph with the specified inputs. + + Raises + ------ + NetworkXError + If degree_seq and nkk are not realizable as a simple directed graph. + + + Notes + ----- + Similarly to the undirected version: + In each iteration of the "while loop" the algorithm picks two disconnected + nodes v and w, of degree k and l correspondingly, for which nkk[k][l] has + not reached its target yet i.e. (for given k,l): n_edges_add < nkk[k][l]. + It then adds edge (v,w) and always increases the number of edges in graph G + by one. + + The intelligence of the algorithm lies in the fact that it is always + possible to add an edge between disconnected nodes v and w, for which + nkk[degree(v)][degree(w)] has not reached its target, even if one or both + nodes do not have free stubs. If either node v or w does not have a free + stub, we perform a "neighbor switch", an edge rewiring move that releases a + free stub while keeping nkk the same. + + The difference for the directed version lies in the fact that neighbor + switches might not be able to rewire, but in these cases unsaturated nodes + can be reassigned to use instead, see [1] for detailed description and + proofs. + + The algorithm continues for E (number of edges in the graph) iterations of + the "while loop", at which point all entries of the given nkk[k][l] have + reached their target values and the construction is complete. + + References + ---------- + [1] B. Tillman, A. Markopoulou, C. T. Butts & M. Gjoka, + "Construction of Directed 2K Graphs". In Proc. of KDD 2017. + + Examples + -------- + >>> in_degrees = [0, 1, 1, 2] + >>> out_degrees = [1, 1, 1, 1] + >>> nkk = {1: {1: 2, 2: 2}} + >>> G = nx.directed_joint_degree_graph(in_degrees, out_degrees, nkk) + >>> + """ + if not is_valid_directed_joint_degree(in_degrees, out_degrees, nkk): + msg = "Input is not realizable as a simple graph" + raise nx.NetworkXError(msg) + + # start with an empty directed graph. + G = nx.DiGraph() + + # for a given group, keep the list of all node ids. + h_degree_nodelist_in = {} + h_degree_nodelist_out = {} + # for a given group, keep the list of all unsaturated node ids. + h_degree_nodelist_in_unsat = {} + h_degree_nodelist_out_unsat = {} + # for a given node, keep track of the remaining stubs to be added. + h_node_residual_out = {} + h_node_residual_in = {} + # for a given node, keep track of the partition id. + h_partition_out = {} + h_partition_in = {} + # keep track of non-chords between pairs of partition ids. + non_chords = {} + + # populate data structures + for idx, i in enumerate(in_degrees): + idx = int(idx) + if i > 0: + h_degree_nodelist_in.setdefault(i, []) + h_degree_nodelist_in_unsat.setdefault(i, set()) + h_degree_nodelist_in[i].append(idx) + h_degree_nodelist_in_unsat[i].add(idx) + h_node_residual_in[idx] = i + h_partition_in[idx] = i + + for idx, o in enumerate(out_degrees): + o = out_degrees[idx] + non_chords[(o, in_degrees[idx])] = non_chords.get((o, in_degrees[idx]), 0) + 1 + idx = int(idx) + if o > 0: + h_degree_nodelist_out.setdefault(o, []) + h_degree_nodelist_out_unsat.setdefault(o, set()) + h_degree_nodelist_out[o].append(idx) + h_degree_nodelist_out_unsat[o].add(idx) + h_node_residual_out[idx] = o + h_partition_out[idx] = o + + G.add_node(idx) + + nk_in = {} + nk_out = {} + for p in h_degree_nodelist_in: + nk_in[p] = len(h_degree_nodelist_in[p]) + for p in h_degree_nodelist_out: + nk_out[p] = len(h_degree_nodelist_out[p]) + + # iterate over every degree pair (k,l) and add the number of edges given + # for each pair. + for k in nkk: + for l in nkk[k]: + n_edges_add = nkk[k][l] + + if n_edges_add > 0: + # chords contains a random set of potential edges. + chords = set() + + k_len = nk_out[k] + l_len = nk_in[l] + chords_sample = seed.sample( + range(k_len * l_len), n_edges_add + non_chords.get((k, l), 0) + ) + + num = 0 + while len(chords) < n_edges_add: + i = h_degree_nodelist_out[k][chords_sample[num] % k_len] + j = h_degree_nodelist_in[l][chords_sample[num] // k_len] + num += 1 + if i != j: + chords.add((i, j)) + + # k_unsat and l_unsat consist of nodes of in/out degree k and l + # that are unsaturated i.e. those nodes that have at least one + # available stub + k_unsat = h_degree_nodelist_out_unsat[k] + l_unsat = h_degree_nodelist_in_unsat[l] + + while n_edges_add > 0: + v, w = chords.pop() + chords.add((v, w)) + + # if node v has no free stubs then do neighbor switch. + if h_node_residual_out[v] == 0: + _v = _directed_neighbor_switch( + G, + v, + k_unsat, + h_node_residual_out, + chords, + h_partition_in, + l, + ) + if _v is not None: + v = _v + + # if node w has no free stubs then do neighbor switch. + if h_node_residual_in[w] == 0: + _w = _directed_neighbor_switch_rev( + G, + w, + l_unsat, + h_node_residual_in, + chords, + h_partition_out, + k, + ) + if _w is not None: + w = _w + + # add edge (v,w) and update data structures. + G.add_edge(v, w) + h_node_residual_out[v] -= 1 + h_node_residual_in[w] -= 1 + n_edges_add -= 1 + chords.discard((v, w)) + + if h_node_residual_out[v] == 0: + k_unsat.discard(v) + if h_node_residual_in[w] == 0: + l_unsat.discard(w) + return G diff --git a/networkx/generators/lattice.py b/networkx/generators/lattice.py index f42c7b1..72bc925 100644 --- a/networkx/generators/lattice.py +++ b/networkx/generators/lattice.py @@ -1,15 +1,3 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg (hagberg@lanl.gov) -# Pieter Swart (swart@lanl.gov) -# Joel Miller (jmiller@lanl.gov) -# Dan Schult (dschult@lanl.gov) """Functions for generating grid graphs and lattices The :func:`grid_2d_graph`, :func:`triangular_lattice_graph`, and @@ -24,26 +12,27 @@ .. _Triangular Tiling: https://en.wikipedia.org/wiki/Triangular_tiling """ -from __future__ import division from math import sqrt -from networkx.classes import Graph from networkx.classes import set_node_attributes from networkx.algorithms.minors import contracted_nodes from networkx.algorithms.operators.product import cartesian_product from networkx.exception import NetworkXError from networkx.relabel import relabel_nodes -from networkx.utils import flatten -from networkx.utils import is_list_of_ints -from networkx.utils import nodes_or_number -from networkx.utils import pairwise +from networkx.utils import flatten, nodes_or_number, pairwise, iterable from networkx.generators.classic import cycle_graph from networkx.generators.classic import empty_graph from networkx.generators.classic import path_graph +from itertools import repeat -__all__ = ['grid_2d_graph', 'grid_graph', 'hypercube_graph', - 'triangular_lattice_graph', 'hexagonal_lattice_graph'] +__all__ = [ + "grid_2d_graph", + "grid_graph", + "hypercube_graph", + "triangular_lattice_graph", + "hexagonal_lattice_graph", +] @nodes_or_number([0, 1]) @@ -58,13 +47,14 @@ def grid_2d_graph(m, n, periodic=False, create_using=None): If an integer, nodes are from `range(n)`. If a container, elements become the coordinate of the nodes. - periodic : bool (default: False) - If this is ``True`` the nodes on the grid boundaries are joined - to the corresponding nodes on the opposite grid boundaries. + periodic : bool or iterable + If `periodic` is True, both dimensions are periodic. If False, none + are periodic. If `periodic` is iterable, it should yield 2 bool + values indicating whether the 1st and 2nd axes, respectively, are + periodic. - create_using : NetworkX graph (default: Graph()) - If provided this graph is cleared of nodes and edges and filled - with the new graph. Usually used to set the type of the graph. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. Returns ------- @@ -76,19 +66,22 @@ def grid_2d_graph(m, n, periodic=False, create_using=None): row_name, rows = m col_name, cols = n G.add_nodes_from((i, j) for i in rows for j in cols) - G.add_edges_from(((i, j), (pi, j)) - for pi, i in pairwise(rows) for j in cols) - G.add_edges_from(((i, j), (i, pj)) - for i in rows for pj, j in pairwise(cols)) - if periodic is True: - if len(rows) > 2: - first = rows[0] - last = rows[-1] - G.add_edges_from(((first, j), (last, j)) for j in cols) - if len(cols) > 2: - first = cols[0] - last = cols[-1] - G.add_edges_from(((i, first), (i, last)) for i in rows) + G.add_edges_from(((i, j), (pi, j)) for pi, i in pairwise(rows) for j in cols) + G.add_edges_from(((i, j), (i, pj)) for i in rows for pj, j in pairwise(cols)) + + if iterable(periodic): + periodic_r, periodic_c = periodic + else: + periodic_r = periodic_c = periodic + + if periodic_r and len(rows) > 2: + first = rows[0] + last = rows[-1] + G.add_edges_from(((first, j), (last, j)) for j in cols) + if periodic_c and len(cols) > 2: + first = cols[0] + last = cols[-1] + G.add_edges_from(((i, first), (i, last)) for i in rows) # both directions for directed if G.is_directed(): G.add_edges_from((v, u) for u, v in G.edges()) @@ -109,9 +102,11 @@ def grid_graph(dim, periodic=False): that dimension. The dimension of the grid_graph is the length of `dim`. - periodic : bool - If `periodic is True` the nodes on the grid boundaries are joined - to the corresponding nodes on the opposite grid boundaries. + periodic : bool or iterable + If `periodic` is True, all dimensions are periodic. If False all + dimensions are not periodic. If `periodic` is iterable, it should + yield `dim` bool values each of which indicates whether the + corresponding axis is periodic. Returns ------- @@ -123,27 +118,25 @@ def grid_graph(dim, periodic=False): To produce a 2 by 3 by 4 grid graph, a graph on 24 nodes: >>> from networkx import grid_graph - >>> G = grid_graph(dim=[2, 3, 4]) + >>> G = grid_graph(dim=(2, 3, 4)) >>> len(G) 24 - >>> G = grid_graph(dim=[range(7, 9), range(3, 6)]) + >>> G = grid_graph(dim=(range(7, 9), range(3, 6))) >>> len(G) 6 """ - dlabel = "%s" % dim if not dim: - G = empty_graph(0) - return G + return empty_graph(0) + + if iterable(periodic): + func = (cycle_graph if p else path_graph for p in periodic) + else: + func = repeat(cycle_graph if periodic else path_graph) - func = cycle_graph if periodic else path_graph - G = func(dim[0]) + G = next(func)(dim[0]) for current_dim in dim[1:]: - # order matters: copy before it is cleared during the creation of Gnew - Gold = G.copy() - Gnew = func(current_dim) - # explicit: create_using = None - # This is so that we get a new graph of Gnew's class. - G = cartesian_product(Gnew, Gold) + Gnew = next(func)(current_dim) + G = cartesian_product(Gnew, G) # graph G is done but has labels of the form (1, (2, (3, 1))) so relabel H = relabel_nodes(G, flatten) return H @@ -175,9 +168,10 @@ def hypercube_graph(n): return G -def triangular_lattice_graph(m, n, periodic=False, with_positions=True, - create_using=None): - """Returns the $m$ by $n$ triangular lattice graph. +def triangular_lattice_graph( + m, n, periodic=False, with_positions=True, create_using=None +): + r"""Returns the $m$ by $n$ triangular lattice graph. The `triangular lattice graph`_ is a two-dimensional `grid graph`_ in which each square unit has a diagonal edge (each grid unit has a chord). @@ -222,10 +216,8 @@ def triangular_lattice_graph(m, n, periodic=False, with_positions=True, Periodic positions shift the nodes vertically in a nonlinear way so the edges don't overlap so much. - create_using : NetworkX graph - If specified, this must be an instance of a NetworkX graph - class. It will be cleared of nodes and edges and filled - with the new graph. Usually used to set the type of the graph. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. Returns ------- @@ -237,8 +229,8 @@ def triangular_lattice_graph(m, n, periodic=False, with_positions=True, return H if periodic: if n < 5 or m < 3: - msg = "m > 2 and n > 4 required for periodic. m={}, n={}" - raise NetworkXError(msg.format(m, n)) + msg = f"m > 2 and n > 4 required for periodic. m={m}, n={n}" + raise NetworkXError(msg) N = (n + 1) // 2 # number of nodes in row rows = range(m + 1) @@ -247,10 +239,8 @@ def triangular_lattice_graph(m, n, periodic=False, with_positions=True, H.add_edges_from(((i, j), (i + 1, j)) for j in rows for i in cols[:N]) H.add_edges_from(((i, j), (i, j + 1)) for j in rows[:m] for i in cols) # add diagonals - H.add_edges_from(((i, j), (i + 1, j + 1)) - for j in rows[1:m:2] for i in cols[:N]) - H.add_edges_from(((i + 1, j), (i, j + 1)) - for j in rows[:m:2] for i in cols[:N]) + H.add_edges_from(((i, j), (i + 1, j + 1)) for j in rows[1:m:2] for i in cols[:N]) + H.add_edges_from(((i + 1, j), (i, j + 1)) for j in rows[:m:2] for i in cols[:N]) # identify boundary nodes if periodic if periodic is True: for i in cols: @@ -259,7 +249,7 @@ def triangular_lattice_graph(m, n, periodic=False, with_positions=True, H = contracted_nodes(H, (0, j), (N, j)) elif n % 2: # remove extra nodes - H.remove_nodes_from(((N, j) for j in rows[1::2])) + H.remove_nodes_from((N, j) for j in rows[1::2]) # Add position node attributes if with_positions: @@ -268,17 +258,17 @@ def triangular_lattice_graph(m, n, periodic=False, with_positions=True, xx = (0.5 * (j % 2) + i for i in cols for j in rows) h = sqrt(3) / 2 if periodic: - yy = (h * j + .01 * i * i for i in cols for j in rows) + yy = (h * j + 0.01 * i * i for i in cols for j in rows) else: yy = (h * j for i in cols for j in rows) - pos = {(i, j): (x, y) for i, j, x, y in zip(ii, jj, xx, yy) - if (i, j) in H} - set_node_attributes(H, pos, 'pos') + pos = {(i, j): (x, y) for i, j, x, y in zip(ii, jj, xx, yy) if (i, j) in H} + set_node_attributes(H, pos, "pos") return H -def hexagonal_lattice_graph(m, n, periodic=False, with_positions=True, - create_using=None): +def hexagonal_lattice_graph( + m, n, periodic=False, with_positions=True, create_using=None +): """Returns an `m` by `n` hexagonal lattice graph. The *hexagonal lattice graph* is a graph whose nodes and edges are @@ -316,10 +306,8 @@ def hexagonal_lattice_graph(m, n, periodic=False, with_positions=True, Periodic positions shift the nodes vertically in a nonlinear way so the edges don't overlap so much. - create_using : NetworkX graph - If specified, this must be an instance of a NetworkX graph - class. It will be cleared of nodes and edges and filled - with the new graph. Usually used to set the type of the graph. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. If graph is directed, edges will point up or right. Returns @@ -327,21 +315,19 @@ def hexagonal_lattice_graph(m, n, periodic=False, with_positions=True, NetworkX graph The *m* by *n* hexagonal lattice graph. """ - G = create_using if create_using is not None else Graph() - G.clear() + G = empty_graph(0, create_using) if m == 0 or n == 0: return G if periodic and (n % 2 == 1 or m < 2 or n < 2): msg = "periodic hexagonal lattice needs m > 1, n > 1 and even n" raise NetworkXError(msg) - M = 2 * m # twice as many nodes as hexagons vertically + M = 2 * m # twice as many nodes as hexagons vertically rows = range(M + 2) cols = range(n + 1) # make lattice - col_edges = (((i, j), (i, j + 1)) for i in cols for j in rows[:M + 1]) - row_edges = (((i, j), (i + 1, j)) for i in cols[:n] for j in rows - if i % 2 == j % 2) + col_edges = (((i, j), (i, j + 1)) for i in cols for j in rows[: M + 1]) + row_edges = (((i, j), (i + 1, j)) for i in cols[:n] for j in rows if i % 2 == j % 2) G.add_edges_from(col_edges) G.add_edges_from(row_edges) # Remove corner nodes with one edge @@ -361,14 +347,13 @@ def hexagonal_lattice_graph(m, n, periodic=False, with_positions=True, # calc position in embedded space ii = (i for i in cols for j in rows) jj = (j for i in cols for j in rows) - xx = (0.5 + i + i // 2 + (j % 2) * ((i % 2) - .5) - for i in cols for j in rows) + xx = (0.5 + i + i // 2 + (j % 2) * ((i % 2) - 0.5) for i in cols for j in rows) h = sqrt(3) / 2 if periodic: - yy = (h * j + .01 * i * i for i in cols for j in rows) + yy = (h * j + 0.01 * i * i for i in cols for j in rows) else: yy = (h * j for i in cols for j in rows) # exclude nodes not in G pos = {(i, j): (x, y) for i, j, x, y in zip(ii, jj, xx, yy) if (i, j) in G} - set_node_attributes(G, pos, 'pos') + set_node_attributes(G, pos, "pos") return G diff --git a/networkx/generators/line.py b/networkx/generators/line.py index eb3fb68..695f927 100644 --- a/networkx/generators/line.py +++ b/networkx/generators/line.py @@ -1,28 +1,16 @@ -# Copyright (C) 2013-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: James Clough -# Aric Hagberg -# Pieter Swart -# Dan Schult -# chebee7i """Functions for generating line graphs.""" from itertools import combinations from collections import defaultdict import networkx as nx -from networkx.utils import arbitrary_element -from networkx.utils.decorators import * +from networkx.utils import arbitrary_element, generate_unique_node +from networkx.utils.decorators import not_implemented_for -__all__ = ['line_graph', 'inverse_line_graph'] +__all__ = ["line_graph", "inverse_line_graph"] def line_graph(G, create_using=None): - """Returns the line graph of the graph or digraph `G`. + r"""Returns the line graph of the graph or digraph `G`. The line graph of a graph `G` has a node for each edge in `G` and an edge joining those nodes if the two edges in `G` share a common node. For @@ -39,6 +27,8 @@ def line_graph(G, create_using=None): ---------- G : graph A NetworkX Graph, DiGraph, MultiGraph, or MultiDigraph. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. Returns ------- @@ -47,7 +37,6 @@ def line_graph(G, create_using=None): Examples -------- - >>> import networkx as nx >>> G = nx.star_graph(3) >>> L = nx.line_graph(G) >>> print(sorted(map(sorted, L.edges()))) # makes a 3-clique, K3 @@ -124,11 +113,15 @@ def _node_func(G): """ if G.is_multigraph(): + def sorted_node(u, v, key): return (u, v, key) if u <= v else (v, u, key) + else: + def sorted_node(u, v): return (u, v) if u <= v else (v, u) + return sorted_node @@ -137,11 +130,15 @@ def _edge_func(G): """ if G.is_multigraph(): + def get_edges(nbunch=None): return G.edges(nbunch, keys=True) + else: + def get_edges(nbunch=None): return G.edges(nbunch) + return get_edges @@ -160,7 +157,7 @@ def _sorted_edge(u, v): def _lg_directed(G, create_using=None): - """Return the line graph L of the (multi)digraph G. + """Returns the line graph L of the (multi)digraph G. Edges in G appear as nodes in L, represented as tuples of the form (u,v) or (u,v,key) if G is a multidigraph. A node in L corresponding to the edge @@ -170,14 +167,12 @@ def _lg_directed(G, create_using=None): ---------- G : digraph A directed graph or directed multigraph. - create_using : None - A digraph instance used to populate the line graph. + create_using : NetworkX graph constructor, optional + Graph type to create. If graph instance, then cleared before populated. + Default is to use the same graph class as `G`. """ - if create_using is None: - L = G.fresh_copy() - else: - L = create_using + L = nx.empty_graph(0, create_using, default=G.__class__) # Create a graph specific edge function. get_edges = _edge_func(G) @@ -192,7 +187,7 @@ def _lg_directed(G, create_using=None): def _lg_undirected(G, selfloops=False, create_using=None): - """Return the line graph L of the (multi)graph G. + """Returns the line graph L of the (multi)graph G. Edges in G appear as nodes in L, represented as sorted tuples of the form (u,v), or (u,v,key) if G is a multigraph. A node in L corresponding to @@ -206,8 +201,8 @@ def _lg_undirected(G, selfloops=False, create_using=None): selfloops : bool If `True`, then self-loops are included in the line graph. If `False`, they are excluded. - create_using : None - A graph instance used to populate the line graph. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. Notes ----- @@ -215,10 +210,7 @@ def _lg_undirected(G, selfloops=False, create_using=None): produce self-loops. """ - if create_using is None: - L = G.fresh_copy() - else: - L = create_using + L = nx.empty_graph(0, create_using, default=G.__class__) # Graph specific functions for edges and sorted nodes. get_edges = _edge_func(G) @@ -227,7 +219,7 @@ def _lg_undirected(G, selfloops=False, create_using=None): # Determine if we include self-loops or not. shift = 0 if selfloops else 1 - edges = set([]) + edges = set() for u in G: # Label nodes as a sorted tuple of nodes in original graph. nodes = [sorted_node(*x) for x in get_edges(u)] @@ -240,14 +232,14 @@ def _lg_undirected(G, selfloops=False, create_using=None): # especially important for multigraphs, we store the edges in # canonical form in a set. for i, a in enumerate(nodes): - edges.update([_sorted_edge(a, b) for b in nodes[i + shift:]]) + edges.update([_sorted_edge(a, b) for b in nodes[i + shift :]]) L.add_edges_from(edges) return L -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def inverse_line_graph(G): """ Returns the inverse line graph of graph G. @@ -279,14 +271,40 @@ def inverse_line_graph(G): ----- This is an implementation of the Roussopoulos algorithm. + If G consists of multiple components, then the algorithm doesn't work. + You should invert every component seperately: + + >>> K5 = nx.complete_graph(5) + >>> P4 = nx.Graph([("a", "b"), ("b", "c"), ("c", "d")]) + >>> G = nx.union(K5, P4) + >>> root_graphs = [] + >>> for comp in nx.connected_components(G): + ... root_graphs.append(nx.inverse_line_graph(G.subgraph(comp))) + >>> len(root_graphs) + 2 + References ---------- * Roussopolous, N, "A max {m, n} algorithm for determining the graph H from its line graph G", Information Processing Letters 2, (1973), 108--112. """ - if G.number_of_edges() == 0 or G.number_of_nodes() == 0: - msg = "G is not a line graph (has zero vertices or edges)" + if G.number_of_nodes() == 0: + a = generate_unique_node() + H = nx.Graph() + H.add_node(a) + return H + elif G.number_of_nodes() == 1: + v = list(G)[0] + a = (v, 0) + b = (v, 1) + H = nx.Graph([(a, b)]) + return H + elif G.number_of_nodes() > 1 and G.number_of_edges() == 0: + msg = ( + "inverse_line_graph() doesn't work on an edgeless graph. " + "Please use this function on each component seperately." + ) raise nx.NetworkXError(msg) starting_cell = _select_starting_cell(G) @@ -298,8 +316,7 @@ def inverse_line_graph(G): P_count[u] += 1 if max(P_count.values()) > 2: - msg = "G is not a line graph (vertex found in more " \ - "than two partition cells)" + msg = "G is not a line graph (vertex found in more " "than two partition cells)" raise nx.NetworkXError(msg) W = tuple([(u,) for u in P_count if P_count[u] == 1]) H = nx.Graph() @@ -315,12 +332,12 @@ def _triangles(G, e): """ Return list of all triangles containing edge e""" u, v = e if u not in G: - raise nx.NetworkXError("Vertex %s not in graph" % u) - if v not in G.neighbors(u): - raise nx.NetworkXError("Edge (%s, %s) not in graph" % (u, v)) + raise nx.NetworkXError(f"Vertex {u} not in graph") + if v not in G[u]: + raise nx.NetworkXError(f"Edge ({u}, {v}) not in graph") triangle_list = [] - for x in G.neighbors(u): - if x in G.neighbors(v): + for x in G[u]: + if x in G[v]: triangle_list.append((u, v, x)) return triangle_list @@ -352,14 +369,14 @@ def _odd_triangle(G, T): """ for u in T: if u not in G.nodes(): - raise nx.NetworkXError("Vertex %s not in graph" % u) + raise nx.NetworkXError(f"Vertex {u} not in graph") for e in list(combinations(T, 2)): - if e[0] not in G.neighbors(e[1]): - raise nx.NetworkXError("Edge (%s, %s) not in graph" % (e[0], e[1])) + if e[0] not in G[e[1]]: + raise nx.NetworkXError(f"Edge ({e[0]}, {e[1]}) not in graph") T_neighbors = defaultdict(int) for t in T: - for v in G.neighbors(t): + for v in G[t]: if v not in T: T_neighbors[v] += 1 for v in T_neighbors: @@ -402,12 +419,14 @@ def _find_partition(G, starting_cell): # if u still has edges then we need to find its other cell # this other cell must be a complete subgraph or else G is # not a line graph - new_cell = [u] + list(G_partition.neighbors(u)) + new_cell = [u] + list(G_partition[u]) for u in new_cell: for v in new_cell: - if (u != v) and (v not in G.neighbors(u)): - msg = "G is not a line graph" \ - "(partition cell not a complete subgraph)" + if (u != v) and (v not in G_partition[u]): + msg = ( + "G is not a line graph" + "(partition cell not a complete subgraph)" + ) raise nx.NetworkXError(msg) P.append(tuple(new_cell)) G_partition.remove_edges_from(list(combinations(new_cell, 2))) @@ -444,8 +463,8 @@ def _select_starting_cell(G, starting_edge=None): else: e = starting_edge if e[0] not in G[e[1]]: - msg = 'starting_edge (%s, %s) is not in the Graph' - raise nx.NetworkXError(msg % e) + msg = f"starting_edge ({e[0]}, {e[1]}) is not in the Graph" + raise nx.NetworkXError(msg) e_triangles = _triangles(G, e) r = len(e_triangles) if r == 0: @@ -481,25 +500,31 @@ def _select_starting_cell(G, starting_edge=None): # check if odd triangles containing e form complete subgraph # there must be exactly s+2 of them # and they must all be connected - triangle_nodes = set([]) + triangle_nodes = set() for T in odd_triangles: for x in T: triangle_nodes.add(x) if len(triangle_nodes) == s + 2: for u in triangle_nodes: for v in triangle_nodes: - if u != v and (v not in G.neighbors(u)): - msg = "G is not a line graph (odd triangles " \ - "do not form complete subgraph)" + if u != v and (v not in G[u]): + msg = ( + "G is not a line graph (odd triangles " + "do not form complete subgraph)" + ) raise nx.NetworkXError(msg) # otherwise then we can use this as the starting cell starting_cell = tuple(triangle_nodes) else: - msg = "G is not a line graph (odd triangles " \ - "do not form complete subgraph)" + msg = ( + "G is not a line graph (odd triangles " + "do not form complete subgraph)" + ) raise nx.NetworkXError(msg) else: - msg = "G is not a line graph (incorrect number of " \ - "odd triangles around starting edge)" + msg = ( + "G is not a line graph (incorrect number of " + "odd triangles around starting edge)" + ) raise nx.NetworkXError(msg) return starting_cell diff --git a/networkx/generators/mycielski.py b/networkx/generators/mycielski.py index 1e3cf4b..e5e7e57 100644 --- a/networkx/generators/mycielski.py +++ b/networkx/generators/mycielski.py @@ -1,10 +1,3 @@ -# Copyright (C) 2010-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. - """Functions related to the Mycielski Operation and the Mycielskian family of graphs. @@ -13,11 +6,11 @@ import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['mycielskian', 'mycielski_graph'] +__all__ = ["mycielskian", "mycielski_graph"] -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def mycielskian(G, iterations=1): r"""Returns the Mycielskian of a simple, undirected graph G diff --git a/networkx/generators/nonisomorphic_trees.py b/networkx/generators/nonisomorphic_trees.py index a563063..b7655a7 100644 --- a/networkx/generators/nonisomorphic_trees.py +++ b/networkx/generators/nonisomorphic_trees.py @@ -6,17 +6,8 @@ the root. """ -# Copyright (C) 2013 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -__author__ = "\n".join(["Aric Hagberg (hagberg@lanl.gov)", - "Mridul Seth (seth.mridul@gmail.com)"]) - -__all__ = ['nonisomorphic_trees', - 'number_of_nonisomorphic_trees'] + +__all__ = ["nonisomorphic_trees", "number_of_nonisomorphic_trees"] import networkx as nx @@ -133,12 +124,12 @@ def _next_tree(candidate): new_left, new_rest = _split_tree(new_candidate) new_left_height = max(new_left) suffix = range(1, new_left_height + 2) - new_candidate[-len(suffix):] = suffix + new_candidate[-len(suffix) :] = suffix return new_candidate def _split_tree(layout): - """Return a tuple of two layouts, one containing the left + """Returns a tuple of two layouts, one containing the left subtree of the root vertex, and one containing the original tree with the left subtree removed.""" diff --git a/networkx/generators/random_clustered.py b/networkx/generators/random_clustered.py index 318865b..622fb91 100644 --- a/networkx/generators/random_clustered.py +++ b/networkx/generators/random_clustered.py @@ -1,22 +1,13 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# Authors: Aric Hagberg (hagberg@lanl.gov) -# Joel Miller (joel.c.miller.research@gmail.com) """Generate graphs with given degree and triangle sequence. """ -import random import networkx as nx +from networkx.utils import py_random_state -__all__ = ['random_clustered_graph'] +__all__ = ["random_clustered_graph"] -def random_clustered_graph(joint_degree_sequence, create_using=None, - seed=None): +@py_random_state(2) +def random_clustered_graph(joint_degree_sequence, create_using=None, seed=None): r"""Generate a random graph with the given joint independent edge degree and triangle degree sequence. @@ -35,10 +26,11 @@ def random_clustered_graph(joint_degree_sequence, create_using=None, joint_degree_sequence : list of integer pairs Each list entry corresponds to the independent edge degree and triangle degree of a node. - create_using : graph, optional (default MultiGraph) - Return graph of this type. The instance will be cleared. - seed : hashable object, optional - The seed for the random number generator. + create_using : NetworkX graph constructor, optional (default MultiGraph) + Graph type to create. If graph instance, then cleared before populated. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -92,19 +84,13 @@ def random_clustered_graph(joint_degree_sequence, create_using=None, >>> G.remove_edges_from(nx.selfloop_edges(G)) """ - if create_using is None: - create_using = nx.MultiGraph() - elif create_using.is_directed(): - raise nx.NetworkXError("Directed Graph not supported") - - if seed is not None: - random.seed(seed) - # In Python 3, zip() returns an iterator. Make this into a list. joint_degree_sequence = list(joint_degree_sequence) N = len(joint_degree_sequence) - G = nx.empty_graph(N, create_using) + G = nx.empty_graph(N, create_using, default=nx.MultiGraph) + if G.is_directed(): + raise nx.NetworkXError("Directed Graph not supported") ilist = [] tlist = [] @@ -116,10 +102,10 @@ def random_clustered_graph(joint_degree_sequence, create_using=None, tlist.append(n) if len(ilist) % 2 != 0 or len(tlist) % 3 != 0: - raise nx.NetworkXError('Invalid degree sequence') + raise nx.NetworkXError("Invalid degree sequence") - random.shuffle(ilist) - random.shuffle(tlist) + seed.shuffle(ilist) + seed.shuffle(tlist) while ilist: G.add_edge(ilist.pop(), ilist.pop()) while tlist: diff --git a/networkx/generators/random_graphs.py b/networkx/generators/random_graphs.py index ce75b66..124b5fd 100644 --- a/networkx/generators/random_graphs.py +++ b/networkx/generators/random_graphs.py @@ -1,50 +1,41 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. """ Generators for random graphs. """ -from __future__ import division import itertools import math -import random import networkx as nx +from networkx.utils import py_random_state from .classic import empty_graph, path_graph, complete_graph from .degree_seq import degree_sequence_tree from collections import defaultdict -__all__ = ['fast_gnp_random_graph', - 'gnp_random_graph', - 'dense_gnm_random_graph', - 'gnm_random_graph', - 'erdos_renyi_graph', - 'binomial_graph', - 'newman_watts_strogatz_graph', - 'watts_strogatz_graph', - 'connected_watts_strogatz_graph', - 'random_regular_graph', - 'barabasi_albert_graph', - 'extended_barabasi_albert_graph', - 'powerlaw_cluster_graph', - 'random_lobster', - 'random_shell_graph', - 'random_powerlaw_tree', - 'random_powerlaw_tree_sequence', - 'random_kernel_graph'] - - -#------------------------------------------------------------------------- -# Some Famous Random Graphs -#------------------------------------------------------------------------- - - +__all__ = [ + "fast_gnp_random_graph", + "gnp_random_graph", + "dense_gnm_random_graph", + "gnm_random_graph", + "erdos_renyi_graph", + "binomial_graph", + "newman_watts_strogatz_graph", + "watts_strogatz_graph", + "connected_watts_strogatz_graph", + "random_regular_graph", + "barabasi_albert_graph", + "dual_barabasi_albert_graph", + "extended_barabasi_albert_graph", + "powerlaw_cluster_graph", + "random_lobster", + "random_shell_graph", + "random_powerlaw_tree", + "random_powerlaw_tree_sequence", + "random_kernel_graph", +] + + +@py_random_state(2) def fast_gnp_random_graph(n, p, seed=None, directed=False): """Returns a $G_{n,p}$ random graph, also known as an Erdős-Rényi graph or a binomial graph. @@ -55,8 +46,9 @@ def fast_gnp_random_graph(n, p, seed=None, directed=False): The number of nodes. p : float Probability for edge creation. - seed : int, optional - Seed for random number generator (default=None). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. directed : bool, optional (default=False) If True, this function returns a directed graph. @@ -82,11 +74,8 @@ def fast_gnp_random_graph(n, p, seed=None, directed=False): """ G = empty_graph(n) - if seed is not None: - random.seed(seed) - if p <= 0 or p >= 1: - return nx.gnp_random_graph(n, p, directed=directed) + return nx.gnp_random_graph(n, p, seed=seed, directed=directed) w = -1 lp = math.log(1.0 - p) @@ -96,7 +85,7 @@ def fast_gnp_random_graph(n, p, seed=None, directed=False): # Nodes in graph are from 0,n-1 (start with v as the first node index). v = 0 while v < n: - lr = math.log(1.0 - random.random()) + lr = math.log(1.0 - seed.random()) w = w + 1 + int(lr / lp) if v == w: # avoid self loops w = w + 1 @@ -111,7 +100,7 @@ def fast_gnp_random_graph(n, p, seed=None, directed=False): # Nodes in graph are from 0,n-1 (start with v as the second node index). v = 1 while v < n: - lr = math.log(1.0 - random.random()) + lr = math.log(1.0 - seed.random()) w = w + 1 + int(lr / lp) while w >= v and v < n: w = w - v @@ -121,23 +110,22 @@ def fast_gnp_random_graph(n, p, seed=None, directed=False): return G +@py_random_state(2) def gnp_random_graph(n, p, seed=None, directed=False): """Returns a $G_{n,p}$ random graph, also known as an Erdős-Rényi graph or a binomial graph. The $G_{n,p}$ model chooses each of the possible edges with probability $p$. - The functions :func:`binomial_graph` and :func:`erdos_renyi_graph` are - aliases of this function. - Parameters ---------- n : int The number of nodes. p : float Probability for edge creation. - seed : int, optional - Seed for random number generator (default=None). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. directed : bool, optional (default=False) If True, this function returns a directed graph. @@ -150,14 +138,24 @@ def gnp_random_graph(n, p, seed=None, directed=False): This algorithm [2]_ runs in $O(n^2)$ time. For sparse graphs (that is, for small values of $p$), :func:`fast_gnp_random_graph` is a faster algorithm. + :func:`binomial_graph` and :func:`erdos_renyi_graph` are + aliases for :func:`gnp_random_graph`. + + >>> nx.binomial_graph is nx.gnp_random_graph + True + >>> nx.erdos_renyi_graph is nx.gnp_random_graph + True + References ---------- .. [1] P. Erdős and A. Rényi, On Random Graphs, Publ. Math. 6, 290 (1959). .. [2] E. N. Gilbert, Random Graphs, Ann. Math. Stat., 30, 1141 (1959). """ if directed: + edges = itertools.permutations(range(n), 2) G = nx.DiGraph() else: + edges = itertools.combinations(range(n), 2) G = nx.Graph() G.add_nodes_from(range(n)) if p <= 0: @@ -165,16 +163,8 @@ def gnp_random_graph(n, p, seed=None, directed=False): if p >= 1: return complete_graph(n, create_using=G) - if seed is not None: - random.seed(seed) - - if G.is_directed(): - edges = itertools.permutations(range(n), 2) - else: - edges = itertools.combinations(range(n), 2) - for e in edges: - if random.random() < p: + if seed.random() < p: G.add_edge(*e) return G @@ -184,6 +174,7 @@ def gnp_random_graph(n, p, seed=None, directed=False): erdos_renyi_graph = gnp_random_graph +@py_random_state(2) def dense_gnm_random_graph(n, m, seed=None): """Returns a $G_{n,m}$ random graph. @@ -199,8 +190,9 @@ def dense_gnm_random_graph(n, m, seed=None): The number of nodes. m : int The number of edges. - seed : int, optional - Seed for random number generator (default=None). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. See Also -------- @@ -226,15 +218,12 @@ def dense_gnm_random_graph(n, m, seed=None): if n == 1 or m >= mmax: return G - if seed is not None: - random.seed(seed) - u = 0 v = 1 t = 0 k = 0 while True: - if random.randrange(mmax - t) < m - k: + if seed.randrange(mmax - t) < m - k: G.add_edge(u, v) k += 1 if k == m: @@ -246,6 +235,7 @@ def dense_gnm_random_graph(n, m, seed=None): v = u + 1 +@py_random_state(2) def gnm_random_graph(n, m, seed=None, directed=False): """Returns a $G_{n,m}$ random graph. @@ -261,8 +251,9 @@ def gnm_random_graph(n, m, seed=None, directed=False): The number of nodes. m : int The number of edges. - seed : int, optional - Seed for random number generator (default=None). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. directed : bool, optional (default=False) If True return a directed graph @@ -277,9 +268,6 @@ def gnm_random_graph(n, m, seed=None, directed=False): G = nx.Graph() G.add_nodes_from(range(n)) - if seed is not None: - random.seed(seed) - if n == 1: return G max_edges = n * (n - 1) @@ -292,8 +280,8 @@ def gnm_random_graph(n, m, seed=None, directed=False): edge_count = 0 while edge_count < m: # generate random edge,u,v - u = random.choice(nlist) - v = random.choice(nlist) + u = seed.choice(nlist) + v = seed.choice(nlist) if u == v or G.has_edge(u, v): continue else: @@ -302,8 +290,9 @@ def gnm_random_graph(n, m, seed=None, directed=False): return G +@py_random_state(3) def newman_watts_strogatz_graph(n, k, p, seed=None): - """Return a Newman–Watts–Strogatz small-world graph. + """Returns a Newman–Watts–Strogatz small-world graph. Parameters ---------- @@ -314,8 +303,9 @@ def newman_watts_strogatz_graph(n, k, p, seed=None): topology. p : float The probability of adding a new edge for each edge. - seed : int, optional - The seed for the random number generator (the default is None). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Notes ----- @@ -338,10 +328,13 @@ def newman_watts_strogatz_graph(n, k, p, seed=None): Physics Letters A, 263, 341, 1999. https://doi.org/10.1016/S0375-9601(99)00757-4 """ - if seed is not None: - random.seed(seed) - if k >= n: + if k > n: raise nx.NetworkXError("k>=n, choose smaller k or larger n") + + # If k == n the graph return is a complete graph + if k == n: + return nx.complete_graph(n) + G = empty_graph(n) nlist = list(G.nodes()) fromv = nlist @@ -354,12 +347,12 @@ def newman_watts_strogatz_graph(n, k, p, seed=None): # node w and add new edge u-w e = list(G.edges()) for (u, v) in e: - if random.random() < p: - w = random.choice(nlist) + if seed.random() < p: + w = seed.choice(nlist) # no self-loops and reject if edge u-w exists # is that the correct NWS model? while w == u or G.has_edge(u, w): - w = random.choice(nlist) + w = seed.choice(nlist) if G.degree(u) >= n - 1: break # skip this rewiring else: @@ -367,8 +360,9 @@ def newman_watts_strogatz_graph(n, k, p, seed=None): return G +@py_random_state(3) def watts_strogatz_graph(n, k, p, seed=None): - """Return a Watts–Strogatz small-world graph. + """Returns a Watts–Strogatz small-world graph. Parameters ---------- @@ -379,8 +373,9 @@ def watts_strogatz_graph(n, k, p, seed=None): topology. p : float The probability of rewiring each edge - seed : int, optional - Seed for random number generator (default=None) + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. See Also -------- @@ -406,10 +401,12 @@ def watts_strogatz_graph(n, k, p, seed=None): Collective dynamics of small-world networks, Nature, 393, pp. 440--442, 1998. """ - if k >= n: - raise nx.NetworkXError("k>=n, choose smaller k or larger n") - if seed is not None: - random.seed(seed) + if k > n: + raise nx.NetworkXError("k>n, choose smaller k or larger n") + + # If k == n, the graph is complete not Watts-Strogatz + if k == n: + return nx.complete_graph(n) G = nx.Graph() nodes = list(range(n)) # nodes are labeled 0 to n-1 @@ -424,11 +421,11 @@ def watts_strogatz_graph(n, k, p, seed=None): targets = nodes[j:] + nodes[0:j] # first j nodes are now last in list # inner loop in node order for u, v in zip(nodes, targets): - if random.random() < p: - w = random.choice(nodes) + if seed.random() < p: + w = seed.choice(nodes) # Enforce no self-loops or multiple edges while w == u or G.has_edge(u, w): - w = random.choice(nodes) + w = seed.choice(nodes) if G.degree(u) >= n - 1: break # skip this rewiring else: @@ -437,6 +434,7 @@ def watts_strogatz_graph(n, k, p, seed=None): return G +@py_random_state(4) def connected_watts_strogatz_graph(n, k, p, tries=100, seed=None): """Returns a connected Watts–Strogatz small-world graph. @@ -455,22 +453,40 @@ def connected_watts_strogatz_graph(n, k, p, tries=100, seed=None): The probability of rewiring each edge tries : int Number of attempts to generate a connected graph. - seed : int, optional - The seed for random number generator. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Notes + ----- + First create a ring over $n$ nodes [1]_. Then each node in the ring is joined + to its $k$ nearest neighbors (or $k - 1$ neighbors if $k$ is odd). + Then shortcuts are created by replacing some edges as follows: for each + edge $(u, v)$ in the underlying "$n$-ring with $k$ nearest neighbors" + with probability $p$ replace it with a new edge $(u, w)$ with uniformly + random choice of existing node $w$. + The entire process is repeated until a connected graph results. See Also -------- newman_watts_strogatz_graph() watts_strogatz_graph() + References + ---------- + .. [1] Duncan J. Watts and Steven H. Strogatz, + Collective dynamics of small-world networks, + Nature, 393, pp. 440--442, 1998. """ for i in range(tries): + # seed is an RNG so should change sequence each call G = watts_strogatz_graph(n, k, p, seed) if nx.is_connected(G): return G - raise nx.NetworkXError('Maximum number of tries exceeded') + raise nx.NetworkXError("Maximum number of tries exceeded") +@py_random_state(2) def random_regular_graph(d, n, seed=None): r"""Returns a random $d$-regular graph on $n$ nodes. @@ -482,8 +498,9 @@ def random_regular_graph(d, n, seed=None): The degree of each node. n : integer The number of nodes. The value of $n \times d$ must be even. - seed : hashable object - The seed for random number generator. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Notes ----- @@ -521,9 +538,6 @@ def random_regular_graph(d, n, seed=None): if d == 0: return empty_graph(n) - if seed is not None: - random.seed(seed) - def _suitable(edges, potential_edges): # Helper subroutine to check if there are suitable edges remaining # If False, the generation of the graph has failed @@ -551,7 +565,7 @@ def _try_creation(): while stubs: potential_edges = defaultdict(lambda: 0) - random.shuffle(stubs) + seed.shuffle(stubs) stubiter = iter(stubs) for s1, s2 in zip(stubiter, stubiter): if s1 > s2: @@ -565,8 +579,11 @@ def _try_creation(): if not _suitable(edges, potential_edges): return None # failed to find suitable edge set - stubs = [node for node, potential in potential_edges.items() - for _ in range(potential)] + stubs = [ + node + for node, potential in potential_edges.items() + for _ in range(potential) + ] return edges # Even though a suitable edge set exists, @@ -582,19 +599,22 @@ def _try_creation(): return G -def _random_subset(seq, m): +def _random_subset(seq, m, rng): """ Return m unique elements from seq. This differs from random.sample which can return repeated elements if seq holds repeated elements. + + Note: rng is a random.Random or numpy.random.RandomState instance. """ targets = set() while len(targets) < m: - x = random.choice(seq) + x = rng.choice(seq) targets.add(x) return targets +@py_random_state(2) def barabasi_albert_graph(n, m, seed=None): """Returns a random graph according to the Barabási–Albert preferential attachment model. @@ -608,8 +628,9 @@ def barabasi_albert_graph(n, m, seed=None): Number of nodes m : int Number of edges to attach from a new node to existing nodes - seed : int, optional - Seed for random number generator (default=None). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -627,10 +648,9 @@ def barabasi_albert_graph(n, m, seed=None): """ if m < 1 or m >= n: - raise nx.NetworkXError("Barabási–Albert network must have m >= 1" - " and m < n, m = %d, n = %d" % (m, n)) - if seed is not None: - random.seed(seed) + raise nx.NetworkXError( + f"Barabási–Albert network must have m >= 1 and m < n, m = {m}, n = {n}" + ) # Add m initial nodes (m0 in barabasi-speak) G = empty_graph(m) @@ -649,11 +669,100 @@ def barabasi_albert_graph(n, m, seed=None): repeated_nodes.extend([source] * m) # Now choose m unique nodes from the existing nodes # Pick uniformly from repeated_nodes (preferential attachment) - targets = _random_subset(repeated_nodes, m) + targets = _random_subset(repeated_nodes, m, seed) + source += 1 + return G + + +@py_random_state(4) +def dual_barabasi_albert_graph(n, m1, m2, p, seed=None): + """Returns a random graph according to the dual Barabási–Albert preferential + attachment model. + + A graph of $n$ nodes is grown by attaching new nodes each with either $m_1$ + edges (with probability $p$) or $m_2$ edges (with probability $1-p$) that + are preferentially attached to existing nodes with high degree. + + Parameters + ---------- + n : int + Number of nodes + m1 : int + Number of edges to attach from a new node to existing nodes with probability $p$ + m2 : int + Number of edges to attach from a new node to existing nodes with probability $1-p$ + p : float + The probability of attaching $m_1$ edges (as opposed to $m_2$ edges) + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G : Graph + + Raises + ------ + NetworkXError + If `m1` and `m2` do not satisfy ``1 <= m1,m2 < n`` or `p` does not satisfy ``0 <= p <= 1``. + + References + ---------- + .. [1] N. Moshiri "The dual-Barabasi-Albert model", arXiv:1810.10538. + """ + + if m1 < 1 or m1 >= n: + raise nx.NetworkXError( + f"Dual Barabási–Albert network must have m1 >= 1 and m1 < n, m1 = {m1}, n = {n}" + ) + if m2 < 1 or m2 >= n: + raise nx.NetworkXError( + f"Dual Barabási–Albert network must have m2 >= 1 and m2 < n, m2 = {m2}, n = {n}" + ) + if p < 0 or p > 1: + raise nx.NetworkXError( + f"Dual Barabási–Albert network must have 0 <= p <= 1, p = {p}" + ) + + # For simplicity, if p == 0 or 1, just return BA + if p == 1: + return barabasi_albert_graph(n, m1, seed) + elif p == 0: + return barabasi_albert_graph(n, m2, seed) + + # Add max(m1,m2) initial nodes (m0 in barabasi-speak) + G = empty_graph(max(m1, m2)) + # Target nodes for new edges + targets = list(range(max(m1, m2))) + # List of existing nodes, with nodes repeated once for each adjacent edge + repeated_nodes = [] + # Start adding the remaining nodes. + source = max(m1, m2) + # Pick which m to use first time (m1 or m2) + if seed.random() < p: + m = m1 + else: + m = m2 + while source < n: + # Add edges to m nodes from the source. + G.add_edges_from(zip([source] * m, targets)) + # Add one node to the list for each new edge just created. + repeated_nodes.extend(targets) + # And the new node "source" has m edges to add to the list. + repeated_nodes.extend([source] * m) + # Pick which m to use next time (m1 or m2) + if seed.random() < p: + m = m1 + else: + m = m2 + # Now choose m unique nodes from the existing nodes + # Pick uniformly from repeated_nodes (preferential attachment) + targets = _random_subset(repeated_nodes, m, seed) source += 1 return G +@py_random_state(4) def extended_barabasi_albert_graph(n, m, p, q, seed=None): """Returns an extended Barabási–Albert model graph. @@ -671,7 +780,7 @@ def extended_barabasi_albert_graph(n, m, p, q, seed=None): 3) With $(1 - p - q)$ probability, $m$ new nodes are added to the graph with edges attached preferentially. - When $p = q = 0$, the model behaves just like the Barabási–Alber mo + When $p = q = 0$, the model behaves just like the Barabási–Alber model. Parameters ---------- @@ -683,8 +792,9 @@ def extended_barabasi_albert_graph(n, m, p, q, seed=None): Probability value for adding an edge between existing nodes. p + q < 1 q : float Probability value of rewiring of existing edges. p + q < 1 - seed : int (optional, default: None) - Seed for random number generator + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -702,13 +812,11 @@ def extended_barabasi_albert_graph(n, m, p, q, seed=None): Physical review letters, 85(24), 5234. """ if m < 1 or m >= n: - msg = "Extended Barabasi-Albert network needs m>=1 and m=1 and m= 1: - msg = "Extended Barabasi-Albert network needs p + q <= 1, p=%d, q=%d" - raise nx.NetworkXError(msg % (p, q)) - if seed is not None: - random.seed(seed) + msg = f"Extended Barabasi-Albert network needs p + q <= 1, p={p}, q={q}" + raise nx.NetworkXError(msg) # Add m initial nodes (m0 in barabasi-speak) G = empty_graph(m) @@ -724,7 +832,7 @@ def extended_barabasi_albert_graph(n, m, p, q, seed=None): # Start adding the other n-m nodes. The first node is m. new_node = m while new_node < n: - a_probability = random.random() + a_probability = seed.random() # Total number of edges of a Clique of all the nodes clique_degree = len(G) - 1 @@ -733,19 +841,19 @@ def extended_barabasi_albert_graph(n, m, p, q, seed=None): # Adding m new edges, if there is room to add them if a_probability < p and G.size() <= clique_size - m: # Select the nodes where an edge can be added - elligible_nodes = [nd for nd, deg in G.degree() - if deg < clique_degree] + elligible_nodes = [nd for nd, deg in G.degree() if deg < clique_degree] for i in range(m): # Choosing a random source node from elligible_nodes - src_node = random.choice(elligible_nodes) + src_node = seed.choice(elligible_nodes) # Picking a possible node that is not 'src_node' or # neighbor with 'src_node', with preferential attachment prohibited_nodes = list(G[src_node]) prohibited_nodes.append(src_node) # This will raise an exception if the sequence is empty - dest_node = random.choice([nd for nd in attachment_preference - if nd not in prohibited_nodes]) + dest_node = seed.choice( + [nd for nd in attachment_preference if nd not in prohibited_nodes] + ) # Adding the new edge G.add_edge(src_node, dest_node) @@ -756,8 +864,10 @@ def extended_barabasi_albert_graph(n, m, p, q, seed=None): # Adjusting the elligible nodes. Degree may be saturated. if G.degree(src_node) == clique_degree: elligible_nodes.remove(src_node) - if G.degree(dest_node) == clique_degree \ - and dest_node in elligible_nodes: + if ( + G.degree(dest_node) == clique_degree + and dest_node in elligible_nodes + ): elligible_nodes.remove(dest_node) # Rewiring m edges, if there are enough edges @@ -765,23 +875,23 @@ def extended_barabasi_albert_graph(n, m, p, q, seed=None): # Selecting nodes that have at least 1 edge but that are not # fully connected to ALL other nodes (center of star). # These nodes are the pivot nodes of the edges to rewire - elligible_nodes = [nd for nd, deg in G.degree() - if 0 < deg < clique_degree] + elligible_nodes = [nd for nd, deg in G.degree() if 0 < deg < clique_degree] for i in range(m): # Choosing a random source node - node = random.choice(elligible_nodes) + node = seed.choice(elligible_nodes) # The available nodes do have a neighbor at least. neighbor_nodes = list(G[node]) # Choosing the other end that will get dettached - src_node = random.choice(neighbor_nodes) + src_node = seed.choice(neighbor_nodes) # Picking a target node that is not 'node' or # neighbor with 'node', with preferential attachment neighbor_nodes.append(node) - dest_node = random.choice([nd for nd in attachment_preference - if nd not in neighbor_nodes]) + dest_node = seed.choice( + [nd for nd in attachment_preference if nd not in neighbor_nodes] + ) # Rewire G.remove_edge(node, src_node) G.add_edge(node, dest_node) @@ -804,7 +914,7 @@ def extended_barabasi_albert_graph(n, m, p, q, seed=None): # Adding new node with m edges else: # Select the edges' nodes by preferential attachment - targets = _random_subset(attachment_preference, m) + targets = _random_subset(attachment_preference, m, seed) G.add_edges_from(zip([new_node] * m, targets)) # Add one node to the list for each new edge just created. @@ -815,6 +925,7 @@ def extended_barabasi_albert_graph(n, m, p, q, seed=None): return G +@py_random_state(3) def powerlaw_cluster_graph(n, m, p, seed=None): """Holme and Kim algorithm for growing graphs with powerlaw degree distribution and approximate average clustering. @@ -827,8 +938,9 @@ def powerlaw_cluster_graph(n, m, p, seed=None): the number of random edges to add for each new node p : float, Probability of adding a triangle after adding a random edge - seed : int, optional - Seed for random number generator (default=None). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Notes ----- @@ -862,33 +974,31 @@ def powerlaw_cluster_graph(n, m, p, seed=None): """ if m < 1 or n < m: - raise nx.NetworkXError( - "NetworkXError must have m>1 and m1 and m 1 or p < 0: - raise nx.NetworkXError( - "NetworkXError p must be in [0,1], p=%f" % (p)) - if seed is not None: - random.seed(seed) + raise nx.NetworkXError(f"NetworkXError p must be in [0,1], p={p}") G = empty_graph(m) # add m initial nodes (m0 in barabasi-speak) repeated_nodes = list(G.nodes()) # list of existing nodes to sample from # with nodes repeated once for each adjacent edge - source = m # next node is m - while source < n: # Now add the other n-1 nodes - possible_targets = _random_subset(repeated_nodes, m) + source = m # next node is m + while source < n: # Now add the other n-1 nodes + possible_targets = _random_subset(repeated_nodes, m, seed) # do one preferential attachment for new node target = possible_targets.pop() G.add_edge(source, target) repeated_nodes.append(target) # add one node to list for each new link count = 1 while count < m: # add m-1 more new links - if random.random() < p: # clustering step: add triangle - neighborhood = [nbr for nbr in G.neighbors(target) - if not G.has_edge(source, nbr) - and not nbr == source] + if seed.random() < p: # clustering step: add triangle + neighborhood = [ + nbr + for nbr in G.neighbors(target) + if not G.has_edge(source, nbr) and not nbr == source + ] if neighborhood: # if there is a neighbor without a link - nbr = random.choice(neighborhood) + nbr = seed.choice(neighborhood) G.add_edge(source, nbr) # add triangle repeated_nodes.append(nbr) count = count + 1 @@ -904,41 +1014,57 @@ def powerlaw_cluster_graph(n, m, p, seed=None): return G +@py_random_state(3) def random_lobster(n, p1, p2, seed=None): """Returns a random lobster graph. - A lobster is a tree that reduces to a caterpillar when pruning all - leaf nodes. A caterpillar is a tree that reduces to a path graph - when pruning all leaf nodes; setting `p2` to zero produces a caterpillar. - - Parameters - ---------- - n : int - The expected number of nodes in the backbone - p1 : float - Probability of adding an edge to the backbone - p2 : float - Probability of adding an edge one level beyond backbone - seed : int, optional - Seed for random number generator (default=None). + A lobster is a tree that reduces to a caterpillar when pruning all + leaf nodes. A caterpillar is a tree that reduces to a path graph + when pruning all leaf nodes; setting `p2` to zero produces a caterpillar. + + This implementation iterates on the probabilities `p1` and `p2` to add + edges at levels 1 and 2, respectively. Graphs are therefore constructed + iteratively with uniform randomness at each level rather than being selected + uniformly at random from the set of all possible lobsters. + + Parameters + ---------- + n : int + The expected number of nodes in the backbone + p1 : float + Probability of adding an edge to the backbone + p2 : float + Probability of adding an edge one level beyond backbone + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Raises + ------ + NetworkXError + If `p1` or `p2` parameters are >= 1 because the while loops would never finish. """ + p1, p2 = abs(p1), abs(p2) + if any([p >= 1 for p in [p1, p2]]): + raise nx.NetworkXError("Probability values for `p1` and `p2` must both be < 1.") + # a necessary ingredient in any self-respecting graph library - if seed is not None: - random.seed(seed) - llen = int(2 * random.random() * n + 0.5) + llen = int(2 * seed.random() * n + 0.5) L = path_graph(llen) # build caterpillar: add edges to path graph with probability p1 current_node = llen - 1 for n in range(llen): - if random.random() < p1: # add fuzzy caterpillar parts + while seed.random() < p1: # add fuzzy caterpillar parts current_node += 1 L.add_edge(n, current_node) - if random.random() < p2: # add crunchy lobster bits + cat_node = current_node + while seed.random() < p2: # add crunchy lobster bits current_node += 1 - L.add_edge(current_node - 1, current_node) + L.add_edge(cat_node, current_node) return L # voila, un lobster! +@py_random_state(1) def random_shell_graph(constructor, seed=None): """Returns a random shell graph for the constructor given. @@ -952,8 +1078,9 @@ def random_shell_graph(constructor, seed=None): inter-shell (next) edges to intra-shell edges. If `d` is zero, there will be no intra-shell edges, and if `d` is one there will be all possible intra-shell edges. - seed : int, optional - Seed for random number generator (default=None). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Examples -------- @@ -963,9 +1090,6 @@ def random_shell_graph(constructor, seed=None): """ G = empty_graph(0) - if seed is not None: - random.seed(seed) - glist = [] intra_edges = [] nnodes = 0 @@ -974,8 +1098,8 @@ def random_shell_graph(constructor, seed=None): inter_edges = int(m * d) intra_edges.append(m - inter_edges) g = nx.convert_node_labels_to_integers( - gnm_random_graph(n, inter_edges), - first_label=nnodes) + gnm_random_graph(n, inter_edges, seed=seed), first_label=nnodes + ) glist.append(g) nnodes += n G = nx.operators.union(G, g) @@ -987,8 +1111,8 @@ def random_shell_graph(constructor, seed=None): total_edges = intra_edges[gi] edge_count = 0 while edge_count < total_edges: - u = random.choice(nlist1) - v = random.choice(nlist2) + u = seed.choice(nlist1) + v = seed.choice(nlist2) if u == v or G.has_edge(u, v): continue else: @@ -997,6 +1121,7 @@ def random_shell_graph(constructor, seed=None): return G +@py_random_state(2) def random_powerlaw_tree(n, gamma=3, seed=None, tries=100): """Returns a tree with a power law degree distribution. @@ -1006,8 +1131,9 @@ def random_powerlaw_tree(n, gamma=3, seed=None, tries=100): The number of nodes. gamma : float Exponent of the power law. - seed : int, optional - Seed for random number generator (default=None). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. tries : int Number of attempts to adjust the sequence to make it a tree. @@ -1031,6 +1157,7 @@ def random_powerlaw_tree(n, gamma=3, seed=None, tries=100): return G +@py_random_state(2) def random_powerlaw_tree_sequence(n, gamma=3, seed=None, tries=100): """Returns a degree sequence for a tree with a power law distribution. @@ -1040,8 +1167,9 @@ def random_powerlaw_tree_sequence(n, gamma=3, seed=None, tries=100): The number of nodes. gamma : float Exponent of the power law. - seed : int, optional - Seed for random number generator (default=None). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. tries : int Number of attempts to adjust the sequence to make it a tree. @@ -1059,16 +1187,13 @@ def random_powerlaw_tree_sequence(n, gamma=3, seed=None, tries=100): edges is one smaller than the number of nodes). """ - if seed is not None: - random.seed(seed) - # get trial sequence - z = nx.utils.powerlaw_sequence(n, exponent=gamma) + z = nx.utils.powerlaw_sequence(n, exponent=gamma, seed=seed) # round to integer values in the range [0,n] zseq = [min(n, max(int(round(s)), 0)) for s in z] # another sequence to swap values from - z = nx.utils.powerlaw_sequence(tries, exponent=gamma) + z = nx.utils.powerlaw_sequence(tries, exponent=gamma, seed=seed) # round to integer values in the range [0,n] swap = [min(n, max(int(round(s)), 0)) for s in z] @@ -1080,15 +1205,17 @@ def random_powerlaw_tree_sequence(n, gamma=3, seed=None, tries=100): # operations. if 2 * n - sum(zseq) == 2: return zseq - index = random.randint(0, n - 1) + index = seed.randint(0, n - 1) zseq[index] = swap.pop() - raise nx.NetworkXError('Exceeded max (%d) attempts for a valid tree' - ' sequence.' % tries) + raise nx.NetworkXError( + f"Exceeded max ({tries}) attempts for a valid tree sequence." + ) +@py_random_state(3) def random_kernel_graph(n, kernel_integral, kernel_root=None, seed=None): - r"""Return an random graph based on the specified kernel. + r"""Returns an random graph based on the specified kernel. The algorithm chooses each of the $[n(n-1)]/2$ possible edges with probability specified by a kernel $\kappa(x,y)$ [1]_. The kernel @@ -1106,8 +1233,9 @@ def random_kernel_graph(n, kernel_integral, kernel_root=None, seed=None): Function that returns the root $b$ of the equation $F(y,a,b) = r$. If None, the root is found using :func:`scipy.optimize.brentq` (this requires SciPy). - seed : int, optional - Seed for random number generator (default=None) + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Notes ----- @@ -1145,20 +1273,20 @@ def random_kernel_graph(n, kernel_integral, kernel_root=None, seed=None): "Fast Generation of Sparse Random Kernel Graphs". PLoS ONE 10(9): e0135177, 2015. doi:10.1371/journal.pone.0135177 """ - if seed is not None: - random.seed(seed) if kernel_root is None: import scipy.optimize as optimize def kernel_root(y, a, r): def my_function(b): return kernel_integral(y, a, b) - r + return optimize.brentq(my_function, a, 1) + graph = nx.Graph() graph.add_nodes_from(range(n)) (i, j) = (1, 1) while i < n: - r = -math.log(1 - random.random()) # (1-random.random()) in (0, 1] + r = -math.log(1 - seed.random()) # (1-seed.random()) in (0, 1] if kernel_integral(i / n, j / n, 1) <= r: i, j = i + 1, i + 1 else: diff --git a/networkx/generators/small.py b/networkx/generators/small.py index 20c97ff..bcfb741 100644 --- a/networkx/generators/small.py +++ b/networkx/generators/small.py @@ -1,49 +1,44 @@ -# -*- coding: utf-8 -*- """ Various small and named graphs, together with some compact generators. """ -__author__ = """Aric Hagberg (hagberg@lanl.gov)\nPieter Swart (swart@lanl.gov)""" -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. - -__all__ = ['make_small_graph', - 'LCF_graph', - 'bull_graph', - 'chvatal_graph', - 'cubical_graph', - 'desargues_graph', - 'diamond_graph', - 'dodecahedral_graph', - 'frucht_graph', - 'heawood_graph', - 'hoffman_singleton_graph', - 'house_graph', - 'house_x_graph', - 'icosahedral_graph', - 'krackhardt_kite_graph', - 'moebius_kantor_graph', - 'octahedral_graph', - 'pappus_graph', - 'petersen_graph', - 'sedgewick_maze_graph', - 'tetrahedral_graph', - 'truncated_cube_graph', - 'truncated_tetrahedron_graph', - 'tutte_graph'] + +__all__ = [ + "make_small_graph", + "LCF_graph", + "bull_graph", + "chvatal_graph", + "cubical_graph", + "desargues_graph", + "diamond_graph", + "dodecahedral_graph", + "frucht_graph", + "heawood_graph", + "hoffman_singleton_graph", + "house_graph", + "house_x_graph", + "icosahedral_graph", + "krackhardt_kite_graph", + "moebius_kantor_graph", + "octahedral_graph", + "pappus_graph", + "petersen_graph", + "sedgewick_maze_graph", + "tetrahedral_graph", + "truncated_cube_graph", + "truncated_tetrahedron_graph", + "tutte_graph", +] import networkx as nx -from networkx.generators.classic import empty_graph, cycle_graph, path_graph, complete_graph +from networkx.generators.classic import ( + empty_graph, + cycle_graph, + path_graph, + complete_graph, +) from networkx.exception import NetworkXError -#------------------------------------------------------------------------------ -# Tools for creating small graphs -#------------------------------------------------------------------------------ - def make_small_undirected_graph(graph_description, create_using=None): """ @@ -51,9 +46,10 @@ def make_small_undirected_graph(graph_description, create_using=None): See make_small_graph. """ - if create_using is not None and create_using.is_directed(): + G = empty_graph(0, create_using) + if G.is_directed(): raise NetworkXError("Directed Graph not supported") - return make_small_graph(graph_description, create_using) + return make_small_graph(graph_description, G) def make_small_graph(graph_description, create_using=None): @@ -71,21 +67,29 @@ def make_small_graph(graph_description, create_using=None): specifies the nodes connected to vertex j. e.g. the "square" graph C_4 can be obtained by - >>> G=nx.make_small_graph(["adjacencylist","C_4",4,[[2,4],[1,3],[2,4],[1,3]]]) + >>> G = nx.make_small_graph( + ... ["adjacencylist", "C_4", 4, [[2, 4], [1, 3], [2, 4], [1, 3]]] + ... ) or, since we do not need to add edges twice, - >>> G=nx.make_small_graph(["adjacencylist","C_4",4,[[2,4],[3],[4],[]]]) + >>> G = nx.make_small_graph(["adjacencylist", "C_4", 4, [[2, 4], [3], [4], []]]) If ltype="edgelist" then xlist is an edge list written as [[v1,w2],[v2,w2],...,[vk,wk]], where vj and wj integers in the range 1,..,n e.g. the "square" graph C_4 can be obtained by - >>> G=nx.make_small_graph(["edgelist","C_4",4,[[1,2],[3,4],[2,3],[4,1]]]) + >>> G = nx.make_small_graph( + ... ["edgelist", "C_4", 4, [[1, 2], [3, 4], [2, 3], [4, 1]]] + ... ) Use the create_using argument to choose the graph class/type. """ + + if graph_description[0] not in ("adjacencylist", "edgelist"): + raise NetworkXError("ltype must be either adjacencylist or edgelist") + ltype = graph_description[0] name = graph_description[1] n = graph_description[2] @@ -147,14 +151,13 @@ def LCF_graph(n, shift_list, repeats, create_using=None): and references. """ - if create_using is not None and create_using.is_directed(): - raise NetworkXError("Directed Graph not supported") - if n <= 0: return empty_graph(0, create_using) # start with the n-cycle G = cycle_graph(n, create_using) + if G.is_directed(): + raise NetworkXError("Directed Graph not supported") G.name = "LCF_graph" nodes = sorted(list(G)) @@ -166,50 +169,70 @@ def LCF_graph(n, shift_list, repeats, create_using=None): for i in range(n_extra_edges): shift = shift_list[i % len(shift_list)] # cycle through shift_list - v1 = nodes[i % n] # cycle repeatedly through nodes + v1 = nodes[i % n] # cycle repeatedly through nodes v2 = nodes[(i + shift) % n] G.add_edge(v1, v2) return G -#------------------------------------------------------------------------------- +# ------------------------------------------------------------------------------- # Various small and named graphs -#------------------------------------------------------------------------------- +# ------------------------------------------------------------------------------- + def bull_graph(create_using=None): - """Return the Bull graph. """ + """Returns the Bull graph. """ description = [ "adjacencylist", "Bull Graph", 5, - [[2, 3], [1, 3, 4], [1, 2, 5], [2], [3]] + [[2, 3], [1, 3, 4], [1, 2, 5], [2], [3]], ] G = make_small_undirected_graph(description, create_using) return G def chvatal_graph(create_using=None): - """Return the Chvátal graph.""" + """Returns the Chvátal graph.""" description = [ "adjacencylist", "Chvatal Graph", 12, - [[2, 5, 7, 10], [3, 6, 8], [4, 7, 9], [5, 8, 10], - [6, 9], [11, 12], [11, 12], [9, 12], - [11], [11, 12], [], []] + [ + [2, 5, 7, 10], + [3, 6, 8], + [4, 7, 9], + [5, 8, 10], + [6, 9], + [11, 12], + [11, 12], + [9, 12], + [11], + [11, 12], + [], + [], + ], ] G = make_small_undirected_graph(description, create_using) return G def cubical_graph(create_using=None): - """Return the 3-regular Platonic Cubical graph.""" + """Returns the 3-regular Platonic Cubical graph.""" description = [ "adjacencylist", "Platonic Cubical Graph", 8, - [[2, 4, 5], [1, 3, 8], [2, 4, 7], [1, 3, 6], - [1, 6, 8], [4, 5, 7], [3, 6, 8], [2, 5, 7]] + [ + [2, 4, 5], + [1, 3, 8], + [2, 4, 7], + [1, 3, 6], + [1, 6, 8], + [4, 5, 7], + [3, 6, 8], + [2, 5, 7], + ], ] G = make_small_undirected_graph(description, create_using) return G @@ -223,12 +246,12 @@ def desargues_graph(create_using=None): def diamond_graph(create_using=None): - """Return the Diamond graph. """ + """Returns the Diamond graph. """ description = [ "adjacencylist", "Diamond Graph", 4, - [[2, 3], [1, 3, 4], [1, 2, 4], [2, 3]] + [[2, 3], [1, 3, 4], [1, 2, 4], [2, 3]], ] G = make_small_undirected_graph(description, create_using) return G @@ -242,15 +265,28 @@ def dodecahedral_graph(create_using=None): def frucht_graph(create_using=None): - """Return the Frucht Graph. + """Returns the Frucht Graph. The Frucht Graph is the smallest cubical graph whose automorphism group consists only of the identity element. """ G = cycle_graph(7, create_using) - G.add_edges_from([[0, 7], [1, 7], [2, 8], [3, 9], [4, 9], [5, 10], [6, 10], - [7, 11], [8, 11], [8, 9], [10, 11]]) + G.add_edges_from( + [ + [0, 7], + [1, 7], + [2, 8], + [3, 9], + [4, 9], + [5, 10], + [6, 10], + [7, 11], + [8, 11], + [8, 9], + [10, 11], + ] + ) G.name = "Frucht Graph" return G @@ -264,55 +300,65 @@ def heawood_graph(create_using=None): def hoffman_singleton_graph(): - '''Return the Hoffman-Singleton Graph.''' + """Return the Hoffman-Singleton Graph.""" G = nx.Graph() for i in range(5): for j in range(5): - G.add_edge(('pentagon', i, j), ('pentagon', i, (j - 1) % 5)) - G.add_edge(('pentagon', i, j), ('pentagon', i, (j + 1) % 5)) - G.add_edge(('pentagram', i, j), ('pentagram', i, (j - 2) % 5)) - G.add_edge(('pentagram', i, j), ('pentagram', i, (j + 2) % 5)) + G.add_edge(("pentagon", i, j), ("pentagon", i, (j - 1) % 5)) + G.add_edge(("pentagon", i, j), ("pentagon", i, (j + 1) % 5)) + G.add_edge(("pentagram", i, j), ("pentagram", i, (j - 2) % 5)) + G.add_edge(("pentagram", i, j), ("pentagram", i, (j + 2) % 5)) for k in range(5): - G.add_edge(('pentagon', i, j), - ('pentagram', k, (i * k + j) % 5)) + G.add_edge(("pentagon", i, j), ("pentagram", k, (i * k + j) % 5)) G = nx.convert_node_labels_to_integers(G) - G.name = 'Hoffman-Singleton Graph' + G.name = "Hoffman-Singleton Graph" return G def house_graph(create_using=None): - """Return the House graph (square with triangle on top).""" + """Returns the House graph (square with triangle on top).""" description = [ "adjacencylist", "House Graph", 5, - [[2, 3], [1, 4], [1, 4, 5], [2, 3, 5], [3, 4]] + [[2, 3], [1, 4], [1, 4, 5], [2, 3, 5], [3, 4]], ] G = make_small_undirected_graph(description, create_using) return G def house_x_graph(create_using=None): - """Return the House graph with a cross inside the house square.""" + """Returns the House graph with a cross inside the house square.""" description = [ "adjacencylist", "House-with-X-inside Graph", 5, - [[2, 3, 4], [1, 3, 4], [1, 2, 4, 5], [1, 2, 3, 5], [3, 4]] + [[2, 3, 4], [1, 3, 4], [1, 2, 4, 5], [1, 2, 3, 5], [3, 4]], ] G = make_small_undirected_graph(description, create_using) return G def icosahedral_graph(create_using=None): - """Return the Platonic Icosahedral graph.""" + """Returns the Platonic Icosahedral graph.""" description = [ "adjacencylist", "Platonic Icosahedral Graph", 12, - [[2, 6, 8, 9, 12], [3, 6, 7, 9], [4, 7, 9, 10], [5, 7, 10, 11], - [6, 7, 11, 12], [7, 12], [], [9, 10, 11, 12], - [10], [11], [12], []] + [ + [2, 6, 8, 9, 12], + [3, 6, 7, 9], + [4, 7, 9, 10], + [5, 7, 10, 11], + [6, 7, 11, 12], + [7, 12], + [], + [9, 10, 11, 12], + [10], + [11], + [12], + [], + ], ] G = make_small_undirected_graph(description, create_using) return G @@ -333,27 +379,37 @@ def krackhardt_kite_graph(create_using=None): "adjacencylist", "Krackhardt Kite Social Network", 10, - [[2, 3, 4, 6], [1, 4, 5, 7], [1, 4, 6], [1, 2, 3, 5, 6, 7], [2, 4, 7], - [1, 3, 4, 7, 8], [2, 4, 5, 6, 8], [6, 7, 9], [8, 10], [9]] + [ + [2, 3, 4, 6], + [1, 4, 5, 7], + [1, 4, 6], + [1, 2, 3, 5, 6, 7], + [2, 4, 7], + [1, 3, 4, 7, 8], + [2, 4, 5, 6, 8], + [6, 7, 9], + [8, 10], + [9], + ], ] G = make_small_undirected_graph(description, create_using) return G def moebius_kantor_graph(create_using=None): - """Return the Moebius-Kantor graph.""" + """Returns the Moebius-Kantor graph.""" G = LCF_graph(16, [5, -5], 8, create_using) G.name = "Moebius-Kantor Graph" return G def octahedral_graph(create_using=None): - """Return the Platonic Octahedral graph.""" + """Returns the Platonic Octahedral graph.""" description = [ "adjacencylist", "Platonic Octahedral Graph", 6, - [[2, 3, 4, 5], [3, 4, 6], [5, 6], [5, 6], [6], []] + [[2, 3, 4, 5], [3, 4, 6], [5, 6], [5, 6], [6], []], ] G = make_small_undirected_graph(description, create_using) return G @@ -367,13 +423,23 @@ def pappus_graph(): def petersen_graph(create_using=None): - """Return the Petersen graph.""" + """Returns the Petersen graph.""" description = [ "adjacencylist", "Petersen Graph", 10, - [[2, 5, 6], [1, 3, 7], [2, 4, 8], [3, 5, 9], [4, 1, 10], [1, 8, 9], [2, 9, 10], - [3, 6, 10], [4, 6, 7], [5, 7, 8]] + [ + [2, 5, 6], + [1, 3, 7], + [2, 4, 8], + [3, 5, 9], + [4, 1, 10], + [1, 8, 9], + [2, 9, 10], + [3, 6, 10], + [4, 6, 7], + [5, 7, 8], + ], ] G = make_small_undirected_graph(description, create_using) return G @@ -405,46 +471,105 @@ def tetrahedral_graph(create_using=None): def truncated_cube_graph(create_using=None): - """Return the skeleton of the truncated cube.""" + """Returns the skeleton of the truncated cube.""" description = [ "adjacencylist", "Truncated Cube Graph", 24, - [[2, 3, 5], [12, 15], [4, 5], [7, 9], - [6], [17, 19], [8, 9], [11, 13], - [10], [18, 21], [12, 13], [15], - [14], [22, 23], [16], [20, 24], - [18, 19], [21], [20], [24], - [22], [23], [24], []] + [ + [2, 3, 5], + [12, 15], + [4, 5], + [7, 9], + [6], + [17, 19], + [8, 9], + [11, 13], + [10], + [18, 21], + [12, 13], + [15], + [14], + [22, 23], + [16], + [20, 24], + [18, 19], + [21], + [20], + [24], + [22], + [23], + [24], + [], + ], ] G = make_small_undirected_graph(description, create_using) return G def truncated_tetrahedron_graph(create_using=None): - """Return the skeleton of the truncated Platonic tetrahedron.""" + """Returns the skeleton of the truncated Platonic tetrahedron.""" G = path_graph(12, create_using) -# G.add_edges_from([(1,3),(1,10),(2,7),(4,12),(5,12),(6,8),(9,11)]) + # G.add_edges_from([(1,3),(1,10),(2,7),(4,12),(5,12),(6,8),(9,11)]) G.add_edges_from([(0, 2), (0, 9), (1, 6), (3, 11), (4, 11), (5, 7), (8, 10)]) G.name = "Truncated Tetrahedron Graph" return G def tutte_graph(create_using=None): - """Return the Tutte graph.""" + """Returns the Tutte graph.""" description = [ "adjacencylist", "Tutte's Graph", 46, - [[2, 3, 4], [5, 27], [11, 12], [19, 20], [6, 34], - [7, 30], [8, 28], [9, 15], [10, 39], [11, 38], - [40], [13, 40], [14, 36], [15, 16], [35], - [17, 23], [18, 45], [19, 44], [46], [21, 46], - [22, 42], [23, 24], [41], [25, 28], [26, 33], - [27, 32], [34], [29], [30, 33], [31], - [32, 34], [33], [], [], [36, 39], - [37], [38, 40], [39], [], [], - [42, 45], [43], [44, 46], [45], [], []] + [ + [2, 3, 4], + [5, 27], + [11, 12], + [19, 20], + [6, 34], + [7, 30], + [8, 28], + [9, 15], + [10, 39], + [11, 38], + [40], + [13, 40], + [14, 36], + [15, 16], + [35], + [17, 23], + [18, 45], + [19, 44], + [46], + [21, 46], + [22, 42], + [23, 24], + [41], + [25, 28], + [26, 33], + [27, 32], + [34], + [29], + [30, 33], + [31], + [32, 34], + [33], + [], + [], + [36, 39], + [37], + [38, 40], + [39], + [], + [], + [42, 45], + [43], + [44, 46], + [45], + [], + [], + ], ] G = make_small_undirected_graph(description, create_using) return G diff --git a/networkx/generators/social.py b/networkx/generators/social.py index ab1230b..6e24160 100644 --- a/networkx/generators/social.py +++ b/networkx/generators/social.py @@ -2,16 +2,17 @@ Famous social networks. """ import networkx as nx -__author__ = """\n""".join(['Jordi Torrents ', - 'Katy Bold ', - 'Aric Hagberg >> import networkx as nx >>> G = nx.karate_club_graph() - >>> G.nodes[5]['club'] + >>> G.nodes[5]["club"] 'Mr. Hi' - >>> G.nodes[9]['club'] + >>> G.nodes[9]["club"] 'Officer' References @@ -82,7 +82,7 @@ def karate_club_graph(): 0 0 1 0 0 0 0 0 1 0 0 0 0 0 1 1 0 0 1 0 1 0 1 1 0 0 0 0 0 1 1 1 0 1 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 1 0 0 1 1 1 0 1 1 0 0 1 1 1 1 1 1 1 0""" - for row, line in enumerate(zacharydat.split('\n')): + for row, line in enumerate(zacharydat.split("\n")): thisrow = [int(b) for b in line.split()] for col, entry in enumerate(thisrow): if entry == 1: @@ -90,12 +90,12 @@ def karate_club_graph(): # Add the name of each member's club as a node attribute. for v in G: - G.nodes[v]['club'] = 'Mr. Hi' if v in club1 else 'Officer' + G.nodes[v]["club"] = "Mr. Hi" if v in club1 else "Officer" return G def davis_southern_women_graph(): - """Return Davis Southern women social network. + """Returns Davis Southern women social network. This is a bipartite graph. @@ -106,138 +106,146 @@ def davis_southern_women_graph(): """ G = nx.Graph() # Top nodes - women = ["Evelyn Jefferson", - "Laura Mandeville", - "Theresa Anderson", - "Brenda Rogers", - "Charlotte McDowd", - "Frances Anderson", - "Eleanor Nye", - "Pearl Oglethorpe", - "Ruth DeSand", - "Verne Sanderson", - "Myra Liddel", - "Katherina Rogers", - "Sylvia Avondale", - "Nora Fayette", - "Helen Lloyd", - "Dorothy Murchison", - "Olivia Carleton", - "Flora Price"] + women = [ + "Evelyn Jefferson", + "Laura Mandeville", + "Theresa Anderson", + "Brenda Rogers", + "Charlotte McDowd", + "Frances Anderson", + "Eleanor Nye", + "Pearl Oglethorpe", + "Ruth DeSand", + "Verne Sanderson", + "Myra Liddel", + "Katherina Rogers", + "Sylvia Avondale", + "Nora Fayette", + "Helen Lloyd", + "Dorothy Murchison", + "Olivia Carleton", + "Flora Price", + ] G.add_nodes_from(women, bipartite=0) # Bottom nodes - events = ["E1", - "E2", - "E3", - "E4", - "E5", - "E6", - "E7", - "E8", - "E9", - "E10", - "E11", - "E12", - "E13", - "E14"] + events = [ + "E1", + "E2", + "E3", + "E4", + "E5", + "E6", + "E7", + "E8", + "E9", + "E10", + "E11", + "E12", + "E13", + "E14", + ] G.add_nodes_from(events, bipartite=1) - G.add_edges_from([("Evelyn Jefferson", "E1"), - ("Evelyn Jefferson", "E2"), - ("Evelyn Jefferson", "E3"), - ("Evelyn Jefferson", "E4"), - ("Evelyn Jefferson", "E5"), - ("Evelyn Jefferson", "E6"), - ("Evelyn Jefferson", "E8"), - ("Evelyn Jefferson", "E9"), - ("Laura Mandeville", "E1"), - ("Laura Mandeville", "E2"), - ("Laura Mandeville", "E3"), - ("Laura Mandeville", "E5"), - ("Laura Mandeville", "E6"), - ("Laura Mandeville", "E7"), - ("Laura Mandeville", "E8"), - ("Theresa Anderson", "E2"), - ("Theresa Anderson", "E3"), - ("Theresa Anderson", "E4"), - ("Theresa Anderson", "E5"), - ("Theresa Anderson", "E6"), - ("Theresa Anderson", "E7"), - ("Theresa Anderson", "E8"), - ("Theresa Anderson", "E9"), - ("Brenda Rogers", "E1"), - ("Brenda Rogers", "E3"), - ("Brenda Rogers", "E4"), - ("Brenda Rogers", "E5"), - ("Brenda Rogers", "E6"), - ("Brenda Rogers", "E7"), - ("Brenda Rogers", "E8"), - ("Charlotte McDowd", "E3"), - ("Charlotte McDowd", "E4"), - ("Charlotte McDowd", "E5"), - ("Charlotte McDowd", "E7"), - ("Frances Anderson", "E3"), - ("Frances Anderson", "E5"), - ("Frances Anderson", "E6"), - ("Frances Anderson", "E8"), - ("Eleanor Nye", "E5"), - ("Eleanor Nye", "E6"), - ("Eleanor Nye", "E7"), - ("Eleanor Nye", "E8"), - ("Pearl Oglethorpe", "E6"), - ("Pearl Oglethorpe", "E8"), - ("Pearl Oglethorpe", "E9"), - ("Ruth DeSand", "E5"), - ("Ruth DeSand", "E7"), - ("Ruth DeSand", "E8"), - ("Ruth DeSand", "E9"), - ("Verne Sanderson", "E7"), - ("Verne Sanderson", "E8"), - ("Verne Sanderson", "E9"), - ("Verne Sanderson", "E12"), - ("Myra Liddel", "E8"), - ("Myra Liddel", "E9"), - ("Myra Liddel", "E10"), - ("Myra Liddel", "E12"), - ("Katherina Rogers", "E8"), - ("Katherina Rogers", "E9"), - ("Katherina Rogers", "E10"), - ("Katherina Rogers", "E12"), - ("Katherina Rogers", "E13"), - ("Katherina Rogers", "E14"), - ("Sylvia Avondale", "E7"), - ("Sylvia Avondale", "E8"), - ("Sylvia Avondale", "E9"), - ("Sylvia Avondale", "E10"), - ("Sylvia Avondale", "E12"), - ("Sylvia Avondale", "E13"), - ("Sylvia Avondale", "E14"), - ("Nora Fayette", "E6"), - ("Nora Fayette", "E7"), - ("Nora Fayette", "E9"), - ("Nora Fayette", "E10"), - ("Nora Fayette", "E11"), - ("Nora Fayette", "E12"), - ("Nora Fayette", "E13"), - ("Nora Fayette", "E14"), - ("Helen Lloyd", "E7"), - ("Helen Lloyd", "E8"), - ("Helen Lloyd", "E10"), - ("Helen Lloyd", "E11"), - ("Helen Lloyd", "E12"), - ("Dorothy Murchison", "E8"), - ("Dorothy Murchison", "E9"), - ("Olivia Carleton", "E9"), - ("Olivia Carleton", "E11"), - ("Flora Price", "E9"), - ("Flora Price", "E11")]) - G.graph['top'] = women - G.graph['bottom'] = events + G.add_edges_from( + [ + ("Evelyn Jefferson", "E1"), + ("Evelyn Jefferson", "E2"), + ("Evelyn Jefferson", "E3"), + ("Evelyn Jefferson", "E4"), + ("Evelyn Jefferson", "E5"), + ("Evelyn Jefferson", "E6"), + ("Evelyn Jefferson", "E8"), + ("Evelyn Jefferson", "E9"), + ("Laura Mandeville", "E1"), + ("Laura Mandeville", "E2"), + ("Laura Mandeville", "E3"), + ("Laura Mandeville", "E5"), + ("Laura Mandeville", "E6"), + ("Laura Mandeville", "E7"), + ("Laura Mandeville", "E8"), + ("Theresa Anderson", "E2"), + ("Theresa Anderson", "E3"), + ("Theresa Anderson", "E4"), + ("Theresa Anderson", "E5"), + ("Theresa Anderson", "E6"), + ("Theresa Anderson", "E7"), + ("Theresa Anderson", "E8"), + ("Theresa Anderson", "E9"), + ("Brenda Rogers", "E1"), + ("Brenda Rogers", "E3"), + ("Brenda Rogers", "E4"), + ("Brenda Rogers", "E5"), + ("Brenda Rogers", "E6"), + ("Brenda Rogers", "E7"), + ("Brenda Rogers", "E8"), + ("Charlotte McDowd", "E3"), + ("Charlotte McDowd", "E4"), + ("Charlotte McDowd", "E5"), + ("Charlotte McDowd", "E7"), + ("Frances Anderson", "E3"), + ("Frances Anderson", "E5"), + ("Frances Anderson", "E6"), + ("Frances Anderson", "E8"), + ("Eleanor Nye", "E5"), + ("Eleanor Nye", "E6"), + ("Eleanor Nye", "E7"), + ("Eleanor Nye", "E8"), + ("Pearl Oglethorpe", "E6"), + ("Pearl Oglethorpe", "E8"), + ("Pearl Oglethorpe", "E9"), + ("Ruth DeSand", "E5"), + ("Ruth DeSand", "E7"), + ("Ruth DeSand", "E8"), + ("Ruth DeSand", "E9"), + ("Verne Sanderson", "E7"), + ("Verne Sanderson", "E8"), + ("Verne Sanderson", "E9"), + ("Verne Sanderson", "E12"), + ("Myra Liddel", "E8"), + ("Myra Liddel", "E9"), + ("Myra Liddel", "E10"), + ("Myra Liddel", "E12"), + ("Katherina Rogers", "E8"), + ("Katherina Rogers", "E9"), + ("Katherina Rogers", "E10"), + ("Katherina Rogers", "E12"), + ("Katherina Rogers", "E13"), + ("Katherina Rogers", "E14"), + ("Sylvia Avondale", "E7"), + ("Sylvia Avondale", "E8"), + ("Sylvia Avondale", "E9"), + ("Sylvia Avondale", "E10"), + ("Sylvia Avondale", "E12"), + ("Sylvia Avondale", "E13"), + ("Sylvia Avondale", "E14"), + ("Nora Fayette", "E6"), + ("Nora Fayette", "E7"), + ("Nora Fayette", "E9"), + ("Nora Fayette", "E10"), + ("Nora Fayette", "E11"), + ("Nora Fayette", "E12"), + ("Nora Fayette", "E13"), + ("Nora Fayette", "E14"), + ("Helen Lloyd", "E7"), + ("Helen Lloyd", "E8"), + ("Helen Lloyd", "E10"), + ("Helen Lloyd", "E11"), + ("Helen Lloyd", "E12"), + ("Dorothy Murchison", "E8"), + ("Dorothy Murchison", "E9"), + ("Olivia Carleton", "E9"), + ("Olivia Carleton", "E11"), + ("Flora Price", "E9"), + ("Flora Price", "E11"), + ] + ) + G.graph["top"] = women + G.graph["bottom"] = events return G def florentine_families_graph(): - """Return Florentine families graph. + """Returns Florentine families graph. References ---------- @@ -246,24 +254,291 @@ def florentine_families_graph(): Social Networks, Volume 8, Issue 3, September 1986, Pages 215-256 """ G = nx.Graph() - G.add_edge('Acciaiuoli', 'Medici') - G.add_edge('Castellani', 'Peruzzi') - G.add_edge('Castellani', 'Strozzi') - G.add_edge('Castellani', 'Barbadori') - G.add_edge('Medici', 'Barbadori') - G.add_edge('Medici', 'Ridolfi') - G.add_edge('Medici', 'Tornabuoni') - G.add_edge('Medici', 'Albizzi') - G.add_edge('Medici', 'Salviati') - G.add_edge('Salviati', 'Pazzi') - G.add_edge('Peruzzi', 'Strozzi') - G.add_edge('Peruzzi', 'Bischeri') - G.add_edge('Strozzi', 'Ridolfi') - G.add_edge('Strozzi', 'Bischeri') - G.add_edge('Ridolfi', 'Tornabuoni') - G.add_edge('Tornabuoni', 'Guadagni') - G.add_edge('Albizzi', 'Ginori') - G.add_edge('Albizzi', 'Guadagni') - G.add_edge('Bischeri', 'Guadagni') - G.add_edge('Guadagni', 'Lamberteschi') + G.add_edge("Acciaiuoli", "Medici") + G.add_edge("Castellani", "Peruzzi") + G.add_edge("Castellani", "Strozzi") + G.add_edge("Castellani", "Barbadori") + G.add_edge("Medici", "Barbadori") + G.add_edge("Medici", "Ridolfi") + G.add_edge("Medici", "Tornabuoni") + G.add_edge("Medici", "Albizzi") + G.add_edge("Medici", "Salviati") + G.add_edge("Salviati", "Pazzi") + G.add_edge("Peruzzi", "Strozzi") + G.add_edge("Peruzzi", "Bischeri") + G.add_edge("Strozzi", "Ridolfi") + G.add_edge("Strozzi", "Bischeri") + G.add_edge("Ridolfi", "Tornabuoni") + G.add_edge("Tornabuoni", "Guadagni") + G.add_edge("Albizzi", "Ginori") + G.add_edge("Albizzi", "Guadagni") + G.add_edge("Bischeri", "Guadagni") + G.add_edge("Guadagni", "Lamberteschi") + return G + + +def les_miserables_graph(): + """Returns coappearance network of characters in the novel Les Miserables. + + References + ---------- + .. [1] D. E. Knuth, 1993. + The Stanford GraphBase: a platform for combinatorial computing, + pp. 74-87. New York: AcM Press. + """ + G = nx.Graph() + G.add_edge("Napoleon", "Myriel", weight=1) + G.add_edge("MlleBaptistine", "Myriel", weight=8) + G.add_edge("MmeMagloire", "Myriel", weight=10) + G.add_edge("MmeMagloire", "MlleBaptistine", weight=6) + G.add_edge("CountessDeLo", "Myriel", weight=1) + G.add_edge("Geborand", "Myriel", weight=1) + G.add_edge("Champtercier", "Myriel", weight=1) + G.add_edge("Cravatte", "Myriel", weight=1) + G.add_edge("Count", "Myriel", weight=2) + G.add_edge("OldMan", "Myriel", weight=1) + G.add_edge("Valjean", "Labarre", weight=1) + G.add_edge("Valjean", "MmeMagloire", weight=3) + G.add_edge("Valjean", "MlleBaptistine", weight=3) + G.add_edge("Valjean", "Myriel", weight=5) + G.add_edge("Marguerite", "Valjean", weight=1) + G.add_edge("MmeDeR", "Valjean", weight=1) + G.add_edge("Isabeau", "Valjean", weight=1) + G.add_edge("Gervais", "Valjean", weight=1) + G.add_edge("Listolier", "Tholomyes", weight=4) + G.add_edge("Fameuil", "Tholomyes", weight=4) + G.add_edge("Fameuil", "Listolier", weight=4) + G.add_edge("Blacheville", "Tholomyes", weight=4) + G.add_edge("Blacheville", "Listolier", weight=4) + G.add_edge("Blacheville", "Fameuil", weight=4) + G.add_edge("Favourite", "Tholomyes", weight=3) + G.add_edge("Favourite", "Listolier", weight=3) + G.add_edge("Favourite", "Fameuil", weight=3) + G.add_edge("Favourite", "Blacheville", weight=4) + G.add_edge("Dahlia", "Tholomyes", weight=3) + G.add_edge("Dahlia", "Listolier", weight=3) + G.add_edge("Dahlia", "Fameuil", weight=3) + G.add_edge("Dahlia", "Blacheville", weight=3) + G.add_edge("Dahlia", "Favourite", weight=5) + G.add_edge("Zephine", "Tholomyes", weight=3) + G.add_edge("Zephine", "Listolier", weight=3) + G.add_edge("Zephine", "Fameuil", weight=3) + G.add_edge("Zephine", "Blacheville", weight=3) + G.add_edge("Zephine", "Favourite", weight=4) + G.add_edge("Zephine", "Dahlia", weight=4) + G.add_edge("Fantine", "Tholomyes", weight=3) + G.add_edge("Fantine", "Listolier", weight=3) + G.add_edge("Fantine", "Fameuil", weight=3) + G.add_edge("Fantine", "Blacheville", weight=3) + G.add_edge("Fantine", "Favourite", weight=4) + G.add_edge("Fantine", "Dahlia", weight=4) + G.add_edge("Fantine", "Zephine", weight=4) + G.add_edge("Fantine", "Marguerite", weight=2) + G.add_edge("Fantine", "Valjean", weight=9) + G.add_edge("MmeThenardier", "Fantine", weight=2) + G.add_edge("MmeThenardier", "Valjean", weight=7) + G.add_edge("Thenardier", "MmeThenardier", weight=13) + G.add_edge("Thenardier", "Fantine", weight=1) + G.add_edge("Thenardier", "Valjean", weight=12) + G.add_edge("Cosette", "MmeThenardier", weight=4) + G.add_edge("Cosette", "Valjean", weight=31) + G.add_edge("Cosette", "Tholomyes", weight=1) + G.add_edge("Cosette", "Thenardier", weight=1) + G.add_edge("Javert", "Valjean", weight=17) + G.add_edge("Javert", "Fantine", weight=5) + G.add_edge("Javert", "Thenardier", weight=5) + G.add_edge("Javert", "MmeThenardier", weight=1) + G.add_edge("Javert", "Cosette", weight=1) + G.add_edge("Fauchelevent", "Valjean", weight=8) + G.add_edge("Fauchelevent", "Javert", weight=1) + G.add_edge("Bamatabois", "Fantine", weight=1) + G.add_edge("Bamatabois", "Javert", weight=1) + G.add_edge("Bamatabois", "Valjean", weight=2) + G.add_edge("Perpetue", "Fantine", weight=1) + G.add_edge("Simplice", "Perpetue", weight=2) + G.add_edge("Simplice", "Valjean", weight=3) + G.add_edge("Simplice", "Fantine", weight=2) + G.add_edge("Simplice", "Javert", weight=1) + G.add_edge("Scaufflaire", "Valjean", weight=1) + G.add_edge("Woman1", "Valjean", weight=2) + G.add_edge("Woman1", "Javert", weight=1) + G.add_edge("Judge", "Valjean", weight=3) + G.add_edge("Judge", "Bamatabois", weight=2) + G.add_edge("Champmathieu", "Valjean", weight=3) + G.add_edge("Champmathieu", "Judge", weight=3) + G.add_edge("Champmathieu", "Bamatabois", weight=2) + G.add_edge("Brevet", "Judge", weight=2) + G.add_edge("Brevet", "Champmathieu", weight=2) + G.add_edge("Brevet", "Valjean", weight=2) + G.add_edge("Brevet", "Bamatabois", weight=1) + G.add_edge("Chenildieu", "Judge", weight=2) + G.add_edge("Chenildieu", "Champmathieu", weight=2) + G.add_edge("Chenildieu", "Brevet", weight=2) + G.add_edge("Chenildieu", "Valjean", weight=2) + G.add_edge("Chenildieu", "Bamatabois", weight=1) + G.add_edge("Cochepaille", "Judge", weight=2) + G.add_edge("Cochepaille", "Champmathieu", weight=2) + G.add_edge("Cochepaille", "Brevet", weight=2) + G.add_edge("Cochepaille", "Chenildieu", weight=2) + G.add_edge("Cochepaille", "Valjean", weight=2) + G.add_edge("Cochepaille", "Bamatabois", weight=1) + G.add_edge("Pontmercy", "Thenardier", weight=1) + G.add_edge("Boulatruelle", "Thenardier", weight=1) + G.add_edge("Eponine", "MmeThenardier", weight=2) + G.add_edge("Eponine", "Thenardier", weight=3) + G.add_edge("Anzelma", "Eponine", weight=2) + G.add_edge("Anzelma", "Thenardier", weight=2) + G.add_edge("Anzelma", "MmeThenardier", weight=1) + G.add_edge("Woman2", "Valjean", weight=3) + G.add_edge("Woman2", "Cosette", weight=1) + G.add_edge("Woman2", "Javert", weight=1) + G.add_edge("MotherInnocent", "Fauchelevent", weight=3) + G.add_edge("MotherInnocent", "Valjean", weight=1) + G.add_edge("Gribier", "Fauchelevent", weight=2) + G.add_edge("MmeBurgon", "Jondrette", weight=1) + G.add_edge("Gavroche", "MmeBurgon", weight=2) + G.add_edge("Gavroche", "Thenardier", weight=1) + G.add_edge("Gavroche", "Javert", weight=1) + G.add_edge("Gavroche", "Valjean", weight=1) + G.add_edge("Gillenormand", "Cosette", weight=3) + G.add_edge("Gillenormand", "Valjean", weight=2) + G.add_edge("Magnon", "Gillenormand", weight=1) + G.add_edge("Magnon", "MmeThenardier", weight=1) + G.add_edge("MlleGillenormand", "Gillenormand", weight=9) + G.add_edge("MlleGillenormand", "Cosette", weight=2) + G.add_edge("MlleGillenormand", "Valjean", weight=2) + G.add_edge("MmePontmercy", "MlleGillenormand", weight=1) + G.add_edge("MmePontmercy", "Pontmercy", weight=1) + G.add_edge("MlleVaubois", "MlleGillenormand", weight=1) + G.add_edge("LtGillenormand", "MlleGillenormand", weight=2) + G.add_edge("LtGillenormand", "Gillenormand", weight=1) + G.add_edge("LtGillenormand", "Cosette", weight=1) + G.add_edge("Marius", "MlleGillenormand", weight=6) + G.add_edge("Marius", "Gillenormand", weight=12) + G.add_edge("Marius", "Pontmercy", weight=1) + G.add_edge("Marius", "LtGillenormand", weight=1) + G.add_edge("Marius", "Cosette", weight=21) + G.add_edge("Marius", "Valjean", weight=19) + G.add_edge("Marius", "Tholomyes", weight=1) + G.add_edge("Marius", "Thenardier", weight=2) + G.add_edge("Marius", "Eponine", weight=5) + G.add_edge("Marius", "Gavroche", weight=4) + G.add_edge("BaronessT", "Gillenormand", weight=1) + G.add_edge("BaronessT", "Marius", weight=1) + G.add_edge("Mabeuf", "Marius", weight=1) + G.add_edge("Mabeuf", "Eponine", weight=1) + G.add_edge("Mabeuf", "Gavroche", weight=1) + G.add_edge("Enjolras", "Marius", weight=7) + G.add_edge("Enjolras", "Gavroche", weight=7) + G.add_edge("Enjolras", "Javert", weight=6) + G.add_edge("Enjolras", "Mabeuf", weight=1) + G.add_edge("Enjolras", "Valjean", weight=4) + G.add_edge("Combeferre", "Enjolras", weight=15) + G.add_edge("Combeferre", "Marius", weight=5) + G.add_edge("Combeferre", "Gavroche", weight=6) + G.add_edge("Combeferre", "Mabeuf", weight=2) + G.add_edge("Prouvaire", "Gavroche", weight=1) + G.add_edge("Prouvaire", "Enjolras", weight=4) + G.add_edge("Prouvaire", "Combeferre", weight=2) + G.add_edge("Feuilly", "Gavroche", weight=2) + G.add_edge("Feuilly", "Enjolras", weight=6) + G.add_edge("Feuilly", "Prouvaire", weight=2) + G.add_edge("Feuilly", "Combeferre", weight=5) + G.add_edge("Feuilly", "Mabeuf", weight=1) + G.add_edge("Feuilly", "Marius", weight=1) + G.add_edge("Courfeyrac", "Marius", weight=9) + G.add_edge("Courfeyrac", "Enjolras", weight=17) + G.add_edge("Courfeyrac", "Combeferre", weight=13) + G.add_edge("Courfeyrac", "Gavroche", weight=7) + G.add_edge("Courfeyrac", "Mabeuf", weight=2) + G.add_edge("Courfeyrac", "Eponine", weight=1) + G.add_edge("Courfeyrac", "Feuilly", weight=6) + G.add_edge("Courfeyrac", "Prouvaire", weight=3) + G.add_edge("Bahorel", "Combeferre", weight=5) + G.add_edge("Bahorel", "Gavroche", weight=5) + G.add_edge("Bahorel", "Courfeyrac", weight=6) + G.add_edge("Bahorel", "Mabeuf", weight=2) + G.add_edge("Bahorel", "Enjolras", weight=4) + G.add_edge("Bahorel", "Feuilly", weight=3) + G.add_edge("Bahorel", "Prouvaire", weight=2) + G.add_edge("Bahorel", "Marius", weight=1) + G.add_edge("Bossuet", "Marius", weight=5) + G.add_edge("Bossuet", "Courfeyrac", weight=12) + G.add_edge("Bossuet", "Gavroche", weight=5) + G.add_edge("Bossuet", "Bahorel", weight=4) + G.add_edge("Bossuet", "Enjolras", weight=10) + G.add_edge("Bossuet", "Feuilly", weight=6) + G.add_edge("Bossuet", "Prouvaire", weight=2) + G.add_edge("Bossuet", "Combeferre", weight=9) + G.add_edge("Bossuet", "Mabeuf", weight=1) + G.add_edge("Bossuet", "Valjean", weight=1) + G.add_edge("Joly", "Bahorel", weight=5) + G.add_edge("Joly", "Bossuet", weight=7) + G.add_edge("Joly", "Gavroche", weight=3) + G.add_edge("Joly", "Courfeyrac", weight=5) + G.add_edge("Joly", "Enjolras", weight=5) + G.add_edge("Joly", "Feuilly", weight=5) + G.add_edge("Joly", "Prouvaire", weight=2) + G.add_edge("Joly", "Combeferre", weight=5) + G.add_edge("Joly", "Mabeuf", weight=1) + G.add_edge("Joly", "Marius", weight=2) + G.add_edge("Grantaire", "Bossuet", weight=3) + G.add_edge("Grantaire", "Enjolras", weight=3) + G.add_edge("Grantaire", "Combeferre", weight=1) + G.add_edge("Grantaire", "Courfeyrac", weight=2) + G.add_edge("Grantaire", "Joly", weight=2) + G.add_edge("Grantaire", "Gavroche", weight=1) + G.add_edge("Grantaire", "Bahorel", weight=1) + G.add_edge("Grantaire", "Feuilly", weight=1) + G.add_edge("Grantaire", "Prouvaire", weight=1) + G.add_edge("MotherPlutarch", "Mabeuf", weight=3) + G.add_edge("Gueulemer", "Thenardier", weight=5) + G.add_edge("Gueulemer", "Valjean", weight=1) + G.add_edge("Gueulemer", "MmeThenardier", weight=1) + G.add_edge("Gueulemer", "Javert", weight=1) + G.add_edge("Gueulemer", "Gavroche", weight=1) + G.add_edge("Gueulemer", "Eponine", weight=1) + G.add_edge("Babet", "Thenardier", weight=6) + G.add_edge("Babet", "Gueulemer", weight=6) + G.add_edge("Babet", "Valjean", weight=1) + G.add_edge("Babet", "MmeThenardier", weight=1) + G.add_edge("Babet", "Javert", weight=2) + G.add_edge("Babet", "Gavroche", weight=1) + G.add_edge("Babet", "Eponine", weight=1) + G.add_edge("Claquesous", "Thenardier", weight=4) + G.add_edge("Claquesous", "Babet", weight=4) + G.add_edge("Claquesous", "Gueulemer", weight=4) + G.add_edge("Claquesous", "Valjean", weight=1) + G.add_edge("Claquesous", "MmeThenardier", weight=1) + G.add_edge("Claquesous", "Javert", weight=1) + G.add_edge("Claquesous", "Eponine", weight=1) + G.add_edge("Claquesous", "Enjolras", weight=1) + G.add_edge("Montparnasse", "Javert", weight=1) + G.add_edge("Montparnasse", "Babet", weight=2) + G.add_edge("Montparnasse", "Gueulemer", weight=2) + G.add_edge("Montparnasse", "Claquesous", weight=2) + G.add_edge("Montparnasse", "Valjean", weight=1) + G.add_edge("Montparnasse", "Gavroche", weight=1) + G.add_edge("Montparnasse", "Eponine", weight=1) + G.add_edge("Montparnasse", "Thenardier", weight=1) + G.add_edge("Toussaint", "Cosette", weight=2) + G.add_edge("Toussaint", "Javert", weight=1) + G.add_edge("Toussaint", "Valjean", weight=1) + G.add_edge("Child1", "Gavroche", weight=2) + G.add_edge("Child2", "Gavroche", weight=2) + G.add_edge("Child2", "Child1", weight=3) + G.add_edge("Brujon", "Babet", weight=3) + G.add_edge("Brujon", "Gueulemer", weight=3) + G.add_edge("Brujon", "Thenardier", weight=3) + G.add_edge("Brujon", "Gavroche", weight=1) + G.add_edge("Brujon", "Eponine", weight=1) + G.add_edge("Brujon", "Claquesous", weight=1) + G.add_edge("Brujon", "Montparnasse", weight=1) + G.add_edge("MmeHucheloup", "Bossuet", weight=1) + G.add_edge("MmeHucheloup", "Joly", weight=1) + G.add_edge("MmeHucheloup", "Grantaire", weight=1) + G.add_edge("MmeHucheloup", "Bahorel", weight=1) + G.add_edge("MmeHucheloup", "Courfeyrac", weight=1) + G.add_edge("MmeHucheloup", "Gavroche", weight=1) + G.add_edge("MmeHucheloup", "Enjolras", weight=1) return G diff --git a/networkx/generators/spectral_graph_forge.py b/networkx/generators/spectral_graph_forge.py new file mode 100644 index 0000000..115e196 --- /dev/null +++ b/networkx/generators/spectral_graph_forge.py @@ -0,0 +1,179 @@ +"""Generates graphs with a given eigenvector structure""" + + +import networkx as nx +from networkx.utils import np_random_state + +__all__ = ["spectral_graph_forge"] + + +def _mat_spect_approx(A, level, sorteigs=True, reverse=False, absolute=True): + """ Returns the low-rank approximation of the given matrix A + + Parameters + ---------- + A : 2D numpy array + level : integer + It represents the fixed rank for the output approximation matrix + sorteigs : boolean + Whether eigenvectors should be sorted according to their associated + eigenvalues before removing the firsts of them + reverse : boolean + Whether eigenvectors list should be reversed before removing the firsts + of them + absolute : boolean + Whether eigenvectors should be sorted considering the absolute values + of the corresponding eigenvalues + + Returns + ------- + B : 2D numpy array + low-rank approximation of A + + Notes + ----- + Low-rank matrix approximation is about finding a fixed rank matrix close + enough to the input one with respect to a given norm (distance). + In the case of real symmetric input matrix and euclidean distance, the best + low-rank approximation is given by the sum of first eigenvector matrices. + + References + ---------- + .. [1] G. Eckart and G. Young, The approximation of one matrix by another + of lower rank + .. [2] L. Mirsky, Symmetric gauge functions and unitarily invariant norms + + """ + + import numpy as np + + d, V = np.linalg.eigh(A) + d = np.ravel(d) + n = len(d) + if sorteigs: + if absolute: + k = np.argsort(np.abs(d)) + else: + k = np.argsort(d) + # ordered from the lowest to the highest + else: + k = range(n) + if not reverse: + k = np.flipud(k) + + z = np.zeros(n) + for i in range(level, n): + V[:, k[i]] = z + + B = V @ np.diag(d) @ V.T + return B + + +@np_random_state(3) +def spectral_graph_forge(G, alpha, transformation="identity", seed=None): + """Returns a random simple graph with spectrum resembling that of `G` + + This algorithm, called Spectral Graph Forge (SGF), computes the + eigenvectors of a given graph adjacency matrix, filters them and + builds a random graph with a similar eigenstructure. + SGF has been proved to be particularly useful for synthesizing + realistic social networks and it can also be used to anonymize + graph sensitive data. + + Parameters + ---------- + G : Graph + alpha : float + Ratio representing the percentage of eigenvectors of G to consider, + values in [0,1]. + transformation : string, optional + Represents the intended matrix linear transformation, possible values + are 'identity' and 'modularity' + seed : integer, random_state, or None (default) + Indicator of numpy random number generation state. + See :ref:`Randomness`. + + Returns + ------- + H : Graph + A graph with a similar eigenvector structure of the input one. + + Raises + ------ + NetworkXError + If transformation has a value different from 'identity' or 'modularity' + + Notes + ----- + Spectral Graph Forge (SGF) generates a random simple graph resembling the + global properties of the given one. + It leverages the low-rank approximation of the associated adjacency matrix + driven by the *alpha* precision parameter. + SGF preserves the number of nodes of the input graph and their ordering. + This way, nodes of output graphs resemble the properties of the input one + and attributes can be directly mapped. + + It considers the graph adjacency matrices which can optionally be + transformed to other symmetric real matrices (currently transformation + options include *identity* and *modularity*). + The *modularity* transformation, in the sense of Newman's modularity matrix + allows the focusing on community structure related properties of the graph. + + SGF applies a low-rank approximation whose fixed rank is computed from the + ratio *alpha* of the input graph adjacency matrix dimension. + This step performs a filtering on the input eigenvectors similar to the low + pass filtering common in telecommunications. + + The filtered values (after truncation) are used as input to a Bernoulli + sampling for constructing a random adjacency matrix. + + References + ---------- + .. [1] L. Baldesi, C. T. Butts, A. Markopoulou, "Spectral Graph Forge: + Graph Generation Targeting Modularity", IEEE Infocom, '18. + https://arxiv.org/abs/1801.01715 + .. [2] M. Newman, "Networks: an introduction", Oxford university press, + 2010 + + Examples + -------- + >>> G = nx.karate_club_graph() + >>> H = nx.spectral_graph_forge(G, 0.3) + >>> + """ + + import numpy as np + import scipy.stats as stats + + available_transformations = ["identity", "modularity"] + alpha = np.clip(alpha, 0, 1) + A = nx.to_numpy_array(G) + n = A.shape[1] + level = int(round(n * alpha)) + + if transformation not in available_transformations: + msg = f"'{transformation}' is not a valid transformation. " + msg += f"Transformations: {available_transformations}" + raise nx.NetworkXError(msg) + + K = np.ones((1, n)) @ A + + B = A + if transformation == "modularity": + B -= K.T @ K / K.sum() + + B = _mat_spect_approx(B, level, sorteigs=True, absolute=True) + + if transformation == "modularity": + B += K.T @ K / K.sum() + + B = np.clip(B, 0, 1) + np.fill_diagonal(B, 0) + + for i in range(n - 1): + B[i, i + 1 :] = stats.bernoulli.rvs(B[i, i + 1 :], random_state=seed) + B[i + 1 :, i] = np.transpose(B[i, i + 1 :]) + + H = nx.from_numpy_array(B) + + return H diff --git a/networkx/generators/stochastic.py b/networkx/generators/stochastic.py index d0a743a..3627658 100644 --- a/networkx/generators/stochastic.py +++ b/networkx/generators/stochastic.py @@ -1,25 +1,17 @@ -# Copyright (C) 2010-2013 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. """Functions for generating stochastic graphs from a given weighted directed graph. """ -from __future__ import division from networkx.classes import DiGraph from networkx.classes import MultiDiGraph from networkx.utils import not_implemented_for -__author__ = "Aric Hagberg " -__all__ = ['stochastic_graph'] +__all__ = ["stochastic_graph"] -@not_implemented_for('undirected') -def stochastic_graph(G, copy=True, weight='weight'): +@not_implemented_for("undirected") +def stochastic_graph(G, copy=True, weight="weight"): """Returns a right-stochastic representation of directed graph `G`. A right-stochastic graph is a weighted digraph in which for each diff --git a/networkx/generators/sudoku.py b/networkx/generators/sudoku.py new file mode 100644 index 0000000..08ffffb --- /dev/null +++ b/networkx/generators/sudoku.py @@ -0,0 +1,128 @@ +"""Generator for Sudoku graphs + +This module gives a generator for n-Sudoku graphs. It can be used to develop +algorithms for solving or generating Sudoku puzzles. + +A completed Sudoku grid is a 9x9 array of integers between 1 and 9, with no +number appearing twice in the same row, column, or 3x3 box. + + 8 6 4 | 3 7 1 | 2 5 9 + 3 2 5 | 8 4 9 | 7 6 1 + 9 7 1 | 2 6 5 | 8 4 3 + ------+-------+------ + 4 3 6 | 1 9 2 | 5 8 7 + 1 9 8 | 6 5 7 | 4 3 2 + 2 5 7 | 4 8 3 | 9 1 6 + ------+-------+------ + 6 8 9 | 7 3 4 | 1 2 5 + 7 1 3 | 5 2 8 | 6 9 4 + 5 4 2 | 9 1 6 | 3 7 8 + + +The Sudoku graph is an undirected graph with 81 vertices, corresponding to +the cells of a Sudoku grid. It is a regular graph of degree 20. Two distinct +vertices are adjacent if and only if the corresponding cells belong to the +same row, column, or box. A completed Sudoku grid corresponds to a vertex +coloring of the Sudoku graph with nine colors. + +More generally, the n-Sudoku graph is a graph with n^4 vertices, corresponding +to the cells of an n^2 by n^2 grid. Two distinct vertices are adjacent if and +only if they belong to the same row, column, or n by n box. + +References +---------- +.. [1] Herzberg, A. M., & Murty, M. R. (2007). Sudoku squares and chromatic + polynomials. Notices of the AMS, 54(6), 708-717. +.. [2] Sander, Torsten (2009), "Sudoku graphs are integral", + Electronic Journal of Combinatorics, 16 (1): Note 25, 7pp, MR 2529816 +.. [3] Wikipedia contributors. "Glossary of Sudoku." Wikipedia, The Free + Encyclopedia, 3 Dec. 2019. Web. 22 Dec. 2019. +""" + +import networkx as nx +from networkx.exception import NetworkXError + +__all__ = ["sudoku_graph"] + + +def sudoku_graph(n=3): + """Returns the n-Sudoku graph. The default value of n is 3. + + The n-Sudoku graph is a graph with n^4 vertices, corresponding to the + cells of an n^2 by n^2 grid. Two distinct vertices are adjacent if and + only if they belong to the same row, column, or n-by-n box. + + Parameters + ---------- + n: integer + The order of the Sudoku graph, equal to the square root of the + number of rows. The default is 3. + + Returns + ------- + NetworkX graph + The n-Sudoku graph Sud(n). + + Examples + -------- + >>> G = nx.sudoku_graph() + >>> G.number_of_nodes() + 81 + >>> G.number_of_edges() + 810 + >>> sorted(G.neighbors(42)) + [6, 15, 24, 33, 34, 35, 36, 37, 38, 39, 40, 41, 43, 44, 51, 52, 53, 60, 69, 78] + >>> G = nx.sudoku_graph(2) + >>> G.number_of_nodes() + 16 + >>> G.number_of_edges() + 56 + + References + ---------- + .. [1] Herzberg, A. M., & Murty, M. R. (2007). Sudoku squares and chromatic + polynomials. Notices of the AMS, 54(6), 708-717. + .. [2] Sander, Torsten (2009), "Sudoku graphs are integral", + Electronic Journal of Combinatorics, 16 (1): Note 25, 7pp, MR 2529816 + .. [3] Wikipedia contributors. "Glossary of Sudoku." Wikipedia, The Free + Encyclopedia, 3 Dec. 2019. Web. 22 Dec. 2019. + """ + + if n < 0: + raise NetworkXError("The order must be greater than or equal to zero.") + + n2 = n * n + n3 = n2 * n + n4 = n3 * n + + # Construct an empty graph with n^4 nodes + G = nx.empty_graph(n4) + + # A Sudoku graph of order 0 or 1 has no edges + if n < 2: + return G + + # Add edges for cells in the same row + for row_no in range(0, n2): + row_start = row_no * n2 + for j in range(1, n2): + for i in range(j): + G.add_edge(row_start + i, row_start + j) + + # Add edges for cells in the same column + for col_no in range(0, n2): + for j in range(col_no, n4, n2): + for i in range(col_no, j, n2): + G.add_edge(i, j) + + # Add edges for cells in the same box + for band_no in range(n): + for stack_no in range(n): + box_start = n3 * band_no + n * stack_no + for j in range(1, n2): + for i in range(j): + u = box_start + (i % n) + n2 * (i // n) + v = box_start + (j % n) + n2 * (j // n) + G.add_edge(u, v) + + return G diff --git a/networkx/generators/tests/__init__.py b/networkx/generators/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/generators/tests/test_atlas.py b/networkx/generators/tests/test_atlas.py index 3095ac5..e9ce00b 100644 --- a/networkx/generators/tests/test_atlas.py +++ b/networkx/generators/tests/test_atlas.py @@ -1,27 +1,25 @@ from itertools import groupby -from nose.tools import assert_equal -from nose.tools import assert_less_equal -from nose.tools import raises +import pytest import networkx as nx -from networkx.testing import * +from networkx.testing import assert_edges_equal, assert_nodes_equal from networkx import graph_atlas from networkx import graph_atlas_g from networkx.generators.atlas import NUM_GRAPHS from networkx.utils import pairwise -class TestAtlasGraph(object): +class TestAtlasGraph: """Unit tests for the :func:`~networkx.graph_atlas` function.""" - @raises(ValueError) def test_index_too_small(self): - graph_atlas(-1) + with pytest.raises(ValueError): + graph_atlas(-1) - @raises(ValueError) def test_index_too_large(self): - graph_atlas(NUM_GRAPHS) + with pytest.raises(ValueError): + graph_atlas(NUM_GRAPHS) def test_graph(self): G = graph_atlas(6) @@ -29,36 +27,37 @@ def test_graph(self): assert_edges_equal(G.edges(), [(0, 1), (0, 2)]) -class TestAtlasGraphG(object): +class TestAtlasGraphG: """Unit tests for the :func:`~networkx.graph_atlas_g` function.""" - def setUp(self): - self.GAG = graph_atlas_g() + @classmethod + def setup_class(cls): + cls.GAG = graph_atlas_g() def test_sizes(self): G = self.GAG[0] - assert_equal(G.number_of_nodes(), 0) - assert_equal(G.number_of_edges(), 0) + assert G.number_of_nodes() == 0 + assert G.number_of_edges() == 0 G = self.GAG[7] - assert_equal(G.number_of_nodes(), 3) - assert_equal(G.number_of_edges(), 3) + assert G.number_of_nodes() == 3 + assert G.number_of_edges() == 3 def test_names(self): for i, G in enumerate(self.GAG): - assert_equal(int(G.name[1:]), i) + assert int(G.name[1:]) == i def test_nondecreasing_nodes(self): # check for nondecreasing number of nodes for n1, n2 in pairwise(map(len, self.GAG)): - assert_less_equal(n2, n1 + 1) + assert n2 <= n1 + 1 def test_nondecreasing_edges(self): # check for nondecreasing number of edges (for fixed number of # nodes) for n, group in groupby(self.GAG, key=nx.number_of_nodes): for m1, m2 in pairwise(map(nx.number_of_edges, group)): - assert_less_equal(m2, m1 + 1) + assert m2 <= m1 + 1 def test_nondecreasing_degree_sequence(self): # Check for lexicographically nondecreasing degree sequences @@ -67,7 +66,7 @@ def test_nondecreasing_degree_sequence(self): # There are three exceptions to this rule in the order given in # the "Atlas of Graphs" book, so we need to manually exclude # those. - exceptions = [('G55', 'G56'), ('G1007', 'G1008'), ('G1012', 'G1013')] + exceptions = [("G55", "G56"), ("G1007", "G1008"), ("G1012", "G1013")] for n, group in groupby(self.GAG, key=nx.number_of_nodes): for m, group in groupby(group, key=nx.number_of_edges): for G1, G2 in pairwise(group): @@ -75,4 +74,4 @@ def test_nondecreasing_degree_sequence(self): continue d1 = sorted(d for v, d in G1.degree()) d2 = sorted(d for v, d in G2.degree()) - assert_less_equal(d1, d2) + assert d1 <= d2 diff --git a/networkx/generators/tests/test_classic.py b/networkx/generators/tests/test_classic.py index a16ba08..c98eb8f 100644 --- a/networkx/generators/tests/test_classic.py +++ b/networkx/generators/tests/test_classic.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ ==================== Generators - Classic @@ -8,9 +7,8 @@ """ import itertools -from nose.tools import * +import pytest import networkx as nx -from networkx import * from networkx.algorithms.isomorphism.isomorph import graph_could_be_isomorphic from networkx.testing import assert_edges_equal from networkx.testing import assert_nodes_equal @@ -18,30 +16,30 @@ is_isomorphic = graph_could_be_isomorphic -class TestGeneratorClassic(): +class TestGeneratorClassic: def test_balanced_tree(self): # balanced_tree(r,h) is a tree with (r**(h+1)-1)/(r-1) edges for r, h in [(2, 2), (3, 3), (6, 2)]: - t = balanced_tree(r, h) + t = nx.balanced_tree(r, h) order = t.order() - assert_true(order == (r**(h + 1) - 1) / (r - 1)) - assert_true(is_connected(t)) - assert_true(t.size() == order - 1) - dh = degree_histogram(t) - assert_equal(dh[0], 0) # no nodes of 0 - assert_equal(dh[1], r**h) # nodes of degree 1 are leaves - assert_equal(dh[r], 1) # root is degree r - assert_equal(dh[r + 1], order - r**h - 1) # everyone else is degree r+1 - assert_equal(len(dh), r + 2) + assert order == (r ** (h + 1) - 1) / (r - 1) + assert nx.is_connected(t) + assert t.size() == order - 1 + dh = nx.degree_histogram(t) + assert dh[0] == 0 # no nodes of 0 + assert dh[1] == r ** h # nodes of degree 1 are leaves + assert dh[r] == 1 # root is degree r + assert dh[r + 1] == order - r ** h - 1 # everyone else is degree r+1 + assert len(dh) == r + 2 def test_balanced_tree_star(self): # balanced_tree(r,1) is the r-star - t = balanced_tree(r=2, h=1) - assert_true(is_isomorphic(t, star_graph(2))) - t = balanced_tree(r=5, h=1) - assert_true(is_isomorphic(t, star_graph(5))) - t = balanced_tree(r=10, h=1) - assert_true(is_isomorphic(t, star_graph(10))) + t = nx.balanced_tree(r=2, h=1) + assert is_isomorphic(t, nx.star_graph(2)) + t = nx.balanced_tree(r=5, h=1) + assert is_isomorphic(t, nx.star_graph(5)) + t = nx.balanced_tree(r=10, h=1) + assert is_isomorphic(t, nx.star_graph(10)) def test_balanced_tree_path(self): """Tests that the balanced tree with branching factor one is the @@ -49,337 +47,385 @@ def test_balanced_tree_path(self): """ # A tree of height four has five levels. - T = balanced_tree(1, 4) - P = path_graph(5) - assert_true(is_isomorphic(T, P)) + T = nx.balanced_tree(1, 4) + P = nx.path_graph(5) + assert is_isomorphic(T, P) def test_full_rary_tree(self): r = 2 n = 9 - t = full_rary_tree(r, n) - assert_equal(t.order(), n) - assert_true(is_connected(t)) - dh = degree_histogram(t) - assert_equal(dh[0], 0) # no nodes of 0 - assert_equal(dh[1], 5) # nodes of degree 1 are leaves - assert_equal(dh[r], 1) # root is degree r - assert_equal(dh[r + 1], 9 - 5 - 1) # everyone else is degree r+1 - assert_equal(len(dh), r + 2) + t = nx.full_rary_tree(r, n) + assert t.order() == n + assert nx.is_connected(t) + dh = nx.degree_histogram(t) + assert dh[0] == 0 # no nodes of 0 + assert dh[1] == 5 # nodes of degree 1 are leaves + assert dh[r] == 1 # root is degree r + assert dh[r + 1] == 9 - 5 - 1 # everyone else is degree r+1 + assert len(dh) == r + 2 def test_full_rary_tree_balanced(self): - t = full_rary_tree(2, 15) - th = balanced_tree(2, 3) - assert_true(is_isomorphic(t, th)) + t = nx.full_rary_tree(2, 15) + th = nx.balanced_tree(2, 3) + assert is_isomorphic(t, th) def test_full_rary_tree_path(self): - t = full_rary_tree(1, 10) - assert_true(is_isomorphic(t, path_graph(10))) + t = nx.full_rary_tree(1, 10) + assert is_isomorphic(t, nx.path_graph(10)) def test_full_rary_tree_empty(self): - t = full_rary_tree(0, 10) - assert_true(is_isomorphic(t, empty_graph(10))) - t = full_rary_tree(3, 0) - assert_true(is_isomorphic(t, empty_graph(0))) + t = nx.full_rary_tree(0, 10) + assert is_isomorphic(t, nx.empty_graph(10)) + t = nx.full_rary_tree(3, 0) + assert is_isomorphic(t, nx.empty_graph(0)) def test_full_rary_tree_3_20(self): - t = full_rary_tree(3, 20) - assert_equal(t.order(), 20) + t = nx.full_rary_tree(3, 20) + assert t.order() == 20 def test_barbell_graph(self): # number of nodes = 2*m1 + m2 (2 m1-complete graphs + m2-path + 2 edges) - # number of edges = 2*(number_of_edges(m1-complete graph) + m2 + 1 + # number of edges = 2*(nx.number_of_edges(m1-complete graph) + m2 + 1 m1 = 3 m2 = 5 - b = barbell_graph(m1, m2) - assert_true(number_of_nodes(b) == 2 * m1 + m2) - assert_true(number_of_edges(b) == m1 * (m1 - 1) + m2 + 1) + b = nx.barbell_graph(m1, m2) + assert nx.number_of_nodes(b) == 2 * m1 + m2 + assert nx.number_of_edges(b) == m1 * (m1 - 1) + m2 + 1 m1 = 4 m2 = 10 - b = barbell_graph(m1, m2) - assert_true(number_of_nodes(b) == 2 * m1 + m2) - assert_true(number_of_edges(b) == m1 * (m1 - 1) + m2 + 1) + b = nx.barbell_graph(m1, m2) + assert nx.number_of_nodes(b) == 2 * m1 + m2 + assert nx.number_of_edges(b) == m1 * (m1 - 1) + m2 + 1 m1 = 3 m2 = 20 - b = barbell_graph(m1, m2) - assert_true(number_of_nodes(b) == 2 * m1 + m2) - assert_true(number_of_edges(b) == m1 * (m1 - 1) + m2 + 1) + b = nx.barbell_graph(m1, m2) + assert nx.number_of_nodes(b) == 2 * m1 + m2 + assert nx.number_of_edges(b) == m1 * (m1 - 1) + m2 + 1 # Raise NetworkXError if m1<2 m1 = 1 m2 = 20 - assert_raises(networkx.exception.NetworkXError, barbell_graph, m1, m2) + pytest.raises(nx.NetworkXError, nx.barbell_graph, m1, m2) # Raise NetworkXError if m2<0 m1 = 5 m2 = -2 - assert_raises(networkx.exception.NetworkXError, barbell_graph, m1, m2) + pytest.raises(nx.NetworkXError, nx.barbell_graph, m1, m2) - # barbell_graph(2,m) = path_graph(m+4) + # nx.barbell_graph(2,m) = nx.path_graph(m+4) m1 = 2 m2 = 5 - b = barbell_graph(m1, m2) - assert_true(is_isomorphic(b, path_graph(m2 + 4))) + b = nx.barbell_graph(m1, m2) + assert is_isomorphic(b, nx.path_graph(m2 + 4)) m1 = 2 m2 = 10 - b = barbell_graph(m1, m2) - assert_true(is_isomorphic(b, path_graph(m2 + 4))) + b = nx.barbell_graph(m1, m2) + assert is_isomorphic(b, nx.path_graph(m2 + 4)) m1 = 2 m2 = 20 - b = barbell_graph(m1, m2) - assert_true(is_isomorphic(b, path_graph(m2 + 4))) + b = nx.barbell_graph(m1, m2) + assert is_isomorphic(b, nx.path_graph(m2 + 4)) - assert_raises(networkx.exception.NetworkXError, barbell_graph, m1, m2, - create_using=DiGraph()) + pytest.raises( + nx.NetworkXError, nx.barbell_graph, m1, m2, create_using=nx.DiGraph() + ) - mb = barbell_graph(m1, m2, create_using=MultiGraph()) + mb = nx.barbell_graph(m1, m2, create_using=nx.MultiGraph()) assert_edges_equal(mb.edges(), b.edges()) + def test_binomial_tree(self): + for n in range(0, 4): + b = nx.binomial_tree(n) + assert nx.number_of_nodes(b) == 2 ** n + assert nx.number_of_edges(b) == (2 ** n - 1) + def test_complete_graph(self): # complete_graph(m) is a connected graph with # m nodes and m*(m+1)/2 edges for m in [0, 1, 3, 5]: - g = complete_graph(m) - assert_true(number_of_nodes(g) == m) - assert_true(number_of_edges(g) == m * (m - 1) // 2) + g = nx.complete_graph(m) + assert nx.number_of_nodes(g) == m + assert nx.number_of_edges(g) == m * (m - 1) // 2 - mg = complete_graph(m, create_using=MultiGraph()) + mg = nx.complete_graph(m, create_using=nx.MultiGraph) assert_edges_equal(mg.edges(), g.edges()) - g = complete_graph("abc") - assert_nodes_equal(g.nodes(), ['a', 'b', 'c']) - assert_equal(g.size(), 3) + g = nx.complete_graph("abc") + assert_nodes_equal(g.nodes(), ["a", "b", "c"]) + assert g.size() == 3 def test_complete_digraph(self): # complete_graph(m) is a connected graph with # m nodes and m*(m+1)/2 edges for m in [0, 1, 3, 5]: - g = complete_graph(m, create_using=nx.DiGraph()) - assert_true(number_of_nodes(g) == m) - assert_true(number_of_edges(g) == m * (m - 1)) + g = nx.complete_graph(m, create_using=nx.DiGraph) + assert nx.number_of_nodes(g) == m + assert nx.number_of_edges(g) == m * (m - 1) - g = complete_graph("abc", create_using=nx.DiGraph()) - assert_equal(len(g), 3) - assert_equal(g.size(), 6) - assert_true(g.is_directed()) + g = nx.complete_graph("abc", create_using=nx.DiGraph) + assert len(g) == 3 + assert g.size() == 6 + assert g.is_directed() def test_circular_ladder_graph(self): - G = circular_ladder_graph(5) - assert_raises(networkx.exception.NetworkXError, circular_ladder_graph, - 5, create_using=DiGraph()) - mG = circular_ladder_graph(5, create_using=MultiGraph()) + G = nx.circular_ladder_graph(5) + pytest.raises( + nx.NetworkXError, nx.circular_ladder_graph, 5, create_using=nx.DiGraph + ) + mG = nx.circular_ladder_graph(5, create_using=nx.MultiGraph) assert_edges_equal(mG.edges(), G.edges()) def test_circulant_graph(self): # Ci_n(1) is the cycle graph for all n - Ci6_1 = circulant_graph(6, [1]) - C6 = cycle_graph(6) + Ci6_1 = nx.circulant_graph(6, [1]) + C6 = nx.cycle_graph(6) assert_edges_equal(Ci6_1.edges(), C6.edges()) # Ci_n(1, 2, ..., n div 2) is the complete graph for all n - Ci7 = circulant_graph(7, [1, 2, 3]) - K7 = complete_graph(7) + Ci7 = nx.circulant_graph(7, [1, 2, 3]) + K7 = nx.complete_graph(7) assert_edges_equal(Ci7.edges(), K7.edges()) # Ci_6(1, 3) is K_3,3 i.e. the utility graph - Ci6_1_3 = circulant_graph(6, [1, 3]) - K3_3 = complete_bipartite_graph(3, 3) - assert_true(is_isomorphic(Ci6_1_3, K3_3)) + Ci6_1_3 = nx.circulant_graph(6, [1, 3]) + K3_3 = nx.complete_bipartite_graph(3, 3) + assert is_isomorphic(Ci6_1_3, K3_3) def test_cycle_graph(self): - G = cycle_graph(4) + G = nx.cycle_graph(4) assert_edges_equal(G.edges(), [(0, 1), (0, 3), (1, 2), (2, 3)]) - mG = cycle_graph(4, create_using=MultiGraph()) + mG = nx.cycle_graph(4, create_using=nx.MultiGraph) assert_edges_equal(mG.edges(), [(0, 1), (0, 3), (1, 2), (2, 3)]) - G = cycle_graph(4, create_using=DiGraph()) - assert_false(G.has_edge(2, 1)) - assert_true(G.has_edge(1, 2)) - assert_true(G.is_directed()) - - G = cycle_graph("abc") - assert_equal(len(G), 3) - assert_equal(G.size(), 3) - g = cycle_graph("abc", nx.DiGraph()) - assert_equal(len(g), 3) - assert_equal(g.size(), 3) - assert_true(g.is_directed()) + G = nx.cycle_graph(4, create_using=nx.DiGraph) + assert not G.has_edge(2, 1) + assert G.has_edge(1, 2) + assert G.is_directed() + + G = nx.cycle_graph("abc") + assert len(G) == 3 + assert G.size() == 3 + g = nx.cycle_graph("abc", nx.DiGraph) + assert len(g) == 3 + assert g.size() == 3 + assert g.is_directed() def test_dorogovtsev_goltsev_mendes_graph(self): - G = dorogovtsev_goltsev_mendes_graph(0) + G = nx.dorogovtsev_goltsev_mendes_graph(0) assert_edges_equal(G.edges(), [(0, 1)]) assert_nodes_equal(list(G), [0, 1]) - G = dorogovtsev_goltsev_mendes_graph(1) + G = nx.dorogovtsev_goltsev_mendes_graph(1) assert_edges_equal(G.edges(), [(0, 1), (0, 2), (1, 2)]) - assert_equal(average_clustering(G), 1.0) - assert_equal(sorted(triangles(G).values()), [1, 1, 1]) - G = dorogovtsev_goltsev_mendes_graph(10) - assert_equal(number_of_nodes(G), 29526) - assert_equal(number_of_edges(G), 59049) - assert_equal(G.degree(0), 1024) - assert_equal(G.degree(1), 1024) - assert_equal(G.degree(2), 1024) - - assert_raises(networkx.exception.NetworkXError, - dorogovtsev_goltsev_mendes_graph, 7, - create_using=DiGraph()) - assert_raises(networkx.exception.NetworkXError, - dorogovtsev_goltsev_mendes_graph, 7, - create_using=MultiGraph()) + assert nx.average_clustering(G) == 1.0 + assert sorted(nx.triangles(G).values()) == [1, 1, 1] + G = nx.dorogovtsev_goltsev_mendes_graph(10) + assert nx.number_of_nodes(G) == 29526 + assert nx.number_of_edges(G) == 59049 + assert G.degree(0) == 1024 + assert G.degree(1) == 1024 + assert G.degree(2) == 1024 + + pytest.raises( + nx.NetworkXError, + nx.dorogovtsev_goltsev_mendes_graph, + 7, + create_using=nx.DiGraph, + ) + pytest.raises( + nx.NetworkXError, + nx.dorogovtsev_goltsev_mendes_graph, + 7, + create_using=nx.MultiGraph, + ) + + def test_create_using(self): + G = nx.empty_graph() + assert isinstance(G, nx.Graph) + pytest.raises(TypeError, nx.empty_graph, create_using=0.0) + pytest.raises(TypeError, nx.empty_graph, create_using="Graph") + + G = nx.empty_graph(create_using=nx.MultiGraph) + assert isinstance(G, nx.MultiGraph) + G = nx.empty_graph(create_using=nx.DiGraph) + assert isinstance(G, nx.DiGraph) + + G = nx.empty_graph(create_using=nx.DiGraph, default=nx.MultiGraph) + assert isinstance(G, nx.DiGraph) + G = nx.empty_graph(create_using=None, default=nx.MultiGraph) + assert isinstance(G, nx.MultiGraph) + G = nx.empty_graph(default=nx.MultiGraph) + assert isinstance(G, nx.MultiGraph) + + G = nx.path_graph(5) + H = nx.empty_graph(create_using=G) + assert not H.is_multigraph() + assert not H.is_directed() + assert len(H) == 0 + assert G is H + + H = nx.empty_graph(create_using=nx.MultiGraph()) + assert H.is_multigraph() + assert not H.is_directed() + assert G is not H def test_empty_graph(self): - G = empty_graph() - assert_equal(number_of_nodes(G), 0) - G = empty_graph(42) - assert_equal(number_of_nodes(G), 42) - assert_equal(number_of_edges(G), 0) + G = nx.empty_graph() + assert nx.number_of_nodes(G) == 0 + G = nx.empty_graph(42) + assert nx.number_of_nodes(G) == 42 + assert nx.number_of_edges(G) == 0 - G = empty_graph("abc") - assert_equal(len(G), 3) - assert_equal(G.size(), 0) + G = nx.empty_graph("abc") + assert len(G) == 3 + assert G.size() == 0 # create empty digraph - G = empty_graph(42, create_using=DiGraph(name="duh")) - assert_equal(number_of_nodes(G), 42) - assert_equal(number_of_edges(G), 0) - assert_true(isinstance(G, DiGraph)) + G = nx.empty_graph(42, create_using=nx.DiGraph(name="duh")) + assert nx.number_of_nodes(G) == 42 + assert nx.number_of_edges(G) == 0 + assert isinstance(G, nx.DiGraph) # create empty multigraph - G = empty_graph(42, create_using=MultiGraph(name="duh")) - assert_equal(number_of_nodes(G), 42) - assert_equal(number_of_edges(G), 0) - assert_true(isinstance(G, MultiGraph)) + G = nx.empty_graph(42, create_using=nx.MultiGraph(name="duh")) + assert nx.number_of_nodes(G) == 42 + assert nx.number_of_edges(G) == 0 + assert isinstance(G, nx.MultiGraph) # create empty graph from another - pete = petersen_graph() - G = empty_graph(42, create_using=pete) - assert_equal(number_of_nodes(G), 42) - assert_equal(number_of_edges(G), 0) - assert_true(isinstance(G, Graph)) + pete = nx.petersen_graph() + G = nx.empty_graph(42, create_using=pete) + assert nx.number_of_nodes(G) == 42 + assert nx.number_of_edges(G) == 0 + assert isinstance(G, nx.Graph) def test_ladder_graph(self): - for i, G in [(0, empty_graph(0)), (1, path_graph(2)), - (2, hypercube_graph(2)), (10, grid_graph([2, 10]))]: - assert_true(is_isomorphic(ladder_graph(i), G)) - - assert_raises(networkx.exception.NetworkXError, - ladder_graph, 2, create_using=DiGraph()) - - g = ladder_graph(2) - mg = ladder_graph(2, create_using=MultiGraph()) + for i, G in [ + (0, nx.empty_graph(0)), + (1, nx.path_graph(2)), + (2, nx.hypercube_graph(2)), + (10, nx.grid_graph([2, 10])), + ]: + assert is_isomorphic(nx.ladder_graph(i), G) + + pytest.raises(nx.NetworkXError, nx.ladder_graph, 2, create_using=nx.DiGraph) + + g = nx.ladder_graph(2) + mg = nx.ladder_graph(2, create_using=nx.MultiGraph) assert_edges_equal(mg.edges(), g.edges()) def test_lollipop_graph(self): # number of nodes = m1 + m2 - # number of edges = number_of_edges(complete_graph(m1)) + m2 + # number of edges = nx.number_of_edges(nx.complete_graph(m1)) + m2 for m1, m2 in [(3, 5), (4, 10), (3, 20)]: - b = lollipop_graph(m1, m2) - assert_equal(number_of_nodes(b), m1 + m2) - assert_equal(number_of_edges(b), m1 * (m1 - 1) / 2 + m2) + b = nx.lollipop_graph(m1, m2) + assert nx.number_of_nodes(b) == m1 + m2 + assert nx.number_of_edges(b) == m1 * (m1 - 1) / 2 + m2 # Raise NetworkXError if m<2 - assert_raises(networkx.exception.NetworkXError, - lollipop_graph, 1, 20) + pytest.raises(nx.NetworkXError, nx.lollipop_graph, 1, 20) # Raise NetworkXError if n<0 - assert_raises(networkx.exception.NetworkXError, - lollipop_graph, 5, -2) + pytest.raises(nx.NetworkXError, nx.lollipop_graph, 5, -2) # lollipop_graph(2,m) = path_graph(m+2) for m1, m2 in [(2, 5), (2, 10), (2, 20)]: - b = lollipop_graph(m1, m2) - assert_true(is_isomorphic(b, path_graph(m2 + 2))) + b = nx.lollipop_graph(m1, m2) + assert is_isomorphic(b, nx.path_graph(m2 + 2)) - assert_raises(networkx.exception.NetworkXError, - lollipop_graph, m1, m2, create_using=DiGraph()) + pytest.raises( + nx.NetworkXError, nx.lollipop_graph, m1, m2, create_using=nx.DiGraph + ) - mb = lollipop_graph(m1, m2, create_using=MultiGraph()) + mb = nx.lollipop_graph(m1, m2, create_using=nx.MultiGraph) assert_edges_equal(mb.edges(), b.edges()) - g = lollipop_graph([1, 2, 3, 4], "abc") - assert_equal(len(g), 7) - assert_equal(g.size(), 9) + g = nx.lollipop_graph([1, 2, 3, 4], "abc") + assert len(g) == 7 + assert g.size() == 9 def test_null_graph(self): - assert_equal(number_of_nodes(null_graph()), 0) + assert nx.number_of_nodes(nx.null_graph()) == 0 def test_path_graph(self): - p = path_graph(0) - assert_true(is_isomorphic(p, null_graph())) + p = nx.path_graph(0) + assert is_isomorphic(p, nx.null_graph()) - p = path_graph(1) - assert_true(is_isomorphic(p, empty_graph(1))) + p = nx.path_graph(1) + assert is_isomorphic(p, nx.empty_graph(1)) - p = path_graph(10) - assert_true(is_connected(p)) - assert_equal(sorted(d for n, d in p.degree()), - [1, 1, 2, 2, 2, 2, 2, 2, 2, 2]) - assert_equal(p.order() - 1, p.size()) + p = nx.path_graph(10) + assert nx.is_connected(p) + assert sorted(d for n, d in p.degree()) == [1, 1, 2, 2, 2, 2, 2, 2, 2, 2] + assert p.order() - 1 == p.size() - dp = path_graph(3, create_using=DiGraph()) - assert_true(dp.has_edge(0, 1)) - assert_false(dp.has_edge(1, 0)) + dp = nx.path_graph(3, create_using=nx.DiGraph) + assert dp.has_edge(0, 1) + assert not dp.has_edge(1, 0) - mp = path_graph(10, create_using=MultiGraph()) + mp = nx.path_graph(10, create_using=nx.MultiGraph) assert_edges_equal(mp.edges(), p.edges()) - G = path_graph("abc") - assert_equal(len(G), 3) - assert_equal(G.size(), 2) - g = path_graph("abc", nx.DiGraph()) - assert_equal(len(g), 3) - assert_equal(g.size(), 2) - assert_true(g.is_directed()) + G = nx.path_graph("abc") + assert len(G) == 3 + assert G.size() == 2 + g = nx.path_graph("abc", nx.DiGraph) + assert len(g) == 3 + assert g.size() == 2 + assert g.is_directed() def test_star_graph(self): - assert_true(is_isomorphic(star_graph(0), empty_graph(1))) - assert_true(is_isomorphic(star_graph(1), path_graph(2))) - assert_true(is_isomorphic(star_graph(2), path_graph(3))) - assert_true(is_isomorphic(star_graph(5), nx.complete_bipartite_graph(1, 5))) + star_graph = nx.star_graph + assert is_isomorphic(star_graph(0), nx.empty_graph(1)) + assert is_isomorphic(star_graph(1), nx.path_graph(2)) + assert is_isomorphic(star_graph(2), nx.path_graph(3)) + assert is_isomorphic(star_graph(5), nx.complete_bipartite_graph(1, 5)) s = star_graph(10) - assert_equal(sorted(d for n, d in s.degree()), - [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10]) + assert sorted(d for n, d in s.degree()) == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10] - assert_raises(networkx.exception.NetworkXError, - star_graph, 10, create_using=DiGraph()) + pytest.raises(nx.NetworkXError, star_graph, 10, create_using=nx.DiGraph) - ms = star_graph(10, create_using=MultiGraph()) + ms = star_graph(10, create_using=nx.MultiGraph) assert_edges_equal(ms.edges(), s.edges()) G = star_graph("abcdefg") - assert_equal(len(G), 7) - assert_equal(G.size(), 6) + assert len(G) == 7 + assert G.size() == 6 def test_trivial_graph(self): - assert_equal(number_of_nodes(trivial_graph()), 1) + assert nx.number_of_nodes(nx.trivial_graph()) == 1 def test_turan_graph(self): - assert_equal(number_of_edges(turan_graph(13, 4)), 63) - assert_true(is_isomorphic(turan_graph(13, 4), complete_multipartite_graph(3, 4, 3, 3))) + assert nx.number_of_edges(nx.turan_graph(13, 4)) == 63 + assert is_isomorphic( + nx.turan_graph(13, 4), nx.complete_multipartite_graph(3, 4, 3, 3) + ) def test_wheel_graph(self): - for n, G in [(0, null_graph()), (1, empty_graph(1)), - (2, path_graph(2)), (3, complete_graph(3)), - (4, complete_graph(4))]: - g = wheel_graph(n) - assert_true(is_isomorphic(g, G)) - - g = wheel_graph(10) - assert_equal(sorted(d for n, d in g.degree()), - [3, 3, 3, 3, 3, 3, 3, 3, 3, 9]) - - assert_raises(networkx.exception.NetworkXError, - wheel_graph, 10, create_using=DiGraph()) - - mg = wheel_graph(10, create_using=MultiGraph()) + for n, G in [ + (0, nx.null_graph()), + (1, nx.empty_graph(1)), + (2, nx.path_graph(2)), + (3, nx.complete_graph(3)), + (4, nx.complete_graph(4)), + ]: + g = nx.wheel_graph(n) + assert is_isomorphic(g, G) + + g = nx.wheel_graph(10) + assert sorted(d for n, d in g.degree()) == [3, 3, 3, 3, 3, 3, 3, 3, 3, 9] + + pytest.raises(nx.NetworkXError, nx.wheel_graph, 10, create_using=nx.DiGraph) + + mg = nx.wheel_graph(10, create_using=nx.MultiGraph()) assert_edges_equal(mg.edges(), g.edges()) - G = wheel_graph("abc") - assert_equal(len(G), 3) - assert_equal(G.size(), 3) + G = nx.wheel_graph("abc") + assert len(G) == 3 + assert G.size() == 3 def test_complete_0_partite_graph(self): """Tests that the complete 0-partite graph is the null graph.""" @@ -412,10 +458,10 @@ def test_complete_multipartite_graph(self): # Within each block, no two vertices should be adjacent. for block in blocks: for u, v in itertools.combinations_with_replacement(block, 2): - assert_true(v not in G[u]) - assert_equal(G.nodes[u], G.nodes[v]) + assert v not in G[u] + assert G.nodes[u] == G.nodes[v] # Across blocks, all vertices should be adjacent. for (block1, block2) in itertools.combinations(blocks, 2): for u, v in itertools.product(block1, block2): - assert_true(v in G[u]) - assert_not_equal(G.nodes[u], G.nodes[v]) + assert v in G[u] + assert G.nodes[u] != G.nodes[v] diff --git a/networkx/generators/tests/test_cographs.py b/networkx/generators/tests/test_cographs.py new file mode 100644 index 0000000..d357af1 --- /dev/null +++ b/networkx/generators/tests/test_cographs.py @@ -0,0 +1,20 @@ +"""Unit tests for the :mod:`networkx.generators.cographs` module. + +""" + +import networkx as nx + + +def test_random_cograph(): + n = 3 + G = nx.random_cograph(n) + + assert len(G) == 2 ** n + + # Every connected subgraph of G has diameter <= 2 + if nx.is_connected(G): + assert nx.diameter(G) <= 2 + else: + components = nx.connected_components(G) + for component in components: + assert nx.diameter(G.subgraph(component)) <= 2 diff --git a/networkx/generators/tests/test_community.py b/networkx/generators/tests/test_community.py index f51c866..faf476d 100644 --- a/networkx/generators/tests/test_community.py +++ b/networkx/generators/tests/test_community.py @@ -1,146 +1,267 @@ import networkx as nx -from nose.tools import * +import pytest def test_random_partition_graph(): - G = nx.random_partition_graph([3, 3, 3], 1, 0) - C = G.graph['partition'] - assert_equal(C, [set([0, 1, 2]), set([3, 4, 5]), set([6, 7, 8])]) - assert_equal(len(G), 9) - assert_equal(len(list(G.edges())), 9) + G = nx.random_partition_graph([3, 3, 3], 1, 0, seed=42) + C = G.graph["partition"] + assert C == [{0, 1, 2}, {3, 4, 5}, {6, 7, 8}] + assert len(G) == 9 + assert len(list(G.edges())) == 9 G = nx.random_partition_graph([3, 3, 3], 0, 1) - C = G.graph['partition'] - assert_equal(C, [set([0, 1, 2]), set([3, 4, 5]), set([6, 7, 8])]) - assert_equal(len(G), 9) - assert_equal(len(list(G.edges())), 27) + C = G.graph["partition"] + assert C == [{0, 1, 2}, {3, 4, 5}, {6, 7, 8}] + assert len(G) == 9 + assert len(list(G.edges())) == 27 G = nx.random_partition_graph([3, 3, 3], 1, 0, directed=True) - C = G.graph['partition'] - assert_equal(C, [set([0, 1, 2]), set([3, 4, 5]), set([6, 7, 8])]) - assert_equal(len(G), 9) - assert_equal(len(list(G.edges())), 18) + C = G.graph["partition"] + assert C == [{0, 1, 2}, {3, 4, 5}, {6, 7, 8}] + assert len(G) == 9 + assert len(list(G.edges())) == 18 G = nx.random_partition_graph([3, 3, 3], 0, 1, directed=True) - C = G.graph['partition'] - assert_equal(C, [set([0, 1, 2]), set([3, 4, 5]), set([6, 7, 8])]) - assert_equal(len(G), 9) - assert_equal(len(list(G.edges())), 54) + C = G.graph["partition"] + assert C == [{0, 1, 2}, {3, 4, 5}, {6, 7, 8}] + assert len(G) == 9 + assert len(list(G.edges())) == 54 G = nx.random_partition_graph([1, 2, 3, 4, 5], 0.5, 0.1) - C = G.graph['partition'] - assert_equal(C, [set([0]), set([1, 2]), set([3, 4, 5]), - set([6, 7, 8, 9]), set([10, 11, 12, 13, 14])]) - assert_equal(len(G), 15) + C = G.graph["partition"] + assert C == [{0}, {1, 2}, {3, 4, 5}, {6, 7, 8, 9}, {10, 11, 12, 13, 14}] + assert len(G) == 15 - assert_raises(nx.NetworkXError, nx.random_partition_graph, [1, 2, 3], 1.1, 0.1) - assert_raises(nx.NetworkXError, nx.random_partition_graph, [1, 2, 3], -0.1, 0.1) - assert_raises(nx.NetworkXError, nx.random_partition_graph, [1, 2, 3], 0.1, 1.1) - assert_raises(nx.NetworkXError, nx.random_partition_graph, [1, 2, 3], 0.1, -0.1) + rpg = nx.random_partition_graph + pytest.raises(nx.NetworkXError, rpg, [1, 2, 3], 1.1, 0.1) + pytest.raises(nx.NetworkXError, rpg, [1, 2, 3], -0.1, 0.1) + pytest.raises(nx.NetworkXError, rpg, [1, 2, 3], 0.1, 1.1) + pytest.raises(nx.NetworkXError, rpg, [1, 2, 3], 0.1, -0.1) def test_planted_partition_graph(): - G = nx.planted_partition_graph(4, 3, 1, 0) - C = G.graph['partition'] - assert_equal(len(C), 4) - assert_equal(len(G), 12) - assert_equal(len(list(G.edges())), 12) + G = nx.planted_partition_graph(4, 3, 1, 0, seed=42) + C = G.graph["partition"] + assert len(C) == 4 + assert len(G) == 12 + assert len(list(G.edges())) == 12 G = nx.planted_partition_graph(4, 3, 0, 1) - C = G.graph['partition'] - assert_equal(len(C), 4) - assert_equal(len(G), 12) - assert_equal(len(list(G.edges())), 54) - - G = nx.planted_partition_graph(10, 4, .5, .1, seed=42) - C = G.graph['partition'] - assert_equal(len(C), 10) - assert_equal(len(G), 40) - # number of edges is random, so can't be tested for exact value? - # assert_equal(len(list(G.edges())),108) + C = G.graph["partition"] + assert len(C) == 4 + assert len(G) == 12 + assert len(list(G.edges())) == 54 + + G = nx.planted_partition_graph(10, 4, 0.5, 0.1, seed=42) + C = G.graph["partition"] + assert len(C) == 10 + assert len(G) == 40 G = nx.planted_partition_graph(4, 3, 1, 0, directed=True) - C = G.graph['partition'] - assert_equal(len(C), 4) - assert_equal(len(G), 12) - assert_equal(len(list(G.edges())), 24) + C = G.graph["partition"] + assert len(C) == 4 + assert len(G) == 12 + assert len(list(G.edges())) == 24 G = nx.planted_partition_graph(4, 3, 0, 1, directed=True) - C = G.graph['partition'] - assert_equal(len(C), 4) - assert_equal(len(G), 12) - assert_equal(len(list(G.edges())), 108) + C = G.graph["partition"] + assert len(C) == 4 + assert len(G) == 12 + assert len(list(G.edges())) == 108 - G = nx.planted_partition_graph(10, 4, .5, .1, seed=42, directed=True) - C = G.graph['partition'] - assert_equal(len(C), 10) - assert_equal(len(G), 40) - assert_equal(len(list(G.edges())), 218) + G = nx.planted_partition_graph(10, 4, 0.5, 0.1, seed=42, directed=True) + C = G.graph["partition"] + assert len(C) == 10 + assert len(G) == 40 - assert_raises(nx.NetworkXError, nx.planted_partition_graph, 3, 3, 1.1, 0.1) - assert_raises(nx.NetworkXError, nx.planted_partition_graph, 3, 3, -0.1, 0.1) - assert_raises(nx.NetworkXError, nx.planted_partition_graph, 3, 3, 0.1, 1.1) - assert_raises(nx.NetworkXError, nx.planted_partition_graph, 3, 3, 0.1, -0.1) + ppg = nx.planted_partition_graph + pytest.raises(nx.NetworkXError, ppg, 3, 3, 1.1, 0.1) + pytest.raises(nx.NetworkXError, ppg, 3, 3, -0.1, 0.1) + pytest.raises(nx.NetworkXError, ppg, 3, 3, 0.1, 1.1) + pytest.raises(nx.NetworkXError, ppg, 3, 3, 0.1, -0.1) def test_relaxed_caveman_graph(): G = nx.relaxed_caveman_graph(4, 3, 0) - assert_equal(len(G), 12) + assert len(G) == 12 G = nx.relaxed_caveman_graph(4, 3, 1) - assert_equal(len(G), 12) + assert len(G) == 12 G = nx.relaxed_caveman_graph(4, 3, 0.5) - assert_equal(len(G), 12) + assert len(G) == 12 + G = nx.relaxed_caveman_graph(4, 3, 0.5, seed=42) + assert len(G) == 12 def test_connected_caveman_graph(): G = nx.connected_caveman_graph(4, 3) - assert_equal(len(G), 12) + assert len(G) == 12 G = nx.connected_caveman_graph(1, 5) K5 = nx.complete_graph(5) K5.remove_edge(3, 4) - assert_true(nx.is_isomorphic(G, K5)) + assert nx.is_isomorphic(G, K5) + + # need at least 2 nodes in each clique + pytest.raises(nx.NetworkXError, nx.connected_caveman_graph, 4, 1) def test_caveman_graph(): G = nx.caveman_graph(4, 3) - assert_equal(len(G), 12) + assert len(G) == 12 G = nx.caveman_graph(1, 5) K5 = nx.complete_graph(5) - assert_true(nx.is_isomorphic(G, K5)) + assert nx.is_isomorphic(G, K5) def test_gaussian_random_partition_graph(): G = nx.gaussian_random_partition_graph(100, 10, 10, 0.3, 0.01) - assert_equal(len(G), 100) - assert_raises(nx.NetworkXError, - nx.gaussian_random_partition_graph, 100, 101, 10, 1, 0) + assert len(G) == 100 + G = nx.gaussian_random_partition_graph(100, 10, 10, 0.3, 0.01, directed=True) + assert len(G) == 100 + G = nx.gaussian_random_partition_graph( + 100, 10, 10, 0.3, 0.01, directed=False, seed=42 + ) + assert len(G) == 100 + assert not isinstance(G, nx.DiGraph) + G = nx.gaussian_random_partition_graph( + 100, 10, 10, 0.3, 0.01, directed=True, seed=42 + ) + assert len(G) == 100 + assert isinstance(G, nx.DiGraph) + pytest.raises( + nx.NetworkXError, nx.gaussian_random_partition_graph, 100, 101, 10, 1, 0 + ) def test_ring_of_cliques(): - for i in range(2, 20): - for j in range(2, 20): + for i in range(2, 20, 3): + for j in range(2, 20, 3): G = nx.ring_of_cliques(i, j) - assert_equal(G.number_of_nodes(), i * j) + assert G.number_of_nodes() == i * j if i != 2 or j != 1: expected_num_edges = i * (((j * (j - 1)) // 2) + 1) else: # the edge that already exists cannot be duplicated expected_num_edges = i * (((j * (j - 1)) // 2) + 1) - 1 - assert_equal(G.number_of_edges(), expected_num_edges) - assert_raises(nx.NetworkXError, nx.ring_of_cliques, 1, 5) - assert_raises(nx.NetworkXError, nx.ring_of_cliques, 3, 0) + assert G.number_of_edges() == expected_num_edges + pytest.raises(nx.NetworkXError, nx.ring_of_cliques, 1, 5) + pytest.raises(nx.NetworkXError, nx.ring_of_cliques, 3, 0) def test_windmill_graph(): - for n in range(2, 20): - for k in range(2, 20): + for n in range(2, 20, 3): + for k in range(2, 20, 3): G = nx.windmill_graph(n, k) - assert_equal(G.number_of_nodes(), (k - 1) * n + 1) - assert_equal(G.number_of_edges(), n * k * (k - 1) / 2) - assert_equal(G.degree(0), G.number_of_nodes() - 1) + assert G.number_of_nodes() == (k - 1) * n + 1 + assert G.number_of_edges() == n * k * (k - 1) / 2 + assert G.degree(0) == G.number_of_nodes() - 1 for i in range(1, G.number_of_nodes()): - assert_equal(G.degree(i), k - 1) - assert_raises(nx.NetworkXError, nx.ring_of_cliques, 1, 3) - assert_raises(nx.NetworkXError, nx.ring_of_cliques, 15, 0) + assert G.degree(i) == k - 1 + pytest.raises(nx.NetworkXError, nx.ring_of_cliques, 1, 3) + pytest.raises(nx.NetworkXError, nx.ring_of_cliques, 15, 0) + + +def test_stochastic_block_model(): + sizes = [75, 75, 300] + probs = [[0.25, 0.05, 0.02], [0.05, 0.35, 0.07], [0.02, 0.07, 0.40]] + G = nx.stochastic_block_model(sizes, probs, seed=0) + C = G.graph["partition"] + assert len(C) == 3 + assert len(G) == 450 + assert G.size() == 22160 + + GG = nx.stochastic_block_model(sizes, probs, range(450), seed=0) + assert G.nodes == GG.nodes + + # Test Exceptions + sbm = nx.stochastic_block_model + badnodelist = list(range(400)) # not enough nodes to match sizes + badprobs1 = [[0.25, 0.05, 1.02], [0.05, 0.35, 0.07], [0.02, 0.07, 0.40]] + badprobs2 = [[0.25, 0.05, 0.02], [0.05, -0.35, 0.07], [0.02, 0.07, 0.40]] + probs_rect1 = [[0.25, 0.05, 0.02], [0.05, -0.35, 0.07]] + probs_rect2 = [[0.25, 0.05], [0.05, -0.35], [0.02, 0.07]] + asymprobs = [[0.25, 0.05, 0.01], [0.05, -0.35, 0.07], [0.02, 0.07, 0.40]] + pytest.raises(nx.NetworkXException, sbm, sizes, badprobs1) + pytest.raises(nx.NetworkXException, sbm, sizes, badprobs2) + pytest.raises(nx.NetworkXException, sbm, sizes, probs_rect1, directed=True) + pytest.raises(nx.NetworkXException, sbm, sizes, probs_rect2, directed=True) + pytest.raises(nx.NetworkXException, sbm, sizes, asymprobs, directed=False) + pytest.raises(nx.NetworkXException, sbm, sizes, probs, badnodelist) + nodelist = [0] + list(range(449)) # repeated node name in nodelist + pytest.raises(nx.NetworkXException, sbm, sizes, probs, nodelist) + + # Extra keyword arguments test + GG = nx.stochastic_block_model(sizes, probs, seed=0, selfloops=True) + assert G.nodes == GG.nodes + GG = nx.stochastic_block_model(sizes, probs, selfloops=True, directed=True) + assert G.nodes == GG.nodes + GG = nx.stochastic_block_model(sizes, probs, seed=0, sparse=False) + assert G.nodes == GG.nodes + + +def test_generator(): + n = 250 + tau1 = 3 + tau2 = 1.5 + mu = 0.1 + G = nx.LFR_benchmark_graph( + n, tau1, tau2, mu, average_degree=5, min_community=20, seed=10 + ) + assert len(G) == 250 + C = {frozenset(G.nodes[v]["community"]) for v in G} + assert nx.community.is_partition(G.nodes(), C) + + +def test_invalid_tau1(): + with pytest.raises(nx.NetworkXError): + n = 100 + tau1 = 2 + tau2 = 1 + mu = 0.1 + nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2) + + +def test_invalid_tau2(): + with pytest.raises(nx.NetworkXError): + n = 100 + tau1 = 1 + tau2 = 2 + mu = 0.1 + nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2) + + +def test_mu_too_large(): + with pytest.raises(nx.NetworkXError): + n = 100 + tau1 = 2 + tau2 = 2 + mu = 1.1 + nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2) + + +def test_mu_too_small(): + with pytest.raises(nx.NetworkXError): + n = 100 + tau1 = 2 + tau2 = 2 + mu = -1 + nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2) + + +def test_both_degrees_none(): + with pytest.raises(nx.NetworkXError): + n = 100 + tau1 = 2 + tau2 = 2 + mu = -1 + nx.LFR_benchmark_graph(n, tau1, tau2, mu) + + +def test_neither_degrees_none(): + with pytest.raises(nx.NetworkXError): + n = 100 + tau1 = 2 + tau2 = 2 + mu = -1 + nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2, average_degree=5) diff --git a/networkx/generators/tests/test_degree_seq.py b/networkx/generators/tests/test_degree_seq.py index bde2c79..70a63b6 100644 --- a/networkx/generators/tests/test_degree_seq.py +++ b/networkx/generators/tests/test_degree_seq.py @@ -1,12 +1,9 @@ -from nose.tools import assert_equal -from nose.tools import assert_raises -from nose.tools import assert_true -from nose.tools import raises +import pytest import networkx as nx -class TestConfigurationModel(object): +class TestConfigurationModel: """Unit tests for the :func:`~networkx.configuration_model` function. @@ -15,7 +12,7 @@ class TestConfigurationModel(object): def test_empty_degree_sequence(self): """Tests that an empty degree sequence yields the null graph.""" G = nx.configuration_model([]) - assert_equal(len(G), 0) + assert len(G) == 0 def test_degree_zero(self): """Tests that a degree sequence of all zeros yields the empty @@ -23,8 +20,8 @@ def test_degree_zero(self): """ G = nx.configuration_model([0, 0, 0]) - assert_equal(len(G), 3) - assert_equal(G.number_of_edges(), 0) + assert len(G) == 3 + assert G.number_of_edges() == 0 def test_degree_sequence(self): """Tests that the degree sequence of the generated graph matches @@ -33,11 +30,32 @@ def test_degree_sequence(self): """ deg_seq = [5, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] G = nx.configuration_model(deg_seq, seed=12345678) - assert_equal(sorted((d for n, d in G.degree()), reverse=True), - [5, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1]) - assert_equal(sorted((d for n, d in G.degree(range(len(deg_seq)))), - reverse=True), - [5, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1]) + assert sorted((d for n, d in G.degree()), reverse=True) == [ + 5, + 3, + 3, + 3, + 3, + 2, + 2, + 2, + 1, + 1, + 1, + ] + assert sorted((d for n, d in G.degree(range(len(deg_seq)))), reverse=True) == [ + 5, + 3, + 3, + 3, + 3, + 2, + 2, + 2, + 1, + 1, + 1, + ] def test_random_seed(self): """Tests that each call with the same random seed generates the @@ -47,98 +65,102 @@ def test_random_seed(self): deg_seq = [3] * 12 G1 = nx.configuration_model(deg_seq, seed=1000) G2 = nx.configuration_model(deg_seq, seed=1000) - assert_true(nx.is_isomorphic(G1, G2)) + assert nx.is_isomorphic(G1, G2) G1 = nx.configuration_model(deg_seq, seed=10) G2 = nx.configuration_model(deg_seq, seed=10) - assert_true(nx.is_isomorphic(G1, G2)) + assert nx.is_isomorphic(G1, G2) - @raises(nx.NetworkXNotImplemented) def test_directed_disallowed(self): """Tests that attempting to create a configuration model graph using a directed graph yields an exception. """ - nx.configuration_model([], create_using=nx.DiGraph()) + with pytest.raises(nx.NetworkXNotImplemented): + nx.configuration_model([], create_using=nx.DiGraph()) - @raises(nx.NetworkXError) def test_odd_degree_sum(self): """Tests that a degree sequence whose sum is odd yields an exception. """ - nx.configuration_model([1, 2]) + with pytest.raises(nx.NetworkXError): + nx.configuration_model([1, 2]) -@raises(nx.NetworkXError) def test_directed_configuation_raise_unequal(): - zin = [5, 3, 3, 3, 3, 2, 2, 2, 1, 1] - zout = [5, 3, 3, 3, 3, 2, 2, 2, 1, 2] - nx.directed_configuration_model(zin, zout) + with pytest.raises(nx.NetworkXError): + zin = [5, 3, 3, 3, 3, 2, 2, 2, 1, 1] + zout = [5, 3, 3, 3, 3, 2, 2, 2, 1, 2] + nx.directed_configuration_model(zin, zout) -def test_directed_configuation_mode(): +def test_directed_configuation_model(): G = nx.directed_configuration_model([], [], seed=0) - assert_equal(len(G), 0) + assert len(G) == 0 + + +def test_simple_directed_configuation_model(): + G = nx.directed_configuration_model([1, 1], [1, 1], seed=0) + assert len(G) == 2 def test_expected_degree_graph_empty(): # empty graph has empty degree sequence deg_seq = [] G = nx.expected_degree_graph(deg_seq) - assert_equal(dict(G.degree()), {}) + assert dict(G.degree()) == {} def test_expected_degree_graph(): # test that fixed seed delivers the same graph deg_seq = [3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3] G1 = nx.expected_degree_graph(deg_seq, seed=1000) - assert_equal(len(G1), 12) + assert len(G1) == 12 G2 = nx.expected_degree_graph(deg_seq, seed=1000) - assert_true(nx.is_isomorphic(G1, G2)) + assert nx.is_isomorphic(G1, G2) G1 = nx.expected_degree_graph(deg_seq, seed=10) G2 = nx.expected_degree_graph(deg_seq, seed=10) - assert_true(nx.is_isomorphic(G1, G2)) + assert nx.is_isomorphic(G1, G2) def test_expected_degree_graph_selfloops(): deg_seq = [3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3] G1 = nx.expected_degree_graph(deg_seq, seed=1000, selfloops=False) G2 = nx.expected_degree_graph(deg_seq, seed=1000, selfloops=False) - assert_true(nx.is_isomorphic(G1, G2)) - assert_equal(len(G1), 12) + assert nx.is_isomorphic(G1, G2) + assert len(G1) == 12 def test_expected_degree_graph_skew(): deg_seq = [10, 2, 2, 2, 2] G1 = nx.expected_degree_graph(deg_seq, seed=1000) G2 = nx.expected_degree_graph(deg_seq, seed=1000) - assert_true(nx.is_isomorphic(G1, G2)) - assert_equal(len(G1), 5) + assert nx.is_isomorphic(G1, G2) + assert len(G1) == 5 def test_havel_hakimi_construction(): G = nx.havel_hakimi_graph([]) - assert_equal(len(G), 0) + assert len(G) == 0 z = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] - assert_raises(nx.NetworkXError, nx.havel_hakimi_graph, z) + pytest.raises(nx.NetworkXError, nx.havel_hakimi_graph, z) z = ["A", 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] - assert_raises(nx.NetworkXError, nx.havel_hakimi_graph, z) + pytest.raises(nx.NetworkXError, nx.havel_hakimi_graph, z) z = [5, 4, 3, 3, 3, 2, 2, 2] G = nx.havel_hakimi_graph(z) G = nx.configuration_model(z) z = [6, 5, 4, 4, 2, 1, 1, 1] - assert_raises(nx.NetworkXError, nx.havel_hakimi_graph, z) + pytest.raises(nx.NetworkXError, nx.havel_hakimi_graph, z) z = [10, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2] G = nx.havel_hakimi_graph(z) - assert_raises(nx.NetworkXError, nx.havel_hakimi_graph, z, - create_using=nx.DiGraph()) + pytest.raises(nx.NetworkXError, nx.havel_hakimi_graph, z, create_using=nx.DiGraph()) def test_directed_havel_hakimi(): @@ -152,59 +174,57 @@ def test_directed_havel_hakimi(): G2 = nx.directed_havel_hakimi_graph(din1, dout1) din2 = list(d for n, d in G2.in_degree()) dout2 = list(d for n, d in G2.out_degree()) - assert_equal(sorted(din1), sorted(din2)) - assert_equal(sorted(dout1), sorted(dout2)) + assert sorted(din1) == sorted(din2) + assert sorted(dout1) == sorted(dout2) # Test non-graphical sequence dout = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] din = [103, 102, 102, 102, 102, 102, 102, 102, 102, 102] - assert_raises(nx.exception.NetworkXError, - nx.directed_havel_hakimi_graph, din, dout) + pytest.raises(nx.exception.NetworkXError, nx.directed_havel_hakimi_graph, din, dout) # Test valid sequences dout = [1, 1, 1, 1, 1, 2, 2, 2, 3, 4] din = [2, 2, 2, 2, 2, 2, 2, 2, 0, 2] G2 = nx.directed_havel_hakimi_graph(din, dout) dout2 = (d for n, d in G2.out_degree()) din2 = (d for n, d in G2.in_degree()) - assert_equal(sorted(dout), sorted(dout2)) - assert_equal(sorted(din), sorted(din2)) + assert sorted(dout) == sorted(dout2) + assert sorted(din) == sorted(din2) # Test unequal sums din = [2, 2, 2, 2, 2, 2, 2, 2, 2, 2] - assert_raises(nx.exception.NetworkXError, - nx.directed_havel_hakimi_graph, din, dout) + pytest.raises(nx.exception.NetworkXError, nx.directed_havel_hakimi_graph, din, dout) # Test for negative values din = [2, 2, 2, 2, 2, 2, 2, 2, 2, 2, -2] - assert_raises(nx.exception.NetworkXError, - nx.directed_havel_hakimi_graph, din, dout) + pytest.raises(nx.exception.NetworkXError, nx.directed_havel_hakimi_graph, din, dout) def test_degree_sequence_tree(): z = [1, 1, 1, 1, 1, 2, 2, 2, 3, 4] G = nx.degree_sequence_tree(z) - assert_equal(len(G), len(z)) - assert_true(len(list(G.edges())) == sum(z) / 2) + assert len(G) == len(z) + assert len(list(G.edges())) == sum(z) / 2 - assert_raises(nx.NetworkXError, nx.degree_sequence_tree, z, - create_using=nx.DiGraph()) + pytest.raises( + nx.NetworkXError, nx.degree_sequence_tree, z, create_using=nx.DiGraph() + ) z = [1, 1, 1, 1, 1, 1, 2, 2, 2, 3, 4] - assert_raises(nx.NetworkXError, nx.degree_sequence_tree, z) + pytest.raises(nx.NetworkXError, nx.degree_sequence_tree, z) def test_random_degree_sequence_graph(): d = [1, 2, 2, 3] - G = nx.random_degree_sequence_graph(d) - assert_equal(d, sorted(d for n, d in G.degree())) + G = nx.random_degree_sequence_graph(d, seed=42) + assert d == sorted(d for n, d in G.degree()) def test_random_degree_sequence_graph_raise(): z = [1, 1, 1, 1, 1, 1, 2, 2, 2, 3, 4] - assert_raises(nx.NetworkXUnfeasible, nx.random_degree_sequence_graph, z) + pytest.raises(nx.NetworkXUnfeasible, nx.random_degree_sequence_graph, z) def test_random_degree_sequence_large(): - G1 = nx.fast_gnp_random_graph(100, 0.1) + G1 = nx.fast_gnp_random_graph(100, 0.1, seed=42) d1 = (d for n, d in G1.degree()) - G2 = nx.random_degree_sequence_graph(d1, seed=0) + G2 = nx.random_degree_sequence_graph(d1, seed=42) d2 = (d for n, d in G2.degree()) - assert_equal(sorted(d1), sorted(d2)) + assert sorted(d1) == sorted(d2) diff --git a/networkx/generators/tests/test_directed.py b/networkx/generators/tests/test_directed.py index 68f6446..6d6a40f 100644 --- a/networkx/generators/tests/test_directed.py +++ b/networkx/generators/tests/test_directed.py @@ -1,10 +1,7 @@ """Generators - Directed Graphs ---------------------------- """ -from nose.tools import assert_equal -from nose.tools import assert_false -from nose.tools import assert_raises -from nose.tools import assert_true +import pytest import networkx as nx from networkx.classes import Graph @@ -17,34 +14,50 @@ from networkx.generators.directed import scale_free_graph -class TestGeneratorsDirected(object): +class TestGeneratorsDirected: def test_smoke_test_random_graphs(self): gn_graph(100) gnr_graph(100, 0.5) gnc_graph(100) scale_free_graph(100) + gn_graph(100, seed=42) + gnr_graph(100, 0.5, seed=42) + gnc_graph(100, seed=42) + scale_free_graph(100, seed=42) + def test_create_using_keyword_arguments(self): - assert_raises(nx.NetworkXError, - gn_graph, 100, create_using=Graph()) - assert_raises(nx.NetworkXError, - gnr_graph, 100, 0.5, create_using=Graph()) - assert_raises(nx.NetworkXError, - gnc_graph, 100, create_using=Graph()) - assert_raises(nx.NetworkXError, - scale_free_graph, 100, create_using=Graph()) + pytest.raises(nx.NetworkXError, gn_graph, 100, create_using=Graph()) + pytest.raises(nx.NetworkXError, gnr_graph, 100, 0.5, create_using=Graph()) + pytest.raises(nx.NetworkXError, gnc_graph, 100, create_using=Graph()) + pytest.raises(nx.NetworkXError, scale_free_graph, 100, create_using=Graph()) G = gn_graph(100, seed=1) MG = gn_graph(100, create_using=MultiDiGraph(), seed=1) - assert_equal(sorted(G.edges()), sorted(MG.edges())) + assert sorted(G.edges()) == sorted(MG.edges()) G = gnr_graph(100, 0.5, seed=1) MG = gnr_graph(100, 0.5, create_using=MultiDiGraph(), seed=1) - assert_equal(sorted(G.edges()), sorted(MG.edges())) + assert sorted(G.edges()) == sorted(MG.edges()) G = gnc_graph(100, seed=1) MG = gnc_graph(100, create_using=MultiDiGraph(), seed=1) - assert_equal(sorted(G.edges()), sorted(MG.edges())) - - -class TestRandomKOutGraph(object): + assert sorted(G.edges()) == sorted(MG.edges()) + + G = scale_free_graph( + 100, + alpha=0.3, + beta=0.4, + gamma=0.3, + delta_in=0.3, + delta_out=0.1, + create_using=MultiDiGraph, + seed=1, + ) + pytest.raises(ValueError, scale_free_graph, 100, 0.5, 0.4, 0.3) + pytest.raises(ValueError, scale_free_graph, 100, alpha=-0.3) + pytest.raises(ValueError, scale_free_graph, 100, beta=-0.3) + pytest.raises(ValueError, scale_free_graph, 100, gamma=-0.3) + + +class TestRandomKOutGraph: """Unit tests for the :func:`~networkx.generators.directed.random_k_out_graph` function. @@ -56,7 +69,9 @@ def test_regularity(self): k = 3 alpha = 1 G = random_k_out_graph(n, k, alpha) - assert_true(all(d == k for v, d in G.out_degree())) + assert all(d == k for v, d in G.out_degree()) + G = random_k_out_graph(n, k, alpha, seed=42) + assert all(d == k for v, d in G.out_degree()) def test_no_self_loops(self): """Tests for forbidding self-loops.""" @@ -64,10 +79,10 @@ def test_no_self_loops(self): k = 3 alpha = 1 G = random_k_out_graph(n, k, alpha, self_loops=False) - assert_equal(nx.number_of_selfloops(G), 0) + assert nx.number_of_selfloops(G) == 0 -class TestUniformRandomKOutGraph(object): +class TestUniformRandomKOutGraph: """Unit tests for the :func:`~networkx.generators.directed.random_uniform_k_out_graph` function. @@ -79,26 +94,28 @@ def test_regularity(self): n = 10 k = 3 G = random_uniform_k_out_graph(n, k) - assert_true(all(d == k for v, d in G.out_degree())) + assert all(d == k for v, d in G.out_degree()) + G = random_uniform_k_out_graph(n, k, seed=42) + assert all(d == k for v, d in G.out_degree()) def test_no_self_loops(self): """Tests for forbidding self-loops.""" n = 10 k = 3 G = random_uniform_k_out_graph(n, k, self_loops=False) - assert_equal(nx.number_of_selfloops(G), 0) - assert_true(all(d == k for v, d in G.out_degree())) + assert nx.number_of_selfloops(G) == 0 + assert all(d == k for v, d in G.out_degree()) def test_with_replacement(self): n = 10 k = 3 G = random_uniform_k_out_graph(n, k, with_replacement=True) - assert_true(G.is_multigraph()) - assert_true(all(d == k for v, d in G.out_degree())) + assert G.is_multigraph() + assert all(d == k for v, d in G.out_degree()) def test_without_replacement(self): n = 10 k = 3 G = random_uniform_k_out_graph(n, k, with_replacement=False) - assert_false(G.is_multigraph()) - assert_true(all(d == k for v, d in G.out_degree())) + assert not G.is_multigraph() + assert all(d == k for v, d in G.out_degree()) diff --git a/networkx/generators/tests/test_duplication.py b/networkx/generators/tests/test_duplication.py index 6496c1d..06c49d2 100644 --- a/networkx/generators/tests/test_duplication.py +++ b/networkx/generators/tests/test_duplication.py @@ -1,25 +1,14 @@ -# -*- encoding: utf-8 -*- -# test_duplication.py - unit tests for the generators.duplication module -# -# Copyright 2010-2018 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.generators.duplication` module. """ -from nose.tools import assert_equal -from nose.tools import assert_raises -from nose.tools import raises +import pytest from networkx.exception import NetworkXError from networkx.generators.duplication import duplication_divergence_graph from networkx.generators.duplication import partial_duplication_graph -class TestDuplicationDivergenceGraph(object): +class TestDuplicationDivergenceGraph: """Unit tests for the :func:`networkx.generators.duplication.duplication_divergence_graph` function. @@ -28,18 +17,20 @@ class TestDuplicationDivergenceGraph(object): def test_final_size(self): G = duplication_divergence_graph(3, 1) - assert_equal(len(G), 3) + assert len(G) == 3 + G = duplication_divergence_graph(3, 1, seed=42) + assert len(G) == 3 - @raises(NetworkXError) def test_probability_too_large(self): - duplication_divergence_graph(3, 2) + with pytest.raises(NetworkXError): + duplication_divergence_graph(3, 2) - @raises(NetworkXError) def test_probability_too_small(self): - duplication_divergence_graph(3, -1) + with pytest.raises(NetworkXError): + duplication_divergence_graph(3, -1) -class TestPartialDuplicationGraph(object): +class TestPartialDuplicationGraph: """Unit tests for the :func:`networkx.generators.duplication.partial_duplication_graph` function. @@ -52,7 +43,9 @@ def test_final_size(self): p = 0.5 q = 0.5 G = partial_duplication_graph(N, n, p, q) - assert_equal(len(G), N) + assert len(G) == N + G = partial_duplication_graph(N, n, p, q, seed=42) + assert len(G) == N def test_initial_clique_size(self): N = 10 @@ -60,20 +53,19 @@ def test_initial_clique_size(self): p = 0.5 q = 0.5 G = partial_duplication_graph(N, n, p, q) - assert_equal(len(G), n) + assert len(G) == n - @raises(NetworkXError) def test_invalid_initial_size(self): - N = 5 - n = 10 - p = 0.5 - q = 0.5 - G = partial_duplication_graph(N, n, p, q) - assert_equal(len(G), n) + with pytest.raises(NetworkXError): + N = 5 + n = 10 + p = 0.5 + q = 0.5 + G = partial_duplication_graph(N, n, p, q) def test_invalid_probabilities(self): N = 1 n = 1 for p, q in [(0.5, 2), (0.5, -1), (2, 0.5), (-1, 0.5)]: args = (N, n, p, q) - assert_raises(NetworkXError, partial_duplication_graph, *args) + pytest.raises(NetworkXError, partial_duplication_graph, *args) diff --git a/networkx/generators/tests/test_ego.py b/networkx/generators/tests/test_ego.py index 97e5b19..3b16b0f 100644 --- a/networkx/generators/tests/test_ego.py +++ b/networkx/generators/tests/test_ego.py @@ -1,24 +1,22 @@ -#!/usr/bin/env python """ ego graph --------- """ -from nose.tools import assert_true import networkx as nx -from networkx.testing.utils import * +from networkx.testing.utils import assert_edges_equal, assert_nodes_equal -class TestGeneratorEgo(): +class TestGeneratorEgo: def test_ego(self): G = nx.star_graph(3) H = nx.ego_graph(G, 0) - assert_true(nx.is_isomorphic(G, H)) + assert nx.is_isomorphic(G, H) G.add_edge(1, 11) G.add_edge(2, 22) G.add_edge(3, 33) H = nx.ego_graph(G, 0) - assert_true(nx.is_isomorphic(nx.star_graph(3), H)) + assert nx.is_isomorphic(nx.star_graph(3), H) G = nx.path_graph(3) H = nx.ego_graph(G, 0) assert_edges_equal(H.edges(), [(0, 1)]) @@ -33,9 +31,9 @@ def test_ego_distance(self): G.add_edge(1, 2, weight=2, distance=2) G.add_edge(2, 3, weight=2, distance=1) assert_nodes_equal(nx.ego_graph(G, 0, radius=3).nodes(), [0, 1, 2, 3]) - eg = nx.ego_graph(G, 0, radius=3, distance='weight') + eg = nx.ego_graph(G, 0, radius=3, distance="weight") assert_nodes_equal(eg.nodes(), [0, 1]) - eg = nx.ego_graph(G, 0, radius=3, distance='weight', undirected=True) + eg = nx.ego_graph(G, 0, radius=3, distance="weight", undirected=True) assert_nodes_equal(eg.nodes(), [0, 1]) - eg = nx.ego_graph(G, 0, radius=3, distance='distance') + eg = nx.ego_graph(G, 0, radius=3, distance="distance") assert_nodes_equal(eg.nodes(), [0, 1, 2]) diff --git a/networkx/generators/tests/test_expanders.py b/networkx/generators/tests/test_expanders.py index a71a1a7..a822e04 100644 --- a/networkx/generators/tests/test_expanders.py +++ b/networkx/generators/tests/test_expanders.py @@ -1,62 +1,43 @@ -# Copyright 2014 "cheebee7i". -# Copyright 2014 "alexbrc". -# Copyright 2014 Jeffrey Finkelstein . """Unit tests for the :mod:`networkx.generators.expanders` module. """ -try: - import scipy - is_scipy_available = True -except: - is_scipy_available = False import networkx as nx from networkx import adjacency_matrix from networkx import number_of_nodes from networkx.generators.expanders import chordal_cycle_graph from networkx.generators.expanders import margulis_gabber_galil_graph +from networkx.generators.expanders import paley_graph -from nose import SkipTest -from nose.tools import assert_equal -from nose.tools import assert_less -from nose.tools import assert_raises -from nose.tools import assert_true +import pytest def test_margulis_gabber_galil_graph(): - try: - # Scipy is required for conversion to an adjacency matrix. - # We also use scipy for computing the eigenvalues, - # but this second use could be done using only numpy. - import numpy as np - import scipy.linalg - has_scipy = True - except ImportError as e: - has_scipy = False for n in 2, 3, 5, 6, 10: g = margulis_gabber_galil_graph(n) - assert_equal(number_of_nodes(g), n * n) + assert number_of_nodes(g) == n * n for node in g: - assert_equal(g.degree(node), 8) - assert_equal(len(node), 2) + assert g.degree(node) == 8 + assert len(node) == 2 for i in node: - assert_equal(int(i), i) - assert_true(0 <= i < n) - if has_scipy: - # Eigenvalues are already sorted using the scipy eigvalsh, - # but the implementation in numpy does not guarantee order. - w = sorted(scipy.linalg.eigvalsh(adjacency_matrix(g).A)) - assert_less(w[-2], 5 * np.sqrt(2)) + assert int(i) == i + assert 0 <= i < n + + np = pytest.importorskip("numpy") + scipy = pytest.importorskip("scipy") + scipy.linalg = pytest.importorskip("scipy.linalg") + # Eigenvalues are already sorted using the scipy eigvalsh, + # but the implementation in numpy does not guarantee order. + w = sorted(scipy.linalg.eigvalsh(adjacency_matrix(g).A)) + assert w[-2] < 5 * np.sqrt(2) def test_chordal_cycle_graph(): """Test for the :func:`networkx.chordal_cycle_graph` function.""" - if not is_scipy_available: - raise SkipTest('SciPy is not available') primes = [3, 5, 7, 11] for p in primes: G = chordal_cycle_graph(p) - assert_equal(len(G), p) + assert len(G) == p # TODO The second largest eigenvalue should be smaller than a constant, # independent of the number of nodes in the graph: # @@ -65,8 +46,26 @@ def test_chordal_cycle_graph(): # +def test_paley_graph(): + """Test for the :func:`networkx.paley_graph` function.""" + primes = [3, 5, 7, 11, 13] + for p in primes: + G = paley_graph(p) + # G has p nodes + assert len(G) == p + # G is (p-1)/2-regular + in_degrees = {G.in_degree(node) for node in G.nodes} + out_degrees = {G.out_degree(node) for node in G.nodes} + assert len(in_degrees) == 1 and in_degrees.pop() == (p - 1) // 2 + assert len(out_degrees) == 1 and out_degrees.pop() == (p - 1) // 2 + + # If p = 1 mod 4, -1 is a square mod 4 and therefore the + # edge in the Paley graph are symmetric. + if p % 4 == 1: + for (u, v) in G.edges: + assert (v, u) in G.edges + + def test_margulis_gabber_galil_graph_badinput(): - assert_raises(nx.NetworkXError, margulis_gabber_galil_graph, 3, - nx.DiGraph()) - assert_raises(nx.NetworkXError, margulis_gabber_galil_graph, 3, - nx.Graph()) + pytest.raises(nx.NetworkXError, margulis_gabber_galil_graph, 3, nx.DiGraph()) + pytest.raises(nx.NetworkXError, margulis_gabber_galil_graph, 3, nx.Graph()) diff --git a/networkx/generators/tests/test_geometric.py b/networkx/generators/tests/test_geometric.py index 8dd2a0a..91151ec 100644 --- a/networkx/generators/tests/test_geometric.py +++ b/networkx/generators/tests/test_geometric.py @@ -2,9 +2,6 @@ from math import sqrt import random -from nose.tools import assert_equal -from nose.tools import assert_false -from nose.tools import assert_true import networkx as nx from networkx.generators.geometric import euclidean @@ -14,17 +11,17 @@ def l1dist(x, y): return sum(abs(a - b) for a, b in zip(x, y)) -class TestRandomGeometricGraph(object): +class TestRandomGeometricGraph: """Unit tests for the :func:`~networkx.random_geometric_graph` function. """ def test_number_of_nodes(self): - G = nx.random_geometric_graph(50, 0.25) - assert_equal(len(G), 50) - G = nx.random_geometric_graph(range(50), 0.25) - assert_equal(len(G), 50) + G = nx.random_geometric_graph(50, 0.25, seed=42) + assert len(G) == 50 + G = nx.random_geometric_graph(range(50), 0.25, seed=42) + assert len(G) == 50 def test_distances(self): """Tests that pairs of vertices adjacent if and only if they are @@ -38,10 +35,10 @@ def test_distances(self): for u, v in combinations(G, 2): # Adjacent vertices must be within the given distance. if v in G[u]: - assert_true(dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25) + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 # Nonadjacent vertices must be at greater distance. else: - assert_false(dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25) + assert not dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 def test_p(self): """Tests for providing an alternate distance metric to the @@ -54,41 +51,42 @@ def test_p(self): for u, v in combinations(G, 2): # Adjacent vertices must be within the given distance. if v in G[u]: - assert_true(dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25) + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 # Nonadjacent vertices must be at greater distance. else: - assert_false(dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25) + assert not dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 def test_node_names(self): """Tests using values other than sequential numbers as node IDs. """ import string + nodes = list(string.ascii_lowercase) G = nx.random_geometric_graph(nodes, 0.25) - assert_equal(len(G), len(nodes)) + assert len(G) == len(nodes) dist = euclidean for u, v in combinations(G, 2): # Adjacent vertices must be within the given distance. if v in G[u]: - assert_true(dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25) + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 # Nonadjacent vertices must be at greater distance. else: - assert_false(dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25) + assert not dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 -class TestSoftRandomGeometricGraph(object): +class TestSoftRandomGeometricGraph: """Unit tests for the :func:`~networkx.soft_random_geometric_graph` function. """ def test_number_of_nodes(self): - G = nx.soft_random_geometric_graph(50, 0.25) - assert_equal(len(G), 50) - G = nx.soft_random_geometric_graph(range(50), 0.25) - assert_equal(len(G), 50) + G = nx.soft_random_geometric_graph(50, 0.25, seed=42) + assert len(G) == 50 + G = nx.soft_random_geometric_graph(range(50), 0.25, seed=42) + assert len(G) == 50 def test_distances(self): """Tests that pairs of vertices adjacent if and only if they are @@ -97,12 +95,14 @@ def test_distances(self): """ # Use the Euclidean metric, the default according to the # documentation. - def dist(x, y): return sqrt(sum((a - b) ** 2 for a, b in zip(x, y))) + def dist(x, y): + return sqrt(sum((a - b) ** 2 for a, b in zip(x, y))) + G = nx.soft_random_geometric_graph(50, 0.25) for u, v in combinations(G, 2): # Adjacent vertices must be within the given distance. if v in G[u]: - assert_true(dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25) + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 def test_p(self): """Tests for providing an alternate distance metric to the @@ -110,27 +110,32 @@ def test_p(self): """ # Use the L1 metric. - def dist(x, y): return sum(abs(a - b) for a, b in zip(x, y)) + def dist(x, y): + return sum(abs(a - b) for a, b in zip(x, y)) + G = nx.soft_random_geometric_graph(50, 0.25, p=1) for u, v in combinations(G, 2): # Adjacent vertices must be within the given distance. if v in G[u]: - assert_true(dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25) + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 def test_node_names(self): """Tests using values other than sequential numbers as node IDs. """ import string + nodes = list(string.ascii_lowercase) G = nx.soft_random_geometric_graph(nodes, 0.25) - assert_equal(len(G), len(nodes)) + assert len(G) == len(nodes) + + def dist(x, y): + return sqrt(sum((a - b) ** 2 for a, b in zip(x, y))) - def dist(x, y): return sqrt(sum((a - b) ** 2 for a, b in zip(x, y))) for u, v in combinations(G, 2): # Adjacent vertices must be within the given distance. if v in G[u]: - assert_true(dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25) + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 def test_p_dist_default(self): """Tests default p_dict = 0.5 returns graph with edge count <= RGG with @@ -142,17 +147,18 @@ def test_p_dist_default(self): pos = {v: [random.random() for i in range(dim)] for v in range(nodes)} RGG = nx.random_geometric_graph(50, 0.25, pos=pos) SRGG = nx.soft_random_geometric_graph(50, 0.25, pos=pos) - assert_true(len(SRGG.edges()) <= len(RGG.edges())) + assert len(SRGG.edges()) <= len(RGG.edges()) def test_p_dist_zero(self): """Tests if p_dict = 0 returns disconencted graph with 0 edges """ + def p_dist(dist): return 0 G = nx.soft_random_geometric_graph(50, 0.25, p_dist=p_dist) - assert_true(len(G.edges) == 0) + assert len(G.edges) == 0 def join(G, u, v, theta, alpha, metric): @@ -168,22 +174,22 @@ def join(G, u, v, theta, alpha, metric): """ du, dv = G.nodes[u], G.nodes[v] - u_pos, v_pos = du['pos'], dv['pos'] - u_weight, v_weight = du['weight'], dv['weight'] + u_pos, v_pos = du["pos"], dv["pos"] + u_weight, v_weight = du["weight"], dv["weight"] return (u_weight + v_weight) * metric(u_pos, v_pos) ** alpha >= theta -class TestGeographicalThresholdGraph(object): +class TestGeographicalThresholdGraph: """Unit tests for the :func:`~networkx.geographical_threshold_graph` function. """ def test_number_of_nodes(self): - G = nx.geographical_threshold_graph(50, 100) - assert_equal(len(G), 50) - G = nx.geographical_threshold_graph(range(50), 100) - assert_equal(len(G), 50) + G = nx.geographical_threshold_graph(50, 100, seed=42) + assert len(G) == 50 + G = nx.geographical_threshold_graph(range(50), 100, seed=42) + assert len(G) == 50 def test_distances(self): """Tests that pairs of vertices adjacent if and only if their @@ -197,10 +203,10 @@ def test_distances(self): for u, v in combinations(G, 2): # Adjacent vertices must exceed the threshold. if v in G[u]: - assert_true(join(G, u, v, 10, -2, dist)) + assert join(G, u, v, 10, -2, dist) # Nonadjacent vertices must not exceed the threshold. else: - assert_false(join(G, u, v, 10, -2, dist)) + assert not join(G, u, v, 10, -2, dist) def test_metric(self): """Tests for providing an alternate distance metric to the @@ -213,36 +219,37 @@ def test_metric(self): for u, v in combinations(G, 2): # Adjacent vertices must exceed the threshold. if v in G[u]: - assert_true(join(G, u, v, 10, -2, dist)) + assert join(G, u, v, 10, -2, dist) # Nonadjacent vertices must not exceed the threshold. else: - assert_false(join(G, u, v, 10, -2, dist)) + assert not join(G, u, v, 10, -2, dist) def test_p_dist_zero(self): """Tests if p_dict = 0 returns disconencted graph with 0 edges """ + def p_dist(dist): return 0 G = nx.geographical_threshold_graph(50, 1, p_dist=p_dist) - assert_true(len(G.edges) == 0) + assert len(G.edges) == 0 -class TestWaxmanGraph(object): +class TestWaxmanGraph: """Unit tests for the :func:`~networkx.waxman_graph` function.""" def test_number_of_nodes_1(self): - G = nx.waxman_graph(50, 0.5, 0.1) - assert_equal(len(G), 50) - G = nx.waxman_graph(range(50), 0.5, 0.1) - assert_equal(len(G), 50) + G = nx.waxman_graph(50, 0.5, 0.1, seed=42) + assert len(G) == 50 + G = nx.waxman_graph(range(50), 0.5, 0.1, seed=42) + assert len(G) == 50 def test_number_of_nodes_2(self): G = nx.waxman_graph(50, 0.5, 0.1, L=1) - assert_equal(len(G), 50) + assert len(G) == 50 G = nx.waxman_graph(range(50), 0.5, 0.1, L=1) - assert_equal(len(G), 50) + assert len(G) == 50 def test_metric(self): """Tests for providing an alternate distance metric to the @@ -252,36 +259,35 @@ def test_metric(self): # Use the L1 metric. dist = l1dist G = nx.waxman_graph(50, 0.5, 0.1, metric=dist) - assert_equal(len(G), 50) + assert len(G) == 50 -class TestNavigableSmallWorldGraph(object): - +class TestNavigableSmallWorldGraph: def test_navigable_small_world(self): - G = nx.navigable_small_world_graph(5, p=1, q=0) + G = nx.navigable_small_world_graph(5, p=1, q=0, seed=42) gg = nx.grid_2d_graph(5, 5).to_directed() - assert_true(nx.is_isomorphic(G, gg)) + assert nx.is_isomorphic(G, gg) G = nx.navigable_small_world_graph(5, p=1, q=0, dim=3) gg = nx.grid_graph([5, 5, 5]).to_directed() - assert_true(nx.is_isomorphic(G, gg)) + assert nx.is_isomorphic(G, gg) G = nx.navigable_small_world_graph(5, p=1, q=0, dim=1) gg = nx.grid_graph([5]).to_directed() - assert_true(nx.is_isomorphic(G, gg)) + assert nx.is_isomorphic(G, gg) -class TestThresholdedRandomGeometricGraph(object): +class TestThresholdedRandomGeometricGraph: """Unit tests for the :func:`~networkx.thresholded_random_geometric_graph` function. """ def test_number_of_nodes(self): - G = nx.thresholded_random_geometric_graph(50, 0.2, 0.1) - assert_equal(len(G), 50) + G = nx.thresholded_random_geometric_graph(50, 0.2, 0.1, seed=42) + assert len(G) == 50 G = nx.thresholded_random_geometric_graph(range(50), 0.2, 0.1) - assert_equal(len(G), 50) + assert len(G) == 50 def test_distances(self): """Tests that pairs of vertices adjacent if and only if they are @@ -290,12 +296,14 @@ def test_distances(self): """ # Use the Euclidean metric, the default according to the # documentation. - def dist(x, y): return sqrt(sum((a - b) ** 2 for a, b in zip(x, y))) + def dist(x, y): + return sqrt(sum((a - b) ** 2 for a, b in zip(x, y))) + G = nx.thresholded_random_geometric_graph(50, 0.25, 0.1) for u, v in combinations(G, 2): # Adjacent vertices must be within the given distance. if v in G[u]: - assert_true(dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25) + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 def test_p(self): """Tests for providing an alternate distance metric to the @@ -303,27 +311,32 @@ def test_p(self): """ # Use the L1 metric. - def dist(x, y): return sum(abs(a - b) for a, b in zip(x, y)) - G = nx.thresholded_random_geometric_graph(50, 0.25, 0.1, p=1) + def dist(x, y): + return sum(abs(a - b) for a, b in zip(x, y)) + + G = nx.thresholded_random_geometric_graph(50, 0.25, 0.1, p=1) for u, v in combinations(G, 2): # Adjacent vertices must be within the given distance. if v in G[u]: - assert_true(dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25) + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 def test_node_names(self): """Tests using values other than sequential numbers as node IDs. """ import string + nodes = list(string.ascii_lowercase) G = nx.thresholded_random_geometric_graph(nodes, 0.25, 0.1) - assert_equal(len(G), len(nodes)) + assert len(G) == len(nodes) + + def dist(x, y): + return sqrt(sum((a - b) ** 2 for a, b in zip(x, y))) - def dist(x, y): return sqrt(sum((a - b) ** 2 for a, b in zip(x, y))) for u, v in combinations(G, 2): # Adjacent vertices must be within the given distance. if v in G[u]: - assert_true(dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25) + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 def test_theta(self): """Tests that pairs of vertices adjacent if and only if their sum @@ -334,4 +347,4 @@ def test_theta(self): for u, v in combinations(G, 2): # Adjacent vertices must be within the given distance. if v in G[u]: - assert_true((G.nodes[u]['weight'] + G.nodes[v]['weight']) >= 0.1) + assert (G.nodes[u]["weight"] + G.nodes[v]["weight"]) >= 0.1 diff --git a/networkx/generators/tests/test_harary_graph.py b/networkx/generators/tests/test_harary_graph.py new file mode 100644 index 0000000..f1709cc --- /dev/null +++ b/networkx/generators/tests/test_harary_graph.py @@ -0,0 +1,135 @@ +"""Unit tests for the :mod:`networkx.generators.harary_graph` module. +""" + +import pytest + +import networkx as nx +from networkx.generators.harary_graph import hnm_harary_graph +from networkx.generators.harary_graph import hkn_harary_graph +from networkx.algorithms.isomorphism.isomorph import is_isomorphic + + +class TestHararyGraph: + """ + Suppose n nodes, m >= n-1 edges, d = 2m // n, r = 2m % n + """ + + def test_hnm_harary_graph(self): + # When d is even and r = 0, the hnm_harary_graph(n,m) is + # the circulant_graph(n, list(range(1,d/2+1))) + for (n, m) in [(5, 5), (6, 12), (7, 14)]: + G1 = hnm_harary_graph(n, m) + d = 2 * m // n + G2 = nx.circulant_graph(n, list(range(1, d // 2 + 1))) + assert is_isomorphic(G1, G2) + + # When d is even and r > 0, the hnm_harary_graph(n,m) is + # the circulant_graph(n, list(range(1,d/2+1))) + # with r edges added arbitrarily + for (n, m) in [(5, 7), (6, 13), (7, 16)]: + G1 = hnm_harary_graph(n, m) + d = 2 * m // n + G2 = nx.circulant_graph(n, list(range(1, d // 2 + 1))) + assert set(G2.edges) < set(G1.edges) + assert G1.number_of_edges() == m + + # When d is odd and n is even and r = 0, the hnm_harary_graph(n,m) + # is the circulant_graph(n, list(range(1,(d+1)/2) plus [n//2]) + for (n, m) in [(6, 9), (8, 12), (10, 15)]: + G1 = hnm_harary_graph(n, m) + d = 2 * m // n + L = list(range(1, (d + 1) // 2)) + L.append(n // 2) + G2 = nx.circulant_graph(n, L) + assert is_isomorphic(G1, G2) + + # When d is odd and n is even and r > 0, the hnm_harary_graph(n,m) + # is the circulant_graph(n, list(range(1,(d+1)/2) plus [n//2]) + # with r edges added arbitrarily + for (n, m) in [(6, 10), (8, 13), (10, 17)]: + G1 = hnm_harary_graph(n, m) + d = 2 * m // n + L = list(range(1, (d + 1) // 2)) + L.append(n // 2) + G2 = nx.circulant_graph(n, L) + assert set(G2.edges) < set(G1.edges) + assert G1.number_of_edges() == m + + # When d is odd and n is odd, the hnm_harary_graph(n,m) is + # the circulant_graph(n, list(range(1,(d+1)/2)) + # with m - n*(d-1)/2 edges added arbitrarily + for (n, m) in [(5, 4), (7, 12), (9, 14)]: + G1 = hnm_harary_graph(n, m) + d = 2 * m // n + L = list(range(1, (d + 1) // 2)) + G2 = nx.circulant_graph(n, L) + assert set(G2.edges) < set(G1.edges) + assert G1.number_of_edges() == m + + # Raise NetworkXError if n<1 + n = 0 + m = 0 + pytest.raises(nx.NetworkXError, hnm_harary_graph, n, m) + + # Raise NetworkXError if m < n-1 + n = 6 + m = 4 + pytest.raises(nx.NetworkXError, hnm_harary_graph, n, m) + + # Raise NetworkXError if m > n(n-1)/2 + n = 6 + m = 16 + pytest.raises(nx.NetworkXError, hnm_harary_graph, n, m) + + """ + Suppose connectivity k, number of nodes n + """ + + def test_hkn_harary_graph(self): + # When k == 1, the hkn_harary_graph(k,n) is + # the path_graph(n) + for (k, n) in [(1, 6), (1, 7)]: + G1 = hkn_harary_graph(k, n) + G2 = nx.path_graph(n) + assert is_isomorphic(G1, G2) + + # When k is even, the hkn_harary_graph(k,n) is + # the circulant_graph(n, list(range(1,k/2+1))) + for (k, n) in [(2, 6), (2, 7), (4, 6), (4, 7)]: + G1 = hkn_harary_graph(k, n) + G2 = nx.circulant_graph(n, list(range(1, k // 2 + 1))) + assert is_isomorphic(G1, G2) + + # When k is odd and n is even, the hkn_harary_graph(k,n) is + # the circulant_graph(n, list(range(1,(k+1)/2)) plus [n/2]) + for (k, n) in [(3, 6), (5, 8), (7, 10)]: + G1 = hkn_harary_graph(k, n) + L = list(range(1, (k + 1) // 2)) + L.append(n // 2) + G2 = nx.circulant_graph(n, L) + assert is_isomorphic(G1, G2) + + # When k is odd and n is odd, the hkn_harary_graph(k,n) is + # the circulant_graph(n, list(range(1,(k+1)/2))) with + # n//2+1 edges added between node i and node i+n//2+1 + for (k, n) in [(3, 5), (5, 9), (7, 11)]: + G1 = hkn_harary_graph(k, n) + G2 = nx.circulant_graph(n, list(range(1, (k + 1) // 2))) + eSet1 = set(G1.edges) + eSet2 = set(G2.edges) + eSet3 = set() + half = n // 2 + for i in range(0, half + 1): + # add half+1 edges between i and i+half + eSet3.add((i, (i + half) % n)) + assert eSet1 == eSet2 | eSet3 + + # Raise NetworkXError if k<1 + k = 0 + n = 0 + pytest.raises(nx.NetworkXError, hkn_harary_graph, k, n) + + # Raise NetworkXError if ndegree_count[1]*degree_count[4] - joint_degrees_3 = {1: {4: 2}, - 2: {2: 2, 3: 2, 4: 2}, - 3: {2: 2, 4: 1}, - 4: {1: 2, 2: 2, 3: 1}} - assert_false(is_valid_joint_degree(joint_degrees_3)) + joint_degrees_3 = { + 1: {4: 2}, + 2: {2: 2, 3: 2, 4: 2}, + 3: {2: 2, 4: 1}, + 4: {1: 2, 2: 2, 3: 1}, + } + assert not is_valid_joint_degree(joint_degrees_3) # test condition 5 # joint_degrees_5[1][1] not even joint_degrees_5 = {1: {1: 9}} - assert_false(is_valid_joint_degree(joint_degrees_5)) + assert not is_valid_joint_degree(joint_degrees_5) -def test_joint_degree_graph(ntimes=100): +def test_joint_degree_graph(ntimes=10): for _ in range(ntimes): - seed = time.time() + seed = int(time.time()) n, m, p = 20, 10, 1 # generate random graph with model powerlaw_cluster and calculate @@ -63,4 +75,52 @@ def test_joint_degree_graph(ntimes=100): # assert that the given joint degree is equal to the generated # graph's joint degree - assert_true(joint_degrees_g == joint_degrees_G) + assert joint_degrees_g == joint_degrees_G + + +def test_is_valid_directed_joint_degree(): + + in_degrees = [0, 1, 1, 2] + out_degrees = [1, 1, 1, 1] + nkk = {1: {1: 2, 2: 2}} + assert is_valid_directed_joint_degree(in_degrees, out_degrees, nkk) + + # not realizable, values are not integers. + nkk = {1: {1: 1.5, 2: 2.5}} + assert not is_valid_directed_joint_degree(in_degrees, out_degrees, nkk) + + # not realizable, number of edges between 1-2 are insufficient. + nkk = {1: {1: 2, 2: 1}} + assert not is_valid_directed_joint_degree(in_degrees, out_degrees, nkk) + + # not realizable, in/out degree sequences have different number of nodes. + out_degrees = [1, 1, 1] + nkk = {1: {1: 2, 2: 2}} + assert not is_valid_directed_joint_degree(in_degrees, out_degrees, nkk) + + # not realizable, degree seqeunces have fewer than required nodes. + in_degrees = [0, 1, 2] + assert not is_valid_directed_joint_degree(in_degrees, out_degrees, nkk) + + +def test_directed_joint_degree_graph(n=15, m=100, ntimes=1000): + for _ in range(ntimes): + + # generate gnm random graph and calculate its joint degree. + g = gnm_random_graph(n, m, None, directed=True) + + # in-degree seqeunce of g as a list of integers. + in_degrees = list(dict(g.in_degree()).values()) + # out-degree sequence of g as a list of integers. + out_degrees = list(dict(g.out_degree()).values()) + nkk = degree_mixing_dict(g) + + # generate simple directed graph with given degree sequence and joint + # degree matrix. + G = directed_joint_degree_graph(in_degrees, out_degrees, nkk) + + # assert degree sequence correctness. + assert in_degrees == list(dict(G.in_degree()).values()) + assert out_degrees == list(dict(G.out_degree()).values()) + # assert joint degree matrix correctness. + assert nkk == degree_mixing_dict(G) diff --git a/networkx/generators/tests/test_lattice.py b/networkx/generators/tests/test_lattice.py index ab8b826..3f72d09 100644 --- a/networkx/generators/tests/test_lattice.py +++ b/networkx/generators/tests/test_lattice.py @@ -1,11 +1,10 @@ """Unit tests for the :mod:`networkx.generators.lattice` module.""" -from nose.tools import assert_equal -from nose.tools import assert_true -from nose.tools import assert_raises +import pytest import networkx as nx from networkx.testing import assert_edges_equal +from itertools import product class TestGrid2DGraph: @@ -14,57 +13,67 @@ class TestGrid2DGraph: def test_number_of_vertices(self): m, n = 5, 6 G = nx.grid_2d_graph(m, n) - assert_equal(len(G), m * n) + assert len(G) == m * n def test_degree_distribution(self): m, n = 5, 6 G = nx.grid_2d_graph(m, n) expected_histogram = [0, 0, 4, 2 * (m + n) - 8, (m - 2) * (n - 2)] - assert_equal(nx.degree_histogram(G), expected_histogram) + assert nx.degree_histogram(G) == expected_histogram def test_directed(self): m, n = 5, 6 G = nx.grid_2d_graph(m, n) H = nx.grid_2d_graph(m, n, create_using=nx.DiGraph()) - assert_equal(H.succ, G.adj) - assert_equal(H.pred, G.adj) + assert H.succ == G.adj + assert H.pred == G.adj def test_multigraph(self): m, n = 5, 6 G = nx.grid_2d_graph(m, n) H = nx.grid_2d_graph(m, n, create_using=nx.MultiGraph()) - assert_equal(list(H.edges()), list(G.edges())) + assert list(H.edges()) == list(G.edges()) def test_periodic(self): G = nx.grid_2d_graph(0, 0, periodic=True) - assert_equal(dict(G.degree()), {}) - - for m, n, H in [(2, 2, nx.cycle_graph(4)), (1, 7, nx.cycle_graph(7)), - (7, 1, nx.cycle_graph(7)), - (2, 5, nx.circular_ladder_graph(5)), - (5, 2, nx.circular_ladder_graph(5)), - (2, 4, nx.cubical_graph()), - (4, 2, nx.cubical_graph())]: + assert dict(G.degree()) == {} + + for m, n, H in [ + (2, 2, nx.cycle_graph(4)), + (1, 7, nx.cycle_graph(7)), + (7, 1, nx.cycle_graph(7)), + (2, 5, nx.circular_ladder_graph(5)), + (5, 2, nx.circular_ladder_graph(5)), + (2, 4, nx.cubical_graph()), + (4, 2, nx.cubical_graph()), + ]: G = nx.grid_2d_graph(m, n, periodic=True) - assert_true(nx.could_be_isomorphic(G, H)) + assert nx.could_be_isomorphic(G, H) + + def test_periodic_iterable(self): + m, n = 3, 7 + for a, b in product([0, 1], [0, 1]): + G = nx.grid_2d_graph(m, n, periodic=(a, b)) + assert G.number_of_nodes() == m * n + assert G.number_of_edges() == (m + a - 1) * n + (n + b - 1) * m def test_periodic_directed(self): G = nx.grid_2d_graph(4, 2, periodic=True) H = nx.grid_2d_graph(4, 2, periodic=True, create_using=nx.DiGraph()) - assert_equal(H.succ, G.adj) - assert_equal(H.pred, G.adj) + assert H.succ == G.adj + assert H.pred == G.adj def test_periodic_multigraph(self): G = nx.grid_2d_graph(4, 2, periodic=True) H = nx.grid_2d_graph(4, 2, periodic=True, create_using=nx.MultiGraph()) - assert_equal(list(G.edges()), list(H.edges())) + assert list(G.edges()) == list(H.edges()) def test_node_input(self): G = nx.grid_2d_graph(4, 2, periodic=True) H = nx.grid_2d_graph(range(4), range(2), periodic=True) - assert_true(nx.is_isomorphic(H, G)) + assert nx.is_isomorphic(H, G) H = nx.grid_2d_graph("abcd", "ef", periodic=True) - assert_true(nx.is_isomorphic(H, G)) + assert nx.is_isomorphic(H, G) G = nx.grid_2d_graph(5, 6) H = nx.grid_2d_graph(range(5), range(6)) assert_edges_equal(H, G) @@ -82,39 +91,56 @@ def test_grid_graph(self): for n, m in [(3, 5), (5, 3), (4, 5), (5, 4)]: dim = [n, m] g = nx.grid_graph(dim) - assert_equal(len(g), n * m) - assert_equal(nx.degree_histogram(g), [0, 0, 4, 2 * (n + m) - 8, - (n - 2) * (m - 2)]) + assert len(g) == n * m + assert nx.degree_histogram(g) == [ + 0, + 0, + 4, + 2 * (n + m) - 8, + (n - 2) * (m - 2), + ] for n, m in [(1, 5), (5, 1)]: dim = [n, m] g = nx.grid_graph(dim) - assert_equal(len(g), n * m) - assert_true(nx.is_isomorphic(g, nx.path_graph(5))) + assert len(g) == n * m + assert nx.is_isomorphic(g, nx.path_graph(5)) -# mg = nx.grid_graph([n,m], create_using=MultiGraph()) -# assert_equal(mg.edges(), g.edges()) + # mg = nx.grid_graph([n,m], create_using=MultiGraph()) + # assert_equal(mg.edges(), g.edges()) def test_node_input(self): G = nx.grid_graph([range(7, 9), range(3, 6)]) - assert_equal(len(G), 2 * 3) - assert_true(nx.is_isomorphic(G, nx.grid_graph([2, 3]))) + assert len(G) == 2 * 3 + assert nx.is_isomorphic(G, nx.grid_graph([2, 3])) + + def test_periodic_iterable(self): + m, n, k = 3, 7, 5 + for a, b, c in product([0, 1], [0, 1], [0, 1]): + G = nx.grid_graph([m, n, k], periodic=(a, b, c)) + num_e = (m + a - 1) * n * k + (n + b - 1) * m * k + (k + c - 1) * m * n + assert G.number_of_nodes() == m * n * k + assert G.number_of_edges() == num_e class TestHypercubeGraph: """Unit tests for :func:`networkx.generators.lattice.hypercube_graph`""" def test_special_cases(self): - for n, H in [(0, nx.null_graph()), (1, nx.path_graph(2)), - (2, nx.cycle_graph(4)), (3, nx.cubical_graph())]: + for n, H in [ + (0, nx.null_graph()), + (1, nx.path_graph(2)), + (2, nx.cycle_graph(4)), + (3, nx.cubical_graph()), + ]: G = nx.hypercube_graph(n) - assert_true(nx.could_be_isomorphic(G, H)) + assert nx.could_be_isomorphic(G, H) def test_degree_distribution(self): for n in range(1, 10): G = nx.hypercube_graph(n) expected_histogram = [0] * n + [2 ** n] - assert_equal(nx.degree_histogram(G), expected_histogram) + assert nx.degree_histogram(G) == expected_histogram class TestTriangularLatticeGraph: @@ -125,43 +151,43 @@ def test_lattice_points(self): for m, n in [(2, 3), (2, 2), (2, 1), (3, 3), (3, 2), (3, 4)]: G = nx.triangular_lattice_graph(m, n) N = (n + 1) // 2 - assert_equal(len(G), (m + 1) * (1 + N) - (n % 2) * ((m + 1) // 2)) + assert len(G) == (m + 1) * (1 + N) - (n % 2) * ((m + 1) // 2) for (i, j) in G.nodes(): nbrs = G[(i, j)] if i < N: - assert_true((i + 1, j) in nbrs) + assert (i + 1, j) in nbrs if j < m: - assert_true((i, j + 1) in nbrs) + assert (i, j + 1) in nbrs if j < m and (i > 0 or j % 2) and (i < N or (j + 1) % 2): - assert_true((i + 1, j + 1) in nbrs or (i - 1, j + 1) in nbrs) + assert (i + 1, j + 1) in nbrs or (i - 1, j + 1) in nbrs def test_directed(self): """Tests for creating a directed triangular lattice.""" G = nx.triangular_lattice_graph(3, 4, create_using=nx.Graph()) H = nx.triangular_lattice_graph(3, 4, create_using=nx.DiGraph()) - assert_true(H.is_directed()) + assert H.is_directed() for u, v in H.edges(): - assert_true(v[1] >= u[1]) + assert v[1] >= u[1] if v[1] == u[1]: - assert_true(v[0] > u[0]) + assert v[0] > u[0] def test_multigraph(self): """Tests for creating a triangular lattice multigraph.""" G = nx.triangular_lattice_graph(3, 4, create_using=nx.Graph()) H = nx.triangular_lattice_graph(3, 4, create_using=nx.MultiGraph()) - assert_equal(list(H.edges()), list(G.edges())) + assert list(H.edges()) == list(G.edges()) def test_periodic(self): G = nx.triangular_lattice_graph(4, 6, periodic=True) - assert_equal(len(G), 12) - assert_equal(G.size(), 36) + assert len(G) == 12 + assert G.size() == 36 # all degrees are 6 - assert_equal(len([n for n, d in G.degree() if d != 6]), 0) + assert len([n for n, d in G.degree() if d != 6]) == 0 G = nx.triangular_lattice_graph(5, 7, periodic=True) TLG = nx.triangular_lattice_graph - assert_raises(nx.NetworkXError, TLG, 2, 4, periodic=True) - assert_raises(nx.NetworkXError, TLG, 4, 4, periodic=True) - assert_raises(nx.NetworkXError, TLG, 2, 6, periodic=True) + pytest.raises(nx.NetworkXError, TLG, 2, 4, periodic=True) + pytest.raises(nx.NetworkXError, TLG, 4, 4, periodic=True) + pytest.raises(nx.NetworkXError, TLG, 2, 6, periodic=True) class TestHexagonalLatticeGraph: @@ -171,7 +197,7 @@ def test_lattice_points(self): """Tests that the graph is really a hexagonal lattice.""" for m, n in [(4, 5), (4, 4), (4, 3), (3, 2), (3, 3), (3, 5)]: G = nx.hexagonal_lattice_graph(m, n) - assert_equal(len(G), 2 * (m + 1) * (n + 1) - 2) + assert len(G) == 2 * (m + 1) * (n + 1) - 2 C_6 = nx.cycle_graph(6) hexagons = [ [(0, 0), (0, 1), (0, 2), (1, 0), (1, 1), (1, 2)], @@ -181,33 +207,33 @@ def test_lattice_points(self): [(2, 2), (2, 3), (2, 4), (3, 2), (3, 3), (3, 4)], ] for hexagon in hexagons: - assert_true(nx.is_isomorphic(G.subgraph(hexagon), C_6)) + assert nx.is_isomorphic(G.subgraph(hexagon), C_6) def test_directed(self): """Tests for creating a directed hexagonal lattice.""" G = nx.hexagonal_lattice_graph(3, 5, create_using=nx.Graph()) H = nx.hexagonal_lattice_graph(3, 5, create_using=nx.DiGraph()) - assert_true(H.is_directed()) - pos = nx.get_node_attributes(H, 'pos') + assert H.is_directed() + pos = nx.get_node_attributes(H, "pos") for u, v in H.edges(): - assert_true(pos[v][1] >= pos[u][1]) + assert pos[v][1] >= pos[u][1] if pos[v][1] == pos[u][1]: - assert_true(pos[v][0] > pos[u][0]) + assert pos[v][0] > pos[u][0] def test_multigraph(self): """Tests for creating a hexagonal lattice multigraph.""" G = nx.hexagonal_lattice_graph(3, 5, create_using=nx.Graph()) H = nx.hexagonal_lattice_graph(3, 5, create_using=nx.MultiGraph()) - assert_equal(list(H.edges()), list(G.edges())) + assert list(H.edges()) == list(G.edges()) def test_periodic(self): G = nx.hexagonal_lattice_graph(4, 6, periodic=True) - assert_equal(len(G), 48) - assert_equal(G.size(), 72) + assert len(G) == 48 + assert G.size() == 72 # all degrees are 3 - assert_equal(len([n for n, d in G.degree() if d != 3]), 0) + assert len([n for n, d in G.degree() if d != 3]) == 0 G = nx.hexagonal_lattice_graph(5, 8, periodic=True) HLG = nx.hexagonal_lattice_graph - assert_raises(nx.NetworkXError, HLG, 2, 7, periodic=True) - assert_raises(nx.NetworkXError, HLG, 1, 4, periodic=True) - assert_raises(nx.NetworkXError, HLG, 2, 1, periodic=True) + pytest.raises(nx.NetworkXError, HLG, 2, 7, periodic=True) + pytest.raises(nx.NetworkXError, HLG, 1, 4, periodic=True) + pytest.raises(nx.NetworkXError, HLG, 2, 1, periodic=True) diff --git a/networkx/generators/tests/test_line.py b/networkx/generators/tests/test_line.py index 2b25dde..fec8893 100644 --- a/networkx/generators/tests/test_line.py +++ b/networkx/generators/tests/test_line.py @@ -1,8 +1,8 @@ import networkx as nx -from nose.tools import * +import pytest import networkx.generators.line as line -from networkx.testing.utils import * +from networkx.testing.utils import assert_edges_equal def test_node_func(): @@ -10,16 +10,16 @@ def test_node_func(): G = nx.Graph() G.add_edge(1, 2) nf = line._node_func(G) - assert_equal(nf(1, 2), (1, 2)) - assert_equal(nf(2, 1), (1, 2)) + assert nf(1, 2) == (1, 2) + assert nf(2, 1) == (1, 2) # multigraph G = nx.MultiGraph() G.add_edge(1, 2) G.add_edge(1, 2) nf = line._node_func(G) - assert_equal(nf(1, 2, 0), (1, 2, 0)) - assert_equal(nf(2, 1, 0), (1, 2, 0)) + assert nf(1, 2, 0) == (1, 2, 0) + assert nf(2, 1, 0) == (1, 2, 0) def test_edge_func(): @@ -39,36 +39,36 @@ def test_edge_func(): ef = line._edge_func(G) expected = [(1, 2, 0), (2, 3, 0), (2, 3, 1)] result = sorted(ef()) - assert_equal(expected, result) + assert expected == result def test_sorted_edge(): - assert_equal((1, 2), line._sorted_edge(1, 2)) - assert_equal((1, 2), line._sorted_edge(2, 1)) + assert (1, 2) == line._sorted_edge(1, 2) + assert (1, 2) == line._sorted_edge(2, 1) -class TestGeneratorLine(): +class TestGeneratorLine: def test_star(self): G = nx.star_graph(5) L = nx.line_graph(G) - assert_true(nx.is_isomorphic(L, nx.complete_graph(5))) + assert nx.is_isomorphic(L, nx.complete_graph(5)) def test_path(self): G = nx.path_graph(5) L = nx.line_graph(G) - assert_true(nx.is_isomorphic(L, nx.path_graph(4))) + assert nx.is_isomorphic(L, nx.path_graph(4)) def test_cycle(self): G = nx.cycle_graph(5) L = nx.line_graph(G) - assert_true(nx.is_isomorphic(L, G)) + assert nx.is_isomorphic(L, G) def test_digraph1(self): G = nx.DiGraph() G.add_edges_from([(0, 1), (0, 2), (0, 3)]) L = nx.line_graph(G) # no edge graph, but with nodes - assert_equal(L.adj, {(0, 1): {}, (0, 2): {}, (0, 3): {}}) + assert L.adj == {(0, 1): {}, (0, 2): {}, (0, 3): {}} def test_digraph2(self): G = nx.DiGraph() @@ -89,42 +89,61 @@ def test_create2(self): assert_edges_equal(L.edges(), [((0, 1), (1, 2)), ((1, 2), (2, 3))]) -class TestGeneratorInverseLine(): +class TestGeneratorInverseLine: def test_example(self): G = nx.Graph() - G_edges = [[1, 2], [1, 3], [1, 4], [1, 5], [2, 3], [2, 5], [2, 6], - [2, 7], [3, 4], [3, 5], [6, 7], [6, 8], [7, 8]] + G_edges = [ + [1, 2], + [1, 3], + [1, 4], + [1, 5], + [2, 3], + [2, 5], + [2, 6], + [2, 7], + [3, 4], + [3, 5], + [6, 7], + [6, 8], + [7, 8], + ] G.add_edges_from(G_edges) H = nx.inverse_line_graph(G) solution = nx.Graph() - solution_edges = [('a', 'b'), ('a', 'c'), ('a', 'd'), ('a', 'e'), - ('c', 'd'), ('e', 'f'), ('e', 'g'), ('f', 'g')] + solution_edges = [ + ("a", "b"), + ("a", "c"), + ("a", "d"), + ("a", "e"), + ("c", "d"), + ("e", "f"), + ("e", "g"), + ("f", "g"), + ] solution.add_edges_from(solution_edges) - assert_true(nx.is_isomorphic(H, solution)) + assert nx.is_isomorphic(H, solution) def test_example_2(self): G = nx.Graph() - G_edges = [[1, 2], [1, 3], [2, 3], - [3, 4], [3, 5], [4, 5]] + G_edges = [[1, 2], [1, 3], [2, 3], [3, 4], [3, 5], [4, 5]] G.add_edges_from(G_edges) H = nx.inverse_line_graph(G) solution = nx.Graph() - solution_edges = [('a', 'c'), ('b', 'c'), ('c', 'd'), - ('d', 'e'), ('d', 'f')] + solution_edges = [("a", "c"), ("b", "c"), ("c", "d"), ("d", "e"), ("d", "f")] solution.add_edges_from(solution_edges) - assert_true(nx.is_isomorphic(H, solution)) + assert nx.is_isomorphic(H, solution) def test_pair(self): G = nx.path_graph(2) H = nx.inverse_line_graph(G) solution = nx.path_graph(3) - assert_true(nx.is_isomorphic(H, solution)) + assert nx.is_isomorphic(H, solution) def test_line(self): G = nx.path_graph(5) solution = nx.path_graph(6) H = nx.inverse_line_graph(G) - assert_true(nx.is_isomorphic(H, solution)) + assert nx.is_isomorphic(H, solution) def test_triangle_graph(self): G = nx.complete_graph(3) @@ -133,88 +152,123 @@ def test_triangle_graph(self): alternative_solution.add_edges_from([[0, 1], [0, 2], [0, 3]]) # there are two alternative inverse line graphs for this case # so long as we get one of them the test should pass - assert_true(nx.is_isomorphic(H, G) or - nx.is_isomorphic(H, alternative_solution)) + assert nx.is_isomorphic(H, G) or nx.is_isomorphic(H, alternative_solution) def test_cycle(self): G = nx.cycle_graph(5) H = nx.inverse_line_graph(G) - assert_true(nx.is_isomorphic(H, G)) + assert nx.is_isomorphic(H, G) def test_empty(self): G = nx.Graph() - assert_raises(nx.NetworkXError, nx.inverse_line_graph, G) + H = nx.inverse_line_graph(G) + assert nx.is_isomorphic(H, nx.complete_graph(1)) + + def test_K1(self): + G = nx.complete_graph(1) + H = nx.inverse_line_graph(G) + solution = nx.path_graph(2) + assert nx.is_isomorphic(H, solution) def test_claw(self): # This is the simplest non-line graph G = nx.Graph() G_edges = [[0, 1], [0, 2], [0, 3]] G.add_edges_from(G_edges) - assert_raises(nx.NetworkXError, nx.inverse_line_graph, G) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G) def test_non_line_graph(self): # These are other non-line graphs + + # wheel graph with 6 nodes G = nx.Graph() - G_edges = [[0, 1], [0, 2], [0, 3], [0, 4], [0, 5], [1, 2], - [2, 3], [3, 4], [4, 5], [5, 1]] + G_edges = [ + [0, 1], + [0, 2], + [0, 3], + [0, 4], + [0, 5], + [1, 2], + [2, 3], + [3, 4], + [4, 5], + [5, 1], + ] G.add_edges_from(G_edges) - assert_raises(nx.NetworkXError, nx.inverse_line_graph, G) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G) + # 3---4---5 + # / \ / \ / + # 0---1---2 G = nx.Graph() - G_edges = [[0, 1], [1, 2], [3, 4], [4, 5], [0, 3], [1, 3], - [1, 4], [2, 4], [2, 5]] + G_edges = [ + [0, 1], + [1, 2], + [3, 4], + [4, 5], + [0, 3], + [1, 3], + [1, 4], + [2, 4], + [2, 5], + ] G.add_edges_from(G_edges) - assert_raises(nx.NetworkXError, nx.inverse_line_graph, G) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G) + + # K_5 minus an edge + K5me = nx.complete_graph(5) + K5me.remove_edge(0, 1) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, K5me) def test_wrong_graph_type(self): G = nx.DiGraph() G_edges = [[0, 1], [0, 2], [0, 3]] G.add_edges_from(G_edges) - assert_raises(nx.NetworkXNotImplemented, nx.inverse_line_graph, G) + pytest.raises(nx.NetworkXNotImplemented, nx.inverse_line_graph, G) G = nx.MultiGraph() G_edges = [[0, 1], [0, 2], [0, 3]] G.add_edges_from(G_edges) - assert_raises(nx.NetworkXNotImplemented, nx.inverse_line_graph, G) + pytest.raises(nx.NetworkXNotImplemented, nx.inverse_line_graph, G) def test_line_inverse_line_complete(self): G = nx.complete_graph(10) H = nx.line_graph(G) J = nx.inverse_line_graph(H) - assert_true(nx.is_isomorphic(G, J)) + assert nx.is_isomorphic(G, J) def test_line_inverse_line_path(self): G = nx.path_graph(10) H = nx.line_graph(G) J = nx.inverse_line_graph(H) - assert_true(nx.is_isomorphic(G, J)) + assert nx.is_isomorphic(G, J) def test_line_inverse_line_hypercube(self): G = nx.hypercube_graph(5) H = nx.line_graph(G) J = nx.inverse_line_graph(H) - assert_true(nx.is_isomorphic(G, J)) + assert nx.is_isomorphic(G, J) def test_line_inverse_line_cycle(self): G = nx.cycle_graph(10) H = nx.line_graph(G) J = nx.inverse_line_graph(H) - assert_true(nx.is_isomorphic(G, J)) + assert nx.is_isomorphic(G, J) def test_line_inverse_line_star(self): G = nx.star_graph(20) H = nx.line_graph(G) J = nx.inverse_line_graph(H) - assert_true(nx.is_isomorphic(G, J)) + assert nx.is_isomorphic(G, J) def test_line_inverse_line_multipartite(self): G = nx.complete_multipartite_graph(3, 4, 5) H = nx.line_graph(G) J = nx.inverse_line_graph(H) - assert_true(nx.is_isomorphic(G, J)) + assert nx.is_isomorphic(G, J) def test_line_inverse_line_dgm(self): G = nx.dorogovtsev_goltsev_mendes_graph(4) H = nx.line_graph(G) J = nx.inverse_line_graph(H) - assert_true(nx.is_isomorphic(G, J)) + assert nx.is_isomorphic(G, J) diff --git a/networkx/generators/tests/test_mycielski.py b/networkx/generators/tests/test_mycielski.py index e3786ca..d71260e 100644 --- a/networkx/generators/tests/test_mycielski.py +++ b/networkx/generators/tests/test_mycielski.py @@ -1,38 +1,26 @@ -# test_mycielski.py - unit tests for the mycielski module -# -# Copyright 2010, 2011, 2012, 2013, 2014, 2015 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. - """Unit tests for the :mod:`networkx.generators.mycielski` module.""" -from nose.tools import assert_true, assert_equal, raises import networkx as nx -from networkx import * - -class TestMycielski(object): +class TestMycielski: def test_construction(self): G = nx.path_graph(2) - M = mycielskian(G) - assert_true(is_isomorphic(M, cycle_graph(5))) + M = nx.mycielskian(G) + assert nx.is_isomorphic(M, nx.cycle_graph(5)) def test_size(self): G = nx.path_graph(2) - M = mycielskian(G, 2) - assert_equal(len(M), 11) - assert_equal(M.size(), 20) + M = nx.mycielskian(G, 2) + assert len(M) == 11 + assert M.size() == 20 def test_mycielski_graph_generator(self): - G = mycielski_graph(1) - assert_true(is_isomorphic(G, nx.empty_graph(1))) - G = mycielski_graph(2) - assert_true(is_isomorphic(G, nx.path_graph(2))) - G = mycielski_graph(3) - assert_true(is_isomorphic(G, cycle_graph(5))) - G = mycielski_graph(4) - assert_true(is_isomorphic(G, mycielskian(cycle_graph(5)))) + G = nx.mycielski_graph(1) + assert nx.is_isomorphic(G, nx.empty_graph(1)) + G = nx.mycielski_graph(2) + assert nx.is_isomorphic(G, nx.path_graph(2)) + G = nx.mycielski_graph(3) + assert nx.is_isomorphic(G, nx.cycle_graph(5)) + G = nx.mycielski_graph(4) + assert nx.is_isomorphic(G, nx.mycielskian(nx.cycle_graph(5))) diff --git a/networkx/generators/tests/test_nonisomorphic_trees.py b/networkx/generators/tests/test_nonisomorphic_trees.py index 0d97b29..cb6eea8 100644 --- a/networkx/generators/tests/test_nonisomorphic_trees.py +++ b/networkx/generators/tests/test_nonisomorphic_trees.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ ==================== Generators - Non Isomorphic Trees @@ -6,54 +5,60 @@ Unit tests for WROM algorithm generator in generators/nonisomorphic_trees.py """ -from nose.tools import * -from networkx import * -from networkx.testing import * +import networkx as nx +from networkx.testing import assert_edges_equal -class TestGeneratorNonIsomorphicTrees(): - +class TestGeneratorNonIsomorphicTrees: def test_tree_structure(self): # test for tree structure for nx.nonisomorphic_trees() - def f(x): return list(nx.nonisomorphic_trees(x)) + def f(x): + return list(nx.nonisomorphic_trees(x)) + for i in f(6): - assert_true(nx.is_tree(i)) + assert nx.is_tree(i) for i in f(8): - assert_true(nx.is_tree(i)) + assert nx.is_tree(i) def test_nonisomorphism(self): # test for nonisomorphism of trees for nx.nonisomorphic_trees() - def f(x): return list(nx.nonisomorphic_trees(x)) + def f(x): + return list(nx.nonisomorphic_trees(x)) + trees = f(6) for i in range(len(trees)): for j in range(i + 1, len(trees)): - assert_false(nx.is_isomorphic(trees[i], trees[j])) + assert not nx.is_isomorphic(trees[i], trees[j]) trees = f(8) for i in range(len(trees)): for j in range(i + 1, len(trees)): - assert_false(nx.is_isomorphic(trees[i], trees[j])) + assert not nx.is_isomorphic(trees[i], trees[j]) def test_number_of_nonisomorphic_trees(self): # http://oeis.org/A000055 - assert_equal(nx.number_of_nonisomorphic_trees(2), 1) - assert_equal(nx.number_of_nonisomorphic_trees(3), 1) - assert_equal(nx.number_of_nonisomorphic_trees(4), 2) - assert_equal(nx.number_of_nonisomorphic_trees(5), 3) - assert_equal(nx.number_of_nonisomorphic_trees(6), 6) - assert_equal(nx.number_of_nonisomorphic_trees(7), 11) - assert_equal(nx.number_of_nonisomorphic_trees(8), 23) + assert nx.number_of_nonisomorphic_trees(2) == 1 + assert nx.number_of_nonisomorphic_trees(3) == 1 + assert nx.number_of_nonisomorphic_trees(4) == 2 + assert nx.number_of_nonisomorphic_trees(5) == 3 + assert nx.number_of_nonisomorphic_trees(6) == 6 + assert nx.number_of_nonisomorphic_trees(7) == 11 + assert nx.number_of_nonisomorphic_trees(8) == 23 def test_nonisomorphic_trees(self): - def f(x): return list(nx.nonisomorphic_trees(x)) + def f(x): + return list(nx.nonisomorphic_trees(x)) + assert_edges_equal(f(3)[0].edges(), [(0, 1), (0, 2)]) assert_edges_equal(f(4)[0].edges(), [(0, 1), (0, 3), (1, 2)]) assert_edges_equal(f(4)[1].edges(), [(0, 1), (0, 2), (0, 3)]) def test_nonisomorphic_trees_matrix(self): trees_2 = [[[0, 1], [1, 0]]] - assert_equal(list(nx.nonisomorphic_trees(2, create="matrix")), trees_2) + assert list(nx.nonisomorphic_trees(2, create="matrix")) == trees_2 trees_3 = [[[0, 1, 1], [1, 0, 0], [1, 0, 0]]] - assert_equal(list(nx.nonisomorphic_trees(3, create="matrix")), trees_3) - trees_4 = [[[0, 1, 0, 1], [1, 0, 1, 0], [0, 1, 0, 0], [1, 0, 0, 0]], - [[0, 1, 1, 1], [1, 0, 0, 0], [1, 0, 0, 0], [1, 0, 0, 0]]] - assert_equal(list(nx.nonisomorphic_trees(4, create="matrix")), trees_4) + assert list(nx.nonisomorphic_trees(3, create="matrix")) == trees_3 + trees_4 = [ + [[0, 1, 0, 1], [1, 0, 1, 0], [0, 1, 0, 0], [1, 0, 0, 0]], + [[0, 1, 1, 1], [1, 0, 0, 0], [1, 0, 0, 0], [1, 0, 0, 0]], + ] + assert list(nx.nonisomorphic_trees(4, create="matrix")) == trees_4 diff --git a/networkx/generators/tests/test_random_clustered.py b/networkx/generators/tests/test_random_clustered.py index e34076a..319c1d7 100644 --- a/networkx/generators/tests/test_random_clustered.py +++ b/networkx/generators/tests/test_random_clustered.py @@ -1,28 +1,33 @@ -#!/usr/bin/env python -from nose.tools import * +import pytest import networkx class TestRandomClusteredGraph: - def test_valid(self): node = [1, 1, 1, 2, 1, 2, 0, 0] tri = [0, 0, 0, 0, 0, 1, 1, 1] joint_degree_sequence = zip(node, tri) G = networkx.random_clustered_graph(joint_degree_sequence) - assert_equal(G.number_of_nodes(), 8) - assert_equal(G.number_of_edges(), 7) + assert G.number_of_nodes() == 8 + assert G.number_of_edges() == 7 def test_valid2(self): G = networkx.random_clustered_graph( - [(1, 2), (2, 1), (1, 1), (1, 1), (1, 1), (2, 0)]) - assert_equal(G.number_of_nodes(), 6) - assert_equal(G.number_of_edges(), 10) + [(1, 2), (2, 1), (1, 1), (1, 1), (1, 1), (2, 0)] + ) + assert G.number_of_nodes() == 6 + assert G.number_of_edges() == 10 def test_invalid1(self): - assert_raises((TypeError, networkx.NetworkXError), - networkx.random_clustered_graph, [[1, 1], [2, 1], [0, 1]]) + pytest.raises( + (TypeError, networkx.NetworkXError), + networkx.random_clustered_graph, + [[1, 1], [2, 1], [0, 1]], + ) def test_invalid2(self): - assert_raises((TypeError, networkx.NetworkXError), - networkx.random_clustered_graph, [[1, 1], [1, 2], [0, 1]]) + pytest.raises( + (TypeError, networkx.NetworkXError), + networkx.random_clustered_graph, + [[1, 1], [1, 2], [0, 1]], + ) diff --git a/networkx/generators/tests/test_random_graphs.py b/networkx/generators/tests/test_random_graphs.py index a20ca93..6835e88 100644 --- a/networkx/generators/tests/test_random_graphs.py +++ b/networkx/generators/tests/test_random_graphs.py @@ -1,24 +1,11 @@ -# -*- encoding: utf-8 -*- -# test_random_graphs.py - unit tests for random graph generators -# -# Copyright 2010-2018 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.generators.random_graphs` module. """ -from nose.tools import assert_almost_equal -from nose.tools import assert_greater -from nose.tools import assert_less -from nose.tools import assert_equal -from nose.tools import assert_raises -from nose.tools import assert_true +import pytest from networkx.exception import NetworkXError from networkx.generators.random_graphs import barabasi_albert_graph +from networkx.generators.random_graphs import dual_barabasi_albert_graph from networkx.generators.random_graphs import extended_barabasi_albert_graph from networkx.generators.random_graphs import binomial_graph from networkx.generators.random_graphs import connected_watts_strogatz_graph @@ -31,72 +18,147 @@ from networkx.generators.random_graphs import powerlaw_cluster_graph from networkx.generators.random_graphs import random_kernel_graph from networkx.generators.random_graphs import random_lobster +from networkx.generators.random_graphs import random_powerlaw_tree +from networkx.generators.random_graphs import random_powerlaw_tree_sequence from networkx.generators.random_graphs import random_regular_graph from networkx.generators.random_graphs import random_shell_graph from networkx.generators.random_graphs import watts_strogatz_graph -class TestGeneratorsRandom(object): - - def smoke_test_random_graph(self): +class TestGeneratorsRandom: + def test_random_graph(self): seed = 42 G = gnp_random_graph(100, 0.25, seed) + G = gnp_random_graph(100, 0.25, seed, directed=True) G = binomial_graph(100, 0.25, seed) G = erdos_renyi_graph(100, 0.25, seed) G = fast_gnp_random_graph(100, 0.25, seed) + G = fast_gnp_random_graph(100, 0.25, seed, directed=True) G = gnm_random_graph(100, 20, seed) + G = gnm_random_graph(100, 20, seed, directed=True) G = dense_gnm_random_graph(100, 20, seed) G = watts_strogatz_graph(10, 2, 0.25, seed) - assert_equal(len(G), 10) - assert_equal(G.number_of_edges(), 10) + assert len(G) == 10 + assert G.number_of_edges() == 10 - G = connected_watts_strogatz_graph(10, 2, 0.1, seed) - assert_equal(len(G), 10) - assert_equal(G.number_of_edges(), 10) + G = connected_watts_strogatz_graph(10, 2, 0.1, tries=10, seed=seed) + assert len(G) == 10 + assert G.number_of_edges() == 10 + pytest.raises( + NetworkXError, connected_watts_strogatz_graph, 10, 2, 0.1, tries=0 + ) G = watts_strogatz_graph(10, 4, 0.25, seed) - assert_equal(len(G), 10) - assert_equal(G.number_of_edges(), 20) + assert len(G) == 10 + assert G.number_of_edges() == 20 G = newman_watts_strogatz_graph(10, 2, 0.0, seed) - assert_equal(len(G), 10) - assert_equal(G.number_of_edges(), 10) + assert len(G) == 10 + assert G.number_of_edges() == 10 G = newman_watts_strogatz_graph(10, 4, 0.25, seed) - assert_equal(len(G), 10) - assert_true(G.number_of_edges() >= 20) + assert len(G) == 10 + assert G.number_of_edges() >= 20 G = barabasi_albert_graph(100, 1, seed) G = barabasi_albert_graph(100, 3, seed) - assert_equal(G.number_of_edges(), (97 * 3)) + assert G.number_of_edges() == (97 * 3) G = extended_barabasi_albert_graph(100, 1, 0, 0, seed) - assert_equal(G.number_of_edges(), 99) + assert G.number_of_edges() == 99 G = extended_barabasi_albert_graph(100, 3, 0, 0, seed) - assert_equal(G.number_of_edges(), 97 * 3) + assert G.number_of_edges() == 97 * 3 G = extended_barabasi_albert_graph(100, 1, 0, 0.5, seed) - assert_equal(G.number_of_edges(), 99) + assert G.number_of_edges() == 99 G = extended_barabasi_albert_graph(100, 2, 0.5, 0, seed) - assert_greater(G.number_of_edges(), 100 * 3) - assert_less(G.number_of_edges(), 100 * 4) + assert G.number_of_edges() > 100 * 3 + assert G.number_of_edges() < 100 * 4 G = extended_barabasi_albert_graph(100, 2, 0.3, 0.3, seed) - assert_greater(G.number_of_edges(), 100 * 2) - assert_less(G.number_of_edges(), 100 * 4) + assert G.number_of_edges() > 100 * 2 + assert G.number_of_edges() < 100 * 4 G = powerlaw_cluster_graph(100, 1, 1.0, seed) G = powerlaw_cluster_graph(100, 3, 0.0, seed) - assert_equal(G.number_of_edges(), (97 * 3)) + assert G.number_of_edges() == (97 * 3) G = random_regular_graph(10, 20, seed) - assert_raises(NetworkXError, random_regular_graph, 3, 21) + pytest.raises(NetworkXError, random_regular_graph, 3, 21) + pytest.raises(NetworkXError, random_regular_graph, 33, 21) constructor = [(10, 20, 0.8), (20, 40, 0.8)] G = random_shell_graph(constructor, seed) + def is_caterpillar(g): + """ + A tree is a caterpillar iff all nodes of degree >=3 are surrounded + by at most two nodes of degree two or greater. + ref: http://mathworld.wolfram.com/CaterpillarGraph.html + """ + deg_over_3 = [n for n in g if g.degree(n) >= 3] + for n in deg_over_3: + nbh_deg_over_2 = [nbh for nbh in g.neighbors(n) if g.degree(nbh) >= 2] + if not len(nbh_deg_over_2) <= 2: + return False + return True + + def is_lobster(g): + """ + A tree is a lobster if it has the property that the removal of leaf + nodes leaves a caterpillar graph (Gallian 2007) + ref: http://mathworld.wolfram.com/LobsterGraph.html + """ + non_leafs = [n for n in g if g.degree(n) > 1] + return is_caterpillar(g.subgraph(non_leafs)) + G = random_lobster(10, 0.1, 0.5, seed) + assert max([G.degree(n) for n in G.nodes()]) > 3 + assert is_lobster(G) + pytest.raises(NetworkXError, random_lobster, 10, 0.1, 1, seed) + pytest.raises(NetworkXError, random_lobster, 10, 1, 1, seed) + pytest.raises(NetworkXError, random_lobster, 10, 1, 0.5, seed) + + # docstring says this should be a caterpillar + G = random_lobster(10, 0.1, 0.0, seed) + assert is_caterpillar(G) + + # difficult to find seed that requires few tries + seq = random_powerlaw_tree_sequence(10, 3, seed=14, tries=1) + G = random_powerlaw_tree(10, 3, seed=14, tries=1) + + def test_dual_barabasi_albert(self, m1=1, m2=4, p=0.5): + """ + Tests that the dual BA random graph generated behaves consistently. + + Tests the exceptions are raised as expected. + + The graphs generation are repeated several times to prevent lucky shots + + """ + seed = 42 + repeats = 2 + + while repeats: + repeats -= 1 + + # This should be BA with m = m1 + BA1 = barabasi_albert_graph(100, m1, seed) + DBA1 = dual_barabasi_albert_graph(100, m1, m2, 1, seed) + assert BA1.size() == DBA1.size() + + # This should be BA with m = m2 + BA2 = barabasi_albert_graph(100, m2, seed) + DBA2 = dual_barabasi_albert_graph(100, m1, m2, 0, seed) + assert BA2.size() == DBA2.size() + + # Testing exceptions + dbag = dual_barabasi_albert_graph + pytest.raises(NetworkXError, dbag, m1, m1, m2, 0) + pytest.raises(NetworkXError, dbag, m2, m1, m2, 0) + pytest.raises(NetworkXError, dbag, 100, m1, m2, -0.5) + pytest.raises(NetworkXError, dbag, 100, m1, m2, 1.5) def test_extended_barabasi_albert(self, m=2): """ @@ -117,26 +179,26 @@ def test_extended_barabasi_albert(self, m=2): # This behaves just like BA, the number of edges must be the same G1 = extended_barabasi_albert_graph(100, m, 0, 0, seed) - assert_equal(G1.size(), BA_model_edges) + assert G1.size() == BA_model_edges # More than twice more edges should have been added G1 = extended_barabasi_albert_graph(100, m, 0.8, 0, seed) - assert_greater(G1.size(), BA_model_edges * 2) + assert G1.size() > BA_model_edges * 2 # Only edge rewiring, so the number of edges less than original G2 = extended_barabasi_albert_graph(100, m, 0, 0.8, seed) - assert_equal(G2.size(), BA_model_edges) + assert G2.size() == BA_model_edges # Mixed scenario: less edges than G1 and more edges than G2 G3 = extended_barabasi_albert_graph(100, m, 0.3, 0.3, seed) - assert_greater(G3.size(), G2.size()) - assert_less(G3.size(), G1.size()) + assert G3.size() > G2.size() + assert G3.size() < G1.size() # Testing exceptions ebag = extended_barabasi_albert_graph - assert_raises(NetworkXError, ebag, m, m, 0, 0) - assert_raises(NetworkXError, ebag, 1, 0.5, 0, 0) - assert_raises(NetworkXError, ebag, 100, 2, 0.5, 0.5) + pytest.raises(NetworkXError, ebag, m, m, 0, 0) + pytest.raises(NetworkXError, ebag, 1, 0.5, 0, 0) + pytest.raises(NetworkXError, ebag, 100, 2, 0.5, 0.5) def test_random_zero_regular_graph(self): """Tests that a 0-regular graph has the correct number of nodes and @@ -144,83 +206,95 @@ def test_random_zero_regular_graph(self): """ seed = 42 - G = random_regular_graph(0, 10) - assert_equal(len(G), 10) - assert_equal(sum(1 for _ in G.edges()), 0) + G = random_regular_graph(0, 10, seed) + assert len(G) == 10 + assert sum(1 for _ in G.edges()) == 0 def test_gnp(self): - for generator in [gnp_random_graph, binomial_graph, erdos_renyi_graph, - fast_gnp_random_graph]: + for generator in [ + gnp_random_graph, + binomial_graph, + erdos_renyi_graph, + fast_gnp_random_graph, + ]: G = generator(10, -1.1) - assert_equal(len(G), 10) - assert_equal(sum(1 for _ in G.edges()), 0) + assert len(G) == 10 + assert sum(1 for _ in G.edges()) == 0 G = generator(10, 0.1) - assert_equal(len(G), 10) + assert len(G) == 10 G = generator(10, 0.1, seed=42) - assert_equal(len(G), 10) + assert len(G) == 10 G = generator(10, 1.1) - assert_equal(len(G), 10) - assert_equal(sum(1 for _ in G.edges()), 45) + assert len(G) == 10 + assert sum(1 for _ in G.edges()) == 45 G = generator(10, -1.1, directed=True) - assert_true(G.is_directed()) - assert_equal(len(G), 10) - assert_equal(sum(1 for _ in G.edges()), 0) + assert G.is_directed() + assert len(G) == 10 + assert sum(1 for _ in G.edges()) == 0 G = generator(10, 0.1, directed=True) - assert_true(G.is_directed()) - assert_equal(len(G), 10) + assert G.is_directed() + assert len(G) == 10 G = generator(10, 1.1, directed=True) - assert_true(G.is_directed()) - assert_equal(len(G), 10) - assert_equal(sum(1 for _ in G.edges()), 90) + assert G.is_directed() + assert len(G) == 10 + assert sum(1 for _ in G.edges()) == 90 # assert that random graphs generate all edges for p close to 1 edges = 0 runs = 100 for i in range(runs): edges += sum(1 for _ in generator(10, 0.99999, directed=True).edges()) - assert_almost_equal(edges / float(runs), 90, delta=runs * 2.0 / 100) + assert abs(edges / float(runs) - 90) <= runs * 2.0 / 100 def test_gnm(self): G = gnm_random_graph(10, 3) - assert_equal(len(G), 10) - assert_equal(sum(1 for _ in G.edges()), 3) + assert len(G) == 10 + assert sum(1 for _ in G.edges()) == 3 G = gnm_random_graph(10, 3, seed=42) - assert_equal(len(G), 10) - assert_equal(sum(1 for _ in G.edges()), 3) + assert len(G) == 10 + assert sum(1 for _ in G.edges()) == 3 G = gnm_random_graph(10, 100) - assert_equal(len(G), 10) - assert_equal(sum(1 for _ in G.edges()), 45) + assert len(G) == 10 + assert sum(1 for _ in G.edges()) == 45 G = gnm_random_graph(10, 100, directed=True) - assert_equal(len(G), 10) - assert_equal(sum(1 for _ in G.edges()), 90) + assert len(G) == 10 + assert sum(1 for _ in G.edges()) == 90 G = gnm_random_graph(10, -1.1) - assert_equal(len(G), 10) - assert_equal(sum(1 for _ in G.edges()), 0) + assert len(G) == 10 + assert sum(1 for _ in G.edges()) == 0 def test_watts_strogatz_big_k(self): - assert_raises(NetworkXError, watts_strogatz_graph, 10, 10, 0.25) - assert_raises(NetworkXError, newman_watts_strogatz_graph, 10, 10, 0.25) + # Test to make sure than n <= k + pytest.raises(NetworkXError, watts_strogatz_graph, 10, 11, 0.25) + pytest.raises(NetworkXError, newman_watts_strogatz_graph, 10, 11, 0.25) + # could create an infinite loop, now doesn't # infinite loop used to occur when a node has degree n-1 and needs to rewire watts_strogatz_graph(10, 9, 0.25, seed=0) newman_watts_strogatz_graph(10, 9, 0.5, seed=0) + # Test k==n scenario + watts_strogatz_graph(10, 10, 0.25, seed=0) + newman_watts_strogatz_graph(10, 10, 0.25, seed=0) + def test_random_kernel_graph(self): def integral(u, w, z): return c * (z - w) def root(u, w, r): return r / c + w + c = 1 graph = random_kernel_graph(1000, integral, root) - assert_equal(len(graph), 1000) + graph = random_kernel_graph(1000, integral, root, seed=42) + assert len(graph) == 1000 diff --git a/networkx/generators/tests/test_small.py b/networkx/generators/tests/test_small.py index a1cc876..882ecef 100644 --- a/networkx/generators/tests/test_small.py +++ b/networkx/generators/tests/test_small.py @@ -1,8 +1,7 @@ -#!/usr/bin/env python - -from nose.tools import * -from networkx import * +import pytest +import networkx as nx from networkx.algorithms.isomorphism.isomorph import graph_could_be_isomorphic + is_isomorphic = graph_could_be_isomorphic """Generators - Small @@ -11,178 +10,179 @@ Some small graphs """ -null = null_graph() +null = nx.null_graph() -class TestGeneratorsSmall(): +class TestGeneratorsSmall: def test_make_small_graph(self): d = ["adjacencylist", "Bull Graph", 5, [[2, 3], [1, 3, 4], [1, 2, 5], [2], [3]]] - G = make_small_graph(d) - assert_true(is_isomorphic(G, bull_graph())) + G = nx.make_small_graph(d) + assert is_isomorphic(G, nx.bull_graph()) + + # Test small graph creation error with wrong ltype + d[0] = "erroneouslist" + pytest.raises(nx.NetworkXError, nx.make_small_graph, graph_description=d) def test__LCF_graph(self): # If n<=0, then return the null_graph - G = LCF_graph(-10, [1, 2], 100) - assert_true(is_isomorphic(G, null)) - G = LCF_graph(0, [1, 2], 3) - assert_true(is_isomorphic(G, null)) - G = LCF_graph(0, [1, 2], 10) - assert_true(is_isomorphic(G, null)) + G = nx.LCF_graph(-10, [1, 2], 100) + assert is_isomorphic(G, null) + G = nx.LCF_graph(0, [1, 2], 3) + assert is_isomorphic(G, null) + G = nx.LCF_graph(0, [1, 2], 10) + assert is_isomorphic(G, null) # Test that LCF(n,[],0) == cycle_graph(n) for a, b, c in [(5, [], 0), (10, [], 0), (5, [], 1), (10, [], 10)]: - G = LCF_graph(a, b, c) - assert_true(is_isomorphic(G, cycle_graph(a))) + G = nx.LCF_graph(a, b, c) + assert is_isomorphic(G, nx.cycle_graph(a)) # Generate the utility graph K_{3,3} - G = LCF_graph(6, [3, -3], 3) - utility_graph = complete_bipartite_graph(3, 3) - assert_true(is_isomorphic(G, utility_graph)) + G = nx.LCF_graph(6, [3, -3], 3) + utility_graph = nx.complete_bipartite_graph(3, 3) + assert is_isomorphic(G, utility_graph) def test_properties_named_small_graphs(self): - G = bull_graph() - assert_equal(G.number_of_nodes(), 5) - assert_equal(G.number_of_edges(), 5) - assert_equal(sorted(d for n, d in G.degree()), [1, 1, 2, 3, 3]) - assert_equal(diameter(G), 3) - assert_equal(radius(G), 2) - - G = chvatal_graph() - assert_equal(G.number_of_nodes(), 12) - assert_equal(G.number_of_edges(), 24) - assert_equal(list(d for n, d in G.degree()), 12 * [4]) - assert_equal(diameter(G), 2) - assert_equal(radius(G), 2) - - G = cubical_graph() - assert_equal(G.number_of_nodes(), 8) - assert_equal(G.number_of_edges(), 12) - assert_equal(list(d for n, d in G.degree()), 8 * [3]) - assert_equal(diameter(G), 3) - assert_equal(radius(G), 3) - - G = desargues_graph() - assert_equal(G.number_of_nodes(), 20) - assert_equal(G.number_of_edges(), 30) - assert_equal(list(d for n, d in G.degree()), 20 * [3]) - - G = diamond_graph() - assert_equal(G.number_of_nodes(), 4) - assert_equal(sorted(d for n, d in G.degree()), [2, 2, 3, 3]) - assert_equal(diameter(G), 2) - assert_equal(radius(G), 1) - - G = dodecahedral_graph() - assert_equal(G.number_of_nodes(), 20) - assert_equal(G.number_of_edges(), 30) - assert_equal(list(d for n, d in G.degree()), 20 * [3]) - assert_equal(diameter(G), 5) - assert_equal(radius(G), 5) - - G = frucht_graph() - assert_equal(G.number_of_nodes(), 12) - assert_equal(G.number_of_edges(), 18) - assert_equal(list(d for n, d in G.degree()), 12 * [3]) - assert_equal(diameter(G), 4) - assert_equal(radius(G), 3) - - G = heawood_graph() - assert_equal(G.number_of_nodes(), 14) - assert_equal(G.number_of_edges(), 21) - assert_equal(list(d for n, d in G.degree()), 14 * [3]) - assert_equal(diameter(G), 3) - assert_equal(radius(G), 3) - - G = hoffman_singleton_graph() - assert_equal(G.number_of_nodes(), 50) - assert_equal(G.number_of_edges(), 175) - assert_equal(list(d for n, d in G.degree()), 50 * [7]) - assert_equal(diameter(G), 2) - assert_equal(radius(G), 2) - - G = house_graph() - assert_equal(G.number_of_nodes(), 5) - assert_equal(G.number_of_edges(), 6) - assert_equal(sorted(d for n, d in G.degree()), [2, 2, 2, 3, 3]) - assert_equal(diameter(G), 2) - assert_equal(radius(G), 2) - - G = house_x_graph() - assert_equal(G.number_of_nodes(), 5) - assert_equal(G.number_of_edges(), 8) - assert_equal(sorted(d for n, d in G.degree()), [2, 3, 3, 4, 4]) - assert_equal(diameter(G), 2) - assert_equal(radius(G), 1) - - G = icosahedral_graph() - assert_equal(G.number_of_nodes(), 12) - assert_equal(G.number_of_edges(), 30) - assert_equal(list(d for n, d in G.degree()), - [5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5]) - assert_equal(diameter(G), 3) - assert_equal(radius(G), 3) - - G = krackhardt_kite_graph() - assert_equal(G.number_of_nodes(), 10) - assert_equal(G.number_of_edges(), 18) - assert_equal(sorted(d for n, d in G.degree()), - [1, 2, 3, 3, 3, 4, 4, 5, 5, 6]) - - G = moebius_kantor_graph() - assert_equal(G.number_of_nodes(), 16) - assert_equal(G.number_of_edges(), 24) - assert_equal(list(d for n, d in G.degree()), 16 * [3]) - assert_equal(diameter(G), 4) - - G = octahedral_graph() - assert_equal(G.number_of_nodes(), 6) - assert_equal(G.number_of_edges(), 12) - assert_equal(list(d for n, d in G.degree()), 6 * [4]) - assert_equal(diameter(G), 2) - assert_equal(radius(G), 2) - - G = pappus_graph() - assert_equal(G.number_of_nodes(), 18) - assert_equal(G.number_of_edges(), 27) - assert_equal(list(d for n, d in G.degree()), 18 * [3]) - assert_equal(diameter(G), 4) - - G = petersen_graph() - assert_equal(G.number_of_nodes(), 10) - assert_equal(G.number_of_edges(), 15) - assert_equal(list(d for n, d in G.degree()), 10 * [3]) - assert_equal(diameter(G), 2) - assert_equal(radius(G), 2) - - G = sedgewick_maze_graph() - assert_equal(G.number_of_nodes(), 8) - assert_equal(G.number_of_edges(), 10) - assert_equal(sorted(d for n, d in G.degree()), [1, 2, 2, 2, 3, 3, 3, 4]) - - G = tetrahedral_graph() - assert_equal(G.number_of_nodes(), 4) - assert_equal(G.number_of_edges(), 6) - assert_equal(list(d for n, d in G.degree()), [3, 3, 3, 3]) - assert_equal(diameter(G), 1) - assert_equal(radius(G), 1) - - G = truncated_cube_graph() - assert_equal(G.number_of_nodes(), 24) - assert_equal(G.number_of_edges(), 36) - assert_equal(list(d for n, d in G.degree()), 24 * [3]) - - G = truncated_tetrahedron_graph() - assert_equal(G.number_of_nodes(), 12) - assert_equal(G.number_of_edges(), 18) - assert_equal(list(d for n, d in G.degree()), 12 * [3]) - - G = tutte_graph() - assert_equal(G.number_of_nodes(), 46) - assert_equal(G.number_of_edges(), 69) - assert_equal(list(d for n, d in G.degree()), 46 * [3]) + G = nx.bull_graph() + assert G.number_of_nodes() == 5 + assert G.number_of_edges() == 5 + assert sorted(d for n, d in G.degree()) == [1, 1, 2, 3, 3] + assert nx.diameter(G) == 3 + assert nx.radius(G) == 2 + + G = nx.chvatal_graph() + assert G.number_of_nodes() == 12 + assert G.number_of_edges() == 24 + assert list(d for n, d in G.degree()) == 12 * [4] + assert nx.diameter(G) == 2 + assert nx.radius(G) == 2 + + G = nx.cubical_graph() + assert G.number_of_nodes() == 8 + assert G.number_of_edges() == 12 + assert list(d for n, d in G.degree()) == 8 * [3] + assert nx.diameter(G) == 3 + assert nx.radius(G) == 3 + + G = nx.desargues_graph() + assert G.number_of_nodes() == 20 + assert G.number_of_edges() == 30 + assert list(d for n, d in G.degree()) == 20 * [3] + + G = nx.diamond_graph() + assert G.number_of_nodes() == 4 + assert sorted(d for n, d in G.degree()) == [2, 2, 3, 3] + assert nx.diameter(G) == 2 + assert nx.radius(G) == 1 + + G = nx.dodecahedral_graph() + assert G.number_of_nodes() == 20 + assert G.number_of_edges() == 30 + assert list(d for n, d in G.degree()) == 20 * [3] + assert nx.diameter(G) == 5 + assert nx.radius(G) == 5 + + G = nx.frucht_graph() + assert G.number_of_nodes() == 12 + assert G.number_of_edges() == 18 + assert list(d for n, d in G.degree()) == 12 * [3] + assert nx.diameter(G) == 4 + assert nx.radius(G) == 3 + + G = nx.heawood_graph() + assert G.number_of_nodes() == 14 + assert G.number_of_edges() == 21 + assert list(d for n, d in G.degree()) == 14 * [3] + assert nx.diameter(G) == 3 + assert nx.radius(G) == 3 + + G = nx.hoffman_singleton_graph() + assert G.number_of_nodes() == 50 + assert G.number_of_edges() == 175 + assert list(d for n, d in G.degree()) == 50 * [7] + assert nx.diameter(G) == 2 + assert nx.radius(G) == 2 + + G = nx.house_graph() + assert G.number_of_nodes() == 5 + assert G.number_of_edges() == 6 + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 3, 3] + assert nx.diameter(G) == 2 + assert nx.radius(G) == 2 + + G = nx.house_x_graph() + assert G.number_of_nodes() == 5 + assert G.number_of_edges() == 8 + assert sorted(d for n, d in G.degree()) == [2, 3, 3, 4, 4] + assert nx.diameter(G) == 2 + assert nx.radius(G) == 1 + + G = nx.icosahedral_graph() + assert G.number_of_nodes() == 12 + assert G.number_of_edges() == 30 + assert list(d for n, d in G.degree()) == [5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5] + assert nx.diameter(G) == 3 + assert nx.radius(G) == 3 + + G = nx.krackhardt_kite_graph() + assert G.number_of_nodes() == 10 + assert G.number_of_edges() == 18 + assert sorted(d for n, d in G.degree()) == [1, 2, 3, 3, 3, 4, 4, 5, 5, 6] + + G = nx.moebius_kantor_graph() + assert G.number_of_nodes() == 16 + assert G.number_of_edges() == 24 + assert list(d for n, d in G.degree()) == 16 * [3] + assert nx.diameter(G) == 4 + + G = nx.octahedral_graph() + assert G.number_of_nodes() == 6 + assert G.number_of_edges() == 12 + assert list(d for n, d in G.degree()) == 6 * [4] + assert nx.diameter(G) == 2 + assert nx.radius(G) == 2 + + G = nx.pappus_graph() + assert G.number_of_nodes() == 18 + assert G.number_of_edges() == 27 + assert list(d for n, d in G.degree()) == 18 * [3] + assert nx.diameter(G) == 4 + + G = nx.petersen_graph() + assert G.number_of_nodes() == 10 + assert G.number_of_edges() == 15 + assert list(d for n, d in G.degree()) == 10 * [3] + assert nx.diameter(G) == 2 + assert nx.radius(G) == 2 + + G = nx.sedgewick_maze_graph() + assert G.number_of_nodes() == 8 + assert G.number_of_edges() == 10 + assert sorted(d for n, d in G.degree()) == [1, 2, 2, 2, 3, 3, 3, 4] + + G = nx.tetrahedral_graph() + assert G.number_of_nodes() == 4 + assert G.number_of_edges() == 6 + assert list(d for n, d in G.degree()) == [3, 3, 3, 3] + assert nx.diameter(G) == 1 + assert nx.radius(G) == 1 + + G = nx.truncated_cube_graph() + assert G.number_of_nodes() == 24 + assert G.number_of_edges() == 36 + assert list(d for n, d in G.degree()) == 24 * [3] + + G = nx.truncated_tetrahedron_graph() + assert G.number_of_nodes() == 12 + assert G.number_of_edges() == 18 + assert list(d for n, d in G.degree()) == 12 * [3] + + G = nx.tutte_graph() + assert G.number_of_nodes() == 46 + assert G.number_of_edges() == 69 + assert list(d for n, d in G.degree()) == 46 * [3] # Test create_using with directed or multigraphs on small graphs - assert_raises(networkx.exception.NetworkXError, tutte_graph, - create_using=DiGraph()) - MG = tutte_graph(create_using=MultiGraph()) - assert_equal(sorted(MG.edges()), sorted(G.edges())) + pytest.raises(nx.NetworkXError, nx.tutte_graph, create_using=nx.DiGraph) + MG = nx.tutte_graph(create_using=nx.MultiGraph) + assert sorted(MG.edges()) == sorted(G.edges()) diff --git a/networkx/generators/tests/test_spectral_graph_forge.py b/networkx/generators/tests/test_spectral_graph_forge.py new file mode 100644 index 0000000..3f27a17 --- /dev/null +++ b/networkx/generators/tests/test_spectral_graph_forge.py @@ -0,0 +1,48 @@ +import pytest + +from networkx import is_isomorphic +from networkx.exception import NetworkXError +from networkx.testing import assert_nodes_equal +from networkx.generators.spectral_graph_forge import spectral_graph_forge +from networkx.generators import karate_club_graph + + +def test_spectral_graph_forge(): + numpy = pytest.importorskip("numpy") + scipy = pytest.importorskip("scipy") + + G = karate_club_graph() + + seed = 54321 + + # common cases, just checking node number preserving and difference + # between identity and modularity cases + H = spectral_graph_forge(G, 0.1, transformation="identity", seed=seed) + assert_nodes_equal(G, H) + + I = spectral_graph_forge(G, 0.1, transformation="identity", seed=seed) + assert_nodes_equal(G, H) + assert is_isomorphic(I, H) + + I = spectral_graph_forge(G, 0.1, transformation="modularity", seed=seed) + assert_nodes_equal(G, I) + + assert not is_isomorphic(I, H) + + # with all the eigenvectors, output graph is identical to the input one + H = spectral_graph_forge(G, 1, transformation="modularity", seed=seed) + assert_nodes_equal(G, H) + assert is_isomorphic(G, H) + + # invalid alpha input value, it is silently truncated in [0,1] + H = spectral_graph_forge(G, -1, transformation="identity", seed=seed) + assert_nodes_equal(G, H) + + H = spectral_graph_forge(G, 10, transformation="identity", seed=seed) + assert_nodes_equal(G, H) + assert is_isomorphic(G, H) + + # invalid transformation mode, checking the error raising + pytest.raises( + NetworkXError, spectral_graph_forge, G, 0.1, transformation="unknown", seed=seed + ) diff --git a/networkx/generators/tests/test_stochastic.py b/networkx/generators/tests/test_stochastic.py index 1c62915..1a2a96f 100644 --- a/networkx/generators/tests/test_stochastic.py +++ b/networkx/generators/tests/test_stochastic.py @@ -1,17 +1,9 @@ -# test_stochastic.py - unit tests for the stochastic module -# -# Copyright 2010, 2011, 2012, 2013, 2014, 2015 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.generators.stochastic` module.""" -from nose.tools import assert_true, assert_equal, raises +import pytest import networkx as nx -class TestStochasticGraph(object): +class TestStochasticGraph: """Unit tests for the :func:`~networkx.stochastic_graph` function. """ @@ -21,9 +13,11 @@ def test_default_weights(self): G.add_edge(0, 1) G.add_edge(0, 2) S = nx.stochastic_graph(G) - assert_true(nx.is_isomorphic(G, S)) - assert_equal(sorted(S.edges(data=True)), - [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) + assert nx.is_isomorphic(G, S) + assert sorted(S.edges(data=True)) == [ + (0, 1, {"weight": 0.5}), + (0, 2, {"weight": 0.5}), + ] def test_in_place(self): """Tests for an in-place reweighting of the edges of the graph. @@ -33,29 +27,37 @@ def test_in_place(self): G.add_edge(0, 1, weight=1) G.add_edge(0, 2, weight=1) nx.stochastic_graph(G, copy=False) - assert_equal(sorted(G.edges(data=True)), - [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) + assert sorted(G.edges(data=True)) == [ + (0, 1, {"weight": 0.5}), + (0, 2, {"weight": 0.5}), + ] def test_arbitrary_weights(self): G = nx.DiGraph() G.add_edge(0, 1, weight=1) G.add_edge(0, 2, weight=1) S = nx.stochastic_graph(G) - assert_equal(sorted(S.edges(data=True)), - [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) + assert sorted(S.edges(data=True)) == [ + (0, 1, {"weight": 0.5}), + (0, 2, {"weight": 0.5}), + ] def test_multidigraph(self): G = nx.MultiDiGraph() G.add_edges_from([(0, 1), (0, 1), (0, 2), (0, 2)]) S = nx.stochastic_graph(G) d = dict(weight=0.25) - assert_equal(sorted(S.edges(data=True)), - [(0, 1, d), (0, 1, d), (0, 2, d), (0, 2, d)]) + assert sorted(S.edges(data=True)) == [ + (0, 1, d), + (0, 1, d), + (0, 2, d), + (0, 2, d), + ] - @raises(nx.NetworkXNotImplemented) def test_graph_disallowed(self): - nx.stochastic_graph(nx.Graph()) + with pytest.raises(nx.NetworkXNotImplemented): + nx.stochastic_graph(nx.Graph()) - @raises(nx.NetworkXNotImplemented) def test_multigraph_disallowed(self): - nx.stochastic_graph(nx.MultiGraph()) + with pytest.raises(nx.NetworkXNotImplemented): + nx.stochastic_graph(nx.MultiGraph()) diff --git a/networkx/generators/tests/test_sudoku.py b/networkx/generators/tests/test_sudoku.py new file mode 100644 index 0000000..366701d --- /dev/null +++ b/networkx/generators/tests/test_sudoku.py @@ -0,0 +1,91 @@ +"""Unit tests for the :mod:`networkx.generators.sudoku_graph` module.""" + +import pytest +import networkx as nx + + +def test_sudoku_negative(): + """Raise an error when generating a Sudoku graph of order -1.""" + pytest.raises(nx.NetworkXError, nx.sudoku_graph, n=-1) + + +@pytest.mark.parametrize("n", [0, 1, 2, 3, 4]) +def test_sudoku_generator(n): + """Generate Sudoku graphs of various sizes and verify their properties.""" + G = nx.sudoku_graph(n) + expected_nodes = n ** 4 + expected_degree = (n - 1) * (3 * n + 1) + expected_edges = expected_nodes * expected_degree // 2 + assert not G.is_directed() + assert not G.is_multigraph() + assert G.number_of_nodes() == expected_nodes + assert G.number_of_edges() == expected_edges + assert all(d == expected_degree for _, d in G.degree) + + if n == 2: + assert sorted(G.neighbors(6)) == [2, 3, 4, 5, 7, 10, 14] + elif n == 3: + assert sorted(G.neighbors(42)) == [ + 6, + 15, + 24, + 33, + 34, + 35, + 36, + 37, + 38, + 39, + 40, + 41, + 43, + 44, + 51, + 52, + 53, + 60, + 69, + 78, + ] + elif n == 4: + assert sorted(G.neighbors(0)) == [ + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 32, + 33, + 34, + 35, + 48, + 49, + 50, + 51, + 64, + 80, + 96, + 112, + 128, + 144, + 160, + 176, + 192, + 208, + 224, + 240, + ] diff --git a/networkx/generators/tests/test_trees.py b/networkx/generators/tests/test_trees.py index fbb74bc..d7a76d9 100644 --- a/networkx/generators/tests/test_trees.py +++ b/networkx/generators/tests/test_trees.py @@ -1,20 +1,19 @@ -from nose.tools import assert_equal, assert_true - import networkx as nx from networkx.generators.trees import NIL from networkx.utils import arbitrary_element -class TestPrefixTree(object): +class TestPrefixTree: """Unit tests for the prefix tree generator function.""" def test_basic(self): # This example is from the Wikipedia article "Trie" # . - strings = ['a', 'to', 'tea', 'ted', 'ten', 'i', 'in', 'inn'] + strings = ["a", "to", "tea", "ted", "ten", "i", "in", "inn"] T, root = nx.prefix_tree(strings) - def source_label(v): return T.node[v]['source'] + def source_label(v): + return T.nodes[v]["source"] # First, we check that the tree has the expected # structure. Recall that each node that corresponds to one of @@ -23,53 +22,53 @@ def source_label(v): return T.node[v]['source'] # Consider the three children at level 1 in the trie. a, i, t = sorted(T[root], key=source_label) # Check the 'a' branch. - assert_equal(len(T[a]), 1) + assert len(T[a]) == 1 nil = arbitrary_element(T[a]) - assert_equal(len(T[nil]), 0) + assert len(T[nil]) == 0 # Check the 'i' branch. - assert_equal(len(T[i]), 2) + assert len(T[i]) == 2 nil, in_ = sorted(T[i], key=source_label) - assert_equal(len(T[nil]), 0) - assert_equal(len(T[in_]), 2) + assert len(T[nil]) == 0 + assert len(T[in_]) == 2 nil, inn = sorted(T[in_], key=source_label) - assert_equal(len(T[nil]), 0) - assert_equal(len(T[inn]), 1) + assert len(T[nil]) == 0 + assert len(T[inn]) == 1 nil = arbitrary_element(T[inn]) - assert_equal(len(T[nil]), 0) + assert len(T[nil]) == 0 # Check the 't' branch. te, to = sorted(T[t], key=source_label) - assert_equal(len(T[to]), 1) + assert len(T[to]) == 1 nil = arbitrary_element(T[to]) - assert_equal(len(T[nil]), 0) + assert len(T[nil]) == 0 tea, ted, ten = sorted(T[te], key=source_label) - assert_equal(len(T[tea]), 1) - assert_equal(len(T[ted]), 1) - assert_equal(len(T[ten]), 1) + assert len(T[tea]) == 1 + assert len(T[ted]) == 1 + assert len(T[ten]) == 1 nil = arbitrary_element(T[tea]) - assert_equal(len(T[nil]), 0) + assert len(T[nil]) == 0 nil = arbitrary_element(T[ted]) - assert_equal(len(T[nil]), 0) + assert len(T[nil]) == 0 nil = arbitrary_element(T[ten]) - assert_equal(len(T[nil]), 0) + assert len(T[nil]) == 0 # Next, we check that the "sources" of each of the nodes is the # rightmost letter in the string corresponding to the path to # that node. - assert_equal(source_label(root), None) - assert_equal(source_label(a), 'a') - assert_equal(source_label(i), 'i') - assert_equal(source_label(t), 't') - assert_equal(source_label(in_), 'n') - assert_equal(source_label(inn), 'n') - assert_equal(source_label(to), 'o') - assert_equal(source_label(te), 'e') - assert_equal(source_label(tea), 'a') - assert_equal(source_label(ted), 'd') - assert_equal(source_label(ten), 'n') - assert_equal(source_label(NIL), NIL) + assert source_label(root) is None + assert source_label(a) == "a" + assert source_label(i) == "i" + assert source_label(t) == "t" + assert source_label(in_) == "n" + assert source_label(inn) == "n" + assert source_label(to) == "o" + assert source_label(te) == "e" + assert source_label(tea) == "a" + assert source_label(ted) == "d" + assert source_label(ten) == "n" + assert source_label(NIL) == NIL def test_random_tree(): """Tests that a random tree is in fact a tree.""" T = nx.random_tree(10, seed=1234) - assert_true(nx.is_tree(T)) + assert nx.is_tree(T) diff --git a/networkx/generators/tests/test_triads.py b/networkx/generators/tests/test_triads.py index c763577..6fc51ae 100644 --- a/networkx/generators/tests/test_triads.py +++ b/networkx/generators/tests/test_triads.py @@ -1,23 +1,14 @@ -# test_triads.py - unit tests for the triads module -# -# Copyright 2015 NetworkX developers. -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Unit tests for the :mod:`networkx.generators.triads` module.""" -from nose.tools import assert_equal -from nose.tools import raises +import pytest from networkx import triad_graph def test_triad_graph(): - G = triad_graph('030T') - assert_equal([tuple(e) for e in ('ab', 'ac', 'cb')], sorted(G.edges())) + G = triad_graph("030T") + assert [tuple(e) for e in ("ab", "ac", "cb")] == sorted(G.edges()) -@raises(ValueError) def test_invalid_name(): - triad_graph('bogus') + with pytest.raises(ValueError): + triad_graph("bogus") diff --git a/networkx/generators/trees.py b/networkx/generators/trees.py index 689ed16..beb1087 100644 --- a/networkx/generators/trees.py +++ b/networkx/generators/trees.py @@ -1,25 +1,17 @@ -# -*- encoding: utf-8 -*- -# Copyright (C) 2015-2018 by -# Jeffrey Finkelstein -# NetworkX developers -# All rights reserved. -# BSD license. -# -# Authors: Jeffrey Finkelstein """Functions for generating trees.""" -import random from collections import defaultdict import networkx as nx from networkx.utils import generate_unique_node +from networkx.utils import py_random_state -__all__ = ['prefix_tree', 'random_tree'] +__all__ = ["prefix_tree", "random_tree"] #: The nil node, the only leaf node in a prefix tree. #: #: Each predecessor of the nil node corresponds to the end of a path #: used to generate the prefix tree. -NIL = 'NIL' +NIL = "NIL" def prefix_tree(paths): @@ -37,7 +29,7 @@ def prefix_tree(paths): Returns ------- - DiGraph + T: DiGraph A directed graph representing an arborescence consisting of the prefix tree generated by `paths`. Nodes are directed "downward", from parent to child. A special "synthetic" root node is added @@ -59,9 +51,13 @@ def prefix_tree(paths): attribute; for example:: >>> from networkx.generators.trees import NIL - >>> paths = ['ab', 'abs', 'ad'] - >>> T = nx.prefix_tree(paths) - >>> T.predecessors(NIL) # doctest: +SKIP + >>> paths = ["ab", "abs", "ad"] + >>> T, root = nx.prefix_tree(paths) + >>> T.predecessors(NIL) + + + root : string + The randomly generated uuid of the root node. Notes ----- @@ -72,7 +68,7 @@ def prefix_tree(paths): Create a prefix tree from a list of strings with some common prefixes:: - >>> strings = ['ab', 'abs', 'ad'] + >>> strings = ["ab", "abs", "ad"] >>> T, root = nx.prefix_tree(strings) Continuing the above example, to recover the original paths that @@ -81,14 +77,14 @@ def prefix_tree(paths): >>> from networkx.generators.trees import NIL >>> - >>> strings = ['ab', 'abs', 'ad'] + >>> strings = ["ab", "abs", "ad"] >>> T, root = nx.prefix_tree(strings) >>> recovered = [] >>> for v in T.predecessors(NIL): - ... s = '' + ... s = "" ... while v != root: ... # Prepend the character `v` to the accumulator `s`. - ... s = str(T.node[v]['source']) + s + ... s = str(T.nodes[v]["source"]) + s ... # Each non-nil, non-root node has exactly one parent. ... v = next(T.predecessors(v)) ... recovered.append(s) @@ -96,6 +92,7 @@ def prefix_tree(paths): ['ab', 'abs', 'ad'] """ + def _helper(paths, root, B): """Recursively create a trie from the given list of paths. @@ -121,8 +118,7 @@ def _helper(paths, root, B): if not path: B.add_edge(root, NIL) continue - # TODO In Python 3, this should be `child, *rest = path`. - child, rest = path[0], path[1:] + child, *rest = path # `child` may exist as the head of more than one path in `paths`. children[child].append(rest) # Add a node for each child found above and add edges from the @@ -156,6 +152,7 @@ def _helper(paths, root, B): # > converting them into the corresponding trees is a straightforward # > method of generating uniformly distributed random labelled trees. # +@py_random_state(1) def random_tree(n, seed=None): """Returns a uniformly random tree on `n` nodes. @@ -163,9 +160,9 @@ def random_tree(n, seed=None): ---------- n : int A positive integer representing the number of nodes in the tree. - - seed : int - A seed for the random number generator. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -189,10 +186,9 @@ def random_tree(n, seed=None): """ if n == 0: - raise nx.NetworkXPointlessConcept('the null graph is not a tree') + raise nx.NetworkXPointlessConcept("the null graph is not a tree") # Cannot create a Prüfer sequence unless `n` is at least two. if n == 1: return nx.empty_graph(1) - random.seed(seed) - sequence = [random.choice(range(n)) for i in range(n - 2)] + sequence = [seed.choice(range(n)) for i in range(n - 2)] return nx.from_prufer_sequence(sequence) diff --git a/networkx/generators/triads.py b/networkx/generators/triads.py index f5e83c2..7557d42 100644 --- a/networkx/generators/triads.py +++ b/networkx/generators/triads.py @@ -1,41 +1,35 @@ -# triads.py - generators for triad graphs -# -# Copyright 2015 NetworkX developers. +# See https://github.com/networkx/networkx/pull/1474 # Copyright 2011 Reya Group # Copyright 2011 Alex Levenson # Copyright 2011 Diederik van Liere -# -# This file is part of NetworkX. -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. """Functions that generate the triad graphs, that is, the possible digraphs on three nodes. """ from networkx.classes import DiGraph -__all__ = ['triad_graph'] +__all__ = ["triad_graph"] #: Dictionary mapping triad name to list of directed edges in the #: digraph representation of that triad (with nodes 'a', 'b', and 'c'). -TRIAD_EDGES = {'003': [], - '012': ['ab'], - '102': ['ab', 'ba'], - '021D': ['ba', 'bc'], - '021U': ['ab', 'cb'], - '021C': ['ab', 'bc'], - '111D': ['ac', 'ca', 'bc'], - '111U': ['ac', 'ca', 'cb'], - '030T': ['ab', 'cb', 'ac'], - '030C': ['ba', 'cb', 'ac'], - '201': ['ab', 'ba', 'ac', 'ca'], - '120D': ['bc', 'ba', 'ac', 'ca'], - '120U': ['ab', 'cb', 'ac', 'ca'], - '120C': ['ab', 'bc', 'ac', 'ca'], - '210': ['ab', 'bc', 'cb', 'ac', 'ca'], - '300': ['ab', 'ba', 'bc', 'cb', 'ac', 'ca'] - } +TRIAD_EDGES = { + "003": [], + "012": ["ab"], + "102": ["ab", "ba"], + "021D": ["ba", "bc"], + "021U": ["ab", "cb"], + "021C": ["ab", "bc"], + "111D": ["ac", "ca", "bc"], + "111U": ["ac", "ca", "cb"], + "030T": ["ab", "cb", "ac"], + "030C": ["ba", "cb", "ac"], + "201": ["ab", "ba", "ac", "ca"], + "120D": ["bc", "ba", "ac", "ca"], + "120U": ["ab", "cb", "ac", "ca"], + "120C": ["ab", "bc", "ac", "ca"], + "210": ["ab", "bc", "cb", "ac", "ca"], + "300": ["ab", "ba", "bc", "cb", "ac", "ca"], +} def triad_graph(triad_name): @@ -62,7 +56,7 @@ def triad_graph(triad_name): Raises ------ - :exc:`ValueError` + ValueError If `triad_name` is not the name of a triad. See also @@ -71,9 +65,11 @@ def triad_graph(triad_name): """ if triad_name not in TRIAD_EDGES: - raise ValueError('unknown triad name "{}"; use one of the triad names' - ' in the TRIAD_NAMES constant'.format(triad_name)) + raise ValueError( + f'unknown triad name "{triad_name}"; use one of the triad names' + " in the TRIAD_NAMES constant" + ) G = DiGraph() - G.add_nodes_from('abc') + G.add_nodes_from("abc") G.add_edges_from(TRIAD_EDGES[triad_name]) return G diff --git a/networkx/linalg/__init__.py b/networkx/linalg/__init__.py index 175d0eb..f09b402 100644 --- a/networkx/linalg/__init__.py +++ b/networkx/linalg/__init__.py @@ -9,3 +9,5 @@ from networkx.linalg.algebraicconnectivity import * from networkx.linalg.modularitymatrix import * import networkx.linalg.modularitymatrix +from networkx.linalg.bethehessianmatrix import * +import networkx.linalg.bethehessianmatrix diff --git a/networkx/linalg/algebraicconnectivity.py b/networkx/linalg/algebraicconnectivity.py index 216d878..e4b3aab 100644 --- a/networkx/linalg/algebraicconnectivity.py +++ b/networkx/linalg/algebraicconnectivity.py @@ -1,9 +1,3 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2014 ysitu -# All rights reserved. -# BSD license. -# -# Author: ysitu """ Algebraic connectivity and Fiedler vectors of undirected graphs. """ @@ -11,15 +5,16 @@ import networkx as nx from networkx.utils import not_implemented_for from networkx.utils import reverse_cuthill_mckee_ordering +from networkx.utils import random_state try: - from numpy import array, asmatrix, asarray, dot, ndarray, ones, sqrt, zeros + from numpy import array, asarray, dot, ndarray, ones, sqrt, zeros, atleast_2d from numpy.linalg import norm, qr - from numpy.random import normal from scipy.linalg import eigh, inv from scipy.sparse import csc_matrix, spdiags from scipy.sparse.linalg import eigsh, lobpcg - __all__ = ['algebraic_connectivity', 'fiedler_vector', 'spectral_ordering'] + + __all__ = ["algebraic_connectivity", "fiedler_vector", "spectral_ordering"] except ImportError: __all__ = [] @@ -37,7 +32,7 @@ def daxpy(x, y, a): return y -class _PCGSolver(object): +class _PCGSolver: """Preconditioned conjugate gradient method. To solve Ax = b: @@ -59,7 +54,7 @@ def __init__(self, A, M): def solve(self, B, tol): B = asarray(B) - X = ndarray(B.shape, order='F') + X = ndarray(B.shape, order="F") for j in range(B.shape[1]): X[:, j] = self._solve(B[:, j], tol) return X @@ -88,7 +83,7 @@ def _solve(self, b, tol): p = daxpy(p, z, a=beta) -class _CholeskySolver(object): +class _CholeskySolver: """Cholesky factorization. To solve Ax = b: @@ -101,7 +96,7 @@ class _CholeskySolver(object): def __init__(self, A): if not self._cholesky: - raise nx.NetworkXError('Cholesky solver unavailable.') + raise nx.NetworkXError("Cholesky solver unavailable.") self._chol = self._cholesky(A) def solve(self, B, tol=None): @@ -109,12 +104,13 @@ def solve(self, B, tol=None): try: from scikits.sparse.cholmod import cholesky + _cholesky = cholesky except ImportError: _cholesky = None -class _LUSolver(object): +class _LUSolver: """LU factorization. To solve Ax = b: @@ -127,20 +123,25 @@ class _LUSolver(object): def __init__(self, A): if not self._splu: - raise nx.NetworkXError('LU solver unavailable.') + raise nx.NetworkXError("LU solver unavailable.") self._LU = self._splu(A) def solve(self, B, tol=None): B = asarray(B) - X = ndarray(B.shape, order='F') + X = ndarray(B.shape, order="F") for j in range(B.shape[1]): X[:, j] = self._LU.solve(B[:, j]) return X try: from scipy.sparse.linalg import splu - _splu = partial(splu, permc_spec='MMD_AT_PLUS_A', diag_pivot_thresh=0., - options={'Equil': True, 'SymmetricMode': True}) + + _splu = partial( + splu, + permc_spec="MMD_AT_PLUS_A", + diag_pivot_thresh=0.0, + options={"Equil": True, "SymmetricMode": True}, + ) except ImportError: _splu = None @@ -151,16 +152,21 @@ def _preprocess_graph(G, weight): if G.is_directed(): H = nx.MultiGraph() H.add_nodes_from(G) - H.add_weighted_edges_from(((u, v, e.get(weight, 1.)) - for u, v, e in G.edges(data=True) - if u != v), weight=weight) + H.add_weighted_edges_from( + ((u, v, e.get(weight, 1.0)) for u, v, e in G.edges(data=True) if u != v), + weight=weight, + ) G = H if not G.is_multigraph(): - edges = ((u, v, abs(e.get(weight, 1.))) - for u, v, e in G.edges(data=True) if u != v) + edges = ( + (u, v, abs(e.get(weight, 1.0))) for u, v, e in G.edges(data=True) if u != v + ) else: - edges = ((u, v, sum(abs(e.get(weight, 1.)) for e in G[u][v].values())) - for u, v in G.edges() if u != v) + edges = ( + (u, v, sum(abs(e.get(weight, 1.0)) for e in G[u][v].values())) + for u, v in G.edges() + if u != v + ) H = nx.Graph() H.add_nodes_from(G) H.add_weighted_edges_from((u, v, e) for u, v, e in edges if e != 0) @@ -177,7 +183,7 @@ def _rcm_estimate(G, nodelist): x = ndarray(n, dtype=float) for i, u in enumerate(order): x[index[u]] = i - x -= (n - 1) / 2. + x -= (n - 1) / 2.0 return x @@ -221,18 +227,21 @@ def _tracemin_fiedler(L, X, normalized, tol, method): # Form the normalized Laplacian matrix and determine the eigenvector of # its nullspace. e = sqrt(L.diagonal()) - D = spdiags(1. / e, [0], n, n, format='csr') + D = spdiags(1.0 / e, [0], n, n, format="csr") L = D * L * D - e *= 1. / norm(e, 2) + e *= 1.0 / norm(e, 2) if normalized: + def project(X): """Make X orthogonal to the nullspace of L. """ X = asarray(X) for j in range(X.shape[1]): X[:, j] -= dot(X[:, j], e) * e + else: + def project(X): """Make X orthogonal to the nullspace of L. """ @@ -240,10 +249,10 @@ def project(X): for j in range(X.shape[1]): X[:, j] -= X[:, j].sum() / n - if method == 'tracemin_pcg': + if method == "tracemin_pcg": D = L.diagonal().astype(float) solver = _PCGSolver(lambda x: L * x, lambda x: D * x) - elif method == 'tracemin_chol' or method == 'tracemin_lu': + elif method == "tracemin_chol" or method == "tracemin_lu": # Convert A to CSC to suppress SparseEfficiencyWarning. A = csc_matrix(L, dtype=float, copy=True) # Force A to be nonsingular. Since A is the Laplacian matrix of a @@ -251,86 +260,92 @@ def project(X): # element needs to modified. Changing to infinity forces a zero in the # corresponding element in the solution. i = (A.indptr[1:] - A.indptr[:-1]).argmax() - A[i, i] = float('inf') - if method == 'tracemin_chol': + A[i, i] = float("inf") + if method == "tracemin_chol": solver = _CholeskySolver(A) else: solver = _LUSolver(A) else: - raise nx.NetworkXError('Unknown linear system solver: ' + method) + raise nx.NetworkXError("Unknown linear system solver: " + method) # Initialize. Lnorm = abs(L).sum(axis=1).flatten().max() project(X) - W = asmatrix(ndarray(X.shape, order='F')) + W = ndarray(X.shape, order="F") while True: # Orthonormalize X. X = qr(X)[0] # Compute iteration matrix H. - W[:, :] = L * X - H = X.T * W + W[:, :] = L @ X + H = X.T @ W sigma, Y = eigh(H, overwrite_a=True) # Compute the Ritz vectors. - X *= Y + X = X @ Y # Test for convergence exploiting the fact that L * X == W * Y. - res = dasum(W * asmatrix(Y)[:, 0] - sigma[0] * X[:, 0]) / Lnorm + res = dasum(W @ Y[:, 0] - sigma[0] * X[:, 0]) / Lnorm if res < tol: break # Compute X = L \ X / (X' * (L \ X)). # L \ X can have an arbitrary projection on the nullspace of L, # which will be eliminated. W[:, :] = solver.solve(X, tol) - X = (inv(W.T * X) * W.T).T # Preserves Fortran storage order. + X = (inv(W.T @ X) @ W.T).T # Preserves Fortran storage order. project(X) return sigma, asarray(X) def _get_fiedler_func(method): - """Return a function that solves the Fiedler eigenvalue problem. + """Returns a function that solves the Fiedler eigenvalue problem. """ if method == "tracemin": # old style keyword `. + Returns ------- algebraic_connectivity : float @@ -392,25 +411,27 @@ def algebraic_connectivity(G, weight='weight', normalized=False, tol=1e-8, laplacian_matrix """ if len(G) < 2: - raise nx.NetworkXError('graph has less than two nodes.') + raise nx.NetworkXError("graph has less than two nodes.") G = _preprocess_graph(G, weight) if not nx.is_connected(G): - return 0. + return 0.0 L = nx.laplacian_matrix(G) if L.shape[0] == 2: - return 2. * L[0, 0] if not normalized else 2. + return 2.0 * L[0, 0] if not normalized else 2.0 find_fiedler = _get_fiedler_func(method) - x = None if method != 'lobpcg' else _rcm_estimate(G, G) - sigma, fiedler = find_fiedler(L, x, normalized, tol) + x = None if method != "lobpcg" else _rcm_estimate(G, G) + sigma, fiedler = find_fiedler(L, x, normalized, tol, seed) return sigma -@not_implemented_for('directed') -def fiedler_vector(G, weight='weight', normalized=False, tol=1e-8, - method='tracemin_pcg'): - """Return the Fiedler vector of a connected undirected graph. +@random_state(5) +@not_implemented_for("directed") +def fiedler_vector( + G, weight="weight", normalized=False, tol=1e-8, method="tracemin_pcg", seed=None +): + """Returns the Fiedler vector of a connected undirected graph. The Fiedler vector of a connected undirected graph is the eigenvector corresponding to the second smallest eigenvalue of the Laplacian matrix of @@ -447,6 +468,10 @@ def fiedler_vector(G, weight='weight', normalized=False, tol=1e-8, 'tracemin_lu' LU factorization =============== ======================================== + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + Returns ------- fiedler_vector : NumPy array of floats. @@ -473,23 +498,25 @@ def fiedler_vector(G, weight='weight', normalized=False, tol=1e-8, laplacian_matrix """ if len(G) < 2: - raise nx.NetworkXError('graph has less than two nodes.') + raise nx.NetworkXError("graph has less than two nodes.") G = _preprocess_graph(G, weight) if not nx.is_connected(G): - raise nx.NetworkXError('graph is not connected.') + raise nx.NetworkXError("graph is not connected.") if len(G) == 2: - return array([1., -1.]) + return array([1.0, -1.0]) find_fiedler = _get_fiedler_func(method) L = nx.laplacian_matrix(G) - x = None if method != 'lobpcg' else _rcm_estimate(G, G) - sigma, fiedler = find_fiedler(L, x, normalized, tol) + x = None if method != "lobpcg" else _rcm_estimate(G, G) + sigma, fiedler = find_fiedler(L, x, normalized, tol, seed) return fiedler -def spectral_ordering(G, weight='weight', normalized=False, tol=1e-8, - method='tracemin_pcg'): +@random_state(5) +def spectral_ordering( + G, weight="weight", normalized=False, tol=1e-8, method="tracemin_pcg", seed=None +): """Compute the spectral_ordering of a graph. The spectral ordering of a graph is an ordering of its nodes where nodes @@ -527,6 +554,10 @@ def spectral_ordering(G, weight='weight', normalized=False, tol=1e-8, 'tracemin_lu' LU factorization =============== ======================================== + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + Returns ------- spectral_ordering : NumPy array of floats. @@ -550,7 +581,7 @@ def spectral_ordering(G, weight='weight', normalized=False, tol=1e-8, laplacian_matrix """ if len(G) == 0: - raise nx.NetworkXError('graph is empty.') + raise nx.NetworkXError("graph is empty.") G = _preprocess_graph(G, weight) find_fiedler = _get_fiedler_func(method) @@ -559,21 +590,11 @@ def spectral_ordering(G, weight='weight', normalized=False, tol=1e-8, size = len(component) if size > 2: L = nx.laplacian_matrix(G, component) - x = None if method != 'lobpcg' else _rcm_estimate(G, component) - sigma, fiedler = find_fiedler(L, x, normalized, tol) + x = None if method != "lobpcg" else _rcm_estimate(G, component) + sigma, fiedler = find_fiedler(L, x, normalized, tol, seed) sort_info = zip(fiedler, range(size), component) order.extend(u for x, c, u in sorted(sort_info)) else: order.extend(component) return order - - -# fixture for nose tests -def setup_module(module): - from nose import SkipTest - try: - import numpy - import scipy.sparse - except ImportError: - raise SkipTest('SciPy not available.') diff --git a/networkx/linalg/attrmatrix.py b/networkx/linalg/attrmatrix.py index 4344a02..1d68fdc 100644 --- a/networkx/linalg/attrmatrix.py +++ b/networkx/linalg/attrmatrix.py @@ -2,9 +2,7 @@ Functions for constructing matrix-like objects from graph attributes. """ -__all__ = ['attr_matrix', 'attr_sparse_matrix'] - -import networkx as nx +__all__ = ["attr_matrix", "attr_sparse_matrix"] def _node_value(G, node_attr): @@ -32,10 +30,15 @@ def _node_value(G, node_attr): """ if node_attr is None: - def value(u): return u - elif not hasattr(node_attr, '__call__'): + + def value(u): + return u + + elif not hasattr(node_attr, "__call__"): # assume it is a key for the node attribute dictionary - def value(u): return G.nodes[u][node_attr] + def value(u): + return G.nodes[u][node_attr] + else: # Advanced: Allow users to specify something else. # @@ -82,25 +85,41 @@ def _edge_value(G, edge_attr): # topological count of edges if G.is_multigraph(): - def value(u, v): return len(G[u][v]) + + def value(u, v): + return len(G[u][v]) + else: - def value(u, v): return 1 - elif not hasattr(edge_attr, '__call__'): + def value(u, v): + return 1 + + elif not hasattr(edge_attr, "__call__"): # assume it is a key for the edge attribute dictionary - if edge_attr == 'weight': + if edge_attr == "weight": # provide a default value if G.is_multigraph(): - def value(u, v): return sum([d.get(edge_attr, 1) for d in G[u][v].values()]) + + def value(u, v): + return sum([d.get(edge_attr, 1) for d in G[u][v].values()]) + else: - def value(u, v): return G[u][v].get(edge_attr, 1) + + def value(u, v): + return G[u][v].get(edge_attr, 1) + else: # otherwise, the edge attribute MUST exist for each edge if G.is_multigraph(): - def value(u, v): return sum([d[edge_attr] for d in G[u][v].values()]) + + def value(u, v): + return sum([d[edge_attr] for d in G[u][v].values()]) + else: - def value(u, v): return G[u][v][edge_attr] + + def value(u, v): + return G[u][v][edge_attr] else: # Advanced: Allow users to specify something else. @@ -122,8 +141,15 @@ def value(u, v): return G[u][v][edge_attr] return value -def attr_matrix(G, edge_attr=None, node_attr=None, normalized=False, - rc_order=None, dtype=None, order=None): +def attr_matrix( + G, + edge_attr=None, + node_attr=None, + normalized=False, + rc_order=None, + dtype=None, + order=None, +): """Returns a NumPy matrix using attributes from G. If only `G` is passed in, then the adjacency matrix is constructed. @@ -190,39 +216,34 @@ def attr_matrix(G, edge_attr=None, node_attr=None, normalized=False, -------- Construct an adjacency matrix: - >>> try: - ... import numpy as np - ... np.set_printoptions(legacy="1.13") - ... except TypeError: - ... pass >>> G = nx.Graph() >>> G.add_edge(0, 1, thickness=1, weight=3) >>> G.add_edge(0, 2, thickness=2) >>> G.add_edge(1, 2, thickness=3) >>> nx.attr_matrix(G, rc_order=[0, 1, 2]) - matrix([[ 0., 1., 1.], - [ 1., 0., 1.], - [ 1., 1., 0.]]) + matrix([[0., 1., 1.], + [1., 0., 1.], + [1., 1., 0.]]) Alternatively, we can obtain the matrix describing edge thickness. - >>> nx.attr_matrix(G, edge_attr='thickness', rc_order=[0, 1, 2]) - matrix([[ 0., 1., 2.], - [ 1., 0., 3.], - [ 2., 3., 0.]]) + >>> nx.attr_matrix(G, edge_attr="thickness", rc_order=[0, 1, 2]) + matrix([[0., 1., 2.], + [1., 0., 3.], + [2., 3., 0.]]) We can also color the nodes and ask for the probability distribution over all edges (u,v) describing: Pr(v has color Y | u has color X) - >>> G.nodes[0]['color'] = 'red' - >>> G.nodes[1]['color'] = 'red' - >>> G.nodes[2]['color'] = 'blue' - >>> rc = ['red', 'blue'] - >>> nx.attr_matrix(G, node_attr='color', normalized=True, rc_order=rc) - matrix([[ 0.33333333, 0.66666667], - [ 1. , 0. ]]) + >>> G.nodes[0]["color"] = "red" + >>> G.nodes[1]["color"] = "red" + >>> G.nodes[2]["color"] = "blue" + >>> rc = ["red", "blue"] + >>> nx.attr_matrix(G, node_attr="color", normalized=True, rc_order=rc) + matrix([[0.33333333, 0.66666667], + [1. , 0. ]]) For example, the above tells us that for all edges (u,v): @@ -234,9 +255,9 @@ def attr_matrix(G, edge_attr=None, node_attr=None, normalized=False, Finally, we can obtain the total weights listed by the node colors. - >>> nx.attr_matrix(G, edge_attr='weight', node_attr='color', rc_order=rc) - matrix([[ 3., 2.], - [ 2., 0.]]) + >>> nx.attr_matrix(G, edge_attr="weight", node_attr="color", rc_order=rc) + matrix([[3., 2.], + [2., 0.]]) Thus, the total weight over all edges (u,v) with u and v having colors: @@ -248,15 +269,14 @@ def attr_matrix(G, edge_attr=None, node_attr=None, normalized=False, """ try: import numpy as np - except ImportError: - raise ImportError( - "attr_matrix() requires numpy: http://scipy.org/ ") + except ImportError as e: + raise ImportError("attr_matrix() requires numpy: http://scipy.org/ ") from e edge_value = _edge_value(G, edge_attr) node_value = _node_value(G, node_attr) if rc_order is None: - ordering = list(set([node_value(n) for n in G])) + ordering = list({node_value(n) for n in G}) else: ordering = rc_order @@ -265,7 +285,7 @@ def attr_matrix(G, edge_attr=None, node_attr=None, normalized=False, index = dict(zip(ordering, range(N))) M = np.zeros((N, N), dtype=dtype, order=order) - seen = set([]) + seen = set() for u, nbrdict in G.adjacency(): for v in nbrdict: # Obtain the node attribute values. @@ -289,8 +309,9 @@ def attr_matrix(G, edge_attr=None, node_attr=None, normalized=False, return M -def attr_sparse_matrix(G, edge_attr=None, node_attr=None, - normalized=False, rc_order=None, dtype=None): +def attr_sparse_matrix( + G, edge_attr=None, node_attr=None, normalized=False, rc_order=None, dtype=None +): """Returns a SciPy sparse matrix using attributes from G. If only `G` is passed in, then the adjacency matrix is constructed. @@ -353,37 +374,36 @@ def attr_sparse_matrix(G, edge_attr=None, node_attr=None, Construct an adjacency matrix: >>> G = nx.Graph() - >>> G.add_edge(0,1,thickness=1,weight=3) - >>> G.add_edge(0,2,thickness=2) - >>> G.add_edge(1,2,thickness=3) - >>> M = nx.attr_sparse_matrix(G, rc_order=[0,1,2]) + >>> G.add_edge(0, 1, thickness=1, weight=3) + >>> G.add_edge(0, 2, thickness=2) + >>> G.add_edge(1, 2, thickness=3) + >>> M = nx.attr_sparse_matrix(G, rc_order=[0, 1, 2]) >>> M.todense() - matrix([[ 0., 1., 1.], - [ 1., 0., 1.], - [ 1., 1., 0.]]) + matrix([[0., 1., 1.], + [1., 0., 1.], + [1., 1., 0.]]) Alternatively, we can obtain the matrix describing edge thickness. - >>> M = nx.attr_sparse_matrix(G, edge_attr='thickness', rc_order=[0,1,2]) + >>> M = nx.attr_sparse_matrix(G, edge_attr="thickness", rc_order=[0, 1, 2]) >>> M.todense() - matrix([[ 0., 1., 2.], - [ 1., 0., 3.], - [ 2., 3., 0.]]) + matrix([[0., 1., 2.], + [1., 0., 3.], + [2., 3., 0.]]) We can also color the nodes and ask for the probability distribution over all edges (u,v) describing: Pr(v has color Y | u has color X) - >>> G.nodes[0]['color'] = 'red' - >>> G.nodes[1]['color'] = 'red' - >>> G.nodes[2]['color'] = 'blue' - >>> rc = ['red', 'blue'] - >>> M = nx.attr_sparse_matrix(G, node_attr='color', \ - normalized=True, rc_order=rc) + >>> G.nodes[0]["color"] = "red" + >>> G.nodes[1]["color"] = "red" + >>> G.nodes[2]["color"] = "blue" + >>> rc = ["red", "blue"] + >>> M = nx.attr_sparse_matrix(G, node_attr="color", normalized=True, rc_order=rc) >>> M.todense() - matrix([[ 0.33333333, 0.66666667], - [ 1. , 0. ]]) + matrix([[0.33333333, 0.66666667], + [1. , 0. ]]) For example, the above tells us that for all edges (u,v): @@ -395,11 +415,10 @@ def attr_sparse_matrix(G, edge_attr=None, node_attr=None, Finally, we can obtain the total weights listed by the node colors. - >>> M = nx.attr_sparse_matrix(G, edge_attr='weight',\ - node_attr='color', rc_order=rc) + >>> M = nx.attr_sparse_matrix(G, edge_attr="weight", node_attr="color", rc_order=rc) >>> M.todense() - matrix([[ 3., 2.], - [ 2., 0.]]) + matrix([[3., 2.], + [2., 0.]]) Thus, the total weight over all edges (u,v) with u and v having colors: @@ -412,15 +431,16 @@ def attr_sparse_matrix(G, edge_attr=None, node_attr=None, try: import numpy as np from scipy import sparse - except ImportError: + except ImportError as e: raise ImportError( - "attr_sparse_matrix() requires scipy: http://scipy.org/ ") + "attr_sparse_matrix() requires scipy: " "http://scipy.org/ " + ) from e edge_value = _edge_value(G, edge_attr) node_value = _node_value(G, node_attr) if rc_order is None: - ordering = list(set([node_value(n) for n in G])) + ordering = list({node_value(n) for n in G}) else: ordering = rc_order @@ -429,7 +449,7 @@ def attr_sparse_matrix(G, edge_attr=None, node_attr=None, index = dict(zip(ordering, range(N))) M = sparse.lil_matrix((N, N), dtype=dtype) - seen = set([]) + seen = set() for u, nbrdict in G.adjacency(): for v in nbrdict: # Obtain the node attribute values. @@ -451,16 +471,3 @@ def attr_sparse_matrix(G, edge_attr=None, node_attr=None, return M, ordering else: return M - - -# fixture for nose tests -def setup_module(module): - from nose import SkipTest - try: - import numpy - except: - raise SkipTest("NumPy not available") - try: - import scipy - except: - raise SkipTest("SciPy not available") diff --git a/networkx/linalg/bethehessianmatrix.py b/networkx/linalg/bethehessianmatrix.py new file mode 100644 index 0000000..5f9dc76 --- /dev/null +++ b/networkx/linalg/bethehessianmatrix.py @@ -0,0 +1,78 @@ +"""Bethe Hessian or deformed Laplacian matrix of graphs.""" +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["bethe_hessian_matrix"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def bethe_hessian_matrix(G, r=None, nodelist=None): + r"""Returns the Bethe Hessian matrix of G. + + The Bethe Hessian is a family of matrices parametrized by r, defined as + H(r) = (r^2 - 1) I - r A + D where A is the adjacency matrix, D is the + diagonal matrix of node degrees, and I is the identify matrix. It is equal + to the graph laplacian when the regularizer r = 1. + + The default choice of regularizer should be the ratio [2] + + .. math:: + r_m = \left(\sum k_i \right)^{-1}\left(\sum k_i^2 \right) - 1 + + Parameters + ---------- + G : Graph + A NetworkX graph + + r : float + Regularizer parameter + + nodelist : list, optional + The rows and columns are ordered according to the nodes in nodelist. + If nodelist is None, then the ordering is produced by G.nodes(). + + + Returns + ------- + H : Numpy matrix + The Bethe Hessian matrix of G, with paramter r. + + Examples + -------- + >>> k = [3, 2, 2, 1, 0] + >>> G = nx.havel_hakimi_graph(k) + >>> H = nx.modularity_matrix(G) + + + See Also + -------- + bethe_hessian_spectrum + to_numpy_array + adjacency_matrix + laplacian_matrix + + References + ---------- + .. [1] A. Saade, F. Krzakala and L. Zdeborová + "Spectral clustering of graphs with the bethe hessian", + Advances in Neural Information Processing Systems. 2014. + .. [2] C. M. Lee, E. Levina + "Estimating the number of communities in networks by spectral methods" + arXiv:1507.00827, 2015. + """ + import scipy.sparse + + if nodelist is None: + nodelist = list(G) + if r is None: + r = ( + sum([d ** 2 for v, d in nx.degree(G)]) / sum([d for v, d in nx.degree(G)]) + - 1 + ) + A = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, format="csr") + n, m = A.shape + diags = A.sum(axis=1) + D = scipy.sparse.spdiags(diags.flatten(), [0], m, n, format="csr") + I = scipy.sparse.eye(m, n, format="csr") + return (r ** 2 - 1) * I - r * A + D diff --git a/networkx/linalg/graphmatrix.py b/networkx/linalg/graphmatrix.py index 368aa96..a2fb48d 100644 --- a/networkx/linalg/graphmatrix.py +++ b/networkx/linalg/graphmatrix.py @@ -1,32 +1,20 @@ """ Adjacency matrix and incidence matrix of graphs. """ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. import networkx as nx -__author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', - 'Pieter Swart (swart@lanl.gov)', - 'Dan Schult(dschult@colgate.edu)']) -__all__ = ['incidence_matrix', - 'adj_matrix', 'adjacency_matrix', - ] +__all__ = ["incidence_matrix", "adj_matrix", "adjacency_matrix"] -def incidence_matrix(G, nodelist=None, edgelist=None, - oriented=False, weight=None): - """Return incidence matrix of G. +def incidence_matrix(G, nodelist=None, edgelist=None, oriented=False, weight=None): + """Returns incidence matrix of G. The incidence matrix assigns each row to a node and each column to an edge. For a standard incidence matrix a 1 appears wherever a row's node is incident on the column's edge. For an oriented incidence matrix each edge is assigned an orientation (arbitrarily for undirected and aligning to - direction for directed). A -1 appears for the tail of an edge and 1 - for the head of the edge. The elements are zero otherwise. + direction for directed). A -1 appears for the source (tail) of an edge and + 1 for the destination (head) of the edge. The elements are zero otherwise. Parameters ---------- @@ -69,6 +57,7 @@ def incidence_matrix(G, nodelist=None, edgelist=None, http://academicearth.org/lectures/network-applications-incidence-matrix """ import scipy.sparse + if nodelist is None: nodelist = list(G) if edgelist is None: @@ -77,7 +66,7 @@ def incidence_matrix(G, nodelist=None, edgelist=None, else: edgelist = list(G.edges()) A = scipy.sparse.lil_matrix((len(nodelist), len(edgelist))) - node_index = dict((node, i) for i, node in enumerate(nodelist)) + node_index = {node: i for i, node in enumerate(nodelist)} for ei, e in enumerate(edgelist): (u, v) = e[:2] if u == v: @@ -85,9 +74,10 @@ def incidence_matrix(G, nodelist=None, edgelist=None, try: ui = node_index[u] vi = node_index[v] - except KeyError: - raise nx.NetworkXError('node %s or %s in edgelist ' - 'but not in nodelist' % (u, v)) + except KeyError as e: + raise nx.NetworkXError( + f"node {u} or {v} in edgelist " f"but not in nodelist" + ) from e if weight is None: wt = 1 else: @@ -102,11 +92,11 @@ def incidence_matrix(G, nodelist=None, edgelist=None, else: A[ui, ei] = wt A[vi, ei] = wt - return A.asformat('csc') + return A.asformat("csc") -def adjacency_matrix(G, nodelist=None, weight='weight'): - """Return adjacency matrix of G. +def adjacency_matrix(G, nodelist=None, weight="weight"): + """Returns adjacency matrix of G. Parameters ---------- @@ -136,7 +126,7 @@ def adjacency_matrix(G, nodelist=None, weight='weight'): sparse matrix. For MultiGraph/MultiDiGraph with parallel edges the weights are summed. - See to_numpy_matrix for other options. + See `to_numpy_array` for other options. The convention used for self-loop edges in graphs is to assign the diagonal matrix entry value to the edge weight attribute @@ -145,31 +135,22 @@ def adjacency_matrix(G, nodelist=None, weight='weight'): resulting Scipy sparse matrix can be modified as follows: >>> import scipy as sp - >>> G = nx.Graph([(1,1)]) + >>> G = nx.Graph([(1, 1)]) >>> A = nx.adjacency_matrix(G) >>> print(A.todense()) [[1]] - >>> A.setdiag(A.diagonal()*2) + >>> A.setdiag(A.diagonal() * 2) >>> print(A.todense()) [[2]] See Also -------- - to_numpy_matrix + to_numpy_array to_scipy_sparse_matrix to_dict_of_dicts + adjacency_spectrum """ return nx.to_scipy_sparse_matrix(G, nodelist=nodelist, weight=weight) adj_matrix = adjacency_matrix - -# fixture for nose tests - - -def setup_module(module): - from nose import SkipTest - try: - import scipy - except: - raise SkipTest("SciPy not available") diff --git a/networkx/linalg/laplacianmatrix.py b/networkx/linalg/laplacianmatrix.py index 0bd681e..ea5e029 100644 --- a/networkx/linalg/laplacianmatrix.py +++ b/networkx/linalg/laplacianmatrix.py @@ -1,25 +1,19 @@ """Laplacian matrix of graphs. """ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. import networkx as nx from networkx.utils import not_implemented_for -__author__ = "\n".join(['Aric Hagberg ', - 'Pieter Swart (swart@lanl.gov)', - 'Dan Schult (dschult@colgate.edu)', - 'Alejandro Weinstein ']) -__all__ = ['laplacian_matrix', - 'normalized_laplacian_matrix', - 'directed_laplacian_matrix'] +__all__ = [ + "laplacian_matrix", + "normalized_laplacian_matrix", + "directed_laplacian_matrix", + "directed_combinatorial_laplacian_matrix", +] -@not_implemented_for('directed') -def laplacian_matrix(G, nodelist=None, weight='weight'): - """Return the Laplacian matrix of G. + +@not_implemented_for("directed") +def laplacian_matrix(G, nodelist=None, weight="weight"): + """Returns the Laplacian matrix of G. The graph Laplacian is the matrix L = D - A, where A is the adjacency matrix and D is the diagonal matrix of node degrees. @@ -48,23 +42,24 @@ def laplacian_matrix(G, nodelist=None, weight='weight'): See Also -------- - to_numpy_matrix + to_numpy_array normalized_laplacian_matrix + laplacian_spectrum """ import scipy.sparse + if nodelist is None: nodelist = list(G) - A = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, weight=weight, - format='csr') + A = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, weight=weight, format="csr") n, m = A.shape diags = A.sum(axis=1) - D = scipy.sparse.spdiags(diags.flatten(), [0], m, n, format='csr') + D = scipy.sparse.spdiags(diags.flatten(), [0], m, n, format="csr") return D - A -@not_implemented_for('directed') -def normalized_laplacian_matrix(G, nodelist=None, weight='weight'): - r"""Return the normalized Laplacian matrix of G. +@not_implemented_for("directed") +def normalized_laplacian_matrix(G, nodelist=None, weight="weight"): + r"""Returns the normalized Laplacian matrix of G. The normalized graph Laplacian is the matrix @@ -90,13 +85,13 @@ def normalized_laplacian_matrix(G, nodelist=None, weight='weight'): Returns ------- - N : NumPy matrix + N : Scipy sparse matrix The normalized Laplacian matrix of G. Notes ----- For MultiGraph/MultiDiGraph, the edges weights are summed. - See to_numpy_matrix for other options. + See to_numpy_array for other options. If the Graph contains selfloops, D is defined as diag(sum(A,1)), where A is the adjacency matrix [2]_. @@ -104,6 +99,7 @@ def normalized_laplacian_matrix(G, nodelist=None, weight='weight'): See Also -------- laplacian_matrix + normalized_laplacian_spectrum References ---------- @@ -113,32 +109,35 @@ def normalized_laplacian_matrix(G, nodelist=None, weight='weight'): Laplacian, Electronic Journal of Linear Algebra, Volume 16, pp. 90-98, March 2007. """ + import numpy as np import scipy import scipy.sparse + if nodelist is None: nodelist = list(G) - A = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, weight=weight, - format='csr') + A = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, weight=weight, format="csr") n, m = A.shape diags = A.sum(axis=1).flatten() - D = scipy.sparse.spdiags(diags, [0], m, n, format='csr') + D = scipy.sparse.spdiags(diags, [0], m, n, format="csr") L = D - A - with scipy.errstate(divide='ignore'): - diags_sqrt = 1.0 / scipy.sqrt(diags) - diags_sqrt[scipy.isinf(diags_sqrt)] = 0 - DH = scipy.sparse.spdiags(diags_sqrt, [0], m, n, format='csr') + with scipy.errstate(divide="ignore"): + diags_sqrt = 1.0 / np.sqrt(diags) + diags_sqrt[np.isinf(diags_sqrt)] = 0 + DH = scipy.sparse.spdiags(diags_sqrt, [0], m, n, format="csr") return DH.dot(L.dot(DH)) + ############################################################################### # Code based on # https://bitbucket.org/bedwards/networkx-community/src/370bd69fc02f/networkx/algorithms/community/ -@not_implemented_for('undirected') -@not_implemented_for('multigraph') -def directed_laplacian_matrix(G, nodelist=None, weight='weight', - walk_type=None, alpha=0.95): - r"""Return the directed Laplacian matrix of G. +@not_implemented_for("undirected") +@not_implemented_for("multigraph") +def directed_laplacian_matrix( + G, nodelist=None, weight="weight", walk_type=None, alpha=0.95 +): + r"""Returns the directed Laplacian matrix of G. The graph directed Laplacian is the matrix @@ -176,16 +175,86 @@ def directed_laplacian_matrix(G, nodelist=None, weight='weight', Returns ------- - L : NumPy array + L : NumPy matrix Normalized Laplacian of G. - Raises - ------ - NetworkXError - If NumPy cannot be imported + Notes + ----- + Only implemented for DiGraphs + + See Also + -------- + laplacian_matrix + + References + ---------- + .. [1] Fan Chung (2005). + Laplacians and the Cheeger inequality for directed graphs. + Annals of Combinatorics, 9(1), 2005 + """ + import numpy as np + from scipy.sparse import spdiags, linalg + + P = _transition_matrix( + G, nodelist=nodelist, weight=weight, walk_type=walk_type, alpha=alpha + ) + + n, m = P.shape + + evals, evecs = linalg.eigs(P.T, k=1) + v = evecs.flatten().real + p = v / v.sum() + sqrtp = np.sqrt(p) + Q = spdiags(sqrtp, [0], n, n) * P * spdiags(1.0 / sqrtp, [0], n, n) + I = np.identity(len(G)) + + return I - (Q + Q.T) / 2.0 + + +@not_implemented_for("undirected") +@not_implemented_for("multigraph") +def directed_combinatorial_laplacian_matrix( + G, nodelist=None, weight="weight", walk_type=None, alpha=0.95 +): + r"""Return the directed combinatorial Laplacian matrix of G. + + The graph directed combinatorial Laplacian is the matrix + + .. math:: + + L = \Phi - (\Phi P + P^T \Phi) / 2 + + where `P` is the transition matrix of the graph and and `\Phi` a matrix + with the Perron vector of `P` in the diagonal and zeros elsewhere. + + Depending on the value of walk_type, `P` can be the transition matrix + induced by a random walk, a lazy random walk, or a random walk with + teleportation (PageRank). + + Parameters + ---------- + G : DiGraph + A NetworkX graph + + nodelist : list, optional + The rows and columns are ordered according to the nodes in nodelist. + If nodelist is None, then the ordering is produced by G.nodes(). + + weight : string or None, optional (default='weight') + The edge data key used to compute each value in the matrix. + If None, then each edge has weight 1. + + walk_type : string or None, optional (default=None) + If None, `P` is selected depending on the properties of the + graph. Otherwise is one of 'random', 'lazy', or 'pagerank' - NetworkXNotImplemnted - If G is not a DiGraph + alpha : real + (1 - alpha) is the teleportation probability used with pagerank + + Returns + ------- + L : NumPy matrix + Combinatorial Laplacian of G. Notes ----- @@ -201,8 +270,65 @@ def directed_laplacian_matrix(G, nodelist=None, weight='weight', Laplacians and the Cheeger inequality for directed graphs. Annals of Combinatorics, 9(1), 2005 """ - import scipy as sp - from scipy.sparse import identity, spdiags, linalg + from scipy.sparse import spdiags, linalg + + P = _transition_matrix( + G, nodelist=nodelist, weight=weight, walk_type=walk_type, alpha=alpha + ) + + n, m = P.shape + + evals, evecs = linalg.eigs(P.T, k=1) + v = evecs.flatten().real + p = v / v.sum() + Phi = spdiags(p, [0], n, n) + + Phi = Phi.todense() + + return Phi - (Phi * P + P.T * Phi) / 2.0 + + +def _transition_matrix(G, nodelist=None, weight="weight", walk_type=None, alpha=0.95): + """Returns the transition matrix of G. + + This is a row stochastic giving the transition probabilities while + performing a random walk on the graph. Depending on the value of walk_type, + P can be the transition matrix induced by a random walk, a lazy random walk, + or a random walk with teleportation (PageRank). + + Parameters + ---------- + G : DiGraph + A NetworkX graph + + nodelist : list, optional + The rows and columns are ordered according to the nodes in nodelist. + If nodelist is None, then the ordering is produced by G.nodes(). + + weight : string or None, optional (default='weight') + The edge data key used to compute each value in the matrix. + If None, then each edge has weight 1. + + walk_type : string or None, optional (default=None) + If None, `P` is selected depending on the properties of the + graph. Otherwise is one of 'random', 'lazy', or 'pagerank' + + alpha : real + (1 - alpha) is the teleportation probability used with pagerank + + Returns + ------- + P : NumPy matrix + transition matrix of G. + + Raises + ------ + NetworkXError + If walk_type not specified or alpha not in valid range + """ + import numpy as np + from scipy.sparse import identity, spdiags + if walk_type is None: if nx.is_strongly_connected(G): if nx.is_aperiodic(G): @@ -212,11 +338,10 @@ def directed_laplacian_matrix(G, nodelist=None, weight='weight', else: walk_type = "pagerank" - M = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, weight=weight, - dtype=float) + M = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, weight=weight, dtype=float) n, m = M.shape if walk_type in ["random", "lazy"]: - DI = spdiags(1.0 / sp.array(M.sum(axis=1).flat), [0], n, n) + DI = spdiags(1.0 / np.array(M.sum(axis=1).flat), [0], n, n) if walk_type == "random": P = DI * M else: @@ -225,11 +350,11 @@ def directed_laplacian_matrix(G, nodelist=None, weight='weight', elif walk_type == "pagerank": if not (0 < alpha < 1): - raise nx.NetworkXError('alpha must be between 0 and 1') + raise nx.NetworkXError("alpha must be between 0 and 1") # this is using a dense representation M = M.todense() # add constant to dangling nodes' row - dangling = sp.where(M.sum(axis=1) == 0) + dangling = np.where(M.sum(axis=1) == 0) for d in dangling[0]: M[d] = 1.0 / n # normalize @@ -238,21 +363,4 @@ def directed_laplacian_matrix(G, nodelist=None, weight='weight', else: raise nx.NetworkXError("walk_type must be random, lazy, or pagerank") - evals, evecs = linalg.eigs(P.T, k=1) - v = evecs.flatten().real - p = v / v.sum() - sqrtp = sp.sqrt(p) - Q = spdiags(sqrtp, [0], n, n) * P * spdiags(1.0 / sqrtp, [0], n, n) - I = sp.identity(len(G)) - - return I - (Q + Q.T) / 2.0 - -# fixture for nose tests - - -def setup_module(module): - from nose import SkipTest - try: - import numpy - except: - raise SkipTest("NumPy not available") + return P diff --git a/networkx/linalg/modularitymatrix.py b/networkx/linalg/modularitymatrix.py index 47d4931..13c33f1 100644 --- a/networkx/linalg/modularitymatrix.py +++ b/networkx/linalg/modularitymatrix.py @@ -1,33 +1,26 @@ """Modularity matrix of graphs. """ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -from __future__ import division import networkx as nx from networkx.utils import not_implemented_for -__author__ = "\n".join(['Aric Hagberg ', - 'Pieter Swart (swart@lanl.gov)', - 'Dan Schult (dschult@colgate.edu)', - 'Jean-Gabriel Young (Jean.gabriel.young@gmail.com)']) -__all__ = ['modularity_matrix', 'directed_modularity_matrix'] +__all__ = ["modularity_matrix", "directed_modularity_matrix"] -@not_implemented_for('directed') -@not_implemented_for('multigraph') + +@not_implemented_for("directed") +@not_implemented_for("multigraph") def modularity_matrix(G, nodelist=None, weight=None): - """Return the modularity matrix of G. + r"""Returns the modularity matrix of G. The modularity matrix is the matrix B = A - , where A is the adjacency matrix and is the average adjacency matrix, assuming that the graph is described by the configuration model. More specifically, the element B_ij of B is defined as - A_ij - k_i k_j / 2 * m - where k_i(in) is the degree of node i, and were m is the number of edges + + .. math:: + A_{ij} - {k_i k_j \over 2 m} + + where k_i is the degree of node i, and where m is the number of edges in the graph. When weight is set to a name of an attribute edge, Aij, k_i, k_j and m are computed using its value. @@ -51,28 +44,26 @@ def modularity_matrix(G, nodelist=None, weight=None): Examples -------- - >>> import networkx as nx - >>> k =[3, 2, 2, 1, 0] + >>> k = [3, 2, 2, 1, 0] >>> G = nx.havel_hakimi_graph(k) >>> B = nx.modularity_matrix(G) See Also -------- - to_numpy_matrix + to_numpy_array + modularity_spectrum adjacency_matrix - laplacian_matrix directed_modularity_matrix References ---------- .. [1] M. E. J. Newman, "Modularity and community structure in networks", - Proc. Natl. Acad. Sci. USA, vol. 103, pp. 8577-8582, 2006. + Proc. Natl. Acad. Sci. USA, vol. 103, pp. 8577-8582, 2006. """ if nodelist is None: nodelist = list(G) - A = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, weight=weight, - format='csr') + A = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, weight=weight, format="csr") k = A.sum(axis=1) m = k.sum() * 0.5 # Expected adjacency matrix @@ -80,18 +71,21 @@ def modularity_matrix(G, nodelist=None, weight=None): return A - X -@not_implemented_for('undirected') -@not_implemented_for('multigraph') +@not_implemented_for("undirected") +@not_implemented_for("multigraph") def directed_modularity_matrix(G, nodelist=None, weight=None): - """Return the directed modularity matrix of G. + """Returns the directed modularity matrix of G. The modularity matrix is the matrix B = A - , where A is the adjacency matrix and is the expected adjacency matrix, assuming that the graph is described by the configuration model. More specifically, the element B_ij of B is defined as - B_ij = A_ij - k_i(out) k_j(in) / m - where k_i(in) is the in degree of node i, and k_j(out) is the out degree + + .. math:: + B_{ij} = A_{ij} - k_i^{out} k_j^{in} / m + + where :math:`k_i^{in}` is the in degree of node i, and :math:`k_j^{out}` is the out degree of node j, with m the number of edges in the graph. When weight is set to a name of an attribute edge, Aij, k_i, k_j and m are computed using its value. @@ -116,10 +110,21 @@ def directed_modularity_matrix(G, nodelist=None, weight=None): Examples -------- - >>> import networkx as nx >>> G = nx.DiGraph() - >>> G.add_edges_from(((1,2), (1,3), (3,1), (3,2), (3,5), (4,5), (4,6), - ... (5,4), (5,6), (6,4))) + >>> G.add_edges_from( + ... ( + ... (1, 2), + ... (1, 3), + ... (3, 1), + ... (3, 2), + ... (3, 5), + ... (4, 5), + ... (4, 6), + ... (5, 4), + ... (5, 6), + ... (6, 4), + ... ) + ... ) >>> B = nx.directed_modularity_matrix(G) @@ -131,34 +136,23 @@ def directed_modularity_matrix(G, nodelist=None, weight=None): See Also -------- - to_numpy_matrix + to_numpy_array + modularity_spectrum adjacency_matrix - laplacian_matrix modularity_matrix References ---------- .. [1] E. A. Leicht, M. E. J. Newman, - "Community structure in directed networks", + "Community structure in directed networks", Phys. Rev Lett., vol. 100, no. 11, p. 118703, 2008. """ if nodelist is None: nodelist = list(G) - A = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, weight=weight, - format='csr') + A = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, weight=weight, format="csr") k_in = A.sum(axis=0) k_out = A.sum(axis=1) m = k_in.sum() # Expected adjacency matrix X = k_out * k_in / m return A - X - - -# fixture for nose tests -def setup_module(module): - from nose import SkipTest - try: - import numpy - import scipy - except: - raise SkipTest("NumPy not available") diff --git a/networkx/linalg/spectrum.py b/networkx/linalg/spectrum.py index 9f24e1a..2855b04 100644 --- a/networkx/linalg/spectrum.py +++ b/networkx/linalg/spectrum.py @@ -1,23 +1,19 @@ """ Eigenvalue spectrum of graphs. """ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. import networkx as nx -__author__ = "\n".join(['Aric Hagberg ', - 'Pieter Swart (swart@lanl.gov)', - 'Dan Schult(dschult@colgate.edu)', - 'Jean-Gabriel Young (jean.gabriel.young@gmail.com)']) -__all__ = ['laplacian_spectrum', 'adjacency_spectrum', 'modularity_spectrum'] +__all__ = [ + "laplacian_spectrum", + "adjacency_spectrum", + "modularity_spectrum", + "normalized_laplacian_spectrum", + "bethe_hessian_spectrum", +] -def laplacian_spectrum(G, weight='weight'): - """Return eigenvalues of the Laplacian of G +def laplacian_spectrum(G, weight="weight"): + """Returns eigenvalues of the Laplacian of G Parameters ---------- @@ -36,18 +32,50 @@ def laplacian_spectrum(G, weight='weight'): Notes ----- For MultiGraph/MultiDiGraph, the edges weights are summed. - See to_numpy_matrix for other options. + See to_numpy_array for other options. See Also -------- laplacian_matrix """ from scipy.linalg import eigvalsh + return eigvalsh(nx.laplacian_matrix(G, weight=weight).todense()) -def adjacency_spectrum(G, weight='weight'): - """Return eigenvalues of the adjacency matrix of G. +def normalized_laplacian_spectrum(G, weight="weight"): + """Return eigenvalues of the normalized Laplacian of G + + Parameters + ---------- + G : graph + A NetworkX graph + + weight : string or None, optional (default='weight') + The edge data key used to compute each value in the matrix. + If None, then each edge has weight 1. + + Returns + ------- + evals : NumPy array + Eigenvalues + + Notes + ----- + For MultiGraph/MultiDiGraph, the edges weights are summed. + See to_numpy_array for other options. + + See Also + -------- + normalized_laplacian_matrix + """ + from scipy.linalg import eigvalsh + + return eigvalsh(nx.normalized_laplacian_matrix(G, weight=weight).todense()) + + +def adjacency_spectrum(G, weight="weight"): + """Returns eigenvalues of the adjacency matrix of G. Parameters ---------- @@ -66,18 +94,19 @@ def adjacency_spectrum(G, weight='weight'): Notes ----- For MultiGraph/MultiDiGraph, the edges weights are summed. - See to_numpy_matrix for other options. + See to_numpy_array for other options. See Also -------- adjacency_matrix """ from scipy.linalg import eigvals + return eigvals(nx.adjacency_matrix(G, weight=weight).todense()) def modularity_spectrum(G): - """Return eigenvalues of the modularity matrix of G. + """Returns eigenvalues of the modularity matrix of G. Parameters ---------- @@ -99,17 +128,39 @@ def modularity_spectrum(G): Proc. Natl. Acad. Sci. USA, vol. 103, pp. 8577-8582, 2006. """ from scipy.linalg import eigvals + if G.is_directed(): return eigvals(nx.directed_modularity_matrix(G)) else: return eigvals(nx.modularity_matrix(G)) -# fixture for nose tests +def bethe_hessian_spectrum(G, r=None): + """Returns eigenvalues of the Bethe Hessian matrix of G. + + Parameters + ---------- + G : Graph + A NetworkX Graph or DiGraph + + r : float + Regularizer parameter + + Returns + ------- + evals : NumPy array + Eigenvalues + + See Also + -------- + bethe_hessian_matrix + + References + ---------- + .. [1] A. Saade, F. Krzakala and L. Zdeborová + "Spectral clustering of graphs with the bethe hessian", + Advances in Neural Information Processing Systems. 2014. + """ + from scipy.linalg import eigvalsh -def setup_module(module): - from nose import SkipTest - try: - import scipy.linalg - except: - raise SkipTest("scipy.linalg not available") + return eigvalsh(nx.bethe_hessian_matrix(G, r).todense()) diff --git a/networkx/linalg/tests/__init__.py b/networkx/linalg/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/linalg/tests/test_algebraic_connectivity.py b/networkx/linalg/tests/test_algebraic_connectivity.py index 8e031e1..f9197e8 100644 --- a/networkx/linalg/tests/test_algebraic_connectivity.py +++ b/networkx/linalg/tests/test_algebraic_connectivity.py @@ -1,277 +1,365 @@ from math import sqrt + +import pytest + +numpy = pytest.importorskip("numpy") +numpy.linalg = pytest.importorskip("numpy.linalg") +scipy = pytest.importorskip("scipy") +scipy.sparse = pytest.importorskip("scipy.sparse") + + import networkx as nx -from nose import SkipTest -from nose.tools import * +from networkx.testing import almost_equal try: from scikits.sparse.cholmod import cholesky + _cholesky = cholesky except ImportError: _cholesky = None if _cholesky is None: - methods = ('tracemin_pcg', 'tracemin_lu', 'lanczos', 'lobpcg') + methods = ("tracemin_pcg", "tracemin_lu", "lanczos", "lobpcg") else: - methods = ('tracemin_pcg', 'tracemin_chol', 'tracemin_lu', 'lanczos', 'lobpcg') + methods = ("tracemin_pcg", "tracemin_chol", "tracemin_lu", "lanczos", "lobpcg") def check_eigenvector(A, l, x): nx = numpy.linalg.norm(x) # Check zeroness. - assert_not_almost_equal(nx, 0) + assert not almost_equal(nx, 0) y = A * x ny = numpy.linalg.norm(y) # Check collinearity. - assert_almost_equal(numpy.dot(x, y), nx * ny) + assert almost_equal(numpy.dot(x, y), nx * ny) # Check eigenvalue. - assert_almost_equal(ny, l * nx) + assert almost_equal(ny, l * nx) -class TestAlgebraicConnectivity(object): - - numpy = 1 - - @classmethod - def setupClass(cls): - global numpy - try: - import numpy.linalg - import scipy.sparse - except ImportError: - raise SkipTest('SciPy not available.') - - def test_directed(self): +class TestAlgebraicConnectivity: + @pytest.mark.parametrize("method", methods) + def test_directed(self, method): G = nx.DiGraph() - for method in self._methods: - assert_raises(nx.NetworkXNotImplemented, nx.algebraic_connectivity, - G, method=method) - assert_raises(nx.NetworkXNotImplemented, nx.fiedler_vector, G, - method=method) + pytest.raises( + nx.NetworkXNotImplemented, nx.algebraic_connectivity, G, method=method + ) + pytest.raises(nx.NetworkXNotImplemented, nx.fiedler_vector, G, method=method) - def test_null_and_singleton(self): + @pytest.mark.parametrize("method", methods) + def test_null_and_singleton(self, method): G = nx.Graph() - for method in self._methods: - assert_raises(nx.NetworkXError, nx.algebraic_connectivity, G, - method=method) - assert_raises(nx.NetworkXError, nx.fiedler_vector, G, - method=method) + pytest.raises(nx.NetworkXError, nx.algebraic_connectivity, G, method=method) + pytest.raises(nx.NetworkXError, nx.fiedler_vector, G, method=method) G.add_edge(0, 0) - for method in self._methods: - assert_raises(nx.NetworkXError, nx.algebraic_connectivity, G, - method=method) - assert_raises(nx.NetworkXError, nx.fiedler_vector, G, - method=method) + pytest.raises(nx.NetworkXError, nx.algebraic_connectivity, G, method=method) + pytest.raises(nx.NetworkXError, nx.fiedler_vector, G, method=method) - def test_disconnected(self): + @pytest.mark.parametrize("method", methods) + def test_disconnected(self, method): G = nx.Graph() G.add_nodes_from(range(2)) - for method in self._methods: - assert_equal(nx.algebraic_connectivity(G), 0) - assert_raises(nx.NetworkXError, nx.fiedler_vector, G, - method=method) + assert nx.algebraic_connectivity(G) == 0 + pytest.raises(nx.NetworkXError, nx.fiedler_vector, G, method=method) G.add_edge(0, 1, weight=0) - for method in self._methods: - assert_equal(nx.algebraic_connectivity(G), 0) - assert_raises(nx.NetworkXError, nx.fiedler_vector, G, - method=method) + assert nx.algebraic_connectivity(G) == 0 + pytest.raises(nx.NetworkXError, nx.fiedler_vector, G, method=method) def test_unrecognized_method(self): G = nx.path_graph(4) - assert_raises(nx.NetworkXError, nx.algebraic_connectivity, G, - method='unknown') - assert_raises(nx.NetworkXError, nx.fiedler_vector, G, method='unknown') + pytest.raises(nx.NetworkXError, nx.algebraic_connectivity, G, method="unknown") + pytest.raises(nx.NetworkXError, nx.fiedler_vector, G, method="unknown") - def test_two_nodes(self): + @pytest.mark.parametrize("method", methods) + def test_two_nodes(self, method): G = nx.Graph() G.add_edge(0, 1, weight=1) A = nx.laplacian_matrix(G) - for method in self._methods: - assert_almost_equal(nx.algebraic_connectivity( - G, tol=1e-12, method=method), 2) - x = nx.fiedler_vector(G, tol=1e-12, method=method) - check_eigenvector(A, 2, x) + assert almost_equal(nx.algebraic_connectivity(G, tol=1e-12, method=method), 2) + x = nx.fiedler_vector(G, tol=1e-12, method=method) + check_eigenvector(A, 2, x) + + @pytest.mark.parametrize("method", methods) + def test_two_nodes_multigraph(self, method): G = nx.MultiGraph() G.add_edge(0, 0, spam=1e8) G.add_edge(0, 1, spam=1) G.add_edge(0, 1, spam=-2) - A = -3 * nx.laplacian_matrix(G, weight='spam') - for method in self._methods: - assert_almost_equal(nx.algebraic_connectivity( - G, weight='spam', tol=1e-12, method=method), 6) - x = nx.fiedler_vector(G, weight='spam', tol=1e-12, method=method) - check_eigenvector(A, 6, x) + A = -3 * nx.laplacian_matrix(G, weight="spam") + assert almost_equal( + nx.algebraic_connectivity(G, weight="spam", tol=1e-12, method=method), 6 + ) + x = nx.fiedler_vector(G, weight="spam", tol=1e-12, method=method) + check_eigenvector(A, 6, x) def test_abbreviation_of_method(self): G = nx.path_graph(8) A = nx.laplacian_matrix(G) sigma = 2 - sqrt(2 + sqrt(2)) - ac = nx.algebraic_connectivity(G, tol=1e-12, method='tracemin') - assert_almost_equal(ac, sigma) - x = nx.fiedler_vector(G, tol=1e-12, method='tracemin') + ac = nx.algebraic_connectivity(G, tol=1e-12, method="tracemin") + assert almost_equal(ac, sigma) + x = nx.fiedler_vector(G, tol=1e-12, method="tracemin") check_eigenvector(A, sigma, x) - def test_path(self): + @pytest.mark.parametrize("method", methods) + def test_path(self, method): G = nx.path_graph(8) A = nx.laplacian_matrix(G) sigma = 2 - sqrt(2 + sqrt(2)) - for method in self._methods: - ac = nx.algebraic_connectivity(G, tol=1e-12, method=method) - assert_almost_equal(ac, sigma) - x = nx.fiedler_vector(G, tol=1e-12, method=method) - check_eigenvector(A, sigma, x) + ac = nx.algebraic_connectivity(G, tol=1e-12, method=method) + assert almost_equal(ac, sigma) + x = nx.fiedler_vector(G, tol=1e-12, method=method) + check_eigenvector(A, sigma, x) - def test_problematic_graph_issue_2381(self): + @pytest.mark.parametrize("method", methods) + def test_problematic_graph_issue_2381(self, method): G = nx.path_graph(4) G.add_edges_from([(4, 2), (5, 1)]) A = nx.laplacian_matrix(G) sigma = 0.438447187191 - for method in self._methods: - ac = nx.algebraic_connectivity(G, tol=1e-12, method=method) - assert_almost_equal(ac, sigma) - x = nx.fiedler_vector(G, tol=1e-12, method=method) - check_eigenvector(A, sigma, x) + ac = nx.algebraic_connectivity(G, tol=1e-12, method=method) + assert almost_equal(ac, sigma) + x = nx.fiedler_vector(G, tol=1e-12, method=method) + check_eigenvector(A, sigma, x) - def test_cycle(self): + @pytest.mark.parametrize("method", methods) + def test_cycle(self, method): G = nx.cycle_graph(8) A = nx.laplacian_matrix(G) sigma = 2 - sqrt(2) - for method in self._methods: - ac = nx.algebraic_connectivity(G, tol=1e-12, method=method) - assert_almost_equal(ac, sigma) - x = nx.fiedler_vector(G, tol=1e-12, method=method) - check_eigenvector(A, sigma, x) + ac = nx.algebraic_connectivity(G, tol=1e-12, method=method) + assert almost_equal(ac, sigma) + x = nx.fiedler_vector(G, tol=1e-12, method=method) + check_eigenvector(A, sigma, x) - def test_buckminsterfullerene(self): + @pytest.mark.parametrize("method", methods) + def test_seed_argument(self, method): + G = nx.cycle_graph(8) + A = nx.laplacian_matrix(G) + sigma = 2 - sqrt(2) + ac = nx.algebraic_connectivity(G, tol=1e-12, method=method, seed=1) + assert almost_equal(ac, sigma) + x = nx.fiedler_vector(G, tol=1e-12, method=method, seed=1) + check_eigenvector(A, sigma, x) + + @pytest.mark.parametrize( + ("normalized", "sigma", "laplacian_fn"), + ( + (False, 0.2434017461399311, nx.laplacian_matrix), + (True, 0.08113391537997749, nx.normalized_laplacian_matrix), + ), + ) + @pytest.mark.parametrize("method", methods) + def test_buckminsterfullerene(self, normalized, sigma, laplacian_fn, method): G = nx.Graph( - [(1, 10), (1, 41), (1, 59), (2, 12), (2, 42), (2, 60), (3, 6), - (3, 43), (3, 57), (4, 8), (4, 44), (4, 58), (5, 13), (5, 56), - (5, 57), (6, 10), (6, 31), (7, 14), (7, 56), (7, 58), (8, 12), - (8, 32), (9, 23), (9, 53), (9, 59), (10, 15), (11, 24), (11, 53), - (11, 60), (12, 16), (13, 14), (13, 25), (14, 26), (15, 27), - (15, 49), (16, 28), (16, 50), (17, 18), (17, 19), (17, 54), - (18, 20), (18, 55), (19, 23), (19, 41), (20, 24), (20, 42), - (21, 31), (21, 33), (21, 57), (22, 32), (22, 34), (22, 58), - (23, 24), (25, 35), (25, 43), (26, 36), (26, 44), (27, 51), - (27, 59), (28, 52), (28, 60), (29, 33), (29, 34), (29, 56), - (30, 51), (30, 52), (30, 53), (31, 47), (32, 48), (33, 45), - (34, 46), (35, 36), (35, 37), (36, 38), (37, 39), (37, 49), - (38, 40), (38, 50), (39, 40), (39, 51), (40, 52), (41, 47), - (42, 48), (43, 49), (44, 50), (45, 46), (45, 54), (46, 55), - (47, 54), (48, 55)]) - for normalized in (False, True): - if not normalized: - A = nx.laplacian_matrix(G) - sigma = 0.2434017461399311 - else: - A = nx.normalized_laplacian_matrix(G) - sigma = 0.08113391537997749 - for method in methods: - try: - assert_almost_equal(nx.algebraic_connectivity( - G, normalized=normalized, tol=1e-12, method=method), - sigma) - x = nx.fiedler_vector(G, normalized=normalized, tol=1e-12, - method=method) - check_eigenvector(A, sigma, x) - except nx.NetworkXError as e: - if e.args not in (('Cholesky solver unavailable.',), - ('LU solver unavailable.',)): - raise - - _methods = methods - - -class TestSpectralOrdering(object): - - numpy = 1 - - @classmethod - def setupClass(cls): - global numpy + [ + (1, 10), + (1, 41), + (1, 59), + (2, 12), + (2, 42), + (2, 60), + (3, 6), + (3, 43), + (3, 57), + (4, 8), + (4, 44), + (4, 58), + (5, 13), + (5, 56), + (5, 57), + (6, 10), + (6, 31), + (7, 14), + (7, 56), + (7, 58), + (8, 12), + (8, 32), + (9, 23), + (9, 53), + (9, 59), + (10, 15), + (11, 24), + (11, 53), + (11, 60), + (12, 16), + (13, 14), + (13, 25), + (14, 26), + (15, 27), + (15, 49), + (16, 28), + (16, 50), + (17, 18), + (17, 19), + (17, 54), + (18, 20), + (18, 55), + (19, 23), + (19, 41), + (20, 24), + (20, 42), + (21, 31), + (21, 33), + (21, 57), + (22, 32), + (22, 34), + (22, 58), + (23, 24), + (25, 35), + (25, 43), + (26, 36), + (26, 44), + (27, 51), + (27, 59), + (28, 52), + (28, 60), + (29, 33), + (29, 34), + (29, 56), + (30, 51), + (30, 52), + (30, 53), + (31, 47), + (32, 48), + (33, 45), + (34, 46), + (35, 36), + (35, 37), + (36, 38), + (37, 39), + (37, 49), + (38, 40), + (38, 50), + (39, 40), + (39, 51), + (40, 52), + (41, 47), + (42, 48), + (43, 49), + (44, 50), + (45, 46), + (45, 54), + (46, 55), + (47, 54), + (48, 55), + ] + ) + A = laplacian_fn(G) try: - import numpy.linalg - import scipy.sparse - except ImportError: - raise SkipTest('SciPy not available.') - - def test_nullgraph(self): - for graph in (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph): - G = graph() - assert_raises(nx.NetworkXError, nx.spectral_ordering, G) - - def test_singleton(self): - for graph in (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph): - G = graph() - G.add_node('x') - assert_equal(nx.spectral_ordering(G), ['x']) - G.add_edge('x', 'x', weight=33) - G.add_edge('x', 'x', weight=33) - assert_equal(nx.spectral_ordering(G), ['x']) + assert almost_equal( + nx.algebraic_connectivity( + G, normalized=normalized, tol=1e-12, method=method + ), + sigma, + ) + x = nx.fiedler_vector(G, normalized=normalized, tol=1e-12, method=method) + check_eigenvector(A, sigma, x) + except nx.NetworkXError as e: + if e.args not in ( + ("Cholesky solver unavailable.",), + ("LU solver unavailable.",), + ): + raise + + +class TestSpectralOrdering: + _graphs = (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph) + + @pytest.mark.parametrize("graph", _graphs) + def test_nullgraph(self, graph): + G = graph() + pytest.raises(nx.NetworkXError, nx.spectral_ordering, G) + + @pytest.mark.parametrize("graph", _graphs) + def test_singleton(self, graph): + G = graph() + G.add_node("x") + assert nx.spectral_ordering(G) == ["x"] + G.add_edge("x", "x", weight=33) + G.add_edge("x", "x", weight=33) + assert nx.spectral_ordering(G) == ["x"] def test_unrecognized_method(self): G = nx.path_graph(4) - assert_raises(nx.NetworkXError, nx.spectral_ordering, G, - method='unknown') + pytest.raises(nx.NetworkXError, nx.spectral_ordering, G, method="unknown") - def test_three_nodes(self): + @pytest.mark.parametrize("method", methods) + def test_three_nodes(self, method): G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2), (2, 3, 1)], - weight='spam') - for method in self._methods: - order = nx.spectral_ordering(G, weight='spam', method=method) - assert_equal(set(order), set(G)) - ok_(set([1, 3]) in (set(order[:-1]), set(order[1:]))) + G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2), (2, 3, 1)], weight="spam") + order = nx.spectral_ordering(G, weight="spam", method=method) + assert set(order) == set(G) + assert {1, 3} in (set(order[:-1]), set(order[1:])) + + @pytest.mark.parametrize("method", methods) + def test_three_nodes_multigraph(self, method): G = nx.MultiDiGraph() G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2), (2, 3, 1), (2, 3, 2)]) - for method in self._methods: - order = nx.spectral_ordering(G, method=method) - assert_equal(set(order), set(G)) - ok_(set([2, 3]) in (set(order[:-1]), set(order[1:]))) + order = nx.spectral_ordering(G, method=method) + assert set(order) == set(G) + assert {2, 3} in (set(order[:-1]), set(order[1:])) + + @pytest.mark.parametrize("method", methods) + def test_path(self, method): + # based on setup_class numpy is installed if we get here + from numpy.random import shuffle - def test_path(self): - # based on setupClass numpy is installed if we get here + path = list(range(10)) + shuffle(path) + G = nx.Graph() + nx.add_path(G, path) + order = nx.spectral_ordering(G, method=method) + assert order in [path, list(reversed(path))] + + @pytest.mark.parametrize("method", methods) + def test_seed_argument(self, method): + # based on setup_class numpy is installed if we get here from numpy.random import shuffle + path = list(range(10)) shuffle(path) G = nx.Graph() nx.add_path(G, path) - for method in self._methods: - order = nx.spectral_ordering(G, method=method) - ok_(order in [path, list(reversed(path))]) + order = nx.spectral_ordering(G, method=method, seed=1) + assert order in [path, list(reversed(path))] - def test_disconnected(self): + @pytest.mark.parametrize("method", methods) + def test_disconnected(self, method): G = nx.Graph() nx.add_path(G, range(0, 10, 2)) nx.add_path(G, range(1, 10, 2)) - for method in self._methods: - order = nx.spectral_ordering(G, method=method) - assert_equal(set(order), set(G)) - seqs = [list(range(0, 10, 2)), list(range(8, -1, -2)), - list(range(1, 10, 2)), list(range(9, -1, -2))] - ok_(order[:5] in seqs) - ok_(order[5:] in seqs) - - def test_cycle(self): + order = nx.spectral_ordering(G, method=method) + assert set(order) == set(G) + seqs = [ + list(range(0, 10, 2)), + list(range(8, -1, -2)), + list(range(1, 10, 2)), + list(range(9, -1, -2)), + ] + assert order[:5] in seqs + assert order[5:] in seqs + + @pytest.mark.parametrize( + ("normalized", "expected_order"), + ( + (False, [[1, 2, 0, 3, 4, 5, 6, 9, 7, 8], [8, 7, 9, 6, 5, 4, 3, 0, 2, 1]]), + (True, [[1, 2, 3, 0, 4, 5, 9, 6, 7, 8], [8, 7, 6, 9, 5, 4, 0, 3, 2, 1]]), + ), + ) + @pytest.mark.parametrize("method", methods) + def test_cycle(self, normalized, expected_order, method): path = list(range(10)) G = nx.Graph() nx.add_path(G, path, weight=5) G.add_edge(path[-1], path[0], weight=1) A = nx.laplacian_matrix(G).todense() - for normalized in (False, True): - for method in methods: - try: - order = nx.spectral_ordering(G, normalized=normalized, - method=method) - except nx.NetworkXError as e: - if e.args not in (('Cholesky solver unavailable.',), - ('LU solver unavailable.',)): - raise - else: - if not normalized: - ok_(order in [[1, 2, 0, 3, 4, 5, 6, 9, 7, 8], - [8, 7, 9, 6, 5, 4, 3, 0, 2, 1]]) - else: - ok_(order in [[1, 2, 3, 0, 4, 5, 9, 6, 7, 8], - [8, 7, 6, 9, 5, 4, 0, 3, 2, 1]]) - - _methods = methods + try: + order = nx.spectral_ordering(G, normalized=normalized, method=method) + except nx.NetworkXError as e: + if e.args not in ( + ("Cholesky solver unavailable.",), + ("LU solver unavailable.",), + ): + raise + else: + assert order in expected_order diff --git a/networkx/linalg/tests/test_attrmatrix.py b/networkx/linalg/tests/test_attrmatrix.py new file mode 100644 index 0000000..fb1ea47 --- /dev/null +++ b/networkx/linalg/tests/test_attrmatrix.py @@ -0,0 +1,108 @@ +import pytest + +np = pytest.importorskip("numpy") +import numpy.testing as npt + +import networkx as nx + + +def test_attr_matrix(): + G = nx.Graph() + G.add_edge(0, 1, thickness=1, weight=3) + G.add_edge(0, 1, thickness=1, weight=3) + G.add_edge(0, 2, thickness=2) + G.add_edge(1, 2, thickness=3) + + def node_attr(u): + return G.nodes[u].get("size", 0.5) * 3 + + def edge_attr(u, v): + return G[u][v].get("thickness", 0.5) + + M = nx.attr_matrix(G, edge_attr=edge_attr, node_attr=node_attr) + npt.assert_equal(M[0], np.array([[6.0]])) + assert M[1] == [1.5] + + +def test_attr_matrix_directed(): + G = nx.DiGraph() + G.add_edge(0, 1, thickness=1, weight=3) + G.add_edge(0, 1, thickness=1, weight=3) + G.add_edge(0, 2, thickness=2) + G.add_edge(1, 2, thickness=3) + M = nx.attr_matrix(G, rc_order=[0, 1, 2]) + # fmt: off + data = np.array( + [[0., 1., 1.], + [0., 0., 1.], + [0., 0., 0.]] + ) + # fmt: on + npt.assert_equal(M, np.array(data)) + + +def test_attr_matrix_multigraph(): + G = nx.MultiGraph() + G.add_edge(0, 1, thickness=1, weight=3) + G.add_edge(0, 1, thickness=1, weight=3) + G.add_edge(0, 1, thickness=1, weight=3) + G.add_edge(0, 2, thickness=2) + G.add_edge(1, 2, thickness=3) + M = nx.attr_matrix(G, rc_order=[0, 1, 2]) + # fmt: off + data = np.array( + [[0., 3., 1.], + [3., 0., 1.], + [1., 1., 0.]] + ) + # fmt: on + npt.assert_equal(M, np.array(data)) + M = nx.attr_matrix(G, edge_attr="weight", rc_order=[0, 1, 2]) + # fmt: off + data = np.array( + [[0., 9., 1.], + [9., 0., 1.], + [1., 1., 0.]] + ) + # fmt: on + npt.assert_equal(M, np.array(data)) + M = nx.attr_matrix(G, edge_attr="thickness", rc_order=[0, 1, 2]) + # fmt: off + data = np.array( + [[0., 3., 2.], + [3., 0., 3.], + [2., 3., 0.]] + ) + # fmt: on + npt.assert_equal(M, np.array(data)) + + +def test_attr_sparse_matrix(): + pytest.importorskip("scipy") + G = nx.Graph() + G.add_edge(0, 1, thickness=1, weight=3) + G.add_edge(0, 2, thickness=2) + G.add_edge(1, 2, thickness=3) + M = nx.attr_sparse_matrix(G) + mtx = M[0] + data = np.ones((3, 3), float) + np.fill_diagonal(data, 0) + npt.assert_equal(mtx.todense(), np.array(data)) + assert M[1] == [0, 1, 2] + + +def test_attr_sparse_matrix_directed(): + G = nx.DiGraph() + G.add_edge(0, 1, thickness=1, weight=3) + G.add_edge(0, 1, thickness=1, weight=3) + G.add_edge(0, 2, thickness=2) + G.add_edge(1, 2, thickness=3) + M = nx.attr_sparse_matrix(G, rc_order=[0, 1, 2]) + # fmt: off + data = np.array( + [[0., 1., 1.], + [0., 0., 1.], + [0., 0., 0.]] + ) + # fmt: on + npt.assert_equal(M.todense(), np.array(data)) diff --git a/networkx/linalg/tests/test_bethehessian.py b/networkx/linalg/tests/test_bethehessian.py new file mode 100644 index 0000000..64644ba --- /dev/null +++ b/networkx/linalg/tests/test_bethehessian.py @@ -0,0 +1,42 @@ +import pytest + +np = pytest.importorskip("numpy") +npt = pytest.importorskip("numpy.testing") +sp = pytest.importorskip("scipy") + +import networkx as nx +from networkx.generators.degree_seq import havel_hakimi_graph + + +class TestBetheHessian: + @classmethod + def setup_class(cls): + deg = [3, 2, 2, 1, 0] + cls.G = havel_hakimi_graph(deg) + cls.P = nx.path_graph(3) + + def test_bethe_hessian(self): + "Bethe Hessian matrix" + # fmt: off + H = np.array([[4, -2, 0], + [-2, 5, -2], + [0, -2, 4]]) + # fmt: on + permutation = [2, 0, 1] + # Bethe Hessian gives expected form + npt.assert_equal(nx.bethe_hessian_matrix(self.P, r=2).todense(), H) + # nodelist is correctly implemented + npt.assert_equal( + nx.bethe_hessian_matrix(self.P, r=2, nodelist=permutation).todense(), + H[np.ix_(permutation, permutation)], + ) + # Equal to Laplacian matrix when r=1 + npt.assert_equal( + nx.bethe_hessian_matrix(self.G, r=1).todense(), + nx.laplacian_matrix(self.G).todense(), + ) + # Correct default for the regularizer r + npt.assert_equal( + nx.bethe_hessian_matrix(self.G).todense(), + nx.bethe_hessian_matrix(self.G, r=1.25).todense(), + ) diff --git a/networkx/linalg/tests/test_graphmatrix.py b/networkx/linalg/tests/test_graphmatrix.py index d131df7..fdf3c64 100644 --- a/networkx/linalg/tests/test_graphmatrix.py +++ b/networkx/linalg/tests/test_graphmatrix.py @@ -1,172 +1,293 @@ -from nose import SkipTest +import pytest + +np = pytest.importorskip("numpy") +npt = pytest.importorskip("numpy.testing") +scipy = pytest.importorskip("scipy") import networkx as nx from networkx.generators.degree_seq import havel_hakimi_graph +from networkx.exception import NetworkXError + + +def test_incidence_matrix_simple(): + deg = [3, 2, 2, 1, 0] + G = havel_hakimi_graph(deg) + deg = [(1, 0), (1, 0), (1, 0), (2, 0), (1, 0), (2, 1), (0, 1), (0, 1)] + MG = nx.random_clustered_graph(deg, seed=42) + + I = nx.incidence_matrix(G).todense().astype(int) + # fmt: off + expected = np.array( + [[1, 1, 1, 0], + [0, 1, 0, 1], + [1, 0, 0, 1], + [0, 0, 1, 0], + [0, 0, 0, 0]] + ) + # fmt: on + npt.assert_equal(I, expected) + + I = nx.incidence_matrix(MG).todense().astype(int) + # fmt: off + expected = np.array( + [[1, 0, 0, 0, 0, 0, 0], + [1, 0, 0, 0, 0, 0, 0], + [0, 1, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0], + [0, 1, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 1, 1, 0], + [0, 0, 0, 0, 0, 1, 1], + [0, 0, 0, 0, 1, 0, 1]] + ) + # fmt: on + npt.assert_equal(I, expected) + with pytest.raises(NetworkXError): + nx.incidence_matrix(G, nodelist=[0, 1]) -class TestGraphMatrix(object): - numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test +class TestGraphMatrix: @classmethod - def setupClass(cls): - global numpy - global assert_equal - global assert_almost_equal - try: - import numpy - import scipy - from numpy.testing import assert_equal, assert_almost_equal - except ImportError: - raise SkipTest('SciPy not available.') - - def setUp(self): + def setup_class(cls): deg = [3, 2, 2, 1, 0] - self.G = havel_hakimi_graph(deg) - self.OI = numpy.array([[-1, -1, -1, 0], - [1, 0, 0, -1], - [0, 1, 0, 1], - [0, 0, 1, 0], - [0, 0, 0, 0]]) - self.A = numpy.array([[0, 1, 1, 1, 0], - [1, 0, 1, 0, 0], - [1, 1, 0, 0, 0], - [1, 0, 0, 0, 0], - [0, 0, 0, 0, 0]]) - self.WG = havel_hakimi_graph(deg) - self.WG.add_edges_from((u, v, {'weight': 0.5, 'other': 0.3}) - for (u, v) in self.G.edges()) - self.WA = numpy.array([[0, 0.5, 0.5, 0.5, 0], - [0.5, 0, 0.5, 0, 0], - [0.5, 0.5, 0, 0, 0], - [0.5, 0, 0, 0, 0], - [0, 0, 0, 0, 0]]) - self.MG = nx.MultiGraph(self.G) - self.MG2 = self.MG.copy() - self.MG2.add_edge(0, 1) - self.MG2A = numpy.array([[0, 2, 1, 1, 0], - [2, 0, 1, 0, 0], - [1, 1, 0, 0, 0], - [1, 0, 0, 0, 0], - [0, 0, 0, 0, 0]]) - self.MGOI = numpy.array([[-1, -1, -1, -1, 0], - [1, 1, 0, 0, -1], - [0, 0, 1, 0, 1], - [0, 0, 0, 1, 0], - [0, 0, 0, 0, 0]]) - self.no_edges_G = nx.Graph([(1, 2), (3, 2, {'weight': 8})]) - self.no_edges_A = numpy.array([[0, 0], [0, 0]]) + cls.G = havel_hakimi_graph(deg) + # fmt: off + cls.OI = np.array( + [[-1, -1, -1, 0], + [1, 0, 0, -1], + [0, 1, 0, 1], + [0, 0, 1, 0], + [0, 0, 0, 0]] + ) + cls.A = np.array( + [[0, 1, 1, 1, 0], + [1, 0, 1, 0, 0], + [1, 1, 0, 0, 0], + [1, 0, 0, 0, 0], + [0, 0, 0, 0, 0]] + ) + # fmt: on + cls.WG = havel_hakimi_graph(deg) + cls.WG.add_edges_from( + (u, v, {"weight": 0.5, "other": 0.3}) for (u, v) in cls.G.edges() + ) + # fmt: off + cls.WA = np.array( + [[0, 0.5, 0.5, 0.5, 0], + [0.5, 0, 0.5, 0, 0], + [0.5, 0.5, 0, 0, 0], + [0.5, 0, 0, 0, 0], + [0, 0, 0, 0, 0]] + ) + # fmt: on + cls.MG = nx.MultiGraph(cls.G) + cls.MG2 = cls.MG.copy() + cls.MG2.add_edge(0, 1) + # fmt: off + cls.MG2A = np.array( + [[0, 2, 1, 1, 0], + [2, 0, 1, 0, 0], + [1, 1, 0, 0, 0], + [1, 0, 0, 0, 0], + [0, 0, 0, 0, 0]] + ) + cls.MGOI = np.array( + [[-1, -1, -1, -1, 0], + [1, 1, 0, 0, -1], + [0, 0, 1, 0, 1], + [0, 0, 0, 1, 0], + [0, 0, 0, 0, 0]] + ) + # fmt: on + cls.no_edges_G = nx.Graph([(1, 2), (3, 2, {"weight": 8})]) + cls.no_edges_A = np.array([[0, 0], [0, 0]]) def test_incidence_matrix(self): "Conversion to incidence matrix" - I = nx.incidence_matrix(self.G, - nodelist=sorted(self.G), - edgelist=sorted(self.G.edges()), - oriented=True).todense().astype(int) - assert_equal(I, self.OI) - I = nx.incidence_matrix(self.G, - nodelist=sorted(self.G), - edgelist=sorted(self.G.edges()), - oriented=False).todense().astype(int) - assert_equal(I, numpy.abs(self.OI)) - - I = nx.incidence_matrix(self.MG, - nodelist=sorted(self.MG), - edgelist=sorted(self.MG.edges()), - oriented=True).todense().astype(int) - assert_equal(I, self.OI) - I = nx.incidence_matrix(self.MG, - nodelist=sorted(self.MG), - edgelist=sorted(self.MG.edges()), - oriented=False).todense().astype(int) - assert_equal(I, numpy.abs(self.OI)) - - I = nx.incidence_matrix(self.MG2, - nodelist=sorted(self.MG2), - edgelist=sorted(self.MG2.edges()), - oriented=True).todense().astype(int) - assert_equal(I, self.MGOI) - I = nx.incidence_matrix(self.MG2, - nodelist=sorted(self.MG), - edgelist=sorted(self.MG2.edges()), - oriented=False).todense().astype(int) - assert_equal(I, numpy.abs(self.MGOI)) + I = ( + nx.incidence_matrix( + self.G, + nodelist=sorted(self.G), + edgelist=sorted(self.G.edges()), + oriented=True, + ) + .todense() + .astype(int) + ) + npt.assert_equal(I, self.OI) + + I = ( + nx.incidence_matrix( + self.G, + nodelist=sorted(self.G), + edgelist=sorted(self.G.edges()), + oriented=False, + ) + .todense() + .astype(int) + ) + npt.assert_equal(I, np.abs(self.OI)) + + I = ( + nx.incidence_matrix( + self.MG, + nodelist=sorted(self.MG), + edgelist=sorted(self.MG.edges()), + oriented=True, + ) + .todense() + .astype(int) + ) + npt.assert_equal(I, self.OI) + + I = ( + nx.incidence_matrix( + self.MG, + nodelist=sorted(self.MG), + edgelist=sorted(self.MG.edges()), + oriented=False, + ) + .todense() + .astype(int) + ) + npt.assert_equal(I, np.abs(self.OI)) + + I = ( + nx.incidence_matrix( + self.MG2, + nodelist=sorted(self.MG2), + edgelist=sorted(self.MG2.edges()), + oriented=True, + ) + .todense() + .astype(int) + ) + npt.assert_equal(I, self.MGOI) + + I = ( + nx.incidence_matrix( + self.MG2, + nodelist=sorted(self.MG), + edgelist=sorted(self.MG2.edges()), + oriented=False, + ) + .todense() + .astype(int) + ) + npt.assert_equal(I, np.abs(self.MGOI)) def test_weighted_incidence_matrix(self): - I = nx.incidence_matrix(self.WG, - nodelist=sorted(self.WG), - edgelist=sorted(self.WG.edges()), - oriented=True).todense().astype(int) - assert_equal(I, self.OI) - I = nx.incidence_matrix(self.WG, - nodelist=sorted(self.WG), - edgelist=sorted(self.WG.edges()), - oriented=False).todense().astype(int) - assert_equal(I, numpy.abs(self.OI)) - - # assert_equal(nx.incidence_matrix(self.WG,oriented=True, + I = ( + nx.incidence_matrix( + self.WG, + nodelist=sorted(self.WG), + edgelist=sorted(self.WG.edges()), + oriented=True, + ) + .todense() + .astype(int) + ) + npt.assert_equal(I, self.OI) + + I = ( + nx.incidence_matrix( + self.WG, + nodelist=sorted(self.WG), + edgelist=sorted(self.WG.edges()), + oriented=False, + ) + .todense() + .astype(int) + ) + npt.assert_equal(I, np.abs(self.OI)) + + # npt.assert_equal(nx.incidence_matrix(self.WG,oriented=True, # weight='weight').todense(),0.5*self.OI) - # assert_equal(nx.incidence_matrix(self.WG,weight='weight').todense(), - # numpy.abs(0.5*self.OI)) - # assert_equal(nx.incidence_matrix(self.WG,oriented=True,weight='other').todense(), + # npt.assert_equal(nx.incidence_matrix(self.WG,weight='weight').todense(), + # np.abs(0.5*self.OI)) + # npt.assert_equal(nx.incidence_matrix(self.WG,oriented=True,weight='other').todense(), # 0.3*self.OI) - I = nx.incidence_matrix(self.WG, - nodelist=sorted(self.WG), - edgelist=sorted(self.WG.edges()), - oriented=True, - weight='weight').todense() - assert_equal(I, 0.5 * self.OI) - I = nx.incidence_matrix(self.WG, - nodelist=sorted(self.WG), - edgelist=sorted(self.WG.edges()), - oriented=False, - weight='weight').todense() - assert_equal(I, numpy.abs(0.5 * self.OI)) - I = nx.incidence_matrix(self.WG, - nodelist=sorted(self.WG), - edgelist=sorted(self.WG.edges()), - oriented=True, - weight='other').todense() - assert_equal(I, 0.3 * self.OI) + I = nx.incidence_matrix( + self.WG, + nodelist=sorted(self.WG), + edgelist=sorted(self.WG.edges()), + oriented=True, + weight="weight", + ).todense() + npt.assert_equal(I, 0.5 * self.OI) + + I = nx.incidence_matrix( + self.WG, + nodelist=sorted(self.WG), + edgelist=sorted(self.WG.edges()), + oriented=False, + weight="weight", + ).todense() + npt.assert_equal(I, np.abs(0.5 * self.OI)) + + I = nx.incidence_matrix( + self.WG, + nodelist=sorted(self.WG), + edgelist=sorted(self.WG.edges()), + oriented=True, + weight="other", + ).todense() + npt.assert_equal(I, 0.3 * self.OI) # WMG=nx.MultiGraph(self.WG) # WMG.add_edge(0,1,weight=0.5,other=0.3) - # assert_equal(nx.incidence_matrix(WMG,weight='weight').todense(), - # numpy.abs(0.5*self.MGOI)) - # assert_equal(nx.incidence_matrix(WMG,weight='weight',oriented=True).todense(), + # npt.assert_equal(nx.incidence_matrix(WMG,weight='weight').todense(), + # np.abs(0.5*self.MGOI)) + # npt.assert_equal(nx.incidence_matrix(WMG,weight='weight',oriented=True).todense(), # 0.5*self.MGOI) - # assert_equal(nx.incidence_matrix(WMG,weight='other',oriented=True).todense(), + # npt.assert_equal(nx.incidence_matrix(WMG,weight='other',oriented=True).todense(), # 0.3*self.MGOI) WMG = nx.MultiGraph(self.WG) WMG.add_edge(0, 1, weight=0.5, other=0.3) - I = nx.incidence_matrix(WMG, - nodelist=sorted(WMG), - edgelist=sorted(WMG.edges(keys=True)), - oriented=True, - weight='weight').todense() - assert_equal(I, 0.5 * self.MGOI) - I = nx.incidence_matrix(WMG, - nodelist=sorted(WMG), - edgelist=sorted(WMG.edges(keys=True)), - oriented=False, - weight='weight').todense() - assert_equal(I, numpy.abs(0.5 * self.MGOI)) - I = nx.incidence_matrix(WMG, - nodelist=sorted(WMG), - edgelist=sorted(WMG.edges(keys=True)), - oriented=True, - weight='other').todense() - assert_equal(I, 0.3 * self.MGOI) + + I = nx.incidence_matrix( + WMG, + nodelist=sorted(WMG), + edgelist=sorted(WMG.edges(keys=True)), + oriented=True, + weight="weight", + ).todense() + npt.assert_equal(I, 0.5 * self.MGOI) + + I = nx.incidence_matrix( + WMG, + nodelist=sorted(WMG), + edgelist=sorted(WMG.edges(keys=True)), + oriented=False, + weight="weight", + ).todense() + npt.assert_equal(I, np.abs(0.5 * self.MGOI)) + + I = nx.incidence_matrix( + WMG, + nodelist=sorted(WMG), + edgelist=sorted(WMG.edges(keys=True)), + oriented=True, + weight="other", + ).todense() + npt.assert_equal(I, 0.3 * self.MGOI) def test_adjacency_matrix(self): "Conversion to adjacency matrix" - assert_equal(nx.adj_matrix(self.G).todense(), self.A) - assert_equal(nx.adj_matrix(self.MG).todense(), self.A) - assert_equal(nx.adj_matrix(self.MG2).todense(), self.MG2A) - assert_equal(nx.adj_matrix(self.G, nodelist=[0, 1]).todense(), self.A[:2, :2]) - assert_equal(nx.adj_matrix(self.WG).todense(), self.WA) - assert_equal(nx.adj_matrix(self.WG, weight=None).todense(), self.A) - assert_equal(nx.adj_matrix(self.MG2, weight=None).todense(), self.MG2A) - assert_equal(nx.adj_matrix(self.WG, weight='other').todense(), 0.6 * self.WA) - assert_equal(nx.adj_matrix(self.no_edges_G, nodelist=[1, 3]).todense(), self.no_edges_A) + npt.assert_equal(nx.adj_matrix(self.G).todense(), self.A) + npt.assert_equal(nx.adj_matrix(self.MG).todense(), self.A) + npt.assert_equal(nx.adj_matrix(self.MG2).todense(), self.MG2A) + npt.assert_equal( + nx.adj_matrix(self.G, nodelist=[0, 1]).todense(), self.A[:2, :2] + ) + npt.assert_equal(nx.adj_matrix(self.WG).todense(), self.WA) + npt.assert_equal(nx.adj_matrix(self.WG, weight=None).todense(), self.A) + npt.assert_equal(nx.adj_matrix(self.MG2, weight=None).todense(), self.MG2A) + npt.assert_equal( + nx.adj_matrix(self.WG, weight="other").todense(), 0.6 * self.WA + ) + npt.assert_equal( + nx.adj_matrix(self.no_edges_G, nodelist=[1, 3]).todense(), self.no_edges_A + ) diff --git a/networkx/linalg/tests/test_laplacian.py b/networkx/linalg/tests/test_laplacian.py index 65bd6b9..d7638b8 100644 --- a/networkx/linalg/tests/test_laplacian.py +++ b/networkx/linalg/tests/test_laplacian.py @@ -1,78 +1,93 @@ -from nose import SkipTest +import pytest + +np = pytest.importorskip("numpy") +npt = pytest.importorskip("numpy.testing") +pytest.importorskip("scipy") import networkx as nx from networkx.generators.degree_seq import havel_hakimi_graph +from networkx.generators.expanders import margulis_gabber_galil_graph -class TestLaplacian(object): - numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test - +class TestLaplacian: @classmethod - def setupClass(cls): - global numpy - global scipy - global assert_equal - global assert_almost_equal - try: - import numpy - import scipy - from numpy.testing import assert_equal, assert_almost_equal - except ImportError: - raise SkipTest('SciPy not available.') - - def setUp(self): + def setup_class(cls): deg = [3, 2, 2, 1, 0] - self.G = havel_hakimi_graph(deg) - self.WG = nx.Graph((u, v, {'weight': 0.5, 'other': 0.3}) - for (u, v) in self.G.edges()) - self.WG.add_node(4) - self.MG = nx.MultiGraph(self.G) + cls.G = havel_hakimi_graph(deg) + cls.WG = nx.Graph( + (u, v, {"weight": 0.5, "other": 0.3}) for (u, v) in cls.G.edges() + ) + cls.WG.add_node(4) + cls.MG = nx.MultiGraph(cls.G) - # Graph with selfloops - self.Gsl = self.G.copy() - for node in self.Gsl.nodes(): - self.Gsl.add_edge(node, node) + # Graph with clsloops + cls.Gsl = cls.G.copy() + for node in cls.Gsl.nodes(): + cls.Gsl.add_edge(node, node) def test_laplacian(self): "Graph Laplacian" - NL = numpy.array([[3, -1, -1, -1, 0], - [-1, 2, -1, 0, 0], - [-1, -1, 2, 0, 0], - [-1, 0, 0, 1, 0], - [0, 0, 0, 0, 0]]) + # fmt: off + NL = np.array([[3, -1, -1, -1, 0], + [-1, 2, -1, 0, 0], + [-1, -1, 2, 0, 0], + [-1, 0, 0, 1, 0], + [0, 0, 0, 0, 0]]) + # fmt: on WL = 0.5 * NL OL = 0.3 * NL - assert_equal(nx.laplacian_matrix(self.G).todense(), NL) - assert_equal(nx.laplacian_matrix(self.MG).todense(), NL) - assert_equal(nx.laplacian_matrix(self.G, nodelist=[0, 1]).todense(), - numpy.array([[1, -1], [-1, 1]])) - assert_equal(nx.laplacian_matrix(self.WG).todense(), WL) - assert_equal(nx.laplacian_matrix(self.WG, weight=None).todense(), NL) - assert_equal(nx.laplacian_matrix(self.WG, weight='other').todense(), OL) + npt.assert_equal(nx.laplacian_matrix(self.G).todense(), NL) + npt.assert_equal(nx.laplacian_matrix(self.MG).todense(), NL) + npt.assert_equal( + nx.laplacian_matrix(self.G, nodelist=[0, 1]).todense(), + np.array([[1, -1], [-1, 1]]), + ) + npt.assert_equal(nx.laplacian_matrix(self.WG).todense(), WL) + npt.assert_equal(nx.laplacian_matrix(self.WG, weight=None).todense(), NL) + npt.assert_equal(nx.laplacian_matrix(self.WG, weight="other").todense(), OL) def test_normalized_laplacian(self): "Generalized Graph Laplacian" - GL = numpy.array([[1.00, -0.408, -0.408, -0.577, 0.00], - [-0.408, 1.00, -0.50, 0.00, 0.00], - [-0.408, -0.50, 1.00, 0.00, 0.00], - [-0.577, 0.00, 0.00, 1.00, 0.00], - [0.00, 0.00, 0.00, 0.00, 0.00]]) - Lsl = numpy.array([[0.75, -0.2887, -0.2887, -0.3536, 0.], - [-0.2887, 0.6667, -0.3333, 0., 0.], - [-0.2887, -0.3333, 0.6667, 0., 0.], - [-0.3536, 0., 0., 0.5, 0.], - [0., 0., 0., 0., 0.]]) - - assert_almost_equal(nx.normalized_laplacian_matrix(self.G).todense(), - GL, decimal=3) - assert_almost_equal(nx.normalized_laplacian_matrix(self.MG).todense(), - GL, decimal=3) - assert_almost_equal(nx.normalized_laplacian_matrix(self.WG).todense(), - GL, decimal=3) - assert_almost_equal(nx.normalized_laplacian_matrix(self.WG, weight='other').todense(), - GL, decimal=3) - assert_almost_equal(nx.normalized_laplacian_matrix(self.Gsl).todense(), - Lsl, decimal=3) + # fmt: off + G = np.array([[ 1. , -0.408, -0.408, -0.577, 0.], + [-0.408, 1. , -0.5 , 0. , 0.], + [-0.408, -0.5 , 1. , 0. , 0.], + [-0.577, 0. , 0. , 1. , 0.], + [ 0. , 0. , 0. , 0. , 0.]]) + GL = np.array([[1.00, -0.408, -0.408, -0.577, 0.00], + [-0.408, 1.00, -0.50, 0.00, 0.00], + [-0.408, -0.50, 1.00, 0.00, 0.00], + [-0.577, 0.00, 0.00, 1.00, 0.00], + [0.00, 0.00, 0.00, 0.00, 0.00]]) + Lsl = np.array([[0.75, -0.2887, -0.2887, -0.3536, 0.], + [-0.2887, 0.6667, -0.3333, 0., 0.], + [-0.2887, -0.3333, 0.6667, 0., 0.], + [-0.3536, 0., 0., 0.5, 0.], + [0., 0., 0., 0., 0.]]) + # fmt: on + + npt.assert_almost_equal( + nx.normalized_laplacian_matrix(self.G, nodelist=range(5)).todense(), + G, + decimal=3, + ) + npt.assert_almost_equal( + nx.normalized_laplacian_matrix(self.G).todense(), GL, decimal=3 + ) + npt.assert_almost_equal( + nx.normalized_laplacian_matrix(self.MG).todense(), GL, decimal=3 + ) + npt.assert_almost_equal( + nx.normalized_laplacian_matrix(self.WG).todense(), GL, decimal=3 + ) + npt.assert_almost_equal( + nx.normalized_laplacian_matrix(self.WG, weight="other").todense(), + GL, + decimal=3, + ) + npt.assert_almost_equal( + nx.normalized_laplacian_matrix(self.Gsl).todense(), Lsl, decimal=3 + ) def test_directed_laplacian(self): "Directed Laplacian" @@ -80,33 +95,137 @@ def test_directed_laplacian(self): # "Google's PageRank and Beyond". The graph contains dangling nodes, so # the pagerank random walk is selected by directed_laplacian G = nx.DiGraph() - G.add_edges_from(((1, 2), (1, 3), (3, 1), (3, 2), (3, 5), (4, 5), (4, 6), - (5, 4), (5, 6), (6, 4))) - GL = numpy.array([[0.9833, -0.2941, -0.3882, -0.0291, -0.0231, -0.0261], - [-0.2941, 0.8333, -0.2339, -0.0536, -0.0589, -0.0554], - [-0.3882, -0.2339, 0.9833, -0.0278, -0.0896, -0.0251], - [-0.0291, -0.0536, -0.0278, 0.9833, -0.4878, -0.6675], - [-0.0231, -0.0589, -0.0896, -0.4878, 0.9833, -0.2078], - [-0.0261, -0.0554, -0.0251, -0.6675, -0.2078, 0.9833]]) + G.add_edges_from( + ( + (1, 2), + (1, 3), + (3, 1), + (3, 2), + (3, 5), + (4, 5), + (4, 6), + (5, 4), + (5, 6), + (6, 4), + ) + ) + # fmt: off + GL = np.array([[0.9833, -0.2941, -0.3882, -0.0291, -0.0231, -0.0261], + [-0.2941, 0.8333, -0.2339, -0.0536, -0.0589, -0.0554], + [-0.3882, -0.2339, 0.9833, -0.0278, -0.0896, -0.0251], + [-0.0291, -0.0536, -0.0278, 0.9833, -0.4878, -0.6675], + [-0.0231, -0.0589, -0.0896, -0.4878, 0.9833, -0.2078], + [-0.0261, -0.0554, -0.0251, -0.6675, -0.2078, 0.9833]]) + # fmt: on L = nx.directed_laplacian_matrix(G, alpha=0.9, nodelist=sorted(G)) - assert_almost_equal(L, GL, decimal=3) + npt.assert_almost_equal(L, GL, decimal=3) + + # Make the graph strongly connected, so we can use a random and lazy walk + G.add_edges_from(((2, 5), (6, 1))) + # fmt: off + GL = np.array([[1., -0.3062, -0.4714, 0., 0., -0.3227], + [-0.3062, 1., -0.1443, 0., -0.3162, 0.], + [-0.4714, -0.1443, 1., 0., -0.0913, 0.], + [0., 0., 0., 1., -0.5, -0.5], + [0., -0.3162, -0.0913, -0.5, 1., -0.25], + [-0.3227, 0., 0., -0.5, -0.25, 1.]]) + # fmt: on + L = nx.directed_laplacian_matrix( + G, alpha=0.9, nodelist=sorted(G), walk_type="random" + ) + npt.assert_almost_equal(L, GL, decimal=3) + + # fmt: off + GL = np.array([[0.5, -0.1531, -0.2357, 0., 0., -0.1614], + [-0.1531, 0.5, -0.0722, 0., -0.1581, 0.], + [-0.2357, -0.0722, 0.5, 0., -0.0456, 0.], + [0., 0., 0., 0.5, -0.25, -0.25], + [0., -0.1581, -0.0456, -0.25, 0.5, -0.125], + [-0.1614, 0., 0., -0.25, -0.125, 0.5]]) + # fmt: on + L = nx.directed_laplacian_matrix( + G, alpha=0.9, nodelist=sorted(G), walk_type="lazy" + ) + npt.assert_almost_equal(L, GL, decimal=3) + + def test_directed_combinatorial_laplacian(self): + "Directed combinatorial Laplacian" + # Graph used as an example in Sec. 4.1 of Langville and Meyer, + # "Google's PageRank and Beyond". The graph contains dangling nodes, so + # the pagerank random walk is selected by directed_laplacian + G = nx.DiGraph() + G.add_edges_from( + ( + (1, 2), + (1, 3), + (3, 1), + (3, 2), + (3, 5), + (4, 5), + (4, 6), + (5, 4), + (5, 6), + (6, 4), + ) + ) + # fmt: off + GL = np.array([[0.0366, -0.0132, -0.0153, -0.0034, -0.0020, -0.0027], + [-0.0132, 0.0450, -0.0111, -0.0076, -0.0062, -0.0069], + [-0.0153, -0.0111, 0.0408, -0.0035, -0.0083, -0.0027], + [-0.0034, -0.0076, -0.0035, 0.3688, -0.1356, -0.2187], + [-0.0020, -0.0062, -0.0083, -0.1356, 0.2026, -0.0505], + [-0.0027, -0.0069, -0.0027, -0.2187, -0.0505, 0.2815]]) + # fmt: on + + L = nx.directed_combinatorial_laplacian_matrix(G, alpha=0.9, nodelist=sorted(G)) + npt.assert_almost_equal(L, GL, decimal=3) # Make the graph strongly connected, so we can use a random and lazy walk - G.add_edges_from((((2, 5), (6, 1)))) - GL = numpy.array([[1., -0.3062, -0.4714, 0., 0., -0.3227], - [-0.3062, 1., -0.1443, 0., -0.3162, 0.], - [-0.4714, -0.1443, 1., 0., -0.0913, 0.], - [0., 0., 0., 1., -0.5, -0.5], - [0., -0.3162, -0.0913, -0.5, 1., -0.25], - [-0.3227, 0., 0., -0.5, -0.25, 1.]]) - L = nx.directed_laplacian_matrix(G, alpha=0.9, nodelist=sorted(G), walk_type='random') - assert_almost_equal(L, GL, decimal=3) - - GL = numpy.array([[0.5, -0.1531, -0.2357, 0., 0., -0.1614], - [-0.1531, 0.5, -0.0722, 0., -0.1581, 0.], - [-0.2357, -0.0722, 0.5, 0., -0.0456, 0.], - [0., 0., 0., 0.5, -0.25, -0.25], - [0., -0.1581, -0.0456, -0.25, 0.5, -0.125], - [-0.1614, 0., 0., -0.25, -0.125, 0.5]]) - L = nx.directed_laplacian_matrix(G, alpha=0.9, nodelist=sorted(G), walk_type='lazy') - assert_almost_equal(L, GL, decimal=3) + G.add_edges_from(((2, 5), (6, 1))) + + # fmt: off + GL = np.array([[0.1395, -0.0349, -0.0465, 0, 0, -0.0581], + [-0.0349, 0.0930, -0.0116, 0, -0.0465, 0], + [-0.0465, -0.0116, 0.0698, 0, -0.0116, 0], + [0, 0, 0, 0.2326, -0.1163, -0.1163], + [0, -0.0465, -0.0116, -0.1163, 0.2326, -0.0581], + [-0.0581, 0, 0, -0.1163, -0.0581, 0.2326]]) + # fmt: on + + L = nx.directed_combinatorial_laplacian_matrix( + G, alpha=0.9, nodelist=sorted(G), walk_type="random" + ) + npt.assert_almost_equal(L, GL, decimal=3) + + # fmt: off + GL = np.array([[0.0698, -0.0174, -0.0233, 0, 0, -0.0291], + [-0.0174, 0.0465, -0.0058, 0, -0.0233, 0], + [-0.0233, -0.0058, 0.0349, 0, -0.0058, 0], + [0, 0, 0, 0.1163, -0.0581, -0.0581], + [0, -0.0233, -0.0058, -0.0581, 0.1163, -0.0291], + [-0.0291, 0, 0, -0.0581, -0.0291, 0.1163]]) + # fmt: on + + L = nx.directed_combinatorial_laplacian_matrix( + G, alpha=0.9, nodelist=sorted(G), walk_type="lazy" + ) + npt.assert_almost_equal(L, GL, decimal=3) + + E = nx.DiGraph(margulis_gabber_galil_graph(2)) + L = nx.directed_combinatorial_laplacian_matrix(E) + # fmt: off + expected = np.array( + [[ 0.16666667, -0.08333333, -0.08333333, 0. ], + [-0.08333333, 0.16666667, 0. , -0.08333333], + [-0.08333333, 0. , 0.16666667, -0.08333333], + [ 0. , -0.08333333, -0.08333333, 0.16666667]] + ) + # fmt: on + npt.assert_almost_equal(L, expected, decimal=6) + + with pytest.raises(nx.NetworkXError): + nx.directed_combinatorial_laplacian_matrix( + G, walk_type="pagerank", alpha=100 + ) + with pytest.raises(nx.NetworkXError): + nx.directed_combinatorial_laplacian_matrix(G, walk_type="silly") diff --git a/networkx/linalg/tests/test_modularity.py b/networkx/linalg/tests/test_modularity.py index 1ba6c13..ccf730f 100644 --- a/networkx/linalg/tests/test_modularity.py +++ b/networkx/linalg/tests/test_modularity.py @@ -1,75 +1,86 @@ -from nose import SkipTest +import pytest + +np = pytest.importorskip("numpy") +npt = pytest.importorskip("numpy.testing") +scipy = pytest.importorskip("scipy") import networkx as nx from networkx.generators.degree_seq import havel_hakimi_graph -class TestModularity(object): - numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test - +class TestModularity: @classmethod - def setupClass(cls): - global numpy - global scipy - global assert_equal - global assert_almost_equal - try: - import numpy - import scipy - from numpy.testing import assert_equal, assert_almost_equal - except ImportError: - raise SkipTest('SciPy not available.') - - def setUp(self): + def setup_class(cls): deg = [3, 2, 2, 1, 0] - self.G = havel_hakimi_graph(deg) + cls.G = havel_hakimi_graph(deg) # Graph used as an example in Sec. 4.1 of Langville and Meyer, # "Google's PageRank and Beyond". (Used for test_directed_laplacian) - self.DG = nx.DiGraph() - self.DG.add_edges_from(((1, 2), (1, 3), (3, 1), (3, 2), (3, 5), (4, 5), (4, 6), - (5, 4), (5, 6), (6, 4))) + cls.DG = nx.DiGraph() + cls.DG.add_edges_from( + ( + (1, 2), + (1, 3), + (3, 1), + (3, 2), + (3, 5), + (4, 5), + (4, 6), + (5, 4), + (5, 6), + (6, 4), + ) + ) def test_modularity(self): "Modularity matrix" - B = numpy.matrix([[-1.125, 0.25, 0.25, 0.625, 0.], - [0.25, -0.5, 0.5, -0.25, 0.], - [0.25, 0.5, -0.5, -0.25, 0.], - [0.625, -0.25, -0.25, -0.125, 0.], - [0., 0., 0., 0., 0.]]) + # fmt: off + B = np.array([[-1.125, 0.25, 0.25, 0.625, 0.], + [0.25, -0.5, 0.5, -0.25, 0.], + [0.25, 0.5, -0.5, -0.25, 0.], + [0.625, -0.25, -0.25, -0.125, 0.], + [0., 0., 0., 0., 0.]]) + # fmt: on permutation = [4, 0, 1, 2, 3] - assert_equal(nx.modularity_matrix(self.G), B) - assert_equal(nx.modularity_matrix(self.G, nodelist=permutation), - B[numpy.ix_(permutation, permutation)]) + npt.assert_equal(nx.modularity_matrix(self.G), B) + npt.assert_equal( + nx.modularity_matrix(self.G, nodelist=permutation), + B[np.ix_(permutation, permutation)], + ) def test_modularity_weight(self): "Modularity matrix with weights" - B = numpy.matrix([[-1.125, 0.25, 0.25, 0.625, 0.], - [0.25, -0.5, 0.5, -0.25, 0.], - [0.25, 0.5, -0.5, -0.25, 0.], - [0.625, -0.25, -0.25, -0.125, 0.], - [0., 0., 0., 0., 0.]]) + # fmt: off + B = np.array([[-1.125, 0.25, 0.25, 0.625, 0.], + [0.25, -0.5, 0.5, -0.25, 0.], + [0.25, 0.5, -0.5, -0.25, 0.], + [0.625, -0.25, -0.25, -0.125, 0.], + [0., 0., 0., 0., 0.]]) + # fmt: on G_weighted = self.G.copy() for n1, n2 in G_weighted.edges(): G_weighted.edges[n1, n2]["weight"] = 0.5 # The following test would fail in networkx 1.1 - assert_equal(nx.modularity_matrix(G_weighted), B) + npt.assert_equal(nx.modularity_matrix(G_weighted), B) # The following test that the modularity matrix get rescaled accordingly - assert_equal(nx.modularity_matrix(G_weighted, weight="weight"), 0.5 * B) + npt.assert_equal(nx.modularity_matrix(G_weighted, weight="weight"), 0.5 * B) def test_directed_modularity(self): "Directed Modularity matrix" - B = numpy.matrix([[-0.2, 0.6, 0.8, -0.4, -0.4, -0.4], - [0., 0., 0., 0., 0., 0.], - [0.7, 0.4, -0.3, -0.6, 0.4, -0.6], - [-0.2, -0.4, -0.2, -0.4, 0.6, 0.6], - [-0.2, -0.4, -0.2, 0.6, -0.4, 0.6], - [-0.1, -0.2, -0.1, 0.8, -0.2, -0.2]]) + # fmt: off + B = np.array([[-0.2, 0.6, 0.8, -0.4, -0.4, -0.4], + [0., 0., 0., 0., 0., 0.], + [0.7, 0.4, -0.3, -0.6, 0.4, -0.6], + [-0.2, -0.4, -0.2, -0.4, 0.6, 0.6], + [-0.2, -0.4, -0.2, 0.6, -0.4, 0.6], + [-0.1, -0.2, -0.1, 0.8, -0.2, -0.2]]) + # fmt: on node_permutation = [5, 1, 2, 3, 4, 6] idx_permutation = [4, 0, 1, 2, 3, 5] - mm = nx.directed_modularity_matrix(self.DG, nodelist=sorted(self.DG)) - assert_equal(mm, B) - assert_equal(nx.directed_modularity_matrix(self.DG, - nodelist=node_permutation), - B[numpy.ix_(idx_permutation, idx_permutation)]) + mm = nx.directed_modularity_matrix(self.DG, nodelist=sorted(self.DG)) + npt.assert_equal(mm, B) + npt.assert_equal( + nx.directed_modularity_matrix(self.DG, nodelist=node_permutation), + B[np.ix_(idx_permutation, idx_permutation)], + ) diff --git a/networkx/linalg/tests/test_spectrum.py b/networkx/linalg/tests/test_spectrum.py index c4e455f..63e12d3 100644 --- a/networkx/linalg/tests/test_spectrum.py +++ b/networkx/linalg/tests/test_spectrum.py @@ -1,58 +1,72 @@ -from nose import SkipTest +import pytest + +np = pytest.importorskip("numpy") +npt = pytest.importorskip("numpy.testing") +scipy = pytest.importorskip("scipy") import networkx as nx from networkx.generators.degree_seq import havel_hakimi_graph -class TestSpectrum(object): - numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test - +class TestSpectrum: @classmethod - def setupClass(cls): - global numpy - global assert_equal - global assert_almost_equal - try: - import numpy - import scipy - from numpy.testing import assert_equal, assert_almost_equal - except ImportError: - raise SkipTest('SciPy not available.') - - def setUp(self): + def setup_class(cls): deg = [3, 2, 2, 1, 0] - self.G = havel_hakimi_graph(deg) - self.P = nx.path_graph(3) - self.WG = nx.Graph((u, v, {'weight': 0.5, 'other': 0.3}) - for (u, v) in self.G.edges()) - self.WG.add_node(4) - self.DG = nx.DiGraph() - nx.add_path(self.DG, [0, 1, 2]) + cls.G = havel_hakimi_graph(deg) + cls.P = nx.path_graph(3) + cls.WG = nx.Graph( + (u, v, {"weight": 0.5, "other": 0.3}) for (u, v) in cls.G.edges() + ) + cls.WG.add_node(4) + cls.DG = nx.DiGraph() + nx.add_path(cls.DG, [0, 1, 2]) def test_laplacian_spectrum(self): "Laplacian eigenvalues" - evals = numpy.array([0, 0, 1, 3, 4]) + evals = np.array([0, 0, 1, 3, 4]) e = sorted(nx.laplacian_spectrum(self.G)) - assert_almost_equal(e, evals) + npt.assert_almost_equal(e, evals) e = sorted(nx.laplacian_spectrum(self.WG, weight=None)) - assert_almost_equal(e, evals) + npt.assert_almost_equal(e, evals) e = sorted(nx.laplacian_spectrum(self.WG)) - assert_almost_equal(e, 0.5 * evals) - e = sorted(nx.laplacian_spectrum(self.WG, weight='other')) - assert_almost_equal(e, 0.3 * evals) + npt.assert_almost_equal(e, 0.5 * evals) + e = sorted(nx.laplacian_spectrum(self.WG, weight="other")) + npt.assert_almost_equal(e, 0.3 * evals) + + def test_normalized_laplacian_spectrum(self): + "Normalized Laplacian eigenvalues" + evals = np.array([0, 0, 0.7712864461218, 1.5, 1.7287135538781]) + e = sorted(nx.normalized_laplacian_spectrum(self.G)) + npt.assert_almost_equal(e, evals) + e = sorted(nx.normalized_laplacian_spectrum(self.WG, weight=None)) + npt.assert_almost_equal(e, evals) + e = sorted(nx.normalized_laplacian_spectrum(self.WG)) + npt.assert_almost_equal(e, evals) + e = sorted(nx.normalized_laplacian_spectrum(self.WG, weight="other")) + npt.assert_almost_equal(e, evals) def test_adjacency_spectrum(self): "Adjacency eigenvalues" - evals = numpy.array([-numpy.sqrt(2), 0, numpy.sqrt(2)]) + evals = np.array([-np.sqrt(2), 0, np.sqrt(2)]) e = sorted(nx.adjacency_spectrum(self.P)) - assert_almost_equal(e, evals) + npt.assert_almost_equal(e, evals) def test_modularity_spectrum(self): "Modularity eigenvalues" - evals = numpy.array([-1.5, 0., 0.]) + evals = np.array([-1.5, 0.0, 0.0]) e = sorted(nx.modularity_spectrum(self.P)) - assert_almost_equal(e, evals) + npt.assert_almost_equal(e, evals) # Directed modularity eigenvalues - evals = numpy.array([-0.5, 0., 0.]) + evals = np.array([-0.5, 0.0, 0.0]) e = sorted(nx.modularity_spectrum(self.DG)) - assert_almost_equal(e, evals) + npt.assert_almost_equal(e, evals) + + def test_bethe_hessian_spectrum(self): + "Bethe Hessian eigenvalues" + evals = np.array([0.5 * (9 - np.sqrt(33)), 4, 0.5 * (9 + np.sqrt(33))]) + e = sorted(nx.bethe_hessian_spectrum(self.P, r=2)) + npt.assert_almost_equal(e, evals) + # Collapses back to Laplacian: + e1 = sorted(nx.bethe_hessian_spectrum(self.P, r=1)) + e2 = sorted(nx.laplacian_spectrum(self.P)) + npt.assert_almost_equal(e1, e2) diff --git a/networkx/readwrite/adjlist.py b/networkx/readwrite/adjlist.py index 72854cb..2de048f 100644 --- a/networkx/readwrite/adjlist.py +++ b/networkx/readwrite/adjlist.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ ************** Adjacency List @@ -22,26 +21,14 @@ a b c # source target target d e """ -__author__ = '\n'.join(['Aric Hagberg ', - 'Dan Schult ', - 'Loïc Séguin-C. ']) -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. - -__all__ = ['generate_adjlist', - 'write_adjlist', - 'parse_adjlist', - 'read_adjlist'] - -from networkx.utils import make_str, open_file + +__all__ = ["generate_adjlist", "write_adjlist", "parse_adjlist", "read_adjlist"] + +from networkx.utils import open_file import networkx as nx -def generate_adjlist(G, delimiter=' '): +def generate_adjlist(G, delimiter=" "): """Generate a single line of the graph G in adjacency list format. Parameters @@ -77,22 +64,22 @@ def generate_adjlist(G, delimiter=' '): directed = G.is_directed() seen = set() for s, nbrs in G.adjacency(): - line = make_str(s) + delimiter + line = str(s) + delimiter for t, data in nbrs.items(): if not directed and t in seen: continue if G.is_multigraph(): for d in data.values(): - line += make_str(t) + delimiter + line += str(t) + delimiter else: - line += make_str(t) + delimiter + line += str(t) + delimiter if not directed: seen.add(s) - yield line[:-len(delimiter)] + yield line[: -len(delimiter)] -@open_file(1, mode='wb') -def write_adjlist(G, path, comments="#", delimiter=' ', encoding='utf-8'): +@open_file(1, mode="wb") +def write_adjlist(G, path, comments="#", delimiter=" ", encoding="utf-8"): """Write graph G in single-line adjacency-list format to path. @@ -115,13 +102,13 @@ def write_adjlist(G, path, comments="#", delimiter=' ', encoding='utf-8'): Examples -------- - >>> G=nx.path_graph(4) - >>> nx.write_adjlist(G,"test.adjlist") + >>> G = nx.path_graph(4) + >>> nx.write_adjlist(G, "test.adjlist") The path can be a filehandle or a string with the name of the file. If a filehandle is provided, it has to be opened in 'wb' mode. - >>> fh=open("test.adjlist",'wb') + >>> fh = open("test.adjlist", "wb") >>> nx.write_adjlist(G, fh) Notes @@ -134,19 +121,25 @@ def write_adjlist(G, path, comments="#", delimiter=' ', encoding='utf-8'): """ import sys import time - pargs = comments + " ".join(sys.argv) + '\n' - header = (pargs - + comments + " GMT {}\n".format(time.asctime(time.gmtime())) - + comments + " {}\n".format(G.name)) + + pargs = comments + " ".join(sys.argv) + "\n" + header = ( + pargs + + comments + + f" GMT {time.asctime(time.gmtime())}\n" + + comments + + f" {G.name}\n" + ) path.write(header.encode(encoding)) for line in generate_adjlist(G, delimiter): - line += '\n' + line += "\n" path.write(line.encode(encoding)) -def parse_adjlist(lines, comments='#', delimiter=None, - create_using=None, nodetype=None): +def parse_adjlist( + lines, comments="#", delimiter=None, create_using=None, nodetype=None +): """Parse lines of a graph adjacency list representation. Parameters @@ -154,8 +147,8 @@ def parse_adjlist(lines, comments='#', delimiter=None, lines : list or iterator of strings Input data in adjlist format - create_using: NetworkX graph container - Use given NetworkX graph for holding nodes or edges. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. nodetype : Python type, optional Convert nodes to this type. @@ -173,11 +166,7 @@ def parse_adjlist(lines, comments='#', delimiter=None, Examples -------- - >>> lines = ['1 2 5', - ... '2 3 4', - ... '3 5', - ... '4', - ... '5'] + >>> lines = ["1 2 5", "2 3 4", "3 5", "4", "5"] >>> G = nx.parse_adjlist(lines, nodetype=int) >>> nodes = [1, 2, 3, 4, 5] >>> all(node in G for node in nodes) @@ -191,15 +180,7 @@ def parse_adjlist(lines, comments='#', delimiter=None, read_adjlist """ - if create_using is None: - G = nx.Graph() - else: - try: - G = create_using - G.clear() - except: - raise TypeError("Input graph is not a NetworkX graph type") - + G = nx.empty_graph(0, create_using) for line in lines: p = line.find(comments) if p >= 0: @@ -212,23 +193,32 @@ def parse_adjlist(lines, comments='#', delimiter=None, if nodetype is not None: try: u = nodetype(u) - except: - raise TypeError("Failed to convert node ({}) to type {}" - .format(u, nodetype)) + except BaseException as e: + raise TypeError( + f"Failed to convert node ({u}) to type " f"{nodetype}" + ) from e G.add_node(u) if nodetype is not None: try: - vlist = map(nodetype, vlist) - except: - raise TypeError("Failed to convert nodes ({}) to type {}" - .format(','.join(vlist), nodetype)) + vlist = list(map(nodetype, vlist)) + except BaseException as e: + raise TypeError( + f"Failed to convert nodes ({','.join(vlist)}) " + f"to type {nodetype}" + ) from e G.add_edges_from([(u, v) for v in vlist]) return G -@open_file(0, mode='rb') -def read_adjlist(path, comments="#", delimiter=None, create_using=None, - nodetype=None, encoding='utf-8'): +@open_file(0, mode="rb") +def read_adjlist( + path, + comments="#", + delimiter=None, + create_using=None, + nodetype=None, + encoding="utf-8", +): """Read graph in adjacency list format from path. Parameters @@ -237,8 +227,8 @@ def read_adjlist(path, comments="#", delimiter=None, create_using=None, Filename or file handle to read. Filenames ending in .gz or .bz2 will be uncompressed. - create_using: NetworkX graph container - Use given NetworkX graph for holding nodes or edges. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. nodetype : Python type, optional Convert nodes to this type. @@ -256,37 +246,37 @@ def read_adjlist(path, comments="#", delimiter=None, create_using=None, Examples -------- - >>> G=nx.path_graph(4) + >>> G = nx.path_graph(4) >>> nx.write_adjlist(G, "test.adjlist") - >>> G=nx.read_adjlist("test.adjlist") + >>> G = nx.read_adjlist("test.adjlist") The path can be a filehandle or a string with the name of the file. If a filehandle is provided, it has to be opened in 'rb' mode. - >>> fh=open("test.adjlist", 'rb') - >>> G=nx.read_adjlist(fh) + >>> fh = open("test.adjlist", "rb") + >>> G = nx.read_adjlist(fh) Filenames ending in .gz or .bz2 will be compressed. - >>> nx.write_adjlist(G,"test.adjlist.gz") - >>> G=nx.read_adjlist("test.adjlist.gz") + >>> nx.write_adjlist(G, "test.adjlist.gz") + >>> G = nx.read_adjlist("test.adjlist.gz") The optional nodetype is a function to convert node strings to nodetype. For example - >>> G=nx.read_adjlist("test.adjlist", nodetype=int) + >>> G = nx.read_adjlist("test.adjlist", nodetype=int) will attempt to convert all nodes to integer type. Since nodes must be hashable, the function nodetype must return hashable types (e.g. int, float, str, frozenset - or tuples of those, etc.) - The optional create_using parameter is a NetworkX graph container. - The default is Graph(), an undirected graph. To read the data as - a directed graph use + The optional create_using parameter indicates the type of NetworkX graph + created. The default is `nx.Graph`, an undirected graph. + To read the data as a directed graph use - >>> G=nx.read_adjlist("test.adjlist", create_using=nx.DiGraph()) + >>> G = nx.read_adjlist("test.adjlist", create_using=nx.DiGraph) Notes ----- @@ -297,17 +287,10 @@ def read_adjlist(path, comments="#", delimiter=None, create_using=None, write_adjlist """ lines = (line.decode(encoding) for line in path) - return parse_adjlist(lines, - comments=comments, - delimiter=delimiter, - create_using=create_using, - nodetype=nodetype) - -# fixture for nose tests - - -def teardown_module(module): - import os - for fname in ['test.adjlist', 'test.adjlist.gz']: - if os.path.isfile(fname): - os.unlink(fname) + return parse_adjlist( + lines, + comments=comments, + delimiter=delimiter, + create_using=create_using, + nodetype=nodetype, + ) diff --git a/networkx/readwrite/edgelist.py b/networkx/readwrite/edgelist.py index aa04ce1..239efdb 100644 --- a/networkx/readwrite/edgelist.py +++ b/networkx/readwrite/edgelist.py @@ -26,26 +26,21 @@ 1 2 7 green """ -__author__ = """Aric Hagberg (hagberg@lanl.gov)\nDan Schult (dschult@colgate.edu)""" -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. - -__all__ = ['generate_edgelist', - 'write_edgelist', - 'parse_edgelist', - 'read_edgelist', - 'read_weighted_edgelist', - 'write_weighted_edgelist'] - -from networkx.utils import open_file, make_str + +__all__ = [ + "generate_edgelist", + "write_edgelist", + "parse_edgelist", + "read_edgelist", + "read_weighted_edgelist", + "write_weighted_edgelist", +] + +from networkx.utils import open_file import networkx as nx -def generate_edgelist(G, delimiter=' ', data=True): +def generate_edgelist(G, delimiter=" ", data=True): """Generate a single line of the graph G in edge list format. Parameters @@ -68,8 +63,8 @@ def generate_edgelist(G, delimiter=' ', data=True): Examples -------- >>> G = nx.lollipop_graph(4, 3) - >>> G[1][2]['weight'] = 3 - >>> G[3][4]['capacity'] = 12 + >>> G[1][2]["weight"] = 3 + >>> G[3][4]["capacity"] = 12 >>> for line in nx.generate_edgelist(G, data=False): ... print(line) 0 1 @@ -94,7 +89,7 @@ def generate_edgelist(G, delimiter=' ', data=True): 4 5 {} 5 6 {} - >>> for line in nx.generate_edgelist(G,data=['weight']): + >>> for line in nx.generate_edgelist(G, data=["weight"]): ... print(line) 0 1 0 2 @@ -113,11 +108,11 @@ def generate_edgelist(G, delimiter=' ', data=True): if data is True: for u, v, d in G.edges(data=True): e = u, v, dict(d) - yield delimiter.join(map(make_str, e)) + yield delimiter.join(map(str, e)) elif data is False: for u, v in G.edges(data=False): e = u, v - yield delimiter.join(map(make_str, e)) + yield delimiter.join(map(str, e)) else: for u, v, d in G.edges(data=True): e = [u, v] @@ -125,12 +120,11 @@ def generate_edgelist(G, delimiter=' ', data=True): e.extend(d[k] for k in data) except KeyError: pass # missing data for this edge, should warn? - yield delimiter.join(map(make_str, e)) + yield delimiter.join(map(str, e)) -@open_file(1, mode='wb') -def write_edgelist(G, path, comments="#", delimiter=' ', data=True, - encoding='utf-8'): +@open_file(1, mode="wb") +def write_edgelist(G, path, comments="#", delimiter=" ", data=True, encoding="utf-8"): """Write graph as a list of edges. Parameters @@ -154,33 +148,34 @@ def write_edgelist(G, path, comments="#", delimiter=' ', data=True, Examples -------- - >>> G=nx.path_graph(4) + >>> G = nx.path_graph(4) >>> nx.write_edgelist(G, "test.edgelist") - >>> G=nx.path_graph(4) - >>> fh=open("test.edgelist",'wb') + >>> G = nx.path_graph(4) + >>> fh = open("test.edgelist", "wb") >>> nx.write_edgelist(G, fh) >>> nx.write_edgelist(G, "test.edgelist.gz") >>> nx.write_edgelist(G, "test.edgelist.gz", data=False) - >>> G=nx.Graph() - >>> G.add_edge(1,2,weight=7,color='red') - >>> nx.write_edgelist(G,'test.edgelist',data=False) - >>> nx.write_edgelist(G,'test.edgelist',data=['color']) - >>> nx.write_edgelist(G,'test.edgelist',data=['color','weight']) + >>> G = nx.Graph() + >>> G.add_edge(1, 2, weight=7, color="red") + >>> nx.write_edgelist(G, "test.edgelist", data=False) + >>> nx.write_edgelist(G, "test.edgelist", data=["color"]) + >>> nx.write_edgelist(G, "test.edgelist", data=["color", "weight"]) See Also -------- - write_edgelist() - write_weighted_edgelist() + read_edgelist + write_weighted_edgelist """ for line in generate_edgelist(G, delimiter, data): - line += '\n' + line += "\n" path.write(line.encode(encoding)) -def parse_edgelist(lines, comments='#', delimiter=None, - create_using=None, nodetype=None, data=True): +def parse_edgelist( + lines, comments="#", delimiter=None, create_using=None, nodetype=None, data=True +): """Parse lines of an edge list representation of a graph. Parameters @@ -188,15 +183,16 @@ def parse_edgelist(lines, comments='#', delimiter=None, lines : list or iterator of strings Input data in edgelist format comments : string, optional - Marker for comment lines + Marker for comment lines. Default is `'#'` delimiter : string, optional - Separator for node labels - create_using: NetworkX graph container, optional - Use given NetworkX graph for holding nodes or edges. + Separator for node labels. Default is `None`, meaning any whitespace. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. nodetype : Python type, optional - Convert nodes to this type. + Convert nodes to this type. Default is `None`, meaning no conversion is + performed. data : bool or list of (label,type) tuples - If False generate no edge data or if True use a dictionary + If `False` generate no edge data or if `True` use a dictionary representation of edge data or a list tuples specifying dictionary key names and types for edge data. @@ -209,10 +205,8 @@ def parse_edgelist(lines, comments='#', delimiter=None, -------- Edgelist with no data: - >>> lines = ["1 2", - ... "2 3", - ... "3 4"] - >>> G = nx.parse_edgelist(lines, nodetype = int) + >>> lines = ["1 2", "2 3", "3 4"] + >>> G = nx.parse_edgelist(lines, nodetype=int) >>> list(G) [1, 2, 3, 4] >>> list(G.edges()) @@ -220,10 +214,8 @@ def parse_edgelist(lines, comments='#', delimiter=None, Edgelist with data in Python dictionary representation: - >>> lines = ["1 2 {'weight':3}", - ... "2 3 {'weight':27}", - ... "3 4 {'weight':3.0}"] - >>> G = nx.parse_edgelist(lines, nodetype = int) + >>> lines = ["1 2 {'weight': 3}", "2 3 {'weight': 27}", "3 4 {'weight': 3.0}"] + >>> G = nx.parse_edgelist(lines, nodetype=int) >>> list(G) [1, 2, 3, 4] >>> list(G.edges(data=True)) @@ -231,10 +223,8 @@ def parse_edgelist(lines, comments='#', delimiter=None, Edgelist with data in a list: - >>> lines = ["1 2 3", - ... "2 3 27", - ... "3 4 3.0"] - >>> G = nx.parse_edgelist(lines, nodetype = int, data=(('weight',float),)) + >>> lines = ["1 2 3", "2 3 27", "3 4 3.0"] + >>> G = nx.parse_edgelist(lines, nodetype=int, data=(("weight", float),)) >>> list(G) [1, 2, 3, 4] >>> list(G.edges(data=True)) @@ -243,23 +233,15 @@ def parse_edgelist(lines, comments='#', delimiter=None, See Also -------- read_weighted_edgelist - """ from ast import literal_eval - if create_using is None: - G = nx.Graph() - else: - try: - G = create_using - G.clear() - except: - raise TypeError("create_using input is not a NetworkX graph type") + G = nx.empty_graph(0, create_using) for line in lines: p = line.find(comments) if p >= 0: line = line[:p] - if not len(line): + if not line: continue # split line, should have 2 or more s = line.strip().split(delimiter) @@ -272,9 +254,10 @@ def parse_edgelist(lines, comments='#', delimiter=None, try: u = nodetype(u) v = nodetype(v) - except: - raise TypeError("Failed to convert nodes %s,%s to type %s." - % (u, v, nodetype)) + except Exception as e: + raise TypeError( + f"Failed to convert nodes {u},{v} to type {nodetype}." + ) from e if len(d) == 0 or data is False: # no data or data type specified @@ -282,32 +265,46 @@ def parse_edgelist(lines, comments='#', delimiter=None, elif data is True: # no edge types specified try: # try to evaluate as dictionary - edgedata = dict(literal_eval(' '.join(d))) - except: + if delimiter == ",": + edgedata_str = ",".join(d) + else: + edgedata_str = " ".join(d) + edgedata = dict(literal_eval(edgedata_str.strip())) + except Exception as e: raise TypeError( - "Failed to convert edge data (%s) to dictionary." % (d)) + f"Failed to convert edge data ({d}) to dictionary." + ) from e else: # convert edge data to dictionary with specified keys and type if len(d) != len(data): raise IndexError( - "Edge data %s and data_keys %s are not the same length" % - (d, data)) + f"Edge data {d} and data_keys {data} are not the same length" + ) edgedata = {} for (edge_key, edge_type), edge_value in zip(data, d): try: edge_value = edge_type(edge_value) - except: + except Exception as e: raise TypeError( - "Failed to convert %s data %s to type %s." - % (edge_key, edge_value, edge_type)) + f"Failed to convert {edge_key} data {edge_value} " + f"to type {edge_type}." + ) from e edgedata.update({edge_key: edge_value}) G.add_edge(u, v, **edgedata) return G -@open_file(0, mode='rb') -def read_edgelist(path, comments="#", delimiter=None, create_using=None, - nodetype=None, data=True, edgetype=None, encoding='utf-8'): +@open_file(0, mode="rb") +def read_edgelist( + path, + comments="#", + delimiter=None, + create_using=None, + nodetype=None, + data=True, + edgetype=None, + encoding="utf-8", +): """Read a graph from a list of edges. Parameters @@ -320,9 +317,8 @@ def read_edgelist(path, comments="#", delimiter=None, create_using=None, The character used to indicate the start of a comment. delimiter : string, optional The string used to separate values. The default is whitespace. - create_using : Graph container, optional, - Use specified container to build graph. The default is networkx.Graph, - an undirected graph. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. nodetype : int, float, str, Python type, optional Convert node data from strings to specified type data : bool or list of (label,type) tuples @@ -340,22 +336,22 @@ def read_edgelist(path, comments="#", delimiter=None, create_using=None, Examples -------- >>> nx.write_edgelist(nx.path_graph(4), "test.edgelist") - >>> G=nx.read_edgelist("test.edgelist") + >>> G = nx.read_edgelist("test.edgelist") - >>> fh=open("test.edgelist", 'rb') - >>> G=nx.read_edgelist(fh) + >>> fh = open("test.edgelist", "rb") + >>> G = nx.read_edgelist(fh) >>> fh.close() - >>> G=nx.read_edgelist("test.edgelist", nodetype=int) - >>> G=nx.read_edgelist("test.edgelist",create_using=nx.DiGraph()) + >>> G = nx.read_edgelist("test.edgelist", nodetype=int) + >>> G = nx.read_edgelist("test.edgelist", create_using=nx.DiGraph) Edgelist with data in a list: - >>> textline = '1 2 3' - >>> fh = open('test.edgelist','w') + >>> textline = "1 2 3" + >>> fh = open("test.edgelist", "w") >>> d = fh.write(textline) >>> fh.close() - >>> G = nx.read_edgelist('test.edgelist', nodetype=int, data=(('weight',float),)) + >>> G = nx.read_edgelist("test.edgelist", nodetype=int, data=(("weight", float),)) >>> list(G) [1, 2] >>> list(G.edges(data=True)) @@ -366,20 +362,25 @@ def read_edgelist(path, comments="#", delimiter=None, create_using=None, See Also -------- parse_edgelist + write_edgelist Notes ----- Since nodes must be hashable, the function nodetype must return hashable types (e.g. int, float, str, frozenset - or tuples of those, etc.) """ - lines = (line.decode(encoding) for line in path) - return parse_edgelist(lines, comments=comments, delimiter=delimiter, - create_using=create_using, nodetype=nodetype, - data=data) - - -def write_weighted_edgelist(G, path, comments="#", - delimiter=' ', encoding='utf-8'): + lines = (line if isinstance(line, str) else line.decode(encoding) for line in path) + return parse_edgelist( + lines, + comments=comments, + delimiter=delimiter, + create_using=create_using, + nodetype=nodetype, + data=data, + ) + + +def write_weighted_edgelist(G, path, comments="#", delimiter=" ", encoding="utf-8"): """Write graph G as a list of edges with numeric weights. Parameters @@ -399,23 +400,34 @@ def write_weighted_edgelist(G, path, comments="#", Examples -------- - >>> G=nx.Graph() - >>> G.add_edge(1,2,weight=7) - >>> nx.write_weighted_edgelist(G, 'test.weighted.edgelist') + >>> G = nx.Graph() + >>> G.add_edge(1, 2, weight=7) + >>> nx.write_weighted_edgelist(G, "test.weighted.edgelist") See Also -------- - read_edgelist() - write_edgelist() - write_weighted_edgelist() - + read_edgelist + write_edgelist + read_weighted_edgelist """ - write_edgelist(G, path, comments=comments, delimiter=delimiter, - data=('weight',), encoding=encoding) - - -def read_weighted_edgelist(path, comments="#", delimiter=None, - create_using=None, nodetype=None, encoding='utf-8'): + write_edgelist( + G, + path, + comments=comments, + delimiter=delimiter, + data=("weight",), + encoding=encoding, + ) + + +def read_weighted_edgelist( + path, + comments="#", + delimiter=None, + create_using=None, + nodetype=None, + encoding="utf-8", +): """Read a graph as list of edges with numeric weights. Parameters @@ -428,9 +440,8 @@ def read_weighted_edgelist(path, comments="#", delimiter=None, The character used to indicate the start of a comment. delimiter : string, optional The string used to separate values. The default is whitespace. - create_using : Graph container, optional, - Use specified container to build graph. The default is networkx.Graph, - an undirected graph. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. nodetype : int, float, str, Python type, optional Convert node data from strings to specified type encoding: string, optional @@ -456,21 +467,17 @@ def read_weighted_edgelist(path, comments="#", delimiter=None, a b 1 a c 3.14159 d e 42 + + See Also + -------- + write_weighted_edgelist """ - return read_edgelist(path, - comments=comments, - delimiter=delimiter, - create_using=create_using, - nodetype=nodetype, - data=(('weight', float),), - encoding=encoding - ) - - -# fixture for nose tests -def teardown_module(module): - import os - for fname in ['test.edgelist', 'test.edgelist.gz', - 'test.weighted.edgelist']: - if os.path.isfile(fname): - os.unlink(fname) + return read_edgelist( + path, + comments=comments, + delimiter=delimiter, + create_using=create_using, + nodetype=nodetype, + data=(("weight", float),), + encoding=encoding, + ) diff --git a/networkx/readwrite/gexf.py b/networkx/readwrite/gexf.py index 2f7d7bc..e3fe5a5 100644 --- a/networkx/readwrite/gexf.py +++ b/networkx/readwrite/gexf.py @@ -1,11 +1,3 @@ -# Copyright (C) 2013-2018 by -# -# Authors: Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# Based on GraphML NetworkX GraphML reader """Read and write graphs in GEXF format. GEXF (Graph Exchange XML Format) is a language for describing complex @@ -23,20 +15,21 @@ import time import networkx as nx -from networkx.utils import open_file, make_str -try: - from xml.etree.cElementTree import Element, ElementTree, SubElement, tostring -except ImportError: - try: - from xml.etree.ElementTree import Element, ElementTree, SubElement, tostring - except ImportError: - pass +from networkx.utils import open_file + +from xml.etree.ElementTree import ( + Element, + ElementTree, + SubElement, + tostring, + register_namespace, +) -__all__ = ['write_gexf', 'read_gexf', 'relabel_gexf_graph', 'generate_gexf'] +__all__ = ["write_gexf", "read_gexf", "relabel_gexf_graph", "generate_gexf"] -@open_file(1, mode='wb') -def write_gexf(G, path, encoding='utf-8', prettyprint=True, version='1.2draft'): +@open_file(1, mode="wb") +def write_gexf(G, path, encoding="utf-8", prettyprint=True, version="1.2draft"): """Write G in GEXF format to path. "GEXF (Graph Exchange XML Format) is a language for describing @@ -64,9 +57,9 @@ def write_gexf(G, path, encoding='utf-8', prettyprint=True, version='1.2draft'): >>> nx.write_gexf(G, "test.gexf") # visualization data - >>> G.nodes[0]['viz'] = {'size': 54} - >>> G.nodes[0]['viz']['position'] = {'x' : 0, 'y' : 1} - >>> G.nodes[0]['viz']['color'] = {'r' : 0, 'g' : 0, 'b' : 256} + >>> G.nodes[0]["viz"] = {"size": 54} + >>> G.nodes[0]["viz"]["position"] = {"x": 0, "y": 1} + >>> G.nodes[0]["viz"]["color"] = {"r": 0, "g": 0, "b": 256} Notes @@ -83,13 +76,12 @@ def write_gexf(G, path, encoding='utf-8', prettyprint=True, version='1.2draft'): .. [1] GEXF File Format, https://gephi.org/gexf/format/ .. [2] GEXF viz schema 1.1, https://gephi.org/gexf/1.1draft/viz """ - writer = GEXFWriter(encoding=encoding, prettyprint=prettyprint, - version=version) + writer = GEXFWriter(encoding=encoding, prettyprint=prettyprint, version=version) writer.add_graph(G) writer.write(path) -def generate_gexf(G, encoding='utf-8', prettyprint=True, version='1.2draft'): +def generate_gexf(G, encoding="utf-8", prettyprint=True, version="1.2draft"): """Generate lines of GEXF format representation of G. "GEXF (Graph Exchange XML Format) is a language for describing @@ -98,23 +90,23 @@ def generate_gexf(G, encoding='utf-8', prettyprint=True, version='1.2draft'): Parameters ---------- G : graph - A NetworkX graph + A NetworkX graph encoding : string (optional, default: 'utf-8') - Encoding for text data. + Encoding for text data. prettyprint : bool (optional, default: True) - If True use line breaks and indenting in output XML. + If True use line breaks and indenting in output XML. version : string (default: 1.2draft) - Version of GEFX File Format (see https://gephi.org/gexf/format/schema.html). - Supported values: "1.1draft", "1.2draft" + Version of GEFX File Format (see https://gephi.org/gexf/format/schema.html) + Supported values: "1.1draft", "1.2draft" Examples -------- >>> G = nx.path_graph(4) - >>> linefeed = chr(10) # linefeed=\n + >>> linefeed = chr(10) # linefeed=\n >>> s = linefeed.join(nx.generate_gexf(G)) # doctest: +SKIP >>> for line in nx.generate_gexf(G): # doctest: +SKIP - ... print line + ... print(line) Notes ----- @@ -129,15 +121,13 @@ def generate_gexf(G, encoding='utf-8', prettyprint=True, version='1.2draft'): ---------- .. [1] GEXF File Format, https://gephi.org/gexf/format/ """ - writer = GEXFWriter(encoding=encoding, prettyprint=prettyprint, - version=version) + writer = GEXFWriter(encoding=encoding, prettyprint=prettyprint, version=version) writer.add_graph(G) - for line in str(writer).splitlines(): - yield line + yield from str(writer).splitlines() -@open_file(0, mode='rb') -def read_gexf(path, node_type=None, relabel=False, version='1.2draft'): +@open_file(0, mode="rb") +def read_gexf(path, node_type=None, relabel=False, version="1.2draft"): """Read graph in GEXF format from path. "GEXF (Graph Exchange XML Format) is a language for describing @@ -146,15 +136,15 @@ def read_gexf(path, node_type=None, relabel=False, version='1.2draft'): Parameters ---------- path : file or string - File or file name to write. - File names ending in .gz or .bz2 will be compressed. + File or file name to read. + File names ending in .gz or .bz2 will be decompressed. node_type: Python type (default: None) Convert node ids to this type if not None. relabel : bool (default: False) If True relabel the nodes to use the GEXF node "label" attribute instead of the node "id" attribute as the NetworkX node label. version : string (default: 1.2draft) - Version of GEFX File Format (see https://gephi.org/gexf/format/schema.html). + Version of GEFX File Format (see https://gephi.org/gexf/format/schema.html) Supported values: "1.1draft", "1.2draft" Returns @@ -180,102 +170,136 @@ def read_gexf(path, node_type=None, relabel=False, version='1.2draft'): return G -class GEXF(object): +class GEXF: versions = {} - d = {'NS_GEXF': "http://www.gexf.net/1.1draft", - 'NS_VIZ': "http://www.gexf.net/1.1draft/viz", - 'NS_XSI': "http://www.w3.org/2001/XMLSchema-instance", - 'SCHEMALOCATION': ' '.join(['http://www.gexf.net/1.1draft', - 'http://www.gexf.net/1.1draft/gexf.xsd']), - 'VERSION': '1.1'} - versions['1.1draft'] = d - d = {'NS_GEXF': "http://www.gexf.net/1.2draft", - 'NS_VIZ': "http://www.gexf.net/1.2draft/viz", - 'NS_XSI': "http://www.w3.org/2001/XMLSchema-instance", - 'SCHEMALOCATION': ' '.join(['http://www.gexf.net/1.2draft', - 'http://www.gexf.net/1.2draft/gexf.xsd']), - 'VERSION': '1.2'} - versions['1.2draft'] = d - - types = [(int, "integer"), - (float, "float"), - (float, "double"), - (bool, "boolean"), - (list, "string"), - (dict, "string")] - - try: # Python 3.x - blurb = chr(1245) # just to trigger the exception - types.extend([ - (int, "long"), - (str, "liststring"), - (str, "anyURI"), - (str, "string")]) - except ValueError: # Python 2.6+ - types.extend([ - (long, "long"), - (str, "liststring"), - (str, "anyURI"), - (str, "string"), - (unicode, "liststring"), - (unicode, "anyURI"), - (unicode, "string")]) + d = { + "NS_GEXF": "http://www.gexf.net/1.1draft", + "NS_VIZ": "http://www.gexf.net/1.1draft/viz", + "NS_XSI": "http://www.w3.org/2001/XMLSchema-instance", + "SCHEMALOCATION": " ".join( + ["http://www.gexf.net/1.1draft", "http://www.gexf.net/1.1draft/gexf.xsd"] + ), + "VERSION": "1.1", + } + versions["1.1draft"] = d + d = { + "NS_GEXF": "http://www.gexf.net/1.2draft", + "NS_VIZ": "http://www.gexf.net/1.2draft/viz", + "NS_XSI": "http://www.w3.org/2001/XMLSchema-instance", + "SCHEMALOCATION": " ".join( + ["http://www.gexf.net/1.2draft", "http://www.gexf.net/1.2draft/gexf.xsd"] + ), + "VERSION": "1.2", + } + versions["1.2draft"] = d + + types = [ + (int, "integer"), + (float, "float"), + (float, "double"), + (bool, "boolean"), + (list, "string"), + (dict, "string"), + (int, "long"), + (str, "liststring"), + (str, "anyURI"), + (str, "string"), + ] + + # These additions to types allow writing numpy types + try: + import numpy as np + except ImportError: + pass + else: + # prepend so that python types are created upon read (last entry wins) + types = [ + (np.float64, "float"), + (np.float32, "float"), + (np.float16, "float"), + (np.float_, "float"), + (np.int_, "int"), + (np.int8, "int"), + (np.int16, "int"), + (np.int32, "int"), + (np.int64, "int"), + (np.uint8, "int"), + (np.uint16, "int"), + (np.uint32, "int"), + (np.uint64, "int"), + (np.int_, "int"), + (np.intc, "int"), + (np.intp, "int"), + ] + types xml_type = dict(types) python_type = dict(reversed(a) for a in types) # http://www.w3.org/TR/xmlschema-2/#boolean convert_bool = { - 'true': True, 'false': False, - 'True': True, 'False': False, - '0': False, 0: False, - '1': True, 1: True + "true": True, + "false": False, + "True": True, + "False": False, + "0": False, + 0: False, + "1": True, + 1: True, } def set_version(self, version): d = self.versions.get(version) if d is None: - raise nx.NetworkXError('Unknown GEXF version %s.' % version) - self.NS_GEXF = d['NS_GEXF'] - self.NS_VIZ = d['NS_VIZ'] - self.NS_XSI = d['NS_XSI'] - self.SCHEMALOCATION = d['NS_XSI'] - self.VERSION = d['VERSION'] + raise nx.NetworkXError(f"Unknown GEXF version {version}.") + self.NS_GEXF = d["NS_GEXF"] + self.NS_VIZ = d["NS_VIZ"] + self.NS_XSI = d["NS_XSI"] + self.SCHEMALOCATION = d["SCHEMALOCATION"] + self.VERSION = d["VERSION"] self.version = version class GEXFWriter(GEXF): # class for writing GEXF format files # use write_gexf() function - def __init__(self, graph=None, encoding='utf-8', prettyprint=True, - version='1.2draft'): - try: - import xml.etree.ElementTree as ET - except ImportError: - raise ImportError('GEXF writer requires ' - 'xml.elementtree.ElementTree') + def __init__( + self, graph=None, encoding="utf-8", prettyprint=True, version="1.2draft" + ): self.prettyprint = prettyprint self.encoding = encoding self.set_version(version) - self.xml = Element('gexf', - {'xmlns': self.NS_GEXF, - 'xmlns:xsi': self.NS_XSI, - 'xsi:schemaLocation': self.SCHEMALOCATION, - 'version': self.VERSION}) - - ET.register_namespace('viz', self.NS_VIZ) + self.xml = Element( + "gexf", + { + "xmlns": self.NS_GEXF, + "xmlns:xsi": self.NS_XSI, + "xsi:schemaLocation": self.SCHEMALOCATION, + "version": self.VERSION, + }, + ) + + # Make meta element a non-graph element + # Also add lastmodifieddate as attribute, not tag + meta_element = Element("meta") + subelement_text = f"NetworkX {nx.__version__}" + SubElement(meta_element, "creator").text = subelement_text + meta_element.set("lastmodifieddate", time.strftime("%Y-%m-%d")) + self.xml.append(meta_element) + + register_namespace("viz", self.NS_VIZ) # counters for edge and attribute identifiers self.edge_id = itertools.count() self.attr_id = itertools.count() + self.all_edge_ids = set() # default attributes are stored in dictionaries self.attr = {} - self.attr['node'] = {} - self.attr['edge'] = {} - self.attr['node']['dynamic'] = {} - self.attr['node']['static'] = {} - self.attr['edge']['dynamic'] = {} - self.attr['edge']['static'] = {} + self.attr["node"] = {} + self.attr["edge"] = {} + self.attr["node"]["dynamic"] = {} + self.attr["node"]["static"] = {} + self.attr["edge"]["dynamic"] = {} + self.attr["edge"]["static"] = {} if graph is not None: self.add_graph(graph) @@ -287,69 +311,64 @@ def __str__(self): return s def add_graph(self, G): + # first pass through G collecting edge ids + for u, v, dd in G.edges(data=True): + eid = dd.get("id") + if eid is not None: + self.all_edge_ids.add(str(eid)) # set graph attributes - if G.graph.get('mode') == 'dynamic': - mode = 'dynamic' + if G.graph.get("mode") == "dynamic": + mode = "dynamic" else: - mode = 'static' + mode = "static" # Add a graph element to the XML if G.is_directed(): - default = 'directed' + default = "directed" else: - default = 'undirected' - name = G.graph.get('name', '') - graph_element = Element('graph', defaultedgetype=default, mode=mode, - name=name) + default = "undirected" + name = G.graph.get("name", "") + graph_element = Element("graph", defaultedgetype=default, mode=mode, name=name) self.graph_element = graph_element - self.add_meta(G, graph_element) self.add_nodes(G, graph_element) self.add_edges(G, graph_element) self.xml.append(graph_element) - def add_meta(self, G, graph_element): - # add meta element with creator and date - meta_element = Element('meta') - SubElement(meta_element, 'creator').text = 'NetworkX {}'.format(nx.__version__) - SubElement(meta_element, 'lastmodified').text = time.strftime('%d/%m/%Y') - graph_element.append(meta_element) - def add_nodes(self, G, graph_element): - nodes_element = Element('nodes') + nodes_element = Element("nodes") for node, data in G.nodes(data=True): node_data = data.copy() - node_id = make_str(node_data.pop('id', node)) - kw = {'id': node_id} - label = make_str(node_data.pop('label', node)) - kw['label'] = label + node_id = str(node_data.pop("id", node)) + kw = {"id": node_id} + label = str(node_data.pop("label", node)) + kw["label"] = label try: - pid = node_data.pop('pid') - kw['pid'] = make_str(pid) + pid = node_data.pop("pid") + kw["pid"] = str(pid) except KeyError: pass try: - start = node_data.pop('start') - kw['start'] = make_str(start) + start = node_data.pop("start") + kw["start"] = str(start) self.alter_graph_mode_timeformat(start) except KeyError: pass try: - end = node_data.pop('end') - kw['end'] = make_str(end) + end = node_data.pop("end") + kw["end"] = str(end) self.alter_graph_mode_timeformat(end) except KeyError: pass # add node element with attributes - node_element = Element('node', **kw) + node_element = Element("node", **kw) # add node element and attr subelements - default = G.graph.get('node_default', {}) + default = G.graph.get("node_default", {}) node_data = self.add_parents(node_element, node_data) - if self.version == '1.1': + if self.VERSION == "1.1": node_data = self.add_slices(node_element, node_data) else: node_data = self.add_spells(node_element, node_data) node_data = self.add_viz(node_element, node_data) - node_data = self.add_attributes('node', node_element, - node_data, default) + node_data = self.add_attributes("node", node_element, node_data, default) nodes_element.append(node_element) graph_element.append(nodes_element) @@ -360,99 +379,129 @@ def edge_key_data(G): for u, v, key, data in G.edges(data=True, keys=True): edge_data = data.copy() edge_data.update(key=key) - edge_id = edge_data.pop('id', None) + edge_id = edge_data.pop("id", None) if edge_id is None: edge_id = next(self.edge_id) + while str(edge_id) in self.all_edge_ids: + edge_id = next(self.edge_id) + self.all_edge_ids.add(str(edge_id)) yield u, v, edge_id, edge_data else: for u, v, data in G.edges(data=True): edge_data = data.copy() - edge_id = edge_data.pop('id', None) + edge_id = edge_data.pop("id", None) if edge_id is None: edge_id = next(self.edge_id) + while str(edge_id) in self.all_edge_ids: + edge_id = next(self.edge_id) + self.all_edge_ids.add(str(edge_id)) yield u, v, edge_id, edge_data - edges_element = Element('edges') + + edges_element = Element("edges") for u, v, key, edge_data in edge_key_data(G): - kw = {'id': make_str(key)} + kw = {"id": str(key)} try: - edge_weight = edge_data.pop('weight') - kw['weight'] = make_str(edge_weight) + edge_label = edge_data.pop("label") + kw["label"] = str(edge_label) except KeyError: pass try: - edge_type = edge_data.pop('type') - kw['type'] = make_str(edge_type) + edge_weight = edge_data.pop("weight") + kw["weight"] = str(edge_weight) except KeyError: pass try: - start = edge_data.pop('start') - kw['start'] = make_str(start) + edge_type = edge_data.pop("type") + kw["type"] = str(edge_type) + except KeyError: + pass + try: + start = edge_data.pop("start") + kw["start"] = str(start) self.alter_graph_mode_timeformat(start) except KeyError: pass try: - end = edge_data.pop('end') - kw['end'] = make_str(end) + end = edge_data.pop("end") + kw["end"] = str(end) self.alter_graph_mode_timeformat(end) except KeyError: pass - source_id = make_str(G.nodes[u].get('id', u)) - target_id = make_str(G.nodes[v].get('id', v)) - edge_element = Element('edge', - source=source_id, target=target_id, **kw) - default = G.graph.get('edge_default', {}) - if self.version == '1.1': + source_id = str(G.nodes[u].get("id", u)) + target_id = str(G.nodes[v].get("id", v)) + edge_element = Element("edge", source=source_id, target=target_id, **kw) + default = G.graph.get("edge_default", {}) + if self.VERSION == "1.1": edge_data = self.add_slices(edge_element, edge_data) else: edge_data = self.add_spells(edge_element, edge_data) edge_data = self.add_viz(edge_element, edge_data) - edge_data = self.add_attributes('edge', edge_element, - edge_data, default) + edge_data = self.add_attributes("edge", edge_element, edge_data, default) edges_element.append(edge_element) graph_element.append(edges_element) def add_attributes(self, node_or_edge, xml_obj, data, default): # Add attrvalues to node or edge - attvalues = Element('attvalues') + attvalues = Element("attvalues") if len(data) == 0: return data - mode = 'static' + mode = "static" for k, v in data.items(): # rename generic multigraph key to avoid any name conflict - if k == 'key': - k = 'networkx_key' + if k == "key": + k = "networkx_key" val_type = type(v) + if val_type not in self.xml_type: + raise TypeError(f"attribute value type is not allowed: {val_type}") if isinstance(v, list): # dynamic data for val, start, end in v: val_type = type(val) if start is not None or end is not None: - mode = 'dynamic' + mode = "dynamic" self.alter_graph_mode_timeformat(start) self.alter_graph_mode_timeformat(end) break - attr_id = self.get_attr_id(make_str(k), self.xml_type[val_type], - node_or_edge, default, mode) + attr_id = self.get_attr_id( + str(k), self.xml_type[val_type], node_or_edge, default, mode + ) for val, start, end in v: - e = Element('attvalue') - e.attrib['for'] = attr_id - e.attrib['value'] = make_str(val) + e = Element("attvalue") + e.attrib["for"] = attr_id + e.attrib["value"] = str(val) + # Handle nan, inf, -inf differently + if val_type == float: + if e.attrib["value"] == "inf": + e.attrib["value"] = "INF" + elif e.attrib["value"] == "nan": + e.attrib["value"] = "NaN" + elif e.attrib["value"] == "-inf": + e.attrib["value"] = "-INF" if start is not None: - e.attrib['start'] = make_str(start) + e.attrib["start"] = str(start) if end is not None: - e.attrib['end'] = make_str(end) + e.attrib["end"] = str(end) attvalues.append(e) else: # static data - mode = 'static' - attr_id = self.get_attr_id(make_str(k), self.xml_type[val_type], - node_or_edge, default, mode) - e = Element('attvalue') - e.attrib['for'] = attr_id + mode = "static" + attr_id = self.get_attr_id( + str(k), self.xml_type[val_type], node_or_edge, default, mode + ) + e = Element("attvalue") + e.attrib["for"] = attr_id if isinstance(v, bool): - e.attrib['value'] = make_str(v).lower() + e.attrib["value"] = str(v).lower() else: - e.attrib['value'] = make_str(v) + e.attrib["value"] = str(v) + # Handle float nan, inf, -inf differently + if val_type == float: + if e.attrib["value"] == "inf": + e.attrib["value"] = "INF" + elif e.attrib["value"] == "nan": + e.attrib["value"] = "NaN" + elif e.attrib["value"] == "-inf": + e.attrib["value"] = "-INF" attvalues.append(e) xml_obj.append(attvalues) return data @@ -465,128 +514,137 @@ def get_attr_id(self, title, attr_type, edge_or_node, default, mode): # generate new id new_id = str(next(self.attr_id)) self.attr[edge_or_node][mode][title] = new_id - attr_kwargs = {'id': new_id, 'title': title, 'type': attr_type} - attribute = Element('attribute', **attr_kwargs) + attr_kwargs = {"id": new_id, "title": title, "type": attr_type} + attribute = Element("attribute", **attr_kwargs) # add subelement for data default value if present default_title = default.get(title) if default_title is not None: - default_element = Element('default') - default_element.text = make_str(default_title) + default_element = Element("default") + default_element.text = str(default_title) attribute.append(default_element) # new insert it into the XML attributes_element = None - for a in self.graph_element.findall('attributes'): + for a in self.graph_element.findall("attributes"): # find existing attributes element by class and mode - a_class = a.get('class') - a_mode = a.get('mode', 'static') + a_class = a.get("class") + a_mode = a.get("mode", "static") if a_class == edge_or_node and a_mode == mode: attributes_element = a if attributes_element is None: # create new attributes element - attr_kwargs = {'mode': mode, 'class': edge_or_node} - attributes_element = Element('attributes', **attr_kwargs) + attr_kwargs = {"mode": mode, "class": edge_or_node} + attributes_element = Element("attributes", **attr_kwargs) self.graph_element.insert(0, attributes_element) attributes_element.append(attribute) return new_id def add_viz(self, element, node_data): - viz = node_data.pop('viz', False) + viz = node_data.pop("viz", False) if viz: - color = viz.get('color') + color = viz.get("color") if color is not None: - if self.VERSION == '1.1': - e = Element('{%s}color' % self.NS_VIZ, - r=str(color.get('r')), - g=str(color.get('g')), - b=str(color.get('b'))) + if self.VERSION == "1.1": + e = Element( + f"{{{self.NS_VIZ}}}color", + r=str(color.get("r")), + g=str(color.get("g")), + b=str(color.get("b")), + ) else: - e = Element('{%s}color' % self.NS_VIZ, - r=str(color.get('r')), - g=str(color.get('g')), - b=str(color.get('b')), - a=str(color.get('a'))) + e = Element( + f"{{{self.NS_VIZ}}}color", + r=str(color.get("r")), + g=str(color.get("g")), + b=str(color.get("b")), + a=str(color.get("a")), + ) element.append(e) - size = viz.get('size') + size = viz.get("size") if size is not None: - e = Element('{%s}size' % self.NS_VIZ, value=str(size)) + e = Element(f"{{{self.NS_VIZ}}}size", value=str(size)) element.append(e) - thickness = viz.get('thickness') + thickness = viz.get("thickness") if thickness is not None: - e = Element('{%s}thickness' % self.NS_VIZ, value=str(thickness)) + e = Element(f"{{{self.NS_VIZ}}}thickness", value=str(thickness)) element.append(e) - shape = viz.get('shape') + shape = viz.get("shape") if shape is not None: - if shape.startswith('http'): - e = Element('{%s}shape' % self.NS_VIZ, - value='image', uri=str(shape)) + if shape.startswith("http"): + e = Element( + f"{{{self.NS_VIZ}}}shape", value="image", uri=str(shape) + ) else: - e = Element('{%s}shape' % self.NS_VIZ, value=str(shape)) + e = Element(f"{{{self.NS_VIZ}}}shape", value=str(shape)) element.append(e) - position = viz.get('position') + position = viz.get("position") if position is not None: - e = Element('{%s}position' % self.NS_VIZ, - x=str(position.get('x')), - y=str(position.get('y')), - z=str(position.get('z'))) + e = Element( + f"{{{self.NS_VIZ}}}position", + x=str(position.get("x")), + y=str(position.get("y")), + z=str(position.get("z")), + ) element.append(e) return node_data def add_parents(self, node_element, node_data): - parents = node_data.pop('parents', False) + parents = node_data.pop("parents", False) if parents: - parents_element = Element('parents') + parents_element = Element("parents") for p in parents: - e = Element('parent') - e.attrib['for'] = str(p) + e = Element("parent") + e.attrib["for"] = str(p) parents_element.append(e) node_element.append(parents_element) return node_data def add_slices(self, node_or_edge_element, node_or_edge_data): - slices = node_or_edge_data.pop('slices', False) + slices = node_or_edge_data.pop("slices", False) if slices: - slices_element = Element('slices') + slices_element = Element("slices") for start, end in slices: - e = Element('slice', start=str(start), end=str(end)) + e = Element("slice", start=str(start), end=str(end)) slices_element.append(e) node_or_edge_element.append(slices_element) return node_or_edge_data def add_spells(self, node_or_edge_element, node_or_edge_data): - spells = node_or_edge_data.pop('spells', False) + spells = node_or_edge_data.pop("spells", False) if spells: - spells_element = Element('spells') + spells_element = Element("spells") for start, end in spells: - e = Element('spell') + e = Element("spell") if start is not None: - e.attrib['start'] = make_str(start) + e.attrib["start"] = str(start) self.alter_graph_mode_timeformat(start) if end is not None: - e.attrib['end'] = make_str(end) + e.attrib["end"] = str(end) self.alter_graph_mode_timeformat(end) spells_element.append(e) node_or_edge_element.append(spells_element) return node_or_edge_data def alter_graph_mode_timeformat(self, start_or_end): - # if 'start' or 'end' appears, alter Graph mode to dynamic and set timeformat - if self.graph_element.get('mode') == 'static': + # If 'start' or 'end' appears, alter Graph mode to dynamic and + # set timeformat + if self.graph_element.get("mode") == "static": if start_or_end is not None: if isinstance(start_or_end, str): - timeformat = 'date' + timeformat = "date" elif isinstance(start_or_end, float): - timeformat = 'double' + timeformat = "double" elif isinstance(start_or_end, int): - timeformat = 'long' + timeformat = "long" else: raise nx.NetworkXError( - 'timeformat should be of the type int, float or str') - self.graph_element.set('timeformat', timeformat) - self.graph_element.set('mode', 'dynamic') + "timeformat should be of the type int, float or str" + ) + self.graph_element.set("timeformat", timeformat) + self.graph_element.set("mode", "dynamic") def write(self, fh): # Serialize graph G in GEXF to the open fh @@ -615,12 +673,7 @@ def indent(self, elem, level=0): class GEXFReader(GEXF): # Class to read GEXF format files # use read_gexf() function - def __init__(self, node_type=None, version='1.2draft'): - try: - import xml.etree.ElementTree - except ImportError: - raise ImportError('GEXF reader requires ' - 'xml.elementtree.ElementTree.') + def __init__(self, node_type=None, version="1.2draft"): self.node_type = node_type # assume simple graph and test for multigraph on read self.simple_graph = True @@ -628,86 +681,86 @@ def __init__(self, node_type=None, version='1.2draft'): def __call__(self, stream): self.xml = ElementTree(file=stream) - g = self.xml.find('{%s}graph' % self.NS_GEXF) + g = self.xml.find(f"{{{self.NS_GEXF}}}graph") if g is not None: return self.make_graph(g) # try all the versions for version in self.versions: self.set_version(version) - g = self.xml.find('{%s}graph' % self.NS_GEXF) + g = self.xml.find(f"{{{self.NS_GEXF}}}graph") if g is not None: return self.make_graph(g) - raise nx.NetworkXError('No element in GEXF file.') + raise nx.NetworkXError("No element in GEXF file.") def make_graph(self, graph_xml): # start with empty DiGraph or MultiDiGraph - edgedefault = graph_xml.get('defaultedgetype', None) - if edgedefault == 'directed': + edgedefault = graph_xml.get("defaultedgetype", None) + if edgedefault == "directed": G = nx.MultiDiGraph() else: G = nx.MultiGraph() # graph attributes - graph_name = graph_xml.get('name', '') - if graph_name != '': - G.graph['name'] = graph_name - graph_start = graph_xml.get('start') + graph_name = graph_xml.get("name", "") + if graph_name != "": + G.graph["name"] = graph_name + graph_start = graph_xml.get("start") if graph_start is not None: - G.graph['start'] = graph_start - graph_end = graph_xml.get('end') + G.graph["start"] = graph_start + graph_end = graph_xml.get("end") if graph_end is not None: - G.graph['end'] = graph_end - graph_mode = graph_xml.get('mode', '') - if graph_mode == 'dynamic': - G.graph['mode'] = 'dynamic' + G.graph["end"] = graph_end + graph_mode = graph_xml.get("mode", "") + if graph_mode == "dynamic": + G.graph["mode"] = "dynamic" else: - G.graph['mode'] = 'static' + G.graph["mode"] = "static" # timeformat - self.timeformat = graph_xml.get('timeformat') - if self.timeformat == 'date': - self.timeformat = 'string' + self.timeformat = graph_xml.get("timeformat") + if self.timeformat == "date": + self.timeformat = "string" # node and edge attributes - attributes_elements = graph_xml.findall('{%s}attributes' % self.NS_GEXF) + attributes_elements = graph_xml.findall(f"{{{self.NS_GEXF}}}attributes") # dictionaries to hold attributes and attribute defaults node_attr = {} node_default = {} edge_attr = {} edge_default = {} for a in attributes_elements: - attr_class = a.get('class') - if attr_class == 'node': + attr_class = a.get("class") + if attr_class == "node": na, nd = self.find_gexf_attributes(a) node_attr.update(na) node_default.update(nd) - G.graph['node_default'] = node_default - elif attr_class == 'edge': + G.graph["node_default"] = node_default + elif attr_class == "edge": ea, ed = self.find_gexf_attributes(a) edge_attr.update(ea) edge_default.update(ed) - G.graph['edge_default'] = edge_default + G.graph["edge_default"] = edge_default else: raise # unknown attribute class # Hack to handle Gephi0.7beta bug # add weight attribute - ea = {'weight': {'type': 'double', 'mode': 'static', 'title': 'weight'}} + ea = {"weight": {"type": "double", "mode": "static", "title": "weight"}} ed = {} edge_attr.update(ea) edge_default.update(ed) - G.graph['edge_default'] = edge_default + G.graph["edge_default"] = edge_default # add nodes - nodes_element = graph_xml.find('{%s}nodes' % self.NS_GEXF) + nodes_element = graph_xml.find(f"{{{self.NS_GEXF}}}nodes") if nodes_element is not None: - for node_xml in nodes_element.findall('{%s}node' % self.NS_GEXF): + for node_xml in nodes_element.findall(f"{{{self.NS_GEXF}}}node"): self.add_node(G, node_xml, node_attr) # add edges - edges_element = graph_xml.find('{%s}edges' % self.NS_GEXF) + edges_element = graph_xml.find(f"{{{self.NS_GEXF}}}edges") if edges_element is not None: - for edge_xml in edges_element.findall('{%s}edge' % self.NS_GEXF): + for edge_xml in edges_element.findall(f"{{{self.NS_GEXF}}}edge"): self.add_edge(G, edge_xml, edge_attr) # switch to Graph or DiGraph if no parallel edges were found. @@ -724,7 +777,7 @@ def add_node(self, G, node_xml, node_attr, node_pid=None): # get attributes and subattributues for node data = self.decode_attr_elements(node_attr, node_xml) data = self.add_parents(data, node_xml) # add any parents - if self.version == '1.1': + if self.VERSION == "1.1": data = self.add_slices(data, node_xml) # add slices else: data = self.add_spells(data, node_xml) # add spells @@ -732,23 +785,23 @@ def add_node(self, G, node_xml, node_attr, node_pid=None): data = self.add_start_end(data, node_xml) # add start/end # find the node id and cast it to the appropriate type - node_id = node_xml.get('id') + node_id = node_xml.get("id") if self.node_type is not None: node_id = self.node_type(node_id) # every node should have a label - node_label = node_xml.get('label') - data['label'] = node_label + node_label = node_xml.get("label") + data["label"] = node_label # parent node id - node_pid = node_xml.get('pid', node_pid) + node_pid = node_xml.get("pid", node_pid) if node_pid is not None: - data['pid'] = node_pid + data["pid"] = node_pid # check for subnodes, recursive - subnodes = node_xml.find('{%s}nodes' % self.NS_GEXF) + subnodes = node_xml.find(f"{{{self.NS_GEXF}}}nodes") if subnodes is not None: - for node_xml in subnodes.findall('{%s}node' % self.NS_GEXF): + for node_xml in subnodes.findall(f"{{{self.NS_GEXF}}}node"): self.add_node(G, node_xml, node_attr, node_pid=node_id) G.add_node(node_id, **data) @@ -756,98 +809,102 @@ def add_node(self, G, node_xml, node_attr, node_pid=None): def add_start_end(self, data, xml): # start and end times ttype = self.timeformat - node_start = xml.get('start') + node_start = xml.get("start") if node_start is not None: - data['start'] = self.python_type[ttype](node_start) - node_end = xml.get('end') + data["start"] = self.python_type[ttype](node_start) + node_end = xml.get("end") if node_end is not None: - data['end'] = self.python_type[ttype](node_end) + data["end"] = self.python_type[ttype](node_end) return data def add_viz(self, data, node_xml): # add viz element for node viz = {} - color = node_xml.find('{%s}color' % self.NS_VIZ) + color = node_xml.find(f"{{{self.NS_VIZ}}}color") if color is not None: - if self.VERSION == '1.1': - viz['color'] = {'r': int(color.get('r')), - 'g': int(color.get('g')), - 'b': int(color.get('b'))} + if self.VERSION == "1.1": + viz["color"] = { + "r": int(color.get("r")), + "g": int(color.get("g")), + "b": int(color.get("b")), + } else: - viz['color'] = {'r': int(color.get('r')), - 'g': int(color.get('g')), - 'b': int(color.get('b')), - 'a': float(color.get('a', 1))} - - size = node_xml.find('{%s}size' % self.NS_VIZ) + viz["color"] = { + "r": int(color.get("r")), + "g": int(color.get("g")), + "b": int(color.get("b")), + "a": float(color.get("a", 1)), + } + + size = node_xml.find(f"{{{self.NS_VIZ}}}size") if size is not None: - viz['size'] = float(size.get('value')) + viz["size"] = float(size.get("value")) - thickness = node_xml.find('{%s}thickness' % self.NS_VIZ) + thickness = node_xml.find(f"{{{self.NS_VIZ}}}thickness") if thickness is not None: - viz['thickness'] = float(thickness.get('value')) + viz["thickness"] = float(thickness.get("value")) - shape = node_xml.find('{%s}shape' % self.NS_VIZ) + shape = node_xml.find(f"{{{self.NS_VIZ}}}shape") if shape is not None: - viz['shape'] = shape.get('shape') - if viz['shape'] == 'image': - viz['shape'] = shape.get('uri') + viz["shape"] = shape.get("shape") + if viz["shape"] == "image": + viz["shape"] = shape.get("uri") - position = node_xml.find('{%s}position' % self.NS_VIZ) + position = node_xml.find(f"{{{self.NS_VIZ}}}position") if position is not None: - viz['position'] = {'x': float(position.get('x', 0)), - 'y': float(position.get('y', 0)), - 'z': float(position.get('z', 0))} + viz["position"] = { + "x": float(position.get("x", 0)), + "y": float(position.get("y", 0)), + "z": float(position.get("z", 0)), + } if len(viz) > 0: - data['viz'] = viz + data["viz"] = viz return data def add_parents(self, data, node_xml): - parents_element = node_xml.find('{%s}parents' % self.NS_GEXF) + parents_element = node_xml.find(f"{{{self.NS_GEXF}}}parents") if parents_element is not None: - data['parents'] = [] - for p in parents_element.findall('{%s}parent' % self.NS_GEXF): - parent = p.get('for') - data['parents'].append(parent) + data["parents"] = [] + for p in parents_element.findall(f"{{{self.NS_GEXF}}}parent"): + parent = p.get("for") + data["parents"].append(parent) return data def add_slices(self, data, node_or_edge_xml): - slices_element = node_or_edge_xml.find('{%s}slices' % self.NS_GEXF) + slices_element = node_or_edge_xml.find(f"{{{self.NS_GEXF}}}slices") if slices_element is not None: - data['slices'] = [] - for s in slices_element.findall('{%s}slice' % self.NS_GEXF): - start = s.get('start') - end = s.get('end') - data['slices'].append((start, end)) + data["slices"] = [] + for s in slices_element.findall(f"{{{self.NS_GEXF}}}slice"): + start = s.get("start") + end = s.get("end") + data["slices"].append((start, end)) return data def add_spells(self, data, node_or_edge_xml): - spells_element = node_or_edge_xml.find('{%s}spells' % self.NS_GEXF) + spells_element = node_or_edge_xml.find(f"{{{self.NS_GEXF}}}spells") if spells_element is not None: - data['spells'] = [] + data["spells"] = [] ttype = self.timeformat - for s in spells_element.findall('{%s}spell' % self.NS_GEXF): - start = self.python_type[ttype](s.get('start')) - end = self.python_type[ttype](s.get('end')) - data['spells'].append((start, end)) + for s in spells_element.findall(f"{{{self.NS_GEXF}}}spell"): + start = self.python_type[ttype](s.get("start")) + end = self.python_type[ttype](s.get("end")) + data["spells"].append((start, end)) return data def add_edge(self, G, edge_element, edge_attr): # add an edge to the graph # raise error if we find mixed directed and undirected edges - edge_direction = edge_element.get('type') - if G.is_directed() and edge_direction == 'undirected': - raise nx.NetworkXError( - 'Undirected edge found in directed graph.') - if (not G.is_directed()) and edge_direction == 'directed': - raise nx.NetworkXError( - 'Directed edge found in undirected graph.') + edge_direction = edge_element.get("type") + if G.is_directed() and edge_direction == "undirected": + raise nx.NetworkXError("Undirected edge found in directed graph.") + if (not G.is_directed()) and edge_direction == "directed": + raise nx.NetworkXError("Directed edge found in undirected graph.") # Get source and target and recast type if required - source = edge_element.get('source') - target = edge_element.get('target') + source = edge_element.get("source") + target = edge_element.get("target") if self.node_type is not None: source = self.node_type(source) target = self.node_type(target) @@ -855,7 +912,7 @@ def add_edge(self, G, edge_element, edge_attr): data = self.decode_attr_elements(edge_attr, edge_element) data = self.add_start_end(data, edge_element) - if self.version == '1.1': + if self.VERSION == "1.1": data = self.add_slices(data, edge_element) # add slices else: data = self.add_spells(data, edge_element) # add spells @@ -863,55 +920,55 @@ def add_edge(self, G, edge_element, edge_attr): # GEXF stores edge ids as an attribute # NetworkX uses them as keys in multigraphs # if networkx_key is not specified as an attribute - edge_id = edge_element.get('id') + edge_id = edge_element.get("id") if edge_id is not None: - data['id'] = edge_id + data["id"] = edge_id # check if there is a 'multigraph_key' and use that as edge_id - multigraph_key = data.pop('networkx_key', None) + multigraph_key = data.pop("networkx_key", None) if multigraph_key is not None: edge_id = multigraph_key - weight = edge_element.get('weight') + weight = edge_element.get("weight") if weight is not None: - data['weight'] = float(weight) + data["weight"] = float(weight) - edge_label = edge_element.get('label') + edge_label = edge_element.get("label") if edge_label is not None: - data['label'] = edge_label + data["label"] = edge_label if G.has_edge(source, target): # seen this edge before - this is a multigraph self.simple_graph = False G.add_edge(source, target, key=edge_id, **data) - if edge_direction == 'mutual': + if edge_direction == "mutual": G.add_edge(target, source, key=edge_id, **data) def decode_attr_elements(self, gexf_keys, obj_xml): # Use the key information to decode the attr XML attr = {} # look for outer '' element - attr_element = obj_xml.find('{%s}attvalues' % self.NS_GEXF) + attr_element = obj_xml.find(f"{{{self.NS_GEXF}}}attvalues") if attr_element is not None: # loop over elements - for a in attr_element.findall('{%s}attvalue' % self.NS_GEXF): - key = a.get('for') # for is required + for a in attr_element.findall(f"{{{self.NS_GEXF}}}attvalue"): + key = a.get("for") # for is required try: # should be in our gexf_keys dictionary - title = gexf_keys[key]['title'] - except KeyError: - raise nx.NetworkXError('No attribute defined for=%s.' % key) - atype = gexf_keys[key]['type'] - value = a.get('value') - if atype == 'boolean': + title = gexf_keys[key]["title"] + except KeyError as e: + raise nx.NetworkXError(f"No attribute defined for={key}.") from e + atype = gexf_keys[key]["type"] + value = a.get("value") + if atype == "boolean": value = self.convert_bool[value] else: value = self.python_type[atype](value) - if gexf_keys[key]['mode'] == 'dynamic': + if gexf_keys[key]["mode"] == "dynamic": # for dynamic graphs use list of three-tuples # [(value1,start1,end1), (value2,start2,end2), etc] ttype = self.timeformat - start = self.python_type[ttype](a.get('start')) - end = self.python_type[ttype](a.get('end')) + start = self.python_type[ttype](a.get("start")) + end = self.python_type[ttype](a.get("end")) if title in attr: attr[title].append((value, start, end)) else: @@ -925,16 +982,16 @@ def find_gexf_attributes(self, attributes_element): # Extract all the attributes and defaults attrs = {} defaults = {} - mode = attributes_element.get('mode') - for k in attributes_element.findall('{%s}attribute' % self.NS_GEXF): - attr_id = k.get('id') - title = k.get('title') - atype = k.get('type') - attrs[attr_id] = {'title': title, 'type': atype, 'mode': mode} + mode = attributes_element.get("mode") + for k in attributes_element.findall(f"{{{self.NS_GEXF}}}attribute"): + attr_id = k.get("id") + title = k.get("title") + atype = k.get("type") + attrs[attr_id] = {"title": title, "type": atype, "mode": mode} # check for the 'default' subelement of key element and add - default = k.find('{%s}default' % self.NS_GEXF) + default = k.find(f"{{{self.NS_GEXF}}}default") if default is not None: - if atype == 'boolean': + if atype == "boolean": value = self.convert_bool[default.text] else: value = self.python_type[atype](default.text) @@ -968,43 +1025,27 @@ def relabel_gexf_graph(G): """ # build mapping of node labels, do some error checking try: - mapping = [(u, G.nodes[u]['label']) for u in G] - except KeyError: - raise nx.NetworkXError('Failed to relabel nodes: ' - 'missing node labels found. ' - 'Use relabel=False.') + mapping = [(u, G.nodes[u]["label"]) for u in G] + except KeyError as e: + raise nx.NetworkXError( + "Failed to relabel nodes: missing node labels found. Use relabel=False." + ) from e x, y = zip(*mapping) if len(set(y)) != len(G): - raise nx.NetworkXError('Failed to relabel nodes: ' - 'duplicate node labels found. ' - 'Use relabel=False.') + raise nx.NetworkXError( + "Failed to relabel nodes: " + "duplicate node labels found. " + "Use relabel=False." + ) mapping = dict(mapping) H = nx.relabel_nodes(G, mapping) # relabel attributes for n in G: m = mapping[n] - H.nodes[m]['id'] = n - H.nodes[m].pop('label') - if 'pid' in H.nodes[m]: - H.nodes[m]['pid'] = mapping[G.nodes[n]['pid']] - if 'parents' in H.nodes[m]: - H.nodes[m]['parents'] = [mapping[p] for p in G.nodes[n]['parents']] + H.nodes[m]["id"] = n + H.nodes[m].pop("label") + if "pid" in H.nodes[m]: + H.nodes[m]["pid"] = mapping[G.nodes[n]["pid"]] + if "parents" in H.nodes[m]: + H.nodes[m]["parents"] = [mapping[p] for p in G.nodes[n]["parents"]] return H - - -# fixture for nose tests -def setup_module(module): - from nose import SkipTest - try: - import xml.etree.cElementTree - except: - raise SkipTest('xml.etree.cElementTree not available.') - - -# fixture for nose tests -def teardown_module(module): - import os - try: - os.unlink('test.gexf') - except: - pass diff --git a/networkx/readwrite/gml.py b/networkx/readwrite/gml.py index 3336a5a..ad31d47 100644 --- a/networkx/readwrite/gml.py +++ b/networkx/readwrite/gml.py @@ -1,16 +1,7 @@ -# encoding: utf-8 -# Copyright (C) 2008-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Author: Aric Hagberg (hagberg@lanl.gov) """ Read graphs in GML format. -"GML, the G>raph Modelling Language, is our proposal for a portable +"GML, the Graph Modelling Language, is our proposal for a portable file format for graphs. GML's key features are portability, simple syntax, extensibility and flexibility. A GML file consists of a hierarchical key-value lists. Graphs can be annotated with arbitrary @@ -30,57 +21,26 @@ specification. For other data types, you need to explicitly supply a `stringizer`/`destringizer`. -For better interoperability of data generated by Python 2 and Python 3, -we've provided `literal_stringizer` and `literal_destringizer`. - For additional documentation on the GML file format, please see the `GML website `_. Several example graphs in GML format may be found on Mark Newman's `Network data page `_. """ -try: - try: - from cStringIO import StringIO - except ImportError: - from StringIO import StringIO -except ImportError: - from io import StringIO +from io import StringIO from ast import literal_eval from collections import defaultdict +from enum import Enum +from typing import Any, NamedTuple import networkx as nx from networkx.exception import NetworkXError from networkx.utils import open_file +import warnings import re -try: - import htmlentitydefs -except ImportError: - # Python 3.x - import html.entities as htmlentitydefs - -__all__ = ['read_gml', 'parse_gml', 'generate_gml', 'write_gml'] - - -try: - long -except NameError: - long = int -try: - unicode -except NameError: - unicode = str -try: - unichr -except NameError: - unichr = chr -try: - literal_eval(r"u'\u4444'") -except SyntaxError: - # Remove 'u' prefixes in unicode literals in Python 3 - def rtp_fix_unicode(s): return s[1:] -else: - rtp_fix_unicode = None +import html.entities as htmlentitydefs + +__all__ = ["read_gml", "parse_gml", "generate_gml", "write_gml"] def escape(text): @@ -89,9 +49,10 @@ def escape(text): Use XML character references for unprintable or non-ASCII characters, double quotes and ampersands in a string """ + def fixup(m): ch = m.group(0) - return '&#' + str(ord(ch)) + ';' + return "&#" + str(ord(ch)) + ";" text = re.sub('[^ -~]|[&"]', fixup, text) return text if isinstance(text, str) else str(text) @@ -99,11 +60,12 @@ def fixup(m): def unescape(text): """Replace XML character references with the referenced characters""" + def fixup(m): text = m.group(0) - if text[1] == '#': + if text[1] == "#": # Character reference - if text[2] == 'x': + if text[2] == "x": code = int(text[3:-1], 16) else: code = int(text[2:-1]) @@ -114,7 +76,7 @@ def fixup(m): except KeyError: return text # leave unchanged try: - return chr(code) if code < 256 else unichr(code) + return chr(code) except (ValueError, OverflowError): return text # leave unchanged @@ -139,20 +101,20 @@ def literal_destringizer(rep): ValueError If `rep` is not a Python literal. """ - if isinstance(rep, (str, unicode)): + msg = "literal_destringizer is deprecated and will be removed in 3.0." + warnings.warn(msg, DeprecationWarning) + if isinstance(rep, str): orig_rep = rep - if rtp_fix_unicode is not None: - rep = rtp_fix_unicode(rep) try: return literal_eval(rep) - except SyntaxError: - raise ValueError('%r is not a valid Python literal' % (orig_rep,)) + except SyntaxError as e: + raise ValueError(f"{orig_rep!r} is not a valid Python literal") from e else: - raise ValueError('%r is not a string' % (rep,)) + raise ValueError(f"{rep!r} is not a string") -@open_file(0, mode='rb') -def read_gml(path, label='label', destringizer=None): +@open_file(0, mode="rb") +def read_gml(path, label="label", destringizer=None): """Read graph in GML format from `path`. Parameters @@ -181,7 +143,7 @@ def read_gml(path, label='label', destringizer=None): See Also -------- - write_gml, parse_gml, literal_destringizer + write_gml, parse_gml Notes ----- @@ -193,25 +155,26 @@ def read_gml(path, label='label', destringizer=None): `stringizer`/`destringizer`. For additional documentation on the GML file format, please see the - `GML website `_. + `GML url `_. - See the module docstring :mod:`networkx.readwrite.gml` for additional details. + See the module docstring :mod:`networkx.readwrite.gml` for more details. Examples -------- >>> G = nx.path_graph(4) - >>> nx.write_gml(G, 'test.gml') - >>> H = nx.read_gml('test.gml') + >>> nx.write_gml(G, "test.gml") + >>> H = nx.read_gml("test.gml") """ + def filter_lines(lines): for line in lines: try: - line = line.decode('ascii') - except UnicodeDecodeError: - raise NetworkXError('input is not ASCII-encoded') + line = line.decode("ascii") + except UnicodeDecodeError as e: + raise NetworkXError("input is not ASCII-encoded") from e if not isinstance(line, str): lines = str(lines) - if line and line[-1] == '\n': + if line and line[-1] == "\n": line = line[:-1] yield line @@ -219,7 +182,7 @@ def filter_lines(lines): return G -def parse_gml(lines, label='label', destringizer=None): +def parse_gml(lines, label="label", destringizer=None): """Parse GML graph from a string or iterable. Parameters @@ -248,7 +211,7 @@ def parse_gml(lines, label='label', destringizer=None): See Also -------- - write_gml, read_gml, literal_destringizer + write_gml, read_gml Notes ----- @@ -263,199 +226,245 @@ def parse_gml(lines, label='label', destringizer=None): `stringizer`/`destringizer`. For additional documentation on the GML file format, please see the - `GML website `_. + `GML url `_. - See the module docstring :mod:`networkx.readwrite.gml` for additional details. + See the module docstring :mod:`networkx.readwrite.gml` for more details. """ + def decode_line(line): if isinstance(line, bytes): try: - line.decode('ascii') - except UnicodeDecodeError: - raise NetworkXError('input is not ASCII-encoded') + line.decode("ascii") + except UnicodeDecodeError as e: + raise NetworkXError("input is not ASCII-encoded") from e if not isinstance(line, str): line = str(line) return line def filter_lines(lines): - if isinstance(lines, (str, unicode)): + if isinstance(lines, str): lines = decode_line(lines) lines = lines.splitlines() - for line in lines: - yield line + yield from lines else: for line in lines: line = decode_line(line) - if line and line[-1] == '\n': + if line and line[-1] == "\n": line = line[:-1] - if line.find('\n') != -1: - raise NetworkXError('input line contains newline') + if line.find("\n") != -1: + raise NetworkXError("input line contains newline") yield line G = parse_gml_lines(filter_lines(lines), label, destringizer) return G +class Pattern(Enum): + """ encodes the index of each token-matching pattern in `tokenize`. """ + + KEYS = 0 + REALS = 1 + INTS = 2 + STRINGS = 3 + DICT_START = 4 + DICT_END = 5 + COMMENT_WHITESPACE = 6 + + +class Token(NamedTuple): + category: Pattern + value: Any + line: int + position: int + + +LIST_START_VALUE = "_networkx_list_start" + + def parse_gml_lines(lines, label, destringizer): """Parse GML `lines` into a graph. """ + def tokenize(): patterns = [ - r'[A-Za-z][0-9A-Za-z_]*\b', # keys - r'[+-]?(?:[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)(?:[Ee][+-]?[0-9]+)?', # reals - r'[+-]?[0-9]+', # ints - r'".*?"', # strings - r'\[', # dict start - r'\]', # dict end - r'#.*$|\s+' # comments and whitespaces + r"[A-Za-z][0-9A-Za-z_]*\b", # keys + # reals + r"[+-]?(?:[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)(?:[Ee][+-]?[0-9]+)?", + r"[+-]?[0-9]+", # ints + r'".*?"', # strings + r"\[", # dict start + r"\]", # dict end + r"#.*$|\s+", # comments and whitespaces ] - tokens = re.compile( - '|'.join('(' + pattern + ')' for pattern in patterns)) + tokens = re.compile("|".join(f"({pattern})" for pattern in patterns)) lineno = 0 for line in lines: length = len(line) pos = 0 while pos < length: match = tokens.match(line, pos) - if match is not None: - for i in range(len(patterns)): - group = match.group(i + 1) - if group is not None: - if i == 0: # keys - value = group.rstrip() - elif i == 1: # reals - value = float(group) - elif i == 2: # ints - value = int(group) - else: - value = group - if i != 6: # comments and whitespaces - yield (i, value, lineno + 1, pos + 1) - pos += len(group) - break - else: - raise NetworkXError('cannot tokenize %r at (%d, %d)' % - (line[pos:], lineno + 1, pos + 1)) + if match is None: + m = f"cannot tokenize {line[pos:]} at ({lineno + 1}, {pos + 1})" + raise NetworkXError(m) + for i in range(len(patterns)): + group = match.group(i + 1) + if group is not None: + if i == 0: # keys + value = group.rstrip() + elif i == 1: # reals + value = float(group) + elif i == 2: # ints + value = int(group) + else: + value = group + if i != 6: # comments and whitespaces + yield Token(Pattern(i), value, lineno + 1, pos + 1) + pos += len(group) + break lineno += 1 - yield (None, None, lineno + 1, 1) # EOF + yield Token(None, None, lineno + 1, 1) # EOF def unexpected(curr_token, expected): category, value, lineno, pos = curr_token - raise NetworkXError( - 'expected %s, found %s at (%d, %d)' % - (expected, repr(value) if value is not None else 'EOF', lineno, - pos)) + value = repr(value) if value is not None else "EOF" + raise NetworkXError(f"expected {expected}, found {value} at ({lineno}, {pos})") def consume(curr_token, category, expected): - if curr_token[0] == category: + if curr_token.category == category: return next(tokens) unexpected(curr_token, expected) def parse_kv(curr_token): dct = defaultdict(list) - while curr_token[0] == 0: # keys - key = curr_token[1] + while curr_token.category == Pattern.KEYS: + key = curr_token.value curr_token = next(tokens) - category = curr_token[0] - if category == 1 or category == 2: # reals or ints - value = curr_token[1] + category = curr_token.category + if category == Pattern.REALS or category == Pattern.INTS: + value = curr_token.value curr_token = next(tokens) - elif category == 3: # strings - value = unescape(curr_token[1][1:-1]) + elif category == Pattern.STRINGS: + value = unescape(curr_token.value[1:-1]) if destringizer: try: value = destringizer(value) except ValueError: pass curr_token = next(tokens) - elif category == 4: # dict start + elif category == Pattern.DICT_START: curr_token, value = parse_dict(curr_token) else: - unexpected(curr_token, "an int, float, string or '['") + # Allow for string convertible id and label values + if key in ("id", "label", "source", "target"): + try: + # String convert the token value + value = unescape(str(curr_token.value)) + if destringizer: + try: + value = destringizer(value) + except ValueError: + pass + curr_token = next(tokens) + except Exception: + msg = ( + "an int, float, string, '[' or string" + + " convertable ASCII value for node id or label" + ) + unexpected(curr_token, msg) + else: # Otherwise error out + unexpected(curr_token, "an int, float, string or '['") dct[key].append(value) - dct = {key: (value if not isinstance(value, list) or len(value) != 1 - else value[0]) for key, value in dct.items()} + + def clean_dict_value(value): + if not isinstance(value, list): + return value + if len(value) == 1: + return value[0] + if value[0] == LIST_START_VALUE: + return value[1:] + return value + + dct = {key: clean_dict_value(value) for key, value in dct.items()} return curr_token, dct def parse_dict(curr_token): - curr_token = consume(curr_token, 4, "'['") # dict start + # dict start + curr_token = consume(curr_token, Pattern.DICT_START, "'['") + # dict contents curr_token, dct = parse_kv(curr_token) - curr_token = consume(curr_token, 5, "']'") # dict end + # dict end + curr_token = consume(curr_token, Pattern.DICT_END, "']'") return curr_token, dct def parse_graph(): curr_token, dct = parse_kv(next(tokens)) - if curr_token[0] is not None: # EOF - unexpected(curr_token, 'EOF') - if 'graph' not in dct: - raise NetworkXError('input contains no graph') - graph = dct['graph'] + if curr_token.category is not None: # EOF + unexpected(curr_token, "EOF") + if "graph" not in dct: + raise NetworkXError("input contains no graph") + graph = dct["graph"] if isinstance(graph, list): - raise NetworkXError('input contains more than one graph') + raise NetworkXError("input contains more than one graph") return graph tokens = tokenize() graph = parse_graph() - directed = graph.pop('directed', False) - multigraph = graph.pop('multigraph', False) + directed = graph.pop("directed", False) + multigraph = graph.pop("multigraph", False) if not multigraph: G = nx.DiGraph() if directed else nx.Graph() else: G = nx.MultiDiGraph() if directed else nx.MultiGraph() - G.graph.update((key, value) for key, value in graph.items() - if key != 'node' and key != 'edge') + graph_attr = {k: v for k, v in graph.items() if k not in ("node", "edge")} + G.graph.update(graph_attr) def pop_attr(dct, category, attr, i): try: return dct.pop(attr) - except KeyError: - raise NetworkXError( - "%s #%d has no '%s' attribute" % (category, i, attr)) + except KeyError as e: + raise NetworkXError(f"{category} #{i} has no '{attr}' attribute") from e - nodes = graph.get('node', []) + nodes = graph.get("node", []) mapping = {} - labels = set() + node_labels = set() for i, node in enumerate(nodes if isinstance(nodes, list) else [nodes]): - id = pop_attr(node, 'node', 'id', i) + id = pop_attr(node, "node", "id", i) if id in G: - raise NetworkXError('node id %r is duplicated' % (id,)) - if label != 'id': - label = pop_attr(node, 'node', 'label', i) - if label in labels: - raise NetworkXError('node label %r is duplicated' % (label,)) - labels.add(label) - mapping[id] = label + raise NetworkXError(f"node id {id!r} is duplicated") + if label is not None and label != "id": + node_label = pop_attr(node, "node", label, i) + if node_label in node_labels: + raise NetworkXError(f"node label {node_label!r} is duplicated") + node_labels.add(node_label) + mapping[id] = node_label G.add_node(id, **node) - edges = graph.get('edge', []) + edges = graph.get("edge", []) for i, edge in enumerate(edges if isinstance(edges, list) else [edges]): - source = pop_attr(edge, 'edge', 'source', i) - target = pop_attr(edge, 'edge', 'target', i) + source = pop_attr(edge, "edge", "source", i) + target = pop_attr(edge, "edge", "target", i) if source not in G: - raise NetworkXError( - 'edge #%d has an undefined source %r' % (i, source)) + raise NetworkXError(f"edge #{i} has undefined source {source!r}") if target not in G: - raise NetworkXError( - 'edge #%d has an undefined target %r' % (i, target)) + raise NetworkXError(f"edge #{i} has undefined target {target!r}") if not multigraph: if not G.has_edge(source, target): G.add_edge(source, target, **edge) else: - raise nx.NetworkXError( - """edge #%d (%r%s%r) is duplicated - -Hint: If this is a multigraph, add "multigraph 1" to the header of the file.""" % - (i, source, '->' if directed else '--', target)) + arrow = "->" if directed else "--" + msg = f"edge #{i} ({source!r}{arrow}{target!r}) is duplicated" + raise nx.NetworkXError(msg) else: - key = edge.pop('key', None) + key = edge.pop("key", None) if key is not None and G.has_edge(source, target, key): - raise nx.NetworkXError( - 'edge #%d (%r%s%r, %r) is duplicated' % - (i, source, '->' if directed else '--', target, key)) + arrow = "->" if directed else "--" + msg = f"edge #{i} ({source!r}{arrow}{target!r}, {key!r})" + msg2 = 'Hint: If multigraph add "multigraph 1" to file header.' + raise nx.NetworkXError(msg + " is duplicated\n" + msg2) G.add_edge(source, target, key, **edge) - if label != 'id': + if label is not None and label != "id": G = nx.relabel_nodes(G, mapping) return G @@ -489,76 +498,79 @@ def literal_stringizer(value): The original value can be recovered using the :func:`networkx.readwrite.gml.literal_destringizer` function. """ + msg = "literal_stringizer is deprecated and will be removed in 3.0." + warnings.warn(msg, DeprecationWarning) + def stringize(value): - if isinstance(value, (int, long, bool)) or value is None: + if isinstance(value, (int, bool)) or value is None: if value is True: # GML uses 1/0 for boolean values. buf.write(str(1)) elif value is False: buf.write(str(0)) else: buf.write(str(value)) - elif isinstance(value, unicode): + elif isinstance(value, str): text = repr(value) - if text[0] != 'u': + if text[0] != "u": try: - value.encode('latin1') + value.encode("latin1") except UnicodeEncodeError: - text = 'u' + text + text = "u" + text buf.write(text) elif isinstance(value, (float, complex, str, bytes)): buf.write(repr(value)) elif isinstance(value, list): - buf.write('[') + buf.write("[") first = True for item in value: if not first: - buf.write(',') + buf.write(",") else: first = False stringize(item) - buf.write(']') + buf.write("]") elif isinstance(value, tuple): if len(value) > 1: - buf.write('(') + buf.write("(") first = True for item in value: if not first: - buf.write(',') + buf.write(",") else: first = False stringize(item) - buf.write(')') + buf.write(")") elif value: - buf.write('(') + buf.write("(") stringize(value[0]) - buf.write(',)') + buf.write(",)") else: - buf.write('()') + buf.write("()") elif isinstance(value, dict): - buf.write('{') + buf.write("{") first = True for key, value in value.items(): if not first: - buf.write(',') + buf.write(",") else: first = False stringize(key) - buf.write(':') + buf.write(":") stringize(value) - buf.write('}') + buf.write("}") elif isinstance(value, set): - buf.write('{') + buf.write("{") first = True for item in value: if not first: - buf.write(',') + buf.write(",") else: first = False stringize(item) - buf.write('}') + buf.write("}") else: - raise ValueError( - '%r cannot be converted into a Python literal' % (value,)) + msg = "{value!r} cannot be converted into a Python literal" + raise ValueError(msg) buf = StringIO() stringize(value) @@ -589,10 +601,6 @@ def generate_gml(G, stringizer=None): If `stringizer` cannot convert a value into a string, or the value to convert is not a string while `stringizer` is None. - See Also - -------- - literal_stringizer - Notes ----- Graph attributes named 'directed', 'multigraph', 'node' or @@ -609,9 +617,9 @@ def generate_gml(G, stringizer=None): `stringizer`/`destringizer`. For additional documentation on the GML file format, please see the - `GML website `_. + `GML url `_. - See the module docstring :mod:`networkx.readwrite.gml` for additional details. + See the module docstring :mod:`networkx.readwrite.gml` for more details. Examples -------- @@ -648,109 +656,111 @@ def generate_gml(G, stringizer=None): ] ] """ - valid_keys = re.compile('^[A-Za-z][0-9A-Za-z]*$') + valid_keys = re.compile("^[A-Za-z][0-9A-Za-z_]*$") def stringize(key, value, ignored_keys, indent, in_list=False): - if not isinstance(key, (str, unicode)): - raise NetworkXError('%r is not a string' % (key,)) + if not isinstance(key, str): + raise NetworkXError(f"{key!r} is not a string") if not valid_keys.match(key): - raise NetworkXError('%r is not a valid key' % (key,)) + raise NetworkXError(f"{key!r} is not a valid key") if not isinstance(key, str): key = str(key) if key not in ignored_keys: - if isinstance(value, (int, long, bool)): - if key == 'label': + if isinstance(value, (int, bool)): + if key == "label": yield indent + key + ' "' + str(value) + '"' elif value is True: # python bool is an instance of int - yield indent + key + ' 1' + yield indent + key + " 1" elif value is False: - yield indent + key + ' 0' + yield indent + key + " 0" + # GML only supports signed 32-bit integers + elif value < -(2 ** 31) or value >= 2 ** 31: + yield indent + key + ' "' + str(value) + '"' else: - yield indent + key + ' ' + str(value) + yield indent + key + " " + str(value) elif isinstance(value, float): text = repr(value).upper() # GML requires that a real literal contain a decimal point, but # repr may not output a decimal point when the mantissa is # integral and hence needs fixing. - epos = text.rfind('E') - if epos != -1 and text.find('.', 0, epos) == -1: - text = text[:epos] + '.' + text[epos:] - if key == 'label': + epos = text.rfind("E") + if epos != -1 and text.find(".", 0, epos) == -1: + text = text[:epos] + "." + text[epos:] + if key == "label": yield indent + key + ' "' + text + '"' else: - yield indent + key + ' ' + text + yield indent + key + " " + text elif isinstance(value, dict): - yield indent + key + ' [' - next_indent = indent + ' ' + yield indent + key + " [" + next_indent = indent + " " for key, value in value.items(): - for line in stringize(key, value, (), next_indent): - yield line - yield indent + ']' - elif isinstance(value, (list, tuple)) and key != 'label' \ - and value and not in_list: - next_indent = indent + ' ' + yield from stringize(key, value, (), next_indent) + yield indent + "]" + elif ( + isinstance(value, (list, tuple)) + and key != "label" + and value + and not in_list + ): + if len(value) == 1: + yield indent + key + " " + f'"{LIST_START_VALUE}"' for val in value: - for line in stringize(key, val, (), next_indent, True): - yield line + yield from stringize(key, val, (), indent, True) else: if stringizer: try: value = stringizer(value) - except ValueError: + except ValueError as e: raise NetworkXError( - '%r cannot be converted into a string' % (value,)) - if not isinstance(value, (str, unicode)): - raise NetworkXError('%r is not a string' % (value,)) + f"{value!r} cannot be converted into a string" + ) from e + if not isinstance(value, str): + raise NetworkXError(f"{value!r} is not a string") yield indent + key + ' "' + escape(value) + '"' multigraph = G.is_multigraph() - yield 'graph [' + yield "graph [" # Output graph attributes if G.is_directed(): - yield ' directed 1' + yield " directed 1" if multigraph: - yield ' multigraph 1' - ignored_keys = {'directed', 'multigraph', 'node', 'edge'} + yield " multigraph 1" + ignored_keys = {"directed", "multigraph", "node", "edge"} for attr, value in G.graph.items(): - for line in stringize(attr, value, ignored_keys, ' '): - yield line + yield from stringize(attr, value, ignored_keys, " ") # Output node data node_id = dict(zip(G, range(len(G)))) - ignored_keys = {'id', 'label'} + ignored_keys = {"id", "label"} for node, attrs in G.nodes.items(): - yield ' node [' - yield ' id ' + str(node_id[node]) - for line in stringize('label', node, (), ' '): - yield line + yield " node [" + yield " id " + str(node_id[node]) + yield from stringize("label", node, (), " ") for attr, value in attrs.items(): - for line in stringize(attr, value, ignored_keys, ' '): - yield line - yield ' ]' + yield from stringize(attr, value, ignored_keys, " ") + yield " ]" # Output edge data - ignored_keys = {'source', 'target'} - kwargs = {'data': True} + ignored_keys = {"source", "target"} + kwargs = {"data": True} if multigraph: - ignored_keys.add('key') - kwargs['keys'] = True + ignored_keys.add("key") + kwargs["keys"] = True for e in G.edges(**kwargs): - yield ' edge [' - yield ' source ' + str(node_id[e[0]]) - yield ' target ' + str(node_id[e[1]]) + yield " edge [" + yield " source " + str(node_id[e[0]]) + yield " target " + str(node_id[e[1]]) if multigraph: - for line in stringize('key', e[2], (), ' '): - yield line + yield from stringize("key", e[2], (), " ") for attr, value in e[-1].items(): - for line in stringize(attr, value, ignored_keys, ' '): - yield line - yield ' ]' - yield ']' + yield from stringize(attr, value, ignored_keys, " ") + yield " ]" + yield "]" -@open_file(1, mode='wb') +@open_file(1, mode="wb") def write_gml(G, path, stringizer=None): """Write a graph `G` in GML format to the file or file handle `path`. @@ -776,7 +786,7 @@ def write_gml(G, path, stringizer=None): See Also -------- - read_gml, generate_gml, literal_stringizer + read_gml, generate_gml Notes ----- @@ -793,10 +803,13 @@ def write_gml(G, path, stringizer=None): specification. For other data types, you need to explicitly supply a `stringizer`/`destringizer`. + Note that while we allow non-standard GML to be read from a file, we make + sure to write GML format. In particular, underscores are not allowed in + attribute names. For additional documentation on the GML file format, please see the - `GML website `_. + `GML url `_. - See the module docstring :mod:`networkx.readwrite.gml` for additional details. + See the module docstring :mod:`networkx.readwrite.gml` for more details. Examples -------- @@ -808,12 +821,4 @@ def write_gml(G, path, stringizer=None): >>> nx.write_gml(G, "test.gml.gz") """ for line in generate_gml(G, stringizer): - path.write((line + '\n').encode('ascii')) - - -# fixture for nose -def teardown_module(module): - import os - for fname in ['test.gml', 'test.gml.gz']: - if os.path.isfile(fname): - os.unlink(fname) + path.write((line + "\n").encode("ascii")) diff --git a/networkx/readwrite/gpickle.py b/networkx/readwrite/gpickle.py index c0b3923..9f8cd1d 100644 --- a/networkx/readwrite/gpickle.py +++ b/networkx/readwrite/gpickle.py @@ -18,28 +18,17 @@ Format ------ -See https://docs.python.org/2/library/pickle.html +See https://docs.python.org/3/library/pickle.html """ -__author__ = """Aric Hagberg (hagberg@lanl.gov)\nDan Schult (dschult@colgate.edu)""" -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -__all__ = ['read_gpickle', 'write_gpickle'] +__all__ = ["read_gpickle", "write_gpickle"] -import networkx as nx from networkx.utils import open_file -try: - import cPickle as pickle -except ImportError: - import pickle +import pickle -@open_file(1, mode='wb') +@open_file(1, mode="wb") def write_gpickle(G, path, protocol=pickle.HIGHEST_PROTOCOL): """Write graph in Python pickle format. @@ -65,12 +54,12 @@ def write_gpickle(G, path, protocol=pickle.HIGHEST_PROTOCOL): References ---------- - .. [1] https://docs.python.org/2/library/pickle.html + .. [1] https://docs.python.org/3/library/pickle.html """ pickle.dump(G, path, protocol) -@open_file(0, mode='rb') +@open_file(0, mode="rb") def read_gpickle(path): """Read graph object in Python pickle format. @@ -96,13 +85,6 @@ def read_gpickle(path): References ---------- - .. [1] https://docs.python.org/2/library/pickle.html + .. [1] https://docs.python.org/3/library/pickle.html """ return pickle.load(path) - -# fixture for nose tests - - -def teardown_module(module): - import os - os.unlink('test.gpickle') diff --git a/networkx/readwrite/graph6.py b/networkx/readwrite/graph6.py index fb18925..a7517f8 100644 --- a/networkx/readwrite/graph6.py +++ b/networkx/readwrite/graph6.py @@ -1,15 +1,5 @@ # Original author: D. Eppstein, UC Irvine, August 12, 2003. # The original code at http://www.ics.uci.edu/~eppstein/PADS/ is public domain. -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# Tomas Gavenciak -# All rights reserved. -# BSD license. -# -# Authors: Tomas Gavenciak -# Aric Hagberg """Functions for reading and writing graphs in the *graph6* format. The *graph6* file format is suitable for small graphs or large dense @@ -21,14 +11,12 @@ """ from itertools import islice -import sys import networkx as nx from networkx.exception import NetworkXError from networkx.utils import open_file, not_implemented_for -__all__ = ['from_graph6_bytes', 'read_graph6', 'to_graph6_bytes', - 'write_graph6'] +__all__ = ["from_graph6_bytes", "read_graph6", "to_graph6_bytes", "write_graph6"] def _generate_graph6_bytes(G, nodes, header): @@ -54,10 +42,11 @@ def _generate_graph6_bytes(G, nodes, header): """ n = len(G) if n >= 2 ** 36: - raise ValueError('graph6 is only defined if number of nodes is less ' - 'than 2 ** 36') + raise ValueError( + "graph6 is only defined if number of nodes is less " "than 2 ** 36" + ) if header: - yield b'>>graph6<<' + yield b">>graph6<<" for d in n_to_data(n): yield str.encode(chr(d + 63)) # This generates the same as `(v in G[u] for u, v in combinations(G, 2))`, @@ -68,15 +57,15 @@ def _generate_graph6_bytes(G, nodes, header): d = sum(b << 5 - i for i, b in enumerate(chunk)) yield str.encode(chr(d + 63)) chunk = list(islice(bits, 6)) - yield b'\n' + yield b"\n" -def from_graph6_bytes(string): - """Read a simple undirected graph in graph6 format from string. +def from_graph6_bytes(bytes_in): + """Read a simple undirected graph in graph6 format from bytes. Parameters ---------- - string : string + bytes_in : bytes Data in graph6 format, without a trailing newline. Returns @@ -86,15 +75,15 @@ def from_graph6_bytes(string): Raises ------ NetworkXError - If the string is unable to be parsed in graph6 format + If bytes_in is unable to be parsed in graph6 format ValueError - If any character ``c`` in the input string does not satisfy + If any character ``c`` in bytes_in does not satisfy ``63 <= ord(c) < 127``. Examples -------- - >>> G = nx.from_graph6_bytes(b'A_') + >>> G = nx.from_graph6_bytes(b"A_") >>> sorted(G.edges()) [(0, 1)] @@ -108,28 +97,27 @@ def from_graph6_bytes(string): """ + def bits(): - """Return sequence of individual bits from 6-bit-per-value + """Returns sequence of individual bits from 6-bit-per-value list of data values.""" for d in data: for i in [5, 4, 3, 2, 1, 0]: yield (d >> i) & 1 - if string.startswith(b'>>graph6<<'): - string = string[10:] + if bytes_in.startswith(b">>graph6<<"): + bytes_in = bytes_in[10:] - if sys.version_info < (3, ): - data = [ord(c) - 63 for c in string] - else: - data = [c - 63 for c in string] + data = [c - 63 for c in bytes_in] if any(c > 63 for c in data): - raise ValueError('each input character must be in range(63, 127)') + raise ValueError("each input character must be in range(63, 127)") n, data = data_to_n(data) nd = (n * (n - 1) // 2 + 5) // 6 if len(data) != nd: raise NetworkXError( - 'Expected %d bits but got %d in graph6' % (n * (n - 1) // 2, len(data) * 6)) + f"Expected {n * (n - 1) // 2} bits but got {len(data) * 6} in graph6" + ) G = nx.Graph() G.add_nodes_from(range(n)) @@ -165,7 +153,7 @@ def to_graph6_bytes(G, nodes=None, header=True): Examples -------- - >>> nx.to_graph6_bytes(nx.path_graph(2)) # doctest: +SKIP + >>> nx.to_graph6_bytes(nx.path_graph(2)) b'>>graph6<>> import tempfile >>> with tempfile.NamedTemporaryFile() as f: - ... _ = f.write(b'>>graph6<>graph6<>> list(G.edges()) @@ -227,7 +215,7 @@ def read_graph6(path): >>> import tempfile >>> with tempfile.NamedTemporaryFile() as f: - ... _ = f.write(b'>>graph6<>graph6<>> list(G.edges()) @@ -255,9 +243,9 @@ def read_graph6(path): return glist -@not_implemented_for('directed') -@not_implemented_for('multigraph') -@open_file(1, mode='wb') +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@open_file(1, mode="wb") def write_graph6(G, path, nodes=None, header=True): """Write a simple undirected graph to a path in graph6 format. @@ -292,7 +280,7 @@ def write_graph6(G, path, nodes=None, header=True): >>> with tempfile.NamedTemporaryFile() as f: ... nx.write_graph6(nx.path_graph(2), f.name) ... _ = f.seek(0) - ... print(f.read()) # doctest: +SKIP + ... print(f.read()) b'>>graph6<>> with tempfile.NamedTemporaryFile() as f: ... nx.write_graph6(nx.path_graph(2), f) ... _ = f.seek(0) - ... print(f.read()) # doctest: +SKIP + ... print(f.read()) b'>>graph6<> 12) & 0x3f, (n >> 6) & 0x3f, n & 0x3f] + return [63, (n >> 12) & 0x3F, (n >> 6) & 0x3F, n & 0x3F] else: # if n <= 68719476735: - return [63, 63, - (n >> 30) & 0x3f, (n >> 24) & 0x3f, (n >> 18) & 0x3f, - (n >> 12) & 0x3f, (n >> 6) & 0x3f, n & 0x3f] + return [ + 63, + 63, + (n >> 30) & 0x3F, + (n >> 24) & 0x3F, + (n >> 18) & 0x3F, + (n >> 12) & 0x3F, + (n >> 6) & 0x3F, + n & 0x3F, + ] diff --git a/networkx/readwrite/graphml.py b/networkx/readwrite/graphml.py index 5bac4c8..f24307b 100644 --- a/networkx/readwrite/graphml.py +++ b/networkx/readwrite/graphml.py @@ -1,12 +1,3 @@ -# Copyright (C) 2008-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Salim Fadhley -# Aric Hagberg (hagberg@lanl.gov) """ ******* GraphML @@ -46,15 +37,7 @@ import warnings from collections import defaultdict -try: - from xml.etree.cElementTree import Element, ElementTree - from xml.etree.cElementTree import tostring, fromstring -except ImportError: - try: - from xml.etree.ElementTree import Element, ElementTree - from xml.etree.ElementTree import tostring, fromstring - except ImportError: - pass +from xml.etree.ElementTree import Element, ElementTree, tostring, fromstring try: import lxml.etree as lxmletree @@ -62,16 +45,29 @@ lxmletree = None import networkx as nx -from networkx.utils import open_file, make_str - -__all__ = ['write_graphml', 'read_graphml', 'generate_graphml', - 'write_graphml_xml', 'write_graphml_lxml', - 'parse_graphml', 'GraphMLWriter', 'GraphMLReader'] - - -@open_file(1, mode='wb') -def write_graphml_xml(G, path, encoding='utf-8', prettyprint=True, - infer_numeric_types=False): +from networkx.utils import open_file + +__all__ = [ + "write_graphml", + "read_graphml", + "generate_graphml", + "write_graphml_xml", + "write_graphml_lxml", + "parse_graphml", + "GraphMLWriter", + "GraphMLReader", +] + + +@open_file(1, mode="wb") +def write_graphml_xml( + G, + path, + encoding="utf-8", + prettyprint=True, + infer_numeric_types=False, + named_key_ids=False, +): """Write G in GraphML XML format to path Parameters @@ -89,6 +85,8 @@ def write_graphml_xml(G, path, encoding='utf-8', prettyprint=True, Determine if numeric types should be generalized. For example, if edges have both int and float 'weight' attributes, we infer in GraphML that both are floats. + named_key_ids : bool (optional) + If True use attr.name as value for key elements' id attribute. Examples -------- @@ -97,22 +95,28 @@ def write_graphml_xml(G, path, encoding='utf-8', prettyprint=True, Notes ----- - It may be a good idea in Python2 to convert strings to unicode - before giving the graph to write_gml. At least the strings with - either many characters to escape. - This implementation does not support mixed graphs (directed and unidirected edges together) hyperedges, nested graphs, or ports. """ - writer = GraphMLWriter(encoding=encoding, prettyprint=prettyprint, - infer_numeric_types=infer_numeric_types) + writer = GraphMLWriter( + encoding=encoding, + prettyprint=prettyprint, + infer_numeric_types=infer_numeric_types, + named_key_ids=named_key_ids, + ) writer.add_graph_element(G) writer.dump(path) -@open_file(1, mode='wb') -def write_graphml_lxml(G, path, encoding='utf-8', prettyprint=True, - infer_numeric_types=False): +@open_file(1, mode="wb") +def write_graphml_lxml( + G, + path, + encoding="utf-8", + prettyprint=True, + infer_numeric_types=False, + named_key_ids=False, +): """Write G in GraphML XML format to path This function uses the LXML framework and should be faster than @@ -133,6 +137,8 @@ def write_graphml_lxml(G, path, encoding='utf-8', prettyprint=True, Determine if numeric types should be generalized. For example, if edges have both int and float 'weight' attributes, we infer in GraphML that both are floats. + named_key_ids : bool (optional) + If True use attr.name as value for key elements' id attribute. Examples -------- @@ -144,13 +150,18 @@ def write_graphml_lxml(G, path, encoding='utf-8', prettyprint=True, This implementation does not support mixed graphs (directed and unidirected edges together) hyperedges, nested graphs, or ports. """ - writer = GraphMLWriterLxml(path, graph=G, encoding=encoding, - prettyprint=prettyprint, - infer_numeric_types=infer_numeric_types) + writer = GraphMLWriterLxml( + path, + graph=G, + encoding=encoding, + prettyprint=prettyprint, + infer_numeric_types=infer_numeric_types, + named_key_ids=named_key_ids, + ) writer.dump() -def generate_graphml(G, encoding='utf-8', prettyprint=True): +def generate_graphml(G, encoding="utf-8", prettyprint=True, named_key_ids=False): """Generate GraphML lines for G Parameters @@ -161,6 +172,8 @@ def generate_graphml(G, encoding='utf-8', prettyprint=True): Encoding for text data. prettyprint : bool (optional) If True use line breaks and indenting in output XML. + named_key_ids : bool (optional) + If True use attr.name as value for key elements' id attribute. Examples -------- @@ -168,21 +181,22 @@ def generate_graphml(G, encoding='utf-8', prettyprint=True): >>> linefeed = chr(10) # linefeed = \n >>> s = linefeed.join(nx.generate_graphml(G)) # doctest: +SKIP >>> for line in nx.generate_graphml(G): # doctest: +SKIP - ... print(line) + ... print(line) Notes ----- This implementation does not support mixed graphs (directed and unidirected edges together) hyperedges, nested graphs, or ports. """ - writer = GraphMLWriter(encoding=encoding, prettyprint=prettyprint) + writer = GraphMLWriter( + encoding=encoding, prettyprint=prettyprint, named_key_ids=named_key_ids + ) writer.add_graph_element(G) - for line in str(writer).splitlines(): - yield line + yield from str(writer).splitlines() -@open_file(0, mode='rb') -def read_graphml(path, node_type=str, edge_key_type=int): +@open_file(0, mode="rb") +def read_graphml(path, node_type=str, edge_key_type=int, force_multigraph=False): """Read graph in GraphML format from path. Parameters @@ -195,14 +209,19 @@ def read_graphml(path, node_type=str, edge_key_type=int): Convert node ids to this type edge_key_type: Python type (default: int) - Convert graphml edge ids to this type as key of multi-edges + Convert graphml edge ids to this type. Multigraphs use id as edge key. + Non-multigraphs add to edge attribute dict with name "id". + force_multigraph : bool (default: False) + If True, return a multigraph with edge keys. If False (the default) + return a multigraph when multiedges are in the graph. Returns ------- graph: NetworkX graph - If no parallel edges are found a Graph or DiGraph is returned. - Otherwise a MultiGraph or MultiDiGraph is returned. + If parallel edges are present or `force_multigraph=True` then + a MultiGraph or MultiDiGraph is returned. Otherwise a Graph/DiGraph. + The returned graph is directed if the file indicates it should be. Notes ----- @@ -210,14 +229,14 @@ def read_graphml(path, node_type=str, edge_key_type=int): They can be obtained from `G.graph` and applied to node and edge attributes if desired using something like this: - >>> default_color = G.graph['node_default']['color'] # doctest: +SKIP + >>> default_color = G.graph["node_default"]["color"] # doctest: +SKIP >>> for node, data in G.nodes(data=True): # doctest: +SKIP - ... if 'color' not in data: - ... data['color']=default_color - >>> default_color = G.graph['edge_default']['color'] # doctest: +SKIP + ... if "color" not in data: + ... data["color"] = default_color + >>> default_color = G.graph["edge_default"]["color"] # doctest: +SKIP >>> for u, v, data in G.edges(data=True): # doctest: +SKIP - ... if 'color' not in data: - ... data['color']=default_color + ... if "color" not in data: + ... data["color"] = default_color This implementation does not support mixed graphs (directed and unidirected edges together), hypergraphs, nested graphs, or ports. @@ -234,7 +253,7 @@ def read_graphml(path, node_type=str, edge_key_type=int): the file to "file.graphml.gz". """ - reader = GraphMLReader(node_type=node_type, edge_key_type=edge_key_type) + reader = GraphMLReader(node_type, edge_key_type, force_multigraph) # need to check for multiple graphs glist = list(reader(path=path)) if len(glist) == 0: @@ -242,14 +261,16 @@ def read_graphml(path, node_type=str, edge_key_type=int): header = b'' path.seek(0) old_bytes = path.read() - new_bytes = old_bytes.replace(b'', header) + new_bytes = old_bytes.replace(b"", header) glist = list(reader(string=new_bytes)) if len(glist) == 0: - raise nx.NetworkXError('file not successfully read as graphml') + raise nx.NetworkXError("file not successfully read as graphml") return glist[0] -def parse_graphml(graphml_string, node_type=str): +def parse_graphml( + graphml_string, node_type=str, edge_key_type=int, force_multigraph=False +): """Read graph in GraphML format from string. Parameters @@ -261,6 +282,15 @@ def parse_graphml(graphml_string, node_type=str): node_type: Python type (default: str) Convert node ids to this type + edge_key_type: Python type (default: int) + Convert graphml edge ids to this type. Multigraphs use id as edge key. + Non-multigraphs add to edge attribute dict with name "id". + + force_multigraph : bool (default: False) + If True, return a multigraph with edge keys. If False (the default) + return a multigraph when multiedges are in the graph. + + Returns ------- graph: NetworkX graph @@ -280,14 +310,14 @@ def parse_graphml(graphml_string, node_type=str): They can be obtained from `G.graph` and applied to node and edge attributes if desired using something like this: - >>> default_color = G.graph['node_default']['color'] # doctest: +SKIP + >>> default_color = G.graph["node_default"]["color"] # doctest: +SKIP >>> for node, data in G.nodes(data=True): # doctest: +SKIP - ... if 'color' not in data: - ... data['color']=default_color - >>> default_color = G.graph['edge_default']['color'] # doctest: +SKIP + ... if "color" not in data: + ... data["color"] = default_color + >>> default_color = G.graph["edge_default"]["color"] # doctest: +SKIP >>> for u, v, data in G.edges(data=True): # doctest: +SKIP - ... if 'color' not in data: - ... data['color']=default_color + ... if "color" not in data: + ... data["color"] = default_color This implementation does not support mixed graphs (directed and unidirected edges together), hypergraphs, nested graphs, or ports. @@ -298,41 +328,66 @@ def parse_graphml(graphml_string, node_type=str): will be provided. """ - reader = GraphMLReader(node_type=node_type) + reader = GraphMLReader(node_type, edge_key_type, force_multigraph) # need to check for multiple graphs glist = list(reader(string=graphml_string)) if len(glist) == 0: # If no graph comes back, try looking for an incomplete header header = '' - new_string = graphml_string.replace('', header) + new_string = graphml_string.replace("", header) glist = list(reader(string=new_string)) if len(glist) == 0: - raise nx.NetworkXError('file not successfully read as graphml') + raise nx.NetworkXError("file not successfully read as graphml") return glist[0] -class GraphML(object): +class GraphML: NS_GRAPHML = "http://graphml.graphdrawing.org/xmlns" NS_XSI = "http://www.w3.org/2001/XMLSchema-instance" # xmlns:y="http://www.yworks.com/xml/graphml" NS_Y = "http://www.yworks.com/xml/graphml" - SCHEMALOCATION = \ - ' '.join(['http://graphml.graphdrawing.org/xmlns', - 'http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd']) - + SCHEMALOCATION = " ".join( + [ + "http://graphml.graphdrawing.org/xmlns", + "http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd", + ] + ) + + types = [ + (int, "integer"), # for Gephi GraphML bug + (str, "yfiles"), + (str, "string"), + (int, "int"), + (float, "float"), + (float, "double"), + (bool, "boolean"), + ] + + # These additions to types allow writing numpy types try: - chr(12345) # Fails on Py!=3. - unicode = str # Py3k's str is our unicode type - long = int # Py3K's int is our long type - except ValueError: - # Python 2.x + import numpy as np + except: pass - - types = [(int, "integer"), # for Gephi GraphML bug - (str, "yfiles"), (str, "string"), (unicode, "string"), - (int, "int"), (long, "long"), - (float, "float"), (float, "double"), - (bool, "boolean")] + else: + # prepend so that python types are created upon read (last entry wins) + types = [ + (np.float64, "float"), + (np.float32, "float"), + (np.float16, "float"), + (np.float_, "float"), + (np.int_, "int"), + (np.int8, "int"), + (np.int16, "int"), + (np.int32, "int"), + (np.int64, "int"), + (np.uint8, "int"), + (np.uint16, "int"), + (np.uint32, "int"), + (np.uint64, "int"), + (np.int_, "int"), + (np.intc, "int"), + (np.intp, "int"), + ] + types xml_type = dict(types) python_type = dict(reversed(a) for a in types) @@ -343,30 +398,39 @@ class GraphML(object): # http://en.wikibooks.org/wiki/Java_Programming/Literals#Boolean_Literals convert_bool = { # We use data.lower() in actual use. - 'true': True, 'false': False, + "true": True, + "false": False, # Include integer strings for convenience. - '0': False, 0: False, - '1': True, 1: True + "0": False, + 0: False, + "1": True, + 1: True, } class GraphMLWriter(GraphML): - def __init__(self, graph=None, encoding="utf-8", prettyprint=True, - infer_numeric_types=False): - try: - import xml.etree.ElementTree - except ImportError: - msg = 'GraphML writer requires xml.elementtree.ElementTree' - raise ImportError(msg) + def __init__( + self, + graph=None, + encoding="utf-8", + prettyprint=True, + infer_numeric_types=False, + named_key_ids=False, + ): self.myElement = Element self.infer_numeric_types = infer_numeric_types self.prettyprint = prettyprint + self.named_key_ids = named_key_ids self.encoding = encoding - self.xml = self.myElement("graphml", - {'xmlns': self.NS_GRAPHML, - 'xmlns:xsi': self.NS_XSI, - 'xsi:schemaLocation': self.SCHEMALOCATION}) + self.xml = self.myElement( + "graphml", + { + "xmlns": self.NS_GRAPHML, + "xmlns:xsi": self.NS_XSI, + "xsi:schemaLocation": self.SCHEMALOCATION, + }, + ) self.keys = {} self.attributes = defaultdict(list) self.attribute_types = defaultdict(set) @@ -392,24 +456,12 @@ def attr_type(self, name, scope, value): if self.infer_numeric_types: types = self.attribute_types[(name, scope)] - try: - chr(12345) # Fails on Py<3. - local_long = int # Py3's int is Py2's long type - local_unicode = str # Py3's str is Py2's unicode type - except ValueError: - # Python 2.x - local_long = long - local_unicode = unicode - if len(types) > 1: - if str in types: + types = {self.xml_type[t] for t in types} + if "string" in types: return str - elif local_unicode in types: - return local_unicode - elif float in types: + elif "float" in types or "double" in types: return float - elif local_long in types: - return local_long else: return int else: @@ -422,34 +474,38 @@ def get_key(self, name, attr_type, scope, default): try: return self.keys[keys_key] except KeyError: - new_id = "d%i" % len(list(self.keys)) + if self.named_key_ids: + new_id = name + else: + new_id = f"d{len(list(self.keys))}" + self.keys[keys_key] = new_id - key_kwargs = {"id": new_id, - "for": scope, - "attr.name": name, - "attr.type": attr_type} + key_kwargs = { + "id": new_id, + "for": scope, + "attr.name": name, + "attr.type": attr_type, + } key_element = self.myElement("key", **key_kwargs) # add subelement for data default value if present if default is not None: default_element = self.myElement("default") - default_element.text = make_str(default) + default_element.text = str(default) key_element.append(default_element) self.xml.insert(0, key_element) return new_id - def add_data(self, name, element_type, value, - scope="all", - default=None): + def add_data(self, name, element_type, value, scope="all", default=None): """ Make a data element for an edge or a node. Keep a log of the type in the keys table. """ if element_type not in self.xml_type: - msg = 'GraphML writer does not support %s as data values.' - raise nx.NetworkXError(msg % element_type) + msg = f"GraphML writer does not support {element_type} as data values." + raise nx.NetworkXError(msg) keyid = self.get_key(name, self.xml_type[element_type], scope, default) data_element = self.myElement("data", key=keyid) - data_element.text = make_str(value) + data_element.text = str(value) return data_element def add_attributes(self, scope, xml_obj, data, default): @@ -457,30 +513,29 @@ def add_attributes(self, scope, xml_obj, data, default): to be added later. See add_graph_element. """ for k, v in data.items(): - self.attribute_types[(make_str(k), scope)].add(type(v)) + self.attribute_types[(str(k), scope)].add(type(v)) self.attributes[xml_obj].append([k, v, scope, default.get(k)]) def add_nodes(self, G, graph_element): - default = G.graph.get('node_default', {}) + default = G.graph.get("node_default", {}) for node, data in G.nodes(data=True): - node_element = self.myElement("node", id=make_str(node)) + node_element = self.myElement("node", id=str(node)) self.add_attributes("node", node_element, data, default) graph_element.append(node_element) def add_edges(self, G, graph_element): if G.is_multigraph(): for u, v, key, data in G.edges(data=True, keys=True): - edge_element = self.myElement("edge", source=make_str(u), - target=make_str(v), - id=make_str(key)) - default = G.graph.get('edge_default', {}) + edge_element = self.myElement( + "edge", source=str(u), target=str(v), id=str(key) + ) + default = G.graph.get("edge_default", {}) self.add_attributes("edge", edge_element, data, default) graph_element.append(edge_element) else: for u, v, data in G.edges(data=True): - edge_element = self.myElement("edge", source=make_str(u), - target=make_str(v)) - default = G.graph.get('edge_default', {}) + edge_element = self.myElement("edge", source=str(u), target=str(v)) + default = G.graph.get("edge_default", {}) self.add_attributes("edge", edge_element, data, default) graph_element.append(edge_element) @@ -489,21 +544,23 @@ def add_graph_element(self, G): Serialize graph G in GraphML to the stream. """ if G.is_directed(): - default_edge_type = 'directed' + default_edge_type = "directed" else: - default_edge_type = 'undirected' + default_edge_type = "undirected" - graphid = G.graph.pop('id', None) + graphid = G.graph.pop("id", None) if graphid is None: - graph_element = self.myElement("graph", - edgedefault=default_edge_type) + graph_element = self.myElement("graph", edgedefault=default_edge_type) else: - graph_element = self.myElement("graph", - edgedefault=default_edge_type, - id=graphid) + graph_element = self.myElement( + "graph", edgedefault=default_edge_type, id=graphid + ) default = {} - data = {k: v for (k, v) in G.graph.items() - if k not in ['node_default', 'edge_default']} + data = { + k: v + for (k, v) in G.graph.items() + if k not in ["node_default", "edge_default"] + } self.add_attributes("graph", graph_element, data, default) self.add_nodes(G, graph_element) self.add_edges(G, graph_element) @@ -514,9 +571,11 @@ def add_graph_element(self, G): # See self.attr_type for (xml_obj, data) in self.attributes.items(): for (k, v, scope, default) in data: - xml_obj.append(self.add_data(make_str(k), - self.attr_type(k, scope, v), - make_str(v), scope, default)) + xml_obj.append( + self.add_data( + str(k), self.attr_type(k, scope, v), str(v), scope, default + ) + ) self.xml.append(graph_element) def add_graphs(self, graph_list): @@ -547,7 +606,7 @@ def indent(self, elem, level=0): elem.tail = i -class IncrementalElement(object): +class IncrementalElement: """Wrapper for _IncrementalWriter providing an Element like interface. This wrapper does not intend to be a complete implementation but rather to @@ -563,12 +622,20 @@ def append(self, element): class GraphMLWriterLxml(GraphMLWriter): - def __init__(self, path, graph=None, encoding='utf-8', prettyprint=True, - infer_numeric_types=False): + def __init__( + self, + path, + graph=None, + encoding="utf-8", + prettyprint=True, + infer_numeric_types=False, + named_key_ids=False, + ): self.myElement = lxmletree.Element self._encoding = encoding self._prettyprint = prettyprint + self.named_key_ids = named_key_ids self.infer_numeric_types = infer_numeric_types self._xml_base = lxmletree.xmlfile(path, encoding=encoding) @@ -582,12 +649,13 @@ def __init__(self, path, graph=None, encoding='utf-8', prettyprint=True, self.xml = [] self._keys = self.xml self._graphml = self._xml.element( - 'graphml', + "graphml", { - 'xmlns': self.NS_GRAPHML, - 'xmlns:xsi': self.NS_XSI, - 'xsi:schemaLocation': self.SCHEMALOCATION - }) + "xmlns": self.NS_GRAPHML, + "xmlns:xsi": self.NS_XSI, + "xsi:schemaLocation": self.SCHEMALOCATION, + }, + ) self._graphml.__enter__() self.keys = {} self.attribute_types = defaultdict(set) @@ -600,63 +668,59 @@ def add_graph_element(self, G): Serialize graph G in GraphML to the stream. """ if G.is_directed(): - default_edge_type = 'directed' + default_edge_type = "directed" else: - default_edge_type = 'undirected' + default_edge_type = "undirected" - graphid = G.graph.pop('id', None) + graphid = G.graph.pop("id", None) if graphid is None: - graph_element = self._xml.element('graph', - edgedefault=default_edge_type) + graph_element = self._xml.element("graph", edgedefault=default_edge_type) else: - graph_element = self._xml.element('graph', - edgedefault=default_edge_type, - id=graphid) + graph_element = self._xml.element( + "graph", edgedefault=default_edge_type, id=graphid + ) # gather attributes types for the whole graph # to find the most general numeric format needed. # Then pass through attributes to create key_id for each. - graphdata = {k: v for k, v in G.graph.items() - if k not in ('node_default', 'edge_default')} - node_default = G.graph.get('node_default', {}) - edge_default = G.graph.get('edge_default', {}) + graphdata = { + k: v + for k, v in G.graph.items() + if k not in ("node_default", "edge_default") + } + node_default = G.graph.get("node_default", {}) + edge_default = G.graph.get("edge_default", {}) # Graph attributes for k, v in graphdata.items(): - self.attribute_types[(make_str(k), "graph")].add(type(v)) + self.attribute_types[(str(k), "graph")].add(type(v)) for k, v in graphdata.items(): element_type = self.xml_type[self.attr_type(k, "graph", v)] - self.get_key(make_str(k), element_type, "graph", None) + self.get_key(str(k), element_type, "graph", None) # Nodes and data - attributes = {} for node, d in G.nodes(data=True): for k, v in d.items(): - self.attribute_types[(make_str(k), "node")].add(type(v)) - if k not in attributes: - attributes[k] = v - for k, v in attributes.items(): - T = self.xml_type[self.attr_type(k, "node", v)] - self.get_key(make_str(k), T, "node", node_default.get(k)) + self.attribute_types[(str(k), "node")].add(type(v)) + for node, d in G.nodes(data=True): + for k, v in d.items(): + T = self.xml_type[self.attr_type(k, "node", v)] + self.get_key(str(k), T, "node", node_default.get(k)) # Edges and data if G.is_multigraph(): - attributes = {} for u, v, ekey, d in G.edges(keys=True, data=True): for k, v in d.items(): - self.attribute_types[(make_str(k), "edge")].add(type(v)) - if k not in attributes: - attributes[k] = v - for k, v in attributes.items(): - T = self.xml_type[self.attr_type(k, "edge", v)] - self.get_key(make_str(k), T, "edge", edge_default.get(k)) + self.attribute_types[(str(k), "edge")].add(type(v)) + for u, v, ekey, d in G.edges(keys=True, data=True): + for k, v in d.items(): + T = self.xml_type[self.attr_type(k, "edge", v)] + self.get_key(str(k), T, "edge", edge_default.get(k)) else: - attributes = {} for u, v, d in G.edges(data=True): for k, v in d.items(): - self.attribute_types[(make_str(k), "edge")].add(type(v)) - if k not in attributes: - attributes[k] = v - for k, v in attributes.items(): - T = self.xml_type[self.attr_type(k, "edge", v)] - self.get_key(make_str(k), T, "edge", edge_default.get(k)) + self.attribute_types[(str(k), "edge")].add(type(v)) + for u, v, d in G.edges(data=True): + for k, v in d.items(): + T = self.xml_type[self.attr_type(k, "edge", v)] + self.get_key(str(k), T, "edge", edge_default.get(k)) # Now add attribute keys to the xml file for key in self.xml: @@ -665,16 +729,16 @@ def add_graph_element(self, G): # The incremental_writer writes each node/edge as it is created incremental_writer = IncrementalElement(self._xml, self._prettyprint) with graph_element: - self.add_attributes('graph', incremental_writer, graphdata, {}) + self.add_attributes("graph", incremental_writer, graphdata, {}) self.add_nodes(G, incremental_writer) # adds attributes too self.add_edges(G, incremental_writer) # adds attributes too def add_attributes(self, scope, xml_obj, data, default): """Appends attribute data.""" for k, v in data.items(): - data_element = self.add_data(make_str(k), - self.attr_type(make_str(k), scope, v), - make_str(v), scope, default.get(k)) + data_element = self.add_data( + str(k), self.attr_type(str(k), scope, v), str(v), scope, default.get(k) + ) xml_obj.append(data_element) def __str__(self): @@ -695,16 +759,11 @@ def dump(self): class GraphMLReader(GraphML): """Read a GraphML document. Produces NetworkX graph objects.""" - def __init__(self, node_type=str, edge_key_type=int): - try: - import xml.etree.ElementTree - except ImportError: - msg = 'GraphML reader requires xml.elementtree.ElementTree' - raise ImportError(msg) + def __init__(self, node_type=str, edge_key_type=int, force_multigraph=False): self.node_type = node_type self.edge_key_type = edge_key_type - self.multigraph = False # assume multigraph and test for multiedges - self.edge_ids = {} # dict mapping (u,v) tuples to id edge attributes + self.multigraph = force_multigraph # If False, test for multiedges + self.edge_ids = {} # dict mapping (u,v) tuples to edge id attributes def __call__(self, path=None, string=None): if path is not None: @@ -714,57 +773,56 @@ def __call__(self, path=None, string=None): else: raise ValueError("Must specify either 'path' or 'string' as kwarg") (keys, defaults) = self.find_graphml_keys(self.xml) - for g in self.xml.findall("{%s}graph" % self.NS_GRAPHML): + for g in self.xml.findall(f"{{{self.NS_GRAPHML}}}graph"): yield self.make_graph(g, keys, defaults) def make_graph(self, graph_xml, graphml_keys, defaults, G=None): # set default graph type edgedefault = graph_xml.get("edgedefault", None) if G is None: - if edgedefault == 'directed': + if edgedefault == "directed": G = nx.MultiDiGraph() else: G = nx.MultiGraph() # set defaults for graph attributes - G.graph['node_default'] = {} - G.graph['edge_default'] = {} + G.graph["node_default"] = {} + G.graph["edge_default"] = {} for key_id, value in defaults.items(): - key_for = graphml_keys[key_id]['for'] - name = graphml_keys[key_id]['name'] - python_type = graphml_keys[key_id]['type'] - if key_for == 'node': - G.graph['node_default'].update({name: python_type(value)}) - if key_for == 'edge': - G.graph['edge_default'].update({name: python_type(value)}) + key_for = graphml_keys[key_id]["for"] + name = graphml_keys[key_id]["name"] + python_type = graphml_keys[key_id]["type"] + if key_for == "node": + G.graph["node_default"].update({name: python_type(value)}) + if key_for == "edge": + G.graph["edge_default"].update({name: python_type(value)}) # hyperedges are not supported - hyperedge = graph_xml.find("{%s}hyperedge" % self.NS_GRAPHML) + hyperedge = graph_xml.find(f"{{{self.NS_GRAPHML}}}hyperedge") if hyperedge is not None: raise nx.NetworkXError("GraphML reader doesn't support hyperedges") # add nodes - for node_xml in graph_xml.findall("{%s}node" % self.NS_GRAPHML): + for node_xml in graph_xml.findall(f"{{{self.NS_GRAPHML}}}node"): self.add_node(G, node_xml, graphml_keys, defaults) # add edges - for edge_xml in graph_xml.findall("{%s}edge" % self.NS_GRAPHML): + for edge_xml in graph_xml.findall(f"{{{self.NS_GRAPHML}}}edge"): self.add_edge(G, edge_xml, graphml_keys) # add graph data data = self.decode_data_elements(graphml_keys, graph_xml) G.graph.update(data) - # switch to Graph or DiGraph if no parallel edges were found. - if not self.multigraph: - if G.is_directed(): - G = nx.DiGraph(G) - else: - G = nx.Graph(G) - nx.set_edge_attributes(G, values=self.edge_ids, name='id') + # switch to Graph or DiGraph if no parallel edges were found + if self.multigraph: + return G + G = nx.DiGraph(G) if G.is_directed() else nx.Graph(G) + # add explicit edge "id" from file as attribute in NX graph. + nx.set_edge_attributes(G, values=self.edge_ids, name="id") return G def add_node(self, G, node_xml, graphml_keys, defaults): """Add a node to the graph. """ # warn on finding unsupported ports tag - ports = node_xml.find("{%s}port" % self.NS_GRAPHML) + ports = node_xml.find(f"{{{self.NS_GRAPHML}}}port") if ports is not None: warnings.warn("GraphML port tag not supported.") # find the node by id and cast it to the appropriate type @@ -773,24 +831,24 @@ def add_node(self, G, node_xml, graphml_keys, defaults): data = self.decode_data_elements(graphml_keys, node_xml) G.add_node(node_id, **data) # get child nodes - if node_xml.attrib.get('yfiles.foldertype') == 'group': - graph_xml = node_xml.find("{%s}graph" % self.NS_GRAPHML) + if node_xml.attrib.get("yfiles.foldertype") == "group": + graph_xml = node_xml.find(f"{{{self.NS_GRAPHML}}}graph") self.make_graph(graph_xml, graphml_keys, defaults, G) def add_edge(self, G, edge_element, graphml_keys): """Add an edge to the graph. """ # warn on finding unsupported ports tag - ports = edge_element.find("{%s}port" % self.NS_GRAPHML) + ports = edge_element.find(f"{{{self.NS_GRAPHML}}}port") if ports is not None: warnings.warn("GraphML port tag not supported.") # raise error if we find mixed directed and undirected edges directed = edge_element.get("directed") - if G.is_directed() and directed == 'false': + if G.is_directed() and directed == "false": msg = "directed=false edge found in directed graph." raise nx.NetworkXError(msg) - if (not G.is_directed()) and directed == 'true': + if (not G.is_directed()) and directed == "true": msg = "directed=true edge found in undirected graph." raise nx.NetworkXError(msg) @@ -809,25 +867,26 @@ def add_edge(self, G, edge_element, graphml_keys): except ValueError: # Could not convert. pass else: - edge_id = data.get('key') + edge_id = data.get("key") if G.has_edge(source, target): # mark this as a multigraph self.multigraph = True # Use add_edges_from to avoid error with add_edge when `'key' in data` + # Note there is only one edge here... G.add_edges_from([(source, target, edge_id, data)]) def decode_data_elements(self, graphml_keys, obj_xml): """Use the key information to decode the data XML if present.""" data = {} - for data_element in obj_xml.findall("{%s}data" % self.NS_GRAPHML): + for data_element in obj_xml.findall(f"{{{self.NS_GRAPHML}}}data"): key = data_element.get("key") try: - data_name = graphml_keys[key]['name'] - data_type = graphml_keys[key]['type'] - except KeyError: - raise nx.NetworkXError("Bad GraphML data: no key %s" % key) + data_name = graphml_keys[key]["name"] + data_type = graphml_keys[key]["type"] + except KeyError as e: + raise nx.NetworkXError(f"Bad GraphML data: no key {key}") from e text = data_element.text # assume anything with subelements is a yfiles extension if text is not None and len(list(data_element)) == 0: @@ -841,27 +900,32 @@ def decode_data_elements(self, graphml_keys, obj_xml): elif len(list(data_element)) > 0: # Assume yfiles as subelements, try to extract node_label node_label = None - for node_type in ['ShapeNode', 'SVGNode', 'ImageNode']: - pref = "{%s}%s/{%s}" % (self.NS_Y, node_type, self.NS_Y) - geometry = data_element.find("%sGeometry" % pref) + for node_type in ["ShapeNode", "SVGNode", "ImageNode"]: + pref = f"{{{self.NS_Y}}}{node_type}/{{{self.NS_Y}}}" + geometry = data_element.find(f"{pref}Geometry") if geometry is not None: - data['x'] = geometry.get('x') - data['y'] = geometry.get('y') + data["x"] = geometry.get("x") + data["y"] = geometry.get("y") if node_label is None: - node_label = data_element.find("%sNodeLabel" % pref) + node_label = data_element.find(f"{pref}NodeLabel") if node_label is not None: - data['label'] = node_label.text + data["label"] = node_label.text # check all the different types of edges avaivable in yEd. - for e in ['PolyLineEdge', 'SplineEdge', 'QuadCurveEdge', - 'BezierEdge', 'ArcEdge']: - pref = "{%s}%s/{%s}" % (self.NS_Y, e, self.NS_Y) - edge_label = data_element.find("%sEdgeLabel" % pref) + for e in [ + "PolyLineEdge", + "SplineEdge", + "QuadCurveEdge", + "BezierEdge", + "ArcEdge", + ]: + pref = f"{{{self.NS_Y}}}{e}/{{{self.NS_Y}}}" + edge_label = data_element.find(f"{pref}EdgeLabel") if edge_label is not None: break if edge_label is not None: - data['label'] = edge_label.text + data["label"] = edge_label.text return data def find_graphml_keys(self, graph_element): @@ -869,42 +933,26 @@ def find_graphml_keys(self, graph_element): """ graphml_keys = {} graphml_key_defaults = {} - for k in graph_element.findall("{%s}key" % self.NS_GRAPHML): + for k in graph_element.findall(f"{{{self.NS_GRAPHML}}}key"): attr_id = k.get("id") - attr_type = k.get('attr.type') + attr_type = k.get("attr.type") attr_name = k.get("attr.name") yfiles_type = k.get("yfiles.type") if yfiles_type is not None: attr_name = yfiles_type - attr_type = 'yfiles' + attr_type = "yfiles" if attr_type is None: attr_type = "string" - warnings.warn("No key type for id %s. Using string" % attr_id) + warnings.warn(f"No key type for id {attr_id}. Using string") if attr_name is None: - raise nx.NetworkXError("Unknown key for id %s." % attr_id) - graphml_keys[attr_id] = {"name": attr_name, - "type": self.python_type[attr_type], - "for": k.get("for")} + raise nx.NetworkXError(f"Unknown key for id {attr_id}.") + graphml_keys[attr_id] = { + "name": attr_name, + "type": self.python_type[attr_type], + "for": k.get("for"), + } # check for "default" subelement of key element - default = k.find("{%s}default" % self.NS_GRAPHML) + default = k.find(f"{{{self.NS_GRAPHML}}}default") if default is not None: graphml_key_defaults[attr_id] = default.text return graphml_keys, graphml_key_defaults - - -# fixture for nose tests -def setup_module(module): - from nose import SkipTest - try: - import xml.etree.ElementTree - except: - raise SkipTest("xml.etree.ElementTree not available") - - -# fixture for nose tests -def teardown_module(module): - import os - try: - os.unlink('test.graphml') - except: - pass diff --git a/networkx/readwrite/json_graph/adjacency.py b/networkx/readwrite/json_graph/adjacency.py index 71f64dd..cf7c8c7 100644 --- a/networkx/readwrite/json_graph/adjacency.py +++ b/networkx/readwrite/json_graph/adjacency.py @@ -1,19 +1,13 @@ -# Copyright (C) 2011-2013 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. from itertools import chain import networkx as nx -__author__ = """Aric Hagberg """ -__all__ = ['adjacency_data', 'adjacency_graph'] -_attrs = dict(id='id', key='key') +__all__ = ["adjacency_data", "adjacency_graph"] + +_attrs = dict(id="id", key="key") def adjacency_data(G, attrs=_attrs): - """Return data in adjacency format that is suitable for JSON serialization + """Returns data in adjacency format that is suitable for JSON serialization and use in Javascript documents. Parameters @@ -42,7 +36,7 @@ def adjacency_data(G, attrs=_attrs): Examples -------- >>> from networkx.readwrite import json_graph - >>> G = nx.Graph([(1,2)]) + >>> G = nx.Graph([(1, 2)]) >>> data = json_graph.adjacency_data(G) To serialize with json @@ -63,19 +57,19 @@ def adjacency_data(G, attrs=_attrs): adjacency_graph, node_link_data, tree_data """ multigraph = G.is_multigraph() - id_ = attrs['id'] + id_ = attrs["id"] # Allow 'key' to be omitted from attrs if the graph is not a multigraph. - key = None if not multigraph else attrs['key'] + key = None if not multigraph else attrs["key"] if id_ == key: - raise nx.NetworkXError('Attribute names are not unique.') + raise nx.NetworkXError("Attribute names are not unique.") data = {} - data['directed'] = G.is_directed() - data['multigraph'] = multigraph - data['graph'] = list(G.graph.items()) - data['nodes'] = [] - data['adjacency'] = [] + data["directed"] = G.is_directed() + data["multigraph"] = multigraph + data["graph"] = list(G.graph.items()) + data["nodes"] = [] + data["adjacency"] = [] for n, nbrdict in G.adjacency(): - data['nodes'].append(dict(chain(G.nodes[n].items(), [(id_, n)]))) + data["nodes"].append(dict(chain(G.nodes[n].items(), [(id_, n)]))) adj = [] if multigraph: for nbr, keys in nbrdict.items(): @@ -84,12 +78,12 @@ def adjacency_data(G, attrs=_attrs): else: for nbr, d in nbrdict.items(): adj.append(dict(chain(d.items(), [(id_, nbr)]))) - data['adjacency'].append(adj) + data["adjacency"].append(adj) return data def adjacency_graph(data, directed=False, multigraph=True, attrs=_attrs): - """Return graph from adjacency data format. + """Returns graph from adjacency data format. Parameters ---------- @@ -116,7 +110,7 @@ def adjacency_graph(data, directed=False, multigraph=True, attrs=_attrs): Examples -------- >>> from networkx.readwrite import json_graph - >>> G = nx.Graph([(1,2)]) + >>> G = nx.Graph([(1, 2)]) >>> data = json_graph.adjacency_data(G) >>> H = json_graph.adjacency_graph(data) @@ -128,26 +122,26 @@ def adjacency_graph(data, directed=False, multigraph=True, attrs=_attrs): -------- adjacency_graph, node_link_data, tree_data """ - multigraph = data.get('multigraph', multigraph) - directed = data.get('directed', directed) + multigraph = data.get("multigraph", multigraph) + directed = data.get("directed", directed) if multigraph: graph = nx.MultiGraph() else: graph = nx.Graph() if directed: graph = graph.to_directed() - id_ = attrs['id'] + id_ = attrs["id"] # Allow 'key' to be omitted from attrs if the graph is not a multigraph. - key = None if not multigraph else attrs['key'] - graph.graph = dict(data.get('graph', [])) + key = None if not multigraph else attrs["key"] + graph.graph = dict(data.get("graph", [])) mapping = [] - for d in data['nodes']: + for d in data["nodes"]: node_data = d.copy() node = node_data.pop(id_) mapping.append(node) graph.add_node(node) graph.nodes[node].update(node_data) - for i, d in enumerate(data['adjacency']): + for i, d in enumerate(data["adjacency"]): source = mapping[i] for tdata in d: target_data = tdata.copy() diff --git a/networkx/readwrite/json_graph/cytoscape.py b/networkx/readwrite/json_graph/cytoscape.py index 6030d16..1a6f0c0 100644 --- a/networkx/readwrite/json_graph/cytoscape.py +++ b/networkx/readwrite/json_graph/cytoscape.py @@ -1,14 +1,12 @@ -# BSD license. - import networkx as nx -__all__ = ['cytoscape_data', 'cytoscape_graph'] +__all__ = ["cytoscape_data", "cytoscape_graph"] -_attrs = dict(name='name', ident='id') +_attrs = dict(name="name", ident="id") def cytoscape_data(G, attrs=None): - """Return data in Cytoscape JSON format (cyjs). + """Returns data in Cytoscape JSON format (cyjs). Parameters ---------- @@ -32,12 +30,12 @@ def cytoscape_data(G, attrs=None): name = attrs["name"] ident = attrs["ident"] - if len(set([name, ident])) < 2: - raise nx.NetworkXError('Attribute names are not unique.') + if len({name, ident}) < 2: + raise nx.NetworkXError("Attribute names are not unique.") jsondata = {"data": list(G.graph.items())} - jsondata['directed'] = G.is_directed() - jsondata['multigraph'] = G.is_multigraph() + jsondata["directed"] = G.is_directed() + jsondata["multigraph"] = G.is_multigraph() jsondata["elements"] = {"nodes": [], "edges": []} nodes = jsondata["elements"]["nodes"] edges = jsondata["elements"]["edges"] @@ -74,18 +72,18 @@ def cytoscape_graph(data, attrs=None): name = attrs["name"] ident = attrs["ident"] - if len(set([ident, name])) < 2: - raise nx.NetworkXError('Attribute names are not unique.') + if len({ident, name}) < 2: + raise nx.NetworkXError("Attribute names are not unique.") - multigraph = data.get('multigraph') - directed = data.get('directed') + multigraph = data.get("multigraph") + directed = data.get("directed") if multigraph: graph = nx.MultiGraph() else: graph = nx.Graph() if directed: graph = graph.to_directed() - graph.graph = dict(data.get('data')) + graph.graph = dict(data.get("data")) for d in data["elements"]["nodes"]: node_data = d["data"].copy() node = d["data"]["value"] diff --git a/networkx/readwrite/json_graph/jit.py b/networkx/readwrite/json_graph/jit.py index 3dae584..77e9c1e 100644 --- a/networkx/readwrite/json_graph/jit.py +++ b/networkx/readwrite/json_graph/jit.py @@ -1,10 +1,3 @@ -# Copyright (C) 2011-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. - """ Read and write NetworkX graphs as JavaScript InfoVis Toolkit (JIT) format JSON. @@ -37,7 +30,7 @@ import networkx as nx from networkx.utils.decorators import not_implemented_for -__all__ = ['jit_graph', 'jit_data'] +__all__ = ["jit_graph", "jit_data"] def jit_graph(data, create_using=None): @@ -60,27 +53,34 @@ def jit_graph(data, create_using=None): G = create_using G.clear() + if isinstance(data, str): + data = json.loads(data) + for node in data: - G.add_node(node['id'], **node['data']) - if node.get('adjacencies') is not None: - for adj in node['adjacencies']: - G.add_edge(node['id'], adj['nodeTo'], **adj['data']) + G.add_node(node["id"], **node["data"]) + if node.get("adjacencies") is not None: + for adj in node["adjacencies"]: + G.add_edge(node["id"], adj["nodeTo"], **adj["data"]) return G -@not_implemented_for('multigraph') -def jit_data(G, indent=None): - """Return data in JIT JSON format. +@not_implemented_for("multigraph") +def jit_data(G, indent=None, default=None): + """Returns data in JIT JSON format. Parameters ---------- G : NetworkX Graph indent: optional, default=None - If indent is a non-negative integer, then JSON array elements and object - members will be pretty-printed with that indent level. An indent level - of 0, or negative, will only insert newlines. None (the default) selects - the most compact representation. + If indent is a non-negative integer, then JSON array elements and + object members will be pretty-printed with that indent level. + An indent level of 0, or negative, will only insert newlines. + None (the default) selects the most compact representation. + + default: optional, default=None + It will pass the value to the json.dumps function in order to + be able to serialize custom objects used as nodes. Returns ------- @@ -88,21 +88,16 @@ def jit_data(G, indent=None): """ json_graph = [] for node in G.nodes(): - json_node = { - "id": node, - "name": node - } + json_node = {"id": node, "name": node} # node data json_node["data"] = G.nodes[node] # adjacencies if G[node]: json_node["adjacencies"] = [] for neighbour in G[node]: - adjacency = { - "nodeTo": neighbour, - } + adjacency = {"nodeTo": neighbour} # adjacency data adjacency["data"] = G.edges[node, neighbour] json_node["adjacencies"].append(adjacency) json_graph.append(json_node) - return json.dumps(json_graph, indent=indent) + return json.dumps(json_graph, indent=indent, default=default) diff --git a/networkx/readwrite/json_graph/node_link.py b/networkx/readwrite/json_graph/node_link.py index c8e9c20..6ccc3b3 100644 --- a/networkx/readwrite/json_graph/node_link.py +++ b/networkx/readwrite/json_graph/node_link.py @@ -1,24 +1,15 @@ -# Copyright (C) 2011-2018 by -# -# Aric Hagberg -# Dan Schult -# Pieter Swart -# Michael E. Rose -# -# All rights reserved. -# BSD license. from itertools import chain, count import networkx as nx -from networkx.utils import make_str, to_tuple -__all__ = ['node_link_data', 'node_link_graph'] +from networkx.utils import to_tuple +__all__ = ["node_link_data", "node_link_graph"] -_attrs = dict(source='source', target='target', name='id', - key='key', link='links') + +_attrs = dict(source="source", target="target", name="id", key="key", link="links") def node_link_data(G, attrs=None): - """Return data in node-link format that is suitable for JSON serialization + """Returns data in node-link format that is suitable for JSON serialization and use in Javascript documents. Parameters @@ -50,16 +41,20 @@ def node_link_data(G, attrs=None): Examples -------- >>> from networkx.readwrite import json_graph - >>> G = nx.Graph([('A', 'B')]) + >>> G = nx.Graph([("A", "B")]) >>> data1 = json_graph.node_link_data(G) >>> H = nx.gn_graph(2) - >>> data2 = json_graph.node_link_data(H, {'link': 'edges', 'source': 'from', 'target': 'to'}) + >>> data2 = json_graph.node_link_data( + ... H, {"link": "edges", "source": "from", "target": "to"} + ... ) To serialize with json >>> import json >>> s1 = json.dumps(data1) - >>> s2 = json.dumps(data2, default={'link': 'edges', 'source': 'from', 'target': 'to'}) + >>> s2 = json.dumps( + ... data2, default={"link": "edges", "source": "from", "target": "to"} + ... ) Notes ----- @@ -78,31 +73,35 @@ def node_link_data(G, attrs=None): attrs = _attrs else: attrs.update({k: v for (k, v) in _attrs.items() if k not in attrs}) - name = attrs['name'] - source = attrs['source'] - target = attrs['target'] - links = attrs['link'] + name = attrs["name"] + source = attrs["source"] + target = attrs["target"] + links = attrs["link"] # Allow 'key' to be omitted from attrs if the graph is not a multigraph. - key = None if not multigraph else attrs['key'] + key = None if not multigraph else attrs["key"] if len({source, target, key}) < 3: - raise nx.NetworkXError('Attribute names are not unique.') - data = {'directed': G.is_directed(), 'multigraph': multigraph, 'graph': G.graph, - 'nodes': [dict(chain(G.nodes[n].items(), [(name, n)])) for n in G]} + raise nx.NetworkXError("Attribute names are not unique.") + data = { + "directed": G.is_directed(), + "multigraph": multigraph, + "graph": G.graph, + "nodes": [dict(chain(G.nodes[n].items(), [(name, n)])) for n in G], + } if multigraph: data[links] = [ - dict(chain(d.items(), - [(source, u), (target, v), (key, k)])) - for u, v, k, d in G.edges(keys=True, data=True)] + dict(chain(d.items(), [(source, u), (target, v), (key, k)])) + for u, v, k, d in G.edges(keys=True, data=True) + ] else: data[links] = [ - dict(chain(d.items(), - [(source, u), (target, v)])) - for u, v, d in G.edges(data=True)] + dict(chain(d.items(), [(source, u), (target, v)])) + for u, v, d in G.edges(data=True) + ] return data def node_link_graph(data, directed=False, multigraph=True, attrs=None): - """Return graph from node-link data format. + """Returns graph from node-link data format. Parameters ---------- @@ -131,7 +130,7 @@ def node_link_graph(data, directed=False, multigraph=True, attrs=None): Examples -------- >>> from networkx.readwrite import json_graph - >>> G = nx.Graph([('A', 'B')]) + >>> G = nx.Graph([("A", "B")]) >>> data = json_graph.node_link_data(G) >>> H = json_graph.node_link_graph(data) @@ -148,36 +147,38 @@ def node_link_graph(data, directed=False, multigraph=True, attrs=None): attrs = _attrs else: attrs.update({k: v for k, v in _attrs.items() if k not in attrs}) - multigraph = data.get('multigraph', multigraph) - directed = data.get('directed', directed) + multigraph = data.get("multigraph", multigraph) + directed = data.get("directed", directed) if multigraph: graph = nx.MultiGraph() else: graph = nx.Graph() if directed: graph = graph.to_directed() - name = attrs['name'] - source = attrs['source'] - target = attrs['target'] - links = attrs['link'] + name = attrs["name"] + source = attrs["source"] + target = attrs["target"] + links = attrs["link"] # Allow 'key' to be omitted from attrs if the graph is not a multigraph. - key = None if not multigraph else attrs['key'] - graph.graph = data.get('graph', {}) + key = None if not multigraph else attrs["key"] + graph.graph = data.get("graph", {}) c = count() - for d in data['nodes']: + for d in data["nodes"]: node = to_tuple(d.get(name, next(c))) - nodedata = dict((make_str(k), v) for k, v in d.items() if k != name) + nodedata = {str(k): v for k, v in d.items() if k != name} graph.add_node(node, **nodedata) for d in data[links]: src = tuple(d[source]) if isinstance(d[source], list) else d[source] tgt = tuple(d[target]) if isinstance(d[target], list) else d[target] if not multigraph: - edgedata = dict((make_str(k), v) for k, v in d.items() - if k != source and k != target) + edgedata = {str(k): v for k, v in d.items() if k != source and k != target} graph.add_edge(src, tgt, **edgedata) else: ky = d.get(key, None) - edgedata = dict((make_str(k), v) for k, v in d.items() - if k != source and k != target and k != key) + edgedata = { + str(k): v + for k, v in d.items() + if k != source and k != target and k != key + } graph.add_edge(src, tgt, ky, **edgedata) return graph diff --git a/networkx/readwrite/json_graph/tests/__init__.py b/networkx/readwrite/json_graph/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/readwrite/json_graph/tests/test_adjacency.py b/networkx/readwrite/json_graph/tests/test_adjacency.py index 5dbb84c..57a2a6b 100644 --- a/networkx/readwrite/json_graph/tests/test_adjacency.py +++ b/networkx/readwrite/json_graph/tests/test_adjacency.py @@ -1,11 +1,10 @@ import json -from nose.tools import assert_equal, assert_true, raises +import pytest import networkx as nx -from networkx.readwrite.json_graph import * +from networkx.readwrite.json_graph import adjacency_data, adjacency_graph class TestAdjacency: - def test_graph(self): G = nx.path_graph(4) H = adjacency_graph(adjacency_data(G)) @@ -13,47 +12,47 @@ def test_graph(self): def test_graph_attributes(self): G = nx.path_graph(4) - G.add_node(1, color='red') + G.add_node(1, color="red") G.add_edge(1, 2, width=7) - G.graph['foo'] = 'bar' - G.graph[1] = 'one' + G.graph["foo"] = "bar" + G.graph[1] = "one" H = adjacency_graph(adjacency_data(G)) - assert_equal(H.graph['foo'], 'bar') - assert_equal(H.nodes[1]['color'], 'red') - assert_equal(H[1][2]['width'], 7) + assert H.graph["foo"] == "bar" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 d = json.dumps(adjacency_data(G)) H = adjacency_graph(json.loads(d)) - assert_equal(H.graph['foo'], 'bar') - assert_equal(H.graph[1], 'one') - assert_equal(H.nodes[1]['color'], 'red') - assert_equal(H[1][2]['width'], 7) + assert H.graph["foo"] == "bar" + assert H.graph[1] == "one" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 def test_digraph(self): G = nx.DiGraph() nx.add_path(G, [1, 2, 3]) H = adjacency_graph(adjacency_data(G)) - assert_true(H.is_directed()) + assert H.is_directed() nx.is_isomorphic(G, H) def test_multidigraph(self): G = nx.MultiDiGraph() nx.add_path(G, [1, 2, 3]) H = adjacency_graph(adjacency_data(G)) - assert_true(H.is_directed()) - assert_true(H.is_multigraph()) + assert H.is_directed() + assert H.is_multigraph() def test_multigraph(self): G = nx.MultiGraph() - G.add_edge(1, 2, key='first') - G.add_edge(1, 2, key='second', color='blue') + G.add_edge(1, 2, key="first") + G.add_edge(1, 2, key="second", color="blue") H = adjacency_graph(adjacency_data(G)) nx.is_isomorphic(G, H) - assert_equal(H[1][2]['second']['color'], 'blue') + assert H[1][2]["second"]["color"] == "blue" - @raises(nx.NetworkXError) def test_exception(self): - G = nx.MultiDiGraph() - attrs = dict(id='node', key='node') - adjacency_data(G, attrs) + with pytest.raises(nx.NetworkXError): + G = nx.MultiDiGraph() + attrs = dict(id="node", key="node") + adjacency_data(G, attrs) diff --git a/networkx/readwrite/json_graph/tests/test_cytoscape.py b/networkx/readwrite/json_graph/tests/test_cytoscape.py index 502d3af..ee4799f 100644 --- a/networkx/readwrite/json_graph/tests/test_cytoscape.py +++ b/networkx/readwrite/json_graph/tests/test_cytoscape.py @@ -1,11 +1,10 @@ import json -from nose.tools import assert_equal, assert_true, raises +import pytest import networkx as nx -from networkx.readwrite.json_graph import * +from networkx.readwrite.json_graph import cytoscape_data, cytoscape_graph class TestCytoscape: - def test_graph(self): G = nx.path_graph(4) H = cytoscape_graph(cytoscape_data(G)) @@ -13,52 +12,52 @@ def test_graph(self): def test_graph_attributes(self): G = nx.path_graph(4) - G.add_node(1, color='red') + G.add_node(1, color="red") G.add_edge(1, 2, width=7) - G.graph['foo'] = 'bar' - G.graph[1] = 'one' + G.graph["foo"] = "bar" + G.graph[1] = "one" G.add_node(3, name="node", id="123") H = cytoscape_graph(cytoscape_data(G)) - assert_equal(H.graph['foo'], 'bar') - assert_equal(H.nodes[1]['color'], 'red') - assert_equal(H[1][2]['width'], 7) - assert_equal(H.nodes[3]['name'], 'node') - assert_equal(H.nodes[3]['id'], '123') + assert H.graph["foo"] == "bar" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 + assert H.nodes[3]["name"] == "node" + assert H.nodes[3]["id"] == "123" d = json.dumps(cytoscape_data(G)) H = cytoscape_graph(json.loads(d)) - assert_equal(H.graph['foo'], 'bar') - assert_equal(H.graph[1], 'one') - assert_equal(H.nodes[1]['color'], 'red') - assert_equal(H[1][2]['width'], 7) - assert_equal(H.nodes[3]['name'], 'node') - assert_equal(H.nodes[3]['id'], '123') + assert H.graph["foo"] == "bar" + assert H.graph[1] == "one" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 + assert H.nodes[3]["name"] == "node" + assert H.nodes[3]["id"] == "123" def test_digraph(self): G = nx.DiGraph() nx.add_path(G, [1, 2, 3]) H = cytoscape_graph(cytoscape_data(G)) - assert_true(H.is_directed()) + assert H.is_directed() nx.is_isomorphic(G, H) def test_multidigraph(self): G = nx.MultiDiGraph() nx.add_path(G, [1, 2, 3]) H = cytoscape_graph(cytoscape_data(G)) - assert_true(H.is_directed()) - assert_true(H.is_multigraph()) + assert H.is_directed() + assert H.is_multigraph() def test_multigraph(self): G = nx.MultiGraph() - G.add_edge(1, 2, key='first') - G.add_edge(1, 2, key='second', color='blue') + G.add_edge(1, 2, key="first") + G.add_edge(1, 2, key="second", color="blue") H = cytoscape_graph(cytoscape_data(G)) - assert_true(nx.is_isomorphic(G, H)) - assert_equal(H[1][2]['second']['color'], 'blue') + assert nx.is_isomorphic(G, H) + assert H[1][2]["second"]["color"] == "blue" - @raises(nx.NetworkXError) def test_exception(self): - G = nx.MultiDiGraph() - attrs = dict(name='node', ident='node') - cytoscape_data(G, attrs) + with pytest.raises(nx.NetworkXError): + G = nx.MultiDiGraph() + attrs = dict(name="node", ident="node") + cytoscape_data(G, attrs) diff --git a/networkx/readwrite/json_graph/tests/test_jit.py b/networkx/readwrite/json_graph/tests/test_jit.py index e63c5d3..9a2ef68 100644 --- a/networkx/readwrite/json_graph/tests/test_jit.py +++ b/networkx/readwrite/json_graph/tests/test_jit.py @@ -1,21 +1,21 @@ import json -from nose.tools import assert_true, assert_false, assert_raises +import pytest import networkx as nx from networkx.readwrite.json_graph import jit_data, jit_graph -class TestJIT(object): +class TestJIT: def test_jit(self): G = nx.Graph() - G.add_node('Node1', node_data='foobar') - G.add_node('Node3', node_data='bar') - G.add_node('Node4') - G.add_edge('Node1', 'Node2', weight=9, something='isSomething') - G.add_edge('Node2', 'Node3', weight=4, something='isNotSomething') - G.add_edge('Node1', 'Node2') + G.add_node("Node1", node_data="foobar") + G.add_node("Node3", node_data="bar") + G.add_node("Node4") + G.add_edge("Node1", "Node2", weight=9, something="isSomething") + G.add_edge("Node2", "Node3", weight=4, something="isNotSomething") + G.add_edge("Node1", "Node2") d = jit_data(G) K = jit_graph(json.loads(d)) - assert_true(nx.is_isomorphic(G, K)) + assert nx.is_isomorphic(G, K) def test_jit_2(self): G = nx.Graph() @@ -26,7 +26,7 @@ def test_jit_2(self): G.add_edge(1, 2) d = jit_data(G) K = jit_graph(json.loads(d)) - assert_true(nx.is_isomorphic(G, K)) + assert nx.is_isomorphic(G, K) def test_jit_directed(self): G = nx.DiGraph() @@ -37,7 +37,7 @@ def test_jit_directed(self): G.add_edge(1, 2) d = jit_data(G) K = jit_graph(json.loads(d), create_using=nx.DiGraph()) - assert_true(nx.is_isomorphic(G, K)) + assert nx.is_isomorphic(G, K) def test_jit_multi_directed(self): G = nx.MultiDiGraph() @@ -46,12 +46,19 @@ def test_jit_multi_directed(self): G.add_edge(1, 2, weight=9, something=0) G.add_edge(2, 3, weight=4, something=3) G.add_edge(1, 2) - assert_raises(nx.NetworkXNotImplemented, jit_data, G) + pytest.raises(nx.NetworkXNotImplemented, jit_data, G) H = nx.DiGraph(G) d = jit_data(H) K = jit_graph(json.loads(d), create_using=nx.MultiDiGraph()) - assert_true(nx.is_isomorphic(H, K)) + assert nx.is_isomorphic(H, K) K.add_edge(1, 2) - assert_false(nx.is_isomorphic(H, K)) - assert_true(nx.is_isomorphic(G, K)) + assert not nx.is_isomorphic(H, K) + assert nx.is_isomorphic(G, K) + + def test_jit_round_trip(self): + G = nx.Graph() + d = nx.jit_data(G) + H = jit_graph(json.loads(d)) + K = jit_graph(d) + assert nx.is_isomorphic(H, K) diff --git a/networkx/readwrite/json_graph/tests/test_node_link.py b/networkx/readwrite/json_graph/tests/test_node_link.py index 92a565b..e5773d2 100644 --- a/networkx/readwrite/json_graph/tests/test_node_link.py +++ b/networkx/readwrite/json_graph/tests/test_node_link.py @@ -1,48 +1,46 @@ -# -*- coding: utf-8 -*- import json -from nose.tools import assert_equal, assert_true, raises +import pytest import networkx as nx -from networkx.readwrite.json_graph import * +from networkx.readwrite.json_graph import node_link_data, node_link_graph class TestNodeLink: - def test_graph(self): G = nx.path_graph(4) H = node_link_graph(node_link_data(G)) - assert_true(nx.is_isomorphic(G, H)) + assert nx.is_isomorphic(G, H) def test_graph_attributes(self): G = nx.path_graph(4) - G.add_node(1, color='red') + G.add_node(1, color="red") G.add_edge(1, 2, width=7) - G.graph[1] = 'one' - G.graph['foo'] = 'bar' + G.graph[1] = "one" + G.graph["foo"] = "bar" H = node_link_graph(node_link_data(G)) - assert_equal(H.graph['foo'], 'bar') - assert_equal(H.nodes[1]['color'], 'red') - assert_equal(H[1][2]['width'], 7) + assert H.graph["foo"] == "bar" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 d = json.dumps(node_link_data(G)) H = node_link_graph(json.loads(d)) - assert_equal(H.graph['foo'], 'bar') - assert_equal(H.graph['1'], 'one') - assert_equal(H.nodes[1]['color'], 'red') - assert_equal(H[1][2]['width'], 7) + assert H.graph["foo"] == "bar" + assert H.graph["1"] == "one" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 def test_digraph(self): G = nx.DiGraph() H = node_link_graph(node_link_data(G)) - assert_true(H.is_directed()) + assert H.is_directed() def test_multigraph(self): G = nx.MultiGraph() - G.add_edge(1, 2, key='first') - G.add_edge(1, 2, key='second', color='blue') + G.add_edge(1, 2, key="first") + G.add_edge(1, 2, key="second", color="blue") H = node_link_graph(node_link_data(G)) nx.is_isomorphic(G, H) - assert_equal(H[1][2]['second']['color'], 'blue') + assert H[1][2]["second"]["color"] == "blue" def test_graph_with_tuple_nodes(self): G = nx.Graph() @@ -51,55 +49,56 @@ def test_graph_with_tuple_nodes(self): dumped_d = json.dumps(d) dd = json.loads(dumped_d) H = node_link_graph(dd) - assert_equal(H.nodes[(0, 0)], G.nodes[(0, 0)]) - assert_equal(H[(0, 0)][(1, 0)]['color'], [255, 255, 0]) + assert H.nodes[(0, 0)] == G.nodes[(0, 0)] + assert H[(0, 0)][(1, 0)]["color"] == [255, 255, 0] def test_unicode_keys(self): - try: - q = unicode("qualité", 'utf-8') - except NameError: - q = "qualité" + q = "qualité" G = nx.Graph() G.add_node(1, **{q: q}) s = node_link_data(G) output = json.dumps(s, ensure_ascii=False) data = json.loads(output) H = node_link_graph(data) - assert_equal(H.nodes[1][q], q) + assert H.nodes[1][q] == q - @raises(nx.NetworkXError) def test_exception(self): - G = nx.MultiDiGraph() - attrs = dict(name='node', source='node', target='node', key='node') - node_link_data(G, attrs) + with pytest.raises(nx.NetworkXError): + G = nx.MultiDiGraph() + attrs = dict(name="node", source="node", target="node", key="node") + node_link_data(G, attrs) def test_string_ids(self): - try: - q = unicode("qualité", 'utf-8') - except NameError: - q = "qualité" - + q = "qualité" G = nx.DiGraph() - G.add_node('A') + G.add_node("A") G.add_node(q) - G.add_edge('A', q) + G.add_edge("A", q) data = node_link_data(G) - assert_equal(data['links'][0]['source'], 'A') - assert_equal(data['links'][0]['target'], q) + assert data["links"][0]["source"] == "A" + assert data["links"][0]["target"] == q H = node_link_graph(data) - assert_true(nx.is_isomorphic(G, H)) + assert nx.is_isomorphic(G, H) def test_custom_attrs(self): G = nx.path_graph(4) - G.add_node(1, color='red') + G.add_node(1, color="red") G.add_edge(1, 2, width=7) - G.graph[1] = 'one' - G.graph['foo'] = 'bar' - - attrs = dict(source='c_source', target='c_target', name='c_id', key='c_key', link='c_links') - - H = node_link_graph(node_link_data(G, attrs=attrs), multigraph=False, attrs=attrs) - assert_true(nx.is_isomorphic(G, H)) - assert_equal(H.graph['foo'], 'bar') - assert_equal(H.nodes[1]['color'], 'red') - assert_equal(H[1][2]['width'], 7) + G.graph[1] = "one" + G.graph["foo"] = "bar" + + attrs = dict( + source="c_source", + target="c_target", + name="c_id", + key="c_key", + link="c_links", + ) + + H = node_link_graph( + node_link_data(G, attrs=attrs), multigraph=False, attrs=attrs + ) + assert nx.is_isomorphic(G, H) + assert H.graph["foo"] == "bar" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 diff --git a/networkx/readwrite/json_graph/tests/test_tree.py b/networkx/readwrite/json_graph/tests/test_tree.py index cb59532..8deda52 100644 --- a/networkx/readwrite/json_graph/tests/test_tree.py +++ b/networkx/readwrite/json_graph/tests/test_tree.py @@ -1,14 +1,13 @@ import json -from nose.tools import assert_equal, assert_true, raises +import pytest import networkx as nx -from networkx.readwrite.json_graph import * +from networkx.readwrite.json_graph import tree_data, tree_graph class TestTree: - def test_graph(self): G = nx.DiGraph() - G.add_nodes_from([1, 2, 3], color='red') + G.add_nodes_from([1, 2, 3], color="red") G.add_edge(1, 2, foo=7) G.add_edge(1, 3, foo=10) G.add_edge(3, 4, foo=10) @@ -17,20 +16,20 @@ def test_graph(self): def test_graph_attributes(self): G = nx.DiGraph() - G.add_nodes_from([1, 2, 3], color='red') + G.add_nodes_from([1, 2, 3], color="red") G.add_edge(1, 2, foo=7) G.add_edge(1, 3, foo=10) G.add_edge(3, 4, foo=10) H = tree_graph(tree_data(G, 1)) - assert_equal(H.nodes[1]['color'], 'red') + assert H.nodes[1]["color"] == "red" d = json.dumps(tree_data(G, 1)) H = tree_graph(json.loads(d)) - assert_equal(H.nodes[1]['color'], 'red') + assert H.nodes[1]["color"] == "red" - @raises(nx.NetworkXError) def test_exception(self): - G = nx.MultiDiGraph() - G.add_node(0) - attrs = dict(id='node', children='node') - tree_data(G, 0, attrs) + with pytest.raises(nx.NetworkXError): + G = nx.MultiDiGraph() + G.add_node(0) + attrs = dict(id="node", children="node") + tree_data(G, 0, attrs) diff --git a/networkx/readwrite/json_graph/tree.py b/networkx/readwrite/json_graph/tree.py index af49c9a..665f35f 100644 --- a/networkx/readwrite/json_graph/tree.py +++ b/networkx/readwrite/json_graph/tree.py @@ -1,20 +1,13 @@ -# Copyright (C) 2011 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. from itertools import chain import networkx as nx -from networkx.utils import make_str -__author__ = """Aric Hagberg (hagberg@lanl.gov))""" -__all__ = ['tree_data', 'tree_graph'] -_attrs = dict(id='id', children='children') +__all__ = ["tree_data", "tree_graph"] + +_attrs = dict(id="id", children="children") def tree_data(G, root, attrs=_attrs): - """Return data in tree format that is suitable for JSON serialization + """Returns data in tree format that is suitable for JSON serialization and use in Javascript documents. Parameters @@ -47,8 +40,8 @@ def tree_data(G, root, attrs=_attrs): Examples -------- >>> from networkx.readwrite import json_graph - >>> G = nx.DiGraph([(1,2)]) - >>> data = json_graph.tree_data(G,root=1) + >>> G = nx.DiGraph([(1, 2)]) + >>> data = json_graph.tree_data(G, root=1) To serialize with json @@ -73,10 +66,10 @@ def tree_data(G, root, attrs=_attrs): if not G.is_directed(): raise TypeError("G is not directed.") - id_ = attrs['id'] - children = attrs['children'] + id_ = attrs["id"] + children = attrs["children"] if id_ == children: - raise nx.NetworkXError('Attribute names are not unique.') + raise nx.NetworkXError("Attribute names are not unique.") def add_children(n, G): nbrs = G[n] @@ -97,7 +90,7 @@ def add_children(n, G): def tree_graph(data, attrs=_attrs): - """Return graph from tree data format. + """Returns graph from tree data format. Parameters ---------- @@ -117,8 +110,8 @@ def tree_graph(data, attrs=_attrs): Examples -------- >>> from networkx.readwrite import json_graph - >>> G = nx.DiGraph([(1,2)]) - >>> data = json_graph.tree_data(G,root=1) + >>> G = nx.DiGraph([(1, 2)]) + >>> data = json_graph.tree_data(G, root=1) >>> H = json_graph.tree_graph(data) Notes @@ -130,8 +123,8 @@ def tree_graph(data, attrs=_attrs): tree_graph, node_link_data, adjacency_data """ graph = nx.DiGraph() - id_ = attrs['id'] - children = attrs['children'] + id_ = attrs["id"] + children = attrs["children"] def add_children(parent, children_): for data in children_: @@ -140,14 +133,14 @@ def add_children(parent, children_): grandchildren = data.get(children, []) if grandchildren: add_children(child, grandchildren) - nodedata = dict((make_str(k), v) for k, v in data.items() - if k != id_ and k != children) + nodedata = { + str(k): v for k, v in data.items() if k != id_ and k != children + } graph.add_node(child, **nodedata) root = data[id_] children_ = data.get(children, []) - nodedata = dict((make_str(k), v) for k, v in data.items() - if k != id_ and k != children) + nodedata = {str(k): v for k, v in data.items() if k != id_ and k != children} graph.add_node(root, **nodedata) add_children(root, children_) return graph diff --git a/networkx/readwrite/leda.py b/networkx/readwrite/leda.py index d5869ec..a9b353c 100644 --- a/networkx/readwrite/leda.py +++ b/networkx/readwrite/leda.py @@ -10,23 +10,16 @@ """ # Original author: D. Eppstein, UC Irvine, August 12, 2003. # The original code at http://www.ics.uci.edu/~eppstein/PADS/ is public domain. -__author__ = """Aric Hagberg (hagberg@lanl.gov)""" -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -__all__ = ['read_leda', 'parse_leda'] +__all__ = ["read_leda", "parse_leda"] import networkx as nx from networkx.exception import NetworkXError -from networkx.utils import open_file, is_string_like +from networkx.utils import open_file -@open_file(0, mode='rb') -def read_leda(path, encoding='UTF-8'): +@open_file(0, mode="rb") +def read_leda(path, encoding="UTF-8"): """Read graph in LEDA format from path. Parameters @@ -72,10 +65,15 @@ def parse_leda(lines): ---------- .. [1] http://www.algorithmic-solutions.info/leda_guide/graphs/leda_native_graph_fileformat.html """ - if is_string_like(lines): - lines = iter(lines.split('\n')) - lines = iter([line.rstrip('\n') for line in lines - if not (line.startswith('#') or line.startswith('\n') or line == '')]) + if isinstance(lines, str): + lines = iter(lines.split("\n")) + lines = iter( + [ + line.rstrip("\n") + for line in lines + if not (line.startswith("#") or line.startswith("\n") or line == "") + ] + ) for i in range(3): next(lines) # Graph @@ -89,7 +87,7 @@ def parse_leda(lines): n = int(next(lines)) # number of nodes node = {} for i in range(1, n + 1): # LEDA counts from 1 to n - symbol = next(lines).rstrip().strip('|{}| ') + symbol = next(lines).rstrip().strip("|{}| ") if symbol == "": symbol = str(i) # use int if no label - could be trouble node[i] = symbol @@ -101,8 +99,8 @@ def parse_leda(lines): for i in range(m): try: s, t, reversal, label = next(lines).split() - except: - raise NetworkXError('Too few fields in LEDA.GRAPH edge %d' % (i + 1)) + except BaseException as e: + raise NetworkXError(f"Too few fields in LEDA.GRAPH edge {i+1}") from e # BEWARE: no handling of reversal edges G.add_edge(node[int(s)], node[int(t)], label=label[2:-2]) return G diff --git a/networkx/readwrite/multiline_adjlist.py b/networkx/readwrite/multiline_adjlist.py index f6d52d0..e12f900 100644 --- a/networkx/readwrite/multiline_adjlist.py +++ b/networkx/readwrite/multiline_adjlist.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ ************************* Multi-line Adjacency List @@ -25,26 +24,19 @@ d 1 e """ -__author__ = '\n'.join(['Aric Hagberg ', - 'Dan Schult ', - 'Loïc Séguin-C. ']) -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. - -__all__ = ['generate_multiline_adjlist', - 'write_multiline_adjlist', - 'parse_multiline_adjlist', - 'read_multiline_adjlist'] - -from networkx.utils import make_str, open_file + +__all__ = [ + "generate_multiline_adjlist", + "write_multiline_adjlist", + "parse_multiline_adjlist", + "read_multiline_adjlist", +] + +from networkx.utils import open_file import networkx as nx -def generate_multiline_adjlist(G, delimiter=' '): +def generate_multiline_adjlist(G, delimiter=" "): """Generate a single line of the graph G in multiline adjacency list format. Parameters @@ -88,58 +80,61 @@ def generate_multiline_adjlist(G, delimiter=' '): if G.is_directed(): if G.is_multigraph(): for s, nbrs in G.adjacency(): - nbr_edges = [(u, data) - for u, datadict in nbrs.items() - for key, data in datadict.items()] + nbr_edges = [ + (u, data) + for u, datadict in nbrs.items() + for key, data in datadict.items() + ] deg = len(nbr_edges) - yield make_str(s) + delimiter + str(deg) + yield str(s) + delimiter + str(deg) for u, d in nbr_edges: if d is None: - yield make_str(u) + yield str(u) else: - yield make_str(u) + delimiter + make_str(d) + yield str(u) + delimiter + str(d) else: # directed single edges for s, nbrs in G.adjacency(): deg = len(nbrs) - yield make_str(s) + delimiter + str(deg) + yield str(s) + delimiter + str(deg) for u, d in nbrs.items(): if d is None: - yield make_str(u) + yield str(u) else: - yield make_str(u) + delimiter + make_str(d) + yield str(u) + delimiter + str(d) else: # undirected if G.is_multigraph(): seen = set() # helper dict used to avoid duplicate edges for s, nbrs in G.adjacency(): - nbr_edges = [(u, data) - for u, datadict in nbrs.items() - if u not in seen - for key, data in datadict.items()] + nbr_edges = [ + (u, data) + for u, datadict in nbrs.items() + if u not in seen + for key, data in datadict.items() + ] deg = len(nbr_edges) - yield make_str(s) + delimiter + str(deg) + yield str(s) + delimiter + str(deg) for u, d in nbr_edges: if d is None: - yield make_str(u) + yield str(u) else: - yield make_str(u) + delimiter + make_str(d) + yield str(u) + delimiter + str(d) seen.add(s) else: # undirected single edges seen = set() # helper dict used to avoid duplicate edges for s, nbrs in G.adjacency(): nbr_edges = [(u, d) for u, d in nbrs.items() if u not in seen] deg = len(nbr_edges) - yield make_str(s) + delimiter + str(deg) + yield str(s) + delimiter + str(deg) for u, d in nbr_edges: if d is None: - yield make_str(u) + yield str(u) else: - yield make_str(u) + delimiter + make_str(d) + yield str(u) + delimiter + str(d) seen.add(s) -@open_file(1, mode='wb') -def write_multiline_adjlist(G, path, delimiter=' ', - comments='#', encoding='utf-8'): +@open_file(1, mode="wb") +def write_multiline_adjlist(G, path, delimiter=" ", comments="#", encoding="utf-8"): """ Write the graph G in multiline adjacency list format to path Parameters @@ -157,18 +152,18 @@ def write_multiline_adjlist(G, path, delimiter=' ', Examples -------- - >>> G=nx.path_graph(4) - >>> nx.write_multiline_adjlist(G,"test.adjlist") + >>> G = nx.path_graph(4) + >>> nx.write_multiline_adjlist(G, "test.adjlist") The path can be a file handle or a string with the name of the file. If a file handle is provided, it has to be opened in 'wb' mode. - >>> fh=open("test.adjlist",'wb') - >>> nx.write_multiline_adjlist(G,fh) + >>> fh = open("test.adjlist", "wb") + >>> nx.write_multiline_adjlist(G, fh) Filenames ending in .gz or .bz2 will be compressed. - >>> nx.write_multiline_adjlist(G,"test.adjlist.gz") + >>> nx.write_multiline_adjlist(G, "test.adjlist.gz") See Also -------- @@ -178,19 +173,23 @@ def write_multiline_adjlist(G, path, delimiter=' ', import time pargs = comments + " ".join(sys.argv) - header = ("{}\n".format(pargs) - + comments + " GMT {}\n".format(time.asctime(time.gmtime())) - + comments + " {}\n".format(G.name)) + header = ( + f"{pargs}\n" + + comments + + f" GMT {time.asctime(time.gmtime())}\n" + + comments + + f" {G.name}\n" + ) path.write(header.encode(encoding)) for multiline in generate_multiline_adjlist(G, delimiter): - multiline += '\n' + multiline += "\n" path.write(multiline.encode(encoding)) -def parse_multiline_adjlist(lines, comments='#', delimiter=None, - create_using=None, nodetype=None, - edgetype=None): +def parse_multiline_adjlist( + lines, comments="#", delimiter=None, create_using=None, nodetype=None, edgetype=None +): """Parse lines of a multiline adjacency list representation of a graph. Parameters @@ -198,8 +197,8 @@ def parse_multiline_adjlist(lines, comments='#', delimiter=None, lines : list or iterator of strings Input data in multiline adjlist format - create_using: NetworkX graph container - Use given NetworkX graph for holding nodes or edges. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. nodetype : Python type, optional Convert nodes to this type. @@ -217,26 +216,21 @@ def parse_multiline_adjlist(lines, comments='#', delimiter=None, Examples -------- - >>> lines = ['1 2', - ... "2 {'weight':3, 'name': 'Frodo'}", - ... "3 {}", - ... "2 1", - ... "5 {'weight':6, 'name': 'Saruman'}"] + >>> lines = [ + ... "1 2", + ... "2 {'weight':3, 'name': 'Frodo'}", + ... "3 {}", + ... "2 1", + ... "5 {'weight':6, 'name': 'Saruman'}", + ... ] >>> G = nx.parse_multiline_adjlist(iter(lines), nodetype=int) >>> list(G) [1, 2, 3, 5] """ from ast import literal_eval - if create_using is None: - G = nx.Graph() - else: - try: - G = create_using - G.clear() - except: - raise TypeError("Input graph is not a networkx graph type") + G = nx.empty_graph(0, create_using) for line in lines: p = line.find(comments) if p >= 0: @@ -246,22 +240,23 @@ def parse_multiline_adjlist(lines, comments='#', delimiter=None, try: (u, deg) = line.strip().split(delimiter) deg = int(deg) - except: - raise TypeError("Failed to read node and degree on line ({})".format(line)) + except BaseException as e: + raise TypeError(f"Failed to read node and degree on line ({line})") from e if nodetype is not None: try: u = nodetype(u) - except: - raise TypeError("Failed to convert node ({}) to type {}" - .format(u, nodetype)) + except BaseException as e: + raise TypeError( + f"Failed to convert node ({u}) to " f"type {nodetype}" + ) from e G.add_node(u) for i in range(deg): while True: try: line = next(lines) - except StopIteration: - msg = "Failed to find neighbor for node ({})".format(u) - raise TypeError(msg) + except StopIteration as e: + msg = f"Failed to find neighbor for node ({u})" + raise TypeError(msg) from e p = line.find(comments) if p >= 0: line = line[:p] @@ -272,21 +267,21 @@ def parse_multiline_adjlist(lines, comments='#', delimiter=None, if numb < 1: continue # isolated node v = vlist.pop(0) - data = ''.join(vlist) + data = "".join(vlist) if nodetype is not None: try: v = nodetype(v) - except: + except BaseException as e: raise TypeError( - "Failed to convert node ({}) to type {}" - .format(v, nodetype)) + f"Failed to convert node ({v}) " f"to type {nodetype}" + ) from e if edgetype is not None: try: - edgedata = {'weight': edgetype(data)} - except: + edgedata = {"weight": edgetype(data)} + except BaseException as e: raise TypeError( - "Failed to convert edge data ({}) to type {}" - .format(data, edgetype)) + f"Failed to convert edge data ({data}) " f"to type {edgetype}" + ) from e else: try: # try to evaluate edgedata = literal_eval(data) @@ -297,11 +292,16 @@ def parse_multiline_adjlist(lines, comments='#', delimiter=None, return G -@open_file(0, mode='rb') -def read_multiline_adjlist(path, comments="#", delimiter=None, - create_using=None, - nodetype=None, edgetype=None, - encoding='utf-8'): +@open_file(0, mode="rb") +def read_multiline_adjlist( + path, + comments="#", + delimiter=None, + create_using=None, + nodetype=None, + edgetype=None, + encoding="utf-8", +): """Read graph in multi-line adjacency list format from path. Parameters @@ -310,8 +310,8 @@ def read_multiline_adjlist(path, comments="#", delimiter=None, Filename or file handle to read. Filenames ending in .gz or .bz2 will be uncompressed. - create_using: NetworkX graph container - Use given NetworkX graph for holding nodes or edges. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. nodetype : Python type, optional Convert nodes to this type. @@ -331,39 +331,39 @@ def read_multiline_adjlist(path, comments="#", delimiter=None, Examples -------- - >>> G=nx.path_graph(4) - >>> nx.write_multiline_adjlist(G,"test.adjlist") - >>> G=nx.read_multiline_adjlist("test.adjlist") + >>> G = nx.path_graph(4) + >>> nx.write_multiline_adjlist(G, "test.adjlist") + >>> G = nx.read_multiline_adjlist("test.adjlist") The path can be a file or a string with the name of the file. If a file s provided, it has to be opened in 'rb' mode. - >>> fh=open("test.adjlist", 'rb') - >>> G=nx.read_multiline_adjlist(fh) + >>> fh = open("test.adjlist", "rb") + >>> G = nx.read_multiline_adjlist(fh) Filenames ending in .gz or .bz2 will be compressed. - >>> nx.write_multiline_adjlist(G,"test.adjlist.gz") - >>> G=nx.read_multiline_adjlist("test.adjlist.gz") + >>> nx.write_multiline_adjlist(G, "test.adjlist.gz") + >>> G = nx.read_multiline_adjlist("test.adjlist.gz") The optional nodetype is a function to convert node strings to nodetype. For example - >>> G=nx.read_multiline_adjlist("test.adjlist", nodetype=int) + >>> G = nx.read_multiline_adjlist("test.adjlist", nodetype=int) will attempt to convert all nodes to integer type. The optional edgetype is a function to convert edge data strings to edgetype. - >>> G=nx.read_multiline_adjlist("test.adjlist") + >>> G = nx.read_multiline_adjlist("test.adjlist") The optional create_using parameter is a NetworkX graph container. The default is Graph(), an undirected graph. To read the data as a directed graph use - >>> G=nx.read_multiline_adjlist("test.adjlist", create_using=nx.DiGraph()) + >>> G = nx.read_multiline_adjlist("test.adjlist", create_using=nx.DiGraph) Notes ----- @@ -374,17 +374,11 @@ def read_multiline_adjlist(path, comments="#", delimiter=None, write_multiline_adjlist """ lines = (line.decode(encoding) for line in path) - return parse_multiline_adjlist(lines, - comments=comments, - delimiter=delimiter, - create_using=create_using, - nodetype=nodetype, - edgetype=edgetype) - - -# fixture for nose tests -def teardown_module(module): - import os - for fname in ['test.adjlist', 'test.adjlist.gz']: - if os.path.isfile(fname): - os.unlink(fname) + return parse_multiline_adjlist( + lines, + comments=comments, + delimiter=delimiter, + create_using=create_using, + nodetype=nodetype, + edgetype=edgetype, + ) diff --git a/networkx/readwrite/nx_shp.py b/networkx/readwrite/nx_shp.py index 4b1a240..c1f4ce8 100644 --- a/networkx/readwrite/nx_shp.py +++ b/networkx/readwrite/nx_shp.py @@ -11,16 +11,9 @@ interoperability among Esri and other software products." See https://en.wikipedia.org/wiki/Shapefile for additional information. """ -# Copyright (C) 2004-2018 by -# Ben Reilly -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. import networkx as nx -__author__ = """Ben Reilly (benwreilly@gmail.com)""" -__all__ = ['read_shp', 'write_shp'] + +__all__ = ["read_shp", "write_shp"] def read_shp(path, simplify=True, geom_attrs=True, strict=True): @@ -76,7 +69,7 @@ def read_shp(path, simplify=True, geom_attrs=True, strict=True): Examples -------- - >>> G=nx.read_shp('test.shp') # doctest: +SKIP + >>> G = nx.read_shp("test.shp") # doctest: +SKIP References ---------- @@ -84,8 +77,8 @@ def read_shp(path, simplify=True, geom_attrs=True, strict=True): """ try: from osgeo import ogr - except ImportError: - raise ImportError("read_shp requires OGR: http://www.gdal.org/") + except ImportError as e: + raise ImportError("read_shp requires OGR: http://www.gdal.org/") from e if not isinstance(path, str): return @@ -93,7 +86,7 @@ def read_shp(path, simplify=True, geom_attrs=True, strict=True): net = nx.DiGraph() shp = ogr.Open(path) if shp is None: - raise RuntimeError("Unable to open {}".format(path)) + raise RuntimeError(f"Unable to open {path}") for lyr in shp: fields = [x.GetName() for x in lyr.schema] for f in lyr: @@ -109,17 +102,16 @@ def read_shp(path, simplify=True, geom_attrs=True, strict=True): # Note: Using layer level geometry type if g.GetGeometryType() == ogr.wkbPoint: net.add_node((g.GetPoint_2D(0)), **attributes) - elif g.GetGeometryType() in (ogr.wkbLineString, - ogr.wkbMultiLineString): - for edge in edges_from_line(g, attributes, simplify, - geom_attrs): + elif g.GetGeometryType() in (ogr.wkbLineString, ogr.wkbMultiLineString): + for edge in edges_from_line(g, attributes, simplify, geom_attrs): e1, e2, attr = edge net.add_edge(e1, e2) net[e1][e2].update(attr) else: if strict: - raise nx.NetworkXError("GeometryType {} not supported". - format(g.GetGeometryType())) + raise nx.NetworkXError( + "GeometryType {} not supported".format(g.GetGeometryType()) + ) return net @@ -154,8 +146,10 @@ def edges_from_line(geom, attrs, simplify=True, geom_attrs=True): """ try: from osgeo import ogr - except ImportError: - raise ImportError("edges_from_line requires OGR: http://www.gdal.org/") + except ImportError as e: + raise ImportError( + "edges_from_line requires OGR: " "http://www.gdal.org/" + ) from e if geom.GetGeometryType() == ogr.wkbLineString: if simplify: @@ -184,8 +178,7 @@ def edges_from_line(geom, attrs, simplify=True, geom_attrs=True): elif geom.GetGeometryType() == ogr.wkbMultiLineString: for i in range(geom.GetGeometryCount()): geom_i = geom.GetGeometryRef(i) - for edge in edges_from_line(geom_i, attrs, simplify, geom_attrs): - yield edge + yield from edges_from_line(geom_i, attrs, simplify, geom_attrs) def write_shp(G, outdir): @@ -216,17 +209,17 @@ def write_shp(G, outdir): """ try: from osgeo import ogr - except ImportError: - raise ImportError("write_shp requires OGR: http://www.gdal.org/") + except ImportError as e: + raise ImportError("write_shp requires OGR: http://www.gdal.org/") from e # easier to debug in python if ogr throws exceptions ogr.UseExceptions() def netgeometry(key, data): - if 'Wkb' in data: - geom = ogr.CreateGeometryFromWkb(data['Wkb']) - elif 'Wkt' in data: - geom = ogr.CreateGeometryFromWkt(data['Wkt']) - elif type(key[0]).__name__ == 'tuple': # edge keys are packed tuples + if "Wkb" in data: + geom = ogr.CreateGeometryFromWkb(data["Wkb"]) + elif "Wkt" in data: + geom = ogr.CreateGeometryFromWkt(data["Wkt"]) + elif type(key[0]).__name__ == "tuple": # edge keys are packed tuples geom = ogr.Geometry(ogr.wkbLineString) _from, _to = key[0], key[1] try: @@ -275,7 +268,6 @@ def add_fields_to_layer(key, value, fields, layer): newfield = ogr.FieldDefn(key, fields[key]) layer.CreateField(newfield) - drv = ogr.GetDriverByName("ESRI Shapefile") shpdir = drv.CreateDataSource(outdir) # delete pre-existing output first otherwise ogr chokes @@ -292,8 +284,7 @@ def create_attributes(data, fields, layer): attributes = {} # storage for attribute data (indexed by field names) for key, value in data.items(): # Reject spatial data not required for attribute table - if (key != 'Json' and key != 'Wkt' and key != 'Wkb' - and key != 'ShpName'): + if key != "Json" and key != "Wkt" and key != "Wkb" and key != "ShpName": # Check/add field and data type to fields dict if key not in fields: add_fields_to_layer(key, value, fields, layer) @@ -314,7 +305,7 @@ def create_attributes(data, fields, layer): edges = shpdir.CreateLayer("edges", None, ogr.wkbLineString) # New edge attribute write support merged into edge loop - edge_fields = {} # storage for field names and their data types + edge_fields = {} # storage for field names and their data types for e in G.edges(data=True): data = G.get_edge_data(*e) @@ -323,12 +314,3 @@ def create_attributes(data, fields, layer): create_feature(g, edges, attributes) nodes, edges = None, None - - -# fixture for nose tests -def setup_module(module): - from nose import SkipTest - try: - import ogr - except: - raise SkipTest("OGR not available") diff --git a/networkx/readwrite/nx_yaml.py b/networkx/readwrite/nx_yaml.py index 338849b..a8b7638 100644 --- a/networkx/readwrite/nx_yaml.py +++ b/networkx/readwrite/nx_yaml.py @@ -4,7 +4,7 @@ **** Read and write NetworkX graphs in YAML format. -"YAML is a data serialization format designed for human readability +"YAML is a data serialization format designed for human readability and interaction with scripting languages." See http://www.yaml.org for documentation. @@ -13,25 +13,17 @@ http://pyyaml.org/wiki/PyYAML """ -__author__ = """Aric Hagberg (hagberg@lanl.gov)""" -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -__all__ = ['read_yaml', 'write_yaml'] +__all__ = ["read_yaml", "write_yaml"] -import networkx as nx from networkx.utils import open_file -@open_file(1, mode='w') +@open_file(1, mode="w") def write_yaml(G_to_be_yaml, path_for_yaml_output, **kwds): - """Write graph G in YAML format to path. + """Write graph G in YAML format to path. - YAML is a data serialization format designed for human readability + YAML is a data serialization format designed for human readability and interaction with scripting languages [1]_. Parameters @@ -39,7 +31,7 @@ def write_yaml(G_to_be_yaml, path_for_yaml_output, **kwds): G : graph A NetworkX graph path : file or string - File or filename to write. + File or filename to write. Filenames ending in .gz or .bz2 will be compressed. Notes @@ -49,8 +41,8 @@ def write_yaml(G_to_be_yaml, path_for_yaml_output, **kwds): Examples -------- - >>> G=nx.path_graph(4) - >>> nx.write_yaml(G,'test.yaml') + >>> G = nx.path_graph(4) + >>> nx.write_yaml(G, "test.yaml") References ---------- @@ -58,22 +50,22 @@ def write_yaml(G_to_be_yaml, path_for_yaml_output, **kwds): """ try: import yaml - except ImportError: - raise ImportError("write_yaml() requires PyYAML: http://pyyaml.org/") + except ImportError as e: + raise ImportError("write_yaml() requires PyYAML: http://pyyaml.org/") from e yaml.dump(G_to_be_yaml, path_for_yaml_output, **kwds) -@open_file(0, mode='r') +@open_file(0, mode="r") def read_yaml(path): """Read graph in YAML format from path. - YAML is a data serialization format designed for human readability + YAML is a data serialization format designed for human readability and interaction with scripting languages [1]_. Parameters ---------- path : file or string - File or filename to read. Filenames ending in .gz or .bz2 + File or filename to read. Filenames ending in .gz or .bz2 will be uncompressed. Returns @@ -82,9 +74,9 @@ def read_yaml(path): Examples -------- - >>> G=nx.path_graph(4) - >>> nx.write_yaml(G,'test.yaml') - >>> G=nx.read_yaml('test.yaml') + >>> G = nx.path_graph(4) + >>> nx.write_yaml(G, "test.yaml") + >>> G = nx.read_yaml("test.yaml") References ---------- @@ -93,24 +85,8 @@ def read_yaml(path): """ try: import yaml - except ImportError: - raise ImportError("read_yaml() requires PyYAML: http://pyyaml.org/") + except ImportError as e: + raise ImportError("read_yaml() requires PyYAML: http://pyyaml.org/") from e - G = yaml.load(path) + G = yaml.load(path, Loader=yaml.FullLoader) return G - - -# fixture for nose tests -def setup_module(module): - from nose import SkipTest - try: - import yaml - except: - raise SkipTest("PyYAML not available") - -# fixture for nose tests - - -def teardown_module(module): - import os - os.unlink('test.yaml') diff --git a/networkx/readwrite/p2g.py b/networkx/readwrite/p2g.py index 934c456..6e23812 100644 --- a/networkx/readwrite/p2g.py +++ b/networkx/readwrite/p2g.py @@ -31,20 +31,12 @@ itself. Indeed, self-loops are allowed. Node index starts from 0. """ -# Copyright (C) 2008-2012 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. import networkx -from networkx.utils import is_string_like, open_file -__author__ = '\n'.join(['Willem Ligtenberg (w.p.a.ligtenberg@tue.nl)', - 'Aric Hagberg (aric.hagberg@gmail.com)']) +from networkx.utils import open_file -@open_file(1, mode='w') -def write_p2g(G, path, encoding='utf-8'): +@open_file(1, mode="w") +def write_p2g(G, path, encoding="utf-8"): """Write NetworkX graph in p2g format. Notes @@ -52,20 +44,20 @@ def write_p2g(G, path, encoding='utf-8'): This format is meant to be used with directed graphs with possible self loops. """ - path.write(("%s\n" % G.name).encode(encoding)) - path.write(("%s %s\n" % (G.order(), G.size())).encode(encoding)) + path.write((f"{G.name}\n").encode(encoding)) + path.write((f"{G.order()} {G.size()}\n").encode(encoding)) nodes = list(G) # make dictionary mapping nodes to integers nodenumber = dict(zip(nodes, range(len(nodes)))) for n in nodes: - path.write(("%s\n" % n).encode(encoding)) + path.write((f"{n}\n").encode(encoding)) for nbr in G.neighbors(n): - path.write(("%s " % nodenumber[nbr]).encode(encoding)) + path.write((f"{nodenumber[nbr]} ").encode(encoding)) path.write("\n".encode(encoding)) -@open_file(0, mode='r') -def read_p2g(path, encoding='utf-8'): +@open_file(0, mode="r") +def read_p2g(path, encoding="utf-8"): """Read graph in p2g format from path. Returns diff --git a/networkx/readwrite/pajek.py b/networkx/readwrite/pajek.py index 1409d89..762ad64 100644 --- a/networkx/readwrite/pajek.py +++ b/networkx/readwrite/pajek.py @@ -1,11 +1,3 @@ -# Copyright (C) 2008-2014 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg (hagberg@lanl.gov) """ ***** Pajek @@ -22,10 +14,12 @@ """ +import warnings + import networkx as nx -from networkx.utils import is_string_like, open_file, make_str +from networkx.utils import open_file -__all__ = ['read_pajek', 'parse_pajek', 'generate_pajek', 'write_pajek'] +__all__ = ["read_pajek", "parse_pajek", "generate_pajek", "write_pajek"] def generate_pajek(G): @@ -41,8 +35,8 @@ def generate_pajek(G): See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm for format information. """ - if G.name == '': - name = 'NetworkX' + if G.name == "": + name = "NetworkX" else: name = G.name # Apparently many Pajek format readers can't process this line @@ -50,40 +44,60 @@ def generate_pajek(G): # yield '*network %s'%name # write nodes with attributes - yield '*vertices %s' % (G.order()) + yield f"*vertices {G.order()}" nodes = list(G) # make dictionary mapping nodes to integers nodenumber = dict(zip(nodes, range(1, len(nodes) + 1))) for n in nodes: - na = G.nodes.get(n, {}) - x = na.get('x', 0.0) - y = na.get('y', 0.0) - id = int(na.get('id', nodenumber[n])) + # copy node attributes and pop mandatory attributes + # to avoid duplication. + na = G.nodes.get(n, {}).copy() + x = na.pop("x", 0.0) + y = na.pop("y", 0.0) + try: + id = int(na.pop("id", nodenumber[n])) + except ValueError as e: + e.args += ( + ( + "Pajek format requires 'id' to be an int()." + " Refer to the 'Relabeling nodes' section." + ), + ) + raise nodenumber[n] = id - shape = na.get('shape', 'ellipse') - s = ' '.join(map(make_qstr, (id, n, x, y, shape))) + shape = na.pop("shape", "ellipse") + s = " ".join(map(make_qstr, (id, n, x, y, shape))) + # only optional attributes are left in na. for k, v in na.items(): - if v.strip() != '': - s += ' %s %s' % (make_qstr(k), make_qstr(v)) + if isinstance(v, str) and v.strip() != "": + s += f" {make_qstr(k)} {make_qstr(v)}" + else: + warnings.warn( + f"Node attribute {k} is not processed. {('Empty attribute' if isinstance(v, str) else 'Non-string attribute')}." + ) yield s # write edges with attributes if G.is_directed(): - yield '*arcs' + yield "*arcs" else: - yield '*edges' + yield "*edges" for u, v, edgedata in G.edges(data=True): d = edgedata.copy() - value = d.pop('weight', 1.0) # use 1 as default edge value - s = ' '.join(map(make_qstr, (nodenumber[u], nodenumber[v], value))) + value = d.pop("weight", 1.0) # use 1 as default edge value + s = " ".join(map(make_qstr, (nodenumber[u], nodenumber[v], value))) for k, v in d.items(): - if v.strip() != '': - s += ' %s %s' % (make_qstr(k), make_qstr(v)) + if isinstance(v, str) and v.strip() != "": + s += f" {make_qstr(k)} {make_qstr(v)}" + else: + warnings.warn( + f"Edge attribute {k} is not processed. {('Empty attribute' if isinstance(v, str) else 'Non-string attribute')}." + ) yield s -@open_file(1, mode='wb') -def write_pajek(G, path, encoding='UTF-8'): +@open_file(1, mode="wb") +def write_pajek(G, path, encoding="UTF-8"): """Write graph in Pajek format to path. Parameters @@ -96,21 +110,27 @@ def write_pajek(G, path, encoding='UTF-8'): Examples -------- - >>> G=nx.path_graph(4) + >>> G = nx.path_graph(4) >>> nx.write_pajek(G, "test.net") + Warnings + -------- + Optional node attributes and edge attributes must be non-empty strings. + Otherwise it will not be written into the file. You will need to + convert those attributes to strings if you want to keep them. + References ---------- See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm for format information. """ for line in generate_pajek(G): - line += '\n' + line += "\n" path.write(line.encode(encoding)) -@open_file(0, mode='rb') -def read_pajek(path, encoding='UTF-8'): +@open_file(0, mode="rb") +def read_pajek(path, encoding="UTF-8"): """Read graph in Pajek format from path. Parameters @@ -125,13 +145,13 @@ def read_pajek(path, encoding='UTF-8'): Examples -------- - >>> G=nx.path_graph(4) + >>> G = nx.path_graph(4) >>> nx.write_pajek(G, "test.net") - >>> G=nx.read_pajek("test.net") + >>> G = nx.read_pajek("test.net") To create a Graph instead of a MultiGraph use - >>> G1=nx.Graph(G) + >>> G1 = nx.Graph(G) References ---------- @@ -160,10 +180,11 @@ def parse_pajek(lines): """ import shlex + # multigraph=False - if is_string_like(lines): - lines = iter(lines.split('\n')) - lines = iter([line.rstrip('\n') for line in lines]) + if isinstance(lines, str): + lines = iter(lines.split("\n")) + lines = iter([line.rstrip("\n") for line in lines]) G = nx.MultiDiGraph() # are multiedges allowed in Pajek? assume yes labels = [] # in the order of the file, needed for matrix while lines: @@ -178,27 +199,28 @@ def parse_pajek(lines): # Line was not of the form: *network NAME pass else: - G.graph['name'] = name + G.graph["name"] = name elif l.lower().startswith("*vertices"): nodelabels = {} l, nnodes = l.split() for i in range(int(nnodes)): l = next(lines) try: - splitline = [x.decode('utf-8') for x in - shlex.split(make_str(l).encode('utf-8'))] + splitline = [ + x.decode("utf-8") for x in shlex.split(str(l).encode("utf-8")) + ] except AttributeError: splitline = shlex.split(str(l)) id, label = splitline[0:2] labels.append(label) G.add_node(label) nodelabels[id] = label - G.nodes[label]['id'] = id + G.nodes[label]["id"] = id try: x, y, shape = splitline[2:5] - G.nodes[label].update({'x': float(x), - 'y': float(y), - 'shape': shape}) + G.nodes[label].update( + {"x": float(x), "y": float(y), "shape": shape} + ) except: pass extra_attr = zip(splitline[5::2], splitline[6::2]) @@ -212,8 +234,9 @@ def parse_pajek(lines): G = G.to_directed() for l in lines: try: - splitline = [x.decode('utf-8') for x in - shlex.split(make_str(l).encode('utf-8'))] + splitline = [ + x.decode("utf-8") for x in shlex.split(str(l).encode("utf-8")) + ] except AttributeError: splitline = shlex.split(str(l)) @@ -227,11 +250,11 @@ def parse_pajek(lines): try: # there should always be a single value on the edge? w = splitline[2:3] - edge_data.update({'weight': float(w[0])}) + edge_data.update({"weight": float(w[0])}) except: pass # if there isn't, just assign a 1 -# edge_data.update({'value':1}) + # edge_data.update({'value':1}) extra_attr = zip(splitline[3::2], splitline[4::2]) edge_data.update(extra_attr) # if G.has_edge(u,v): @@ -239,27 +262,23 @@ def parse_pajek(lines): G.add_edge(u, v, **edge_data) elif l.lower().startswith("*matrix"): G = nx.DiGraph(G) - adj_list = ((labels[row], labels[col], {'weight': int(data)}) - for (row, line) in enumerate(lines) - for (col, data) in enumerate(line.split()) - if int(data) != 0) + adj_list = ( + (labels[row], labels[col], {"weight": int(data)}) + for (row, line) in enumerate(lines) + for (col, data) in enumerate(line.split()) + if int(data) != 0 + ) G.add_edges_from(adj_list) return G def make_qstr(t): - """Return the string representation of t. + """Returns the string representation of t. Add outer double-quotes if the string has a space. """ - if not is_string_like(t): + if not isinstance(t, str): t = str(t) if " " in t: - t = r'"%s"' % t + t = f'"{t}"' return t - - -# fixture for nose tests -def teardown_module(module): - import os - os.unlink('test.net') diff --git a/networkx/readwrite/sparse6.py b/networkx/readwrite/sparse6.py index aff6a83..f9ea9bb 100644 --- a/networkx/readwrite/sparse6.py +++ b/networkx/readwrite/sparse6.py @@ -1,15 +1,5 @@ # Original author: D. Eppstein, UC Irvine, August 12, 2003. # The original code at http://www.ics.uci.edu/~eppstein/PADS/ is public domain. -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# Tomas Gavenciak -# All rights reserved. -# BSD license. -# -# Authors: Tomas Gavenciak -# Aric Hagberg """Functions for reading and writing graphs in the *sparse6* format. The *sparse6* file format is a space-efficient format for large sparse @@ -21,17 +11,12 @@ .. _sparse6: http://users.cecs.anu.edu.au/~bdm/data/formats.html """ -from itertools import chain -import math -import sys - import networkx as nx from networkx.exception import NetworkXError from networkx.utils import open_file, not_implemented_for from networkx.readwrite.graph6 import data_to_n, n_to_data -__all__ = ['from_sparse6_bytes', 'read_sparse6', 'to_sparse6_bytes', - 'write_sparse6'] +__all__ = ["from_sparse6_bytes", "read_sparse6", "to_sparse6_bytes", "write_sparse6"] def _generate_sparse6_bytes(G, nodes, header): @@ -57,11 +42,12 @@ def _generate_sparse6_bytes(G, nodes, header): """ n = len(G) if n >= 2 ** 36: - raise ValueError('sparse6 is only defined if number of nodes is less ' - 'than 2 ** 36') + raise ValueError( + "sparse6 is only defined if number of nodes is less " "than 2 ** 36" + ) if header: - yield b'>>sparse6<<' - yield b':' + yield b">>sparse6<<" + yield b":" for d in n_to_data(n): yield str.encode(chr(d + 63)) @@ -100,12 +86,19 @@ def enc(x): else: bits.extend([1] * ((-len(bits)) % 6)) - data = [(bits[i + 0] << 5) + (bits[i + 1] << 4) + (bits[i + 2] << 3) + (bits[i + 3] << 2) + - (bits[i + 4] << 1) + (bits[i + 5] << 0) for i in range(0, len(bits), 6)] + data = [ + (bits[i + 0] << 5) + + (bits[i + 1] << 4) + + (bits[i + 2] << 3) + + (bits[i + 3] << 2) + + (bits[i + 4] << 1) + + (bits[i + 5] << 0) + for i in range(0, len(bits), 6) + ] for d in data: yield str.encode(chr(d + 63)) - yield b'\n' + yield b"\n" def from_sparse6_bytes(string): @@ -127,7 +120,7 @@ def from_sparse6_bytes(string): Examples -------- - >>> G = nx.from_sparse6_bytes(b':A_') + >>> G = nx.from_sparse6_bytes(b":A_") >>> sorted(G.edges()) [(0, 1), (0, 1), (0, 1)] @@ -141,41 +134,44 @@ def from_sparse6_bytes(string): """ - if string.startswith(b'>>sparse6<<'): + if string.startswith(b">>sparse6<<"): string = string[11:] - if not string.startswith(b':'): - raise NetworkXError('Expected leading colon in sparse6') + if not string.startswith(b":"): + raise NetworkXError("Expected leading colon in sparse6") - if sys.version_info < (3, ): - chars = [ord(c) - 63 for c in string[1:]] - else: - chars = [c - 63 for c in string[1:]] + chars = [c - 63 for c in string[1:]] n, data = data_to_n(chars) k = 1 while 1 << k < n: k += 1 def parseData(): - """Return stream of pairs b[i], x[i] for sparse6 format.""" + """Returns stream of pairs b[i], x[i] for sparse6 format.""" chunks = iter(data) d = None # partial data word dLen = 0 # how many unparsed bits are left in d while 1: if dLen < 1: - d = next(chunks) + try: + d = next(chunks) + except StopIteration: + return dLen = 6 dLen -= 1 b = (d >> dLen) & 1 # grab top remaining bit x = d & ((1 << dLen) - 1) # partially built up value of x - xLen = dLen # how many bits included so far in x + xLen = dLen # how many bits included so far in x while xLen < k: # now grab full chunks until we have enough - d = next(chunks) + try: + d = next(chunks) + except StopIteration: + return dLen = 6 x = (x << 6) + d xLen += 6 - x = (x >> (xLen - k)) # shift back the extra bits + x = x >> (xLen - k) # shift back the extra bits dLen = xLen - k yield b, x @@ -227,7 +223,7 @@ def to_sparse6_bytes(G, nodes=None, header=True): Examples -------- - >>> nx.to_sparse6_bytes(nx.path_graph(2)) # doctest: +SKIP + >>> nx.to_sparse6_bytes(nx.path_graph(2)) b'>>sparse6<<:An\\n' See Also @@ -248,11 +244,11 @@ def to_sparse6_bytes(G, nodes=None, header=True): """ if nodes is not None: G = G.subgraph(nodes) - G = nx.convert_node_labels_to_integers(G, ordering='sorted') - return b''.join(_generate_sparse6_bytes(G, nodes, header)) + G = nx.convert_node_labels_to_integers(G, ordering="sorted") + return b"".join(_generate_sparse6_bytes(G, nodes, header)) -@open_file(0, mode='rb') +@open_file(0, mode="rb") def read_sparse6(path): """Read an undirected graph in sparse6 format from path. @@ -277,7 +273,7 @@ def read_sparse6(path): >>> import tempfile >>> with tempfile.NamedTemporaryFile() as f: - ... _ = f.write(b'>>sparse6<<:An\\n') + ... _ = f.write(b">>sparse6<<:An\\n") ... _ = f.seek(0) ... G = nx.read_sparse6(f.name) >>> list(G.edges()) @@ -287,7 +283,7 @@ def read_sparse6(path): >>> import tempfile >>> with tempfile.NamedTemporaryFile() as f: - ... _ = f.write(b'>>sparse6<<:An\\n') + ... _ = f.write(b">>sparse6<<:An\\n") ... _ = f.seek(0) ... G = nx.read_sparse6(f) >>> list(G.edges()) @@ -315,8 +311,8 @@ def read_sparse6(path): return glist -@not_implemented_for('directed') -@open_file(1, mode='wb') +@not_implemented_for("directed") +@open_file(1, mode="wb") def write_sparse6(G, path, nodes=None, header=True): """Write graph G to given path in sparse6 format. @@ -346,7 +342,7 @@ def write_sparse6(G, path, nodes=None, header=True): >>> import tempfile >>> with tempfile.NamedTemporaryFile() as f: ... nx.write_sparse6(nx.path_graph(2), f.name) - ... print(f.read()) # doctest: +SKIP + ... print(f.read()) b'>>sparse6<<:An\\n' You can also write a sparse6 file by giving an open file-like object:: @@ -354,7 +350,7 @@ def write_sparse6(G, path, nodes=None, header=True): >>> with tempfile.NamedTemporaryFile() as f: ... nx.write_sparse6(nx.path_graph(2), f) ... _ = f.seek(0) - ... print(f.read()) # doctest: +SKIP + ... print(f.read()) b'>>sparse6<<:An\\n' See Also @@ -373,6 +369,6 @@ def write_sparse6(G, path, nodes=None, header=True): """ if nodes is not None: G = G.subgraph(nodes) - G = nx.convert_node_labels_to_integers(G, ordering='sorted') + G = nx.convert_node_labels_to_integers(G, ordering="sorted") for b in _generate_sparse6_bytes(G, nodes, header): path.write(b) diff --git a/networkx/readwrite/tests/__init__.py b/networkx/readwrite/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/readwrite/tests/test_adjlist.py b/networkx/readwrite/tests/test_adjlist.py index e477617..e2a655e 100644 --- a/networkx/readwrite/tests/test_adjlist.py +++ b/networkx/readwrite/tests/test_adjlist.py @@ -1,27 +1,25 @@ -# -*- coding: utf-8 -*- """ Unit tests for adjlist. """ import io -from nose.tools import assert_equal, assert_raises, assert_not_equal +import pytest import os import tempfile import networkx as nx -from networkx.testing import (assert_nodes_equal, assert_edges_equal, - assert_graphs_equal) +from networkx.testing import assert_nodes_equal, assert_edges_equal, assert_graphs_equal -class TestAdjlist(): - - def setUp(self): - self.G = nx.Graph(name="test") - e = [('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'e'), ('e', 'f'), ('a', 'f')] - self.G.add_edges_from(e) - self.G.add_node('g') - self.DG = nx.DiGraph(self.G) - self.XG = nx.MultiGraph() - self.XG.add_weighted_edges_from([(1, 2, 5), (1, 2, 5), (1, 2, 1), (3, 3, 42)]) - self. XDG = nx.MultiDiGraph(self.XG) +class TestAdjlist: + @classmethod + def setup_class(cls): + cls.G = nx.Graph(name="test") + e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")] + cls.G.add_edges_from(e) + cls.G.add_node("g") + cls.DG = nx.DiGraph(cls.G) + cls.XG = nx.MultiGraph() + cls.XG.add_weighted_edges_from([(1, 2, 5), (1, 2, 5), (1, 2, 1), (3, 3, 42)]) + cls.XDG = nx.MultiDiGraph(cls.XG) def test_read_multiline_adjlist_1(self): # Unit test for https://networkx.lanl.gov/trac/ticket/252 @@ -33,18 +31,14 @@ def test_read_multiline_adjlist_1(self): """ bytesIO = io.BytesIO(s) G = nx.read_multiline_adjlist(bytesIO) - adj = {'1': {'3': {}, '2': {}}, '3': {'1': {}}, '2': {'1': {}}} + adj = {"1": {"3": {}, "2": {}}, "3": {"1": {}}, "2": {"1": {}}} assert_graphs_equal(G, nx.Graph(adj)) def test_unicode(self): G = nx.Graph() - try: # Python 3.x - name1 = chr(2344) + chr(123) + chr(6543) - name2 = chr(5543) + chr(1543) + chr(324) - except ValueError: # Python 2.6+ - name1 = unichr(2344) + unichr(123) + unichr(6543) - name2 = unichr(5543) + unichr(1543) + unichr(324) - G.add_edge(name1, 'Radiohead', **{name2: 3}) + name1 = chr(2344) + chr(123) + chr(6543) + name2 = chr(5543) + chr(1543) + chr(324) + G.add_edge(name1, "Radiohead", **{name2: 3}) fd, fname = tempfile.mkstemp() nx.write_multiline_adjlist(G, fname) H = nx.read_multiline_adjlist(fname) @@ -54,44 +48,44 @@ def test_unicode(self): def test_latin1_err(self): G = nx.Graph() - try: # Python 3.x - name1 = chr(2344) + chr(123) + chr(6543) - name2 = chr(5543) + chr(1543) + chr(324) - except ValueError: # Python 2.6+ - name1 = unichr(2344) + unichr(123) + unichr(6543) - name2 = unichr(5543) + unichr(1543) + unichr(324) - G.add_edge(name1, 'Radiohead', **{name2: 3}) + name1 = chr(2344) + chr(123) + chr(6543) + name2 = chr(5543) + chr(1543) + chr(324) + G.add_edge(name1, "Radiohead", **{name2: 3}) fd, fname = tempfile.mkstemp() - assert_raises(UnicodeEncodeError, - nx.write_multiline_adjlist, - G, fname, encoding='latin-1') + pytest.raises( + UnicodeEncodeError, nx.write_multiline_adjlist, G, fname, encoding="latin-1" + ) os.close(fd) os.unlink(fname) def test_latin1(self): G = nx.Graph() - try: # Python 3.x - blurb = chr(1245) # just to trigger the exception - name1 = 'Bj' + chr(246) + 'rk' - name2 = chr(220) + 'ber' - except ValueError: # Python 2.6+ - name1 = 'Bj' + unichr(246) + 'rk' - name2 = unichr(220) + 'ber' - G.add_edge(name1, 'Radiohead', **{name2: 3}) + name1 = "Bj" + chr(246) + "rk" + name2 = chr(220) + "ber" + G.add_edge(name1, "Radiohead", **{name2: 3}) fd, fname = tempfile.mkstemp() - nx.write_multiline_adjlist(G, fname, encoding='latin-1') - H = nx.read_multiline_adjlist(fname, encoding='latin-1') + nx.write_multiline_adjlist(G, fname, encoding="latin-1") + H = nx.read_multiline_adjlist(fname, encoding="latin-1") assert_graphs_equal(G, H) os.close(fd) os.unlink(fname) + def test_parse_adjlist(self): + lines = ["1 2 5", "2 3 4", "3 5", "4", "5"] + nx.parse_adjlist(lines, nodetype=int) # smoke test + with pytest.raises(TypeError): + nx.parse_adjlist(lines, nodetype="int") + lines = ["1 2 5", "2 b", "c"] + with pytest.raises(TypeError): + nx.parse_adjlist(lines, nodetype=int) + def test_adjlist_graph(self): G = self.G (fd, fname) = tempfile.mkstemp() nx.write_adjlist(G, fname) H = nx.read_adjlist(fname) H2 = nx.read_adjlist(fname) - assert_not_equal(H, H2) # they should be different graphs + assert H != H2 # they should be different graphs assert_nodes_equal(list(H), list(G)) assert_edges_equal(list(H.edges()), list(G.edges())) os.close(fd) @@ -103,7 +97,7 @@ def test_adjlist_digraph(self): nx.write_adjlist(G, fname) H = nx.read_adjlist(fname, create_using=nx.DiGraph()) H2 = nx.read_adjlist(fname, create_using=nx.DiGraph()) - assert_not_equal(H, H2) # they should be different graphs + assert H != H2 # they should be different graphs assert_nodes_equal(list(H), list(G)) assert_edges_equal(list(H.edges()), list(G.edges())) os.close(fd) @@ -115,6 +109,7 @@ def test_adjlist_integers(self): nx.write_adjlist(G, fname) H = nx.read_adjlist(fname, nodetype=int) H2 = nx.read_adjlist(fname, nodetype=int) + assert H != H2 # they should be different graphs assert_nodes_equal(list(H), list(G)) assert_edges_equal(list(H.edges()), list(G.edges())) os.close(fd) @@ -124,11 +119,9 @@ def test_adjlist_multigraph(self): G = self.XG (fd, fname) = tempfile.mkstemp() nx.write_adjlist(G, fname) - H = nx.read_adjlist(fname, nodetype=int, - create_using=nx.MultiGraph()) - H2 = nx.read_adjlist(fname, nodetype=int, - create_using=nx.MultiGraph()) - assert_not_equal(H, H2) # they should be different graphs + H = nx.read_adjlist(fname, nodetype=int, create_using=nx.MultiGraph()) + H2 = nx.read_adjlist(fname, nodetype=int, create_using=nx.MultiGraph()) + assert H != H2 # they should be different graphs assert_nodes_equal(list(H), list(G)) assert_edges_equal(list(H.edges()), list(G.edges())) os.close(fd) @@ -138,11 +131,9 @@ def test_adjlist_multidigraph(self): G = self.XDG (fd, fname) = tempfile.mkstemp() nx.write_adjlist(G, fname) - H = nx.read_adjlist(fname, nodetype=int, - create_using=nx.MultiDiGraph()) - H2 = nx.read_adjlist(fname, nodetype=int, - create_using=nx.MultiDiGraph()) - assert_not_equal(H, H2) # they should be different graphs + H = nx.read_adjlist(fname, nodetype=int, create_using=nx.MultiDiGraph()) + H2 = nx.read_adjlist(fname, nodetype=int, create_using=nx.MultiDiGraph()) + assert H != H2 # they should be different graphs assert_nodes_equal(list(H), list(G)) assert_edges_equal(list(H.edges()), list(G.edges())) os.close(fd) @@ -151,26 +142,53 @@ def test_adjlist_multidigraph(self): def test_adjlist_delimiter(self): fh = io.BytesIO() G = nx.path_graph(3) - nx.write_adjlist(G, fh, delimiter=':') + nx.write_adjlist(G, fh, delimiter=":") fh.seek(0) - H = nx.read_adjlist(fh, nodetype=int, delimiter=':') + H = nx.read_adjlist(fh, nodetype=int, delimiter=":") assert_nodes_equal(list(H), list(G)) assert_edges_equal(list(H.edges()), list(G.edges())) -class TestMultilineAdjlist(): +class TestMultilineAdjlist: + @classmethod + def setup_class(cls): + cls.G = nx.Graph(name="test") + e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")] + cls.G.add_edges_from(e) + cls.G.add_node("g") + cls.DG = nx.DiGraph(cls.G) + cls.DG.remove_edge("b", "a") + cls.DG.remove_edge("b", "c") + cls.XG = nx.MultiGraph() + cls.XG.add_weighted_edges_from([(1, 2, 5), (1, 2, 5), (1, 2, 1), (3, 3, 42)]) + cls.XDG = nx.MultiDiGraph(cls.XG) - def setUp(self): - self.G = nx.Graph(name="test") - e = [('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'e'), ('e', 'f'), ('a', 'f')] - self.G.add_edges_from(e) - self.G.add_node('g') - self.DG = nx.DiGraph(self.G) - self.DG.remove_edge('b', 'a') - self.DG.remove_edge('b', 'c') - self.XG = nx.MultiGraph() - self.XG.add_weighted_edges_from([(1, 2, 5), (1, 2, 5), (1, 2, 1), (3, 3, 42)]) - self. XDG = nx.MultiDiGraph(self.XG) + def test_parse_multiline_adjlist(self): + lines = [ + "1 2", + "b {'weight':3, 'name': 'Frodo'}", + "c {}", + "d 1", + "e {'weight':6, 'name': 'Saruman'}", + ] + nx.parse_multiline_adjlist(iter(lines)) # smoke test + with pytest.raises(TypeError): + nx.parse_multiline_adjlist(iter(lines), nodetype=int) + nx.parse_multiline_adjlist(iter(lines), edgetype=str) # smoke test + with pytest.raises(TypeError): + nx.parse_multiline_adjlist(iter(lines), nodetype=int) + lines = ["1 a"] + with pytest.raises(TypeError): + nx.parse_multiline_adjlist(iter(lines)) + lines = ["a 2"] + with pytest.raises(TypeError): + nx.parse_multiline_adjlist(iter(lines), nodetype=int) + lines = ["1 2"] + with pytest.raises(TypeError): + nx.parse_multiline_adjlist(iter(lines)) + lines = ["1 2", "2 {}"] + with pytest.raises(TypeError): + nx.parse_multiline_adjlist(iter(lines)) def test_multiline_adjlist_graph(self): G = self.G @@ -178,7 +196,7 @@ def test_multiline_adjlist_graph(self): nx.write_multiline_adjlist(G, fname) H = nx.read_multiline_adjlist(fname) H2 = nx.read_multiline_adjlist(fname) - assert_not_equal(H, H2) # they should be different graphs + assert H != H2 # they should be different graphs assert_nodes_equal(list(H), list(G)) assert_edges_equal(list(H.edges()), list(G.edges())) os.close(fd) @@ -190,7 +208,7 @@ def test_multiline_adjlist_digraph(self): nx.write_multiline_adjlist(G, fname) H = nx.read_multiline_adjlist(fname, create_using=nx.DiGraph()) H2 = nx.read_multiline_adjlist(fname, create_using=nx.DiGraph()) - assert_not_equal(H, H2) # they should be different graphs + assert H != H2 # they should be different graphs assert_nodes_equal(list(H), list(G)) assert_edges_equal(list(H.edges()), list(G.edges())) os.close(fd) @@ -202,6 +220,7 @@ def test_multiline_adjlist_integers(self): nx.write_multiline_adjlist(G, fname) H = nx.read_multiline_adjlist(fname, nodetype=int) H2 = nx.read_multiline_adjlist(fname, nodetype=int) + assert H != H2 # they should be different graphs assert_nodes_equal(list(H), list(G)) assert_edges_equal(list(H.edges()), list(G.edges())) os.close(fd) @@ -211,11 +230,11 @@ def test_multiline_adjlist_multigraph(self): G = self.XG (fd, fname) = tempfile.mkstemp() nx.write_multiline_adjlist(G, fname) - H = nx.read_multiline_adjlist(fname, nodetype=int, - create_using=nx.MultiGraph()) - H2 = nx.read_multiline_adjlist(fname, nodetype=int, - create_using=nx.MultiGraph()) - assert_not_equal(H, H2) # they should be different graphs + H = nx.read_multiline_adjlist(fname, nodetype=int, create_using=nx.MultiGraph()) + H2 = nx.read_multiline_adjlist( + fname, nodetype=int, create_using=nx.MultiGraph() + ) + assert H != H2 # they should be different graphs assert_nodes_equal(list(H), list(G)) assert_edges_equal(list(H.edges()), list(G.edges())) os.close(fd) @@ -225,11 +244,13 @@ def test_multiline_adjlist_multidigraph(self): G = self.XDG (fd, fname) = tempfile.mkstemp() nx.write_multiline_adjlist(G, fname) - H = nx.read_multiline_adjlist(fname, nodetype=int, - create_using=nx.MultiDiGraph()) - H2 = nx.read_multiline_adjlist(fname, nodetype=int, - create_using=nx.MultiDiGraph()) - assert_not_equal(H, H2) # they should be different graphs + H = nx.read_multiline_adjlist( + fname, nodetype=int, create_using=nx.MultiDiGraph() + ) + H2 = nx.read_multiline_adjlist( + fname, nodetype=int, create_using=nx.MultiDiGraph() + ) + assert H != H2 # they should be different graphs assert_nodes_equal(list(H), list(G)) assert_edges_equal(list(H.edges()), list(G.edges())) os.close(fd) @@ -238,8 +259,8 @@ def test_multiline_adjlist_multidigraph(self): def test_multiline_adjlist_delimiter(self): fh = io.BytesIO() G = nx.path_graph(3) - nx.write_multiline_adjlist(G, fh, delimiter=':') + nx.write_multiline_adjlist(G, fh, delimiter=":") fh.seek(0) - H = nx.read_multiline_adjlist(fh, nodetype=int, delimiter=':') + H = nx.read_multiline_adjlist(fh, nodetype=int, delimiter=":") assert_nodes_equal(list(H), list(G)) assert_edges_equal(list(H.edges()), list(G.edges())) diff --git a/networkx/readwrite/tests/test_edgelist.py b/networkx/readwrite/tests/test_edgelist.py index ba16f73..31f2145 100644 --- a/networkx/readwrite/tests/test_edgelist.py +++ b/networkx/readwrite/tests/test_edgelist.py @@ -1,27 +1,26 @@ """ Unit tests for edgelists. """ -from nose.tools import assert_equal, assert_raises, assert_not_equal +import pytest import io import tempfile import os import networkx as nx -from networkx.testing import (assert_edges_equal, assert_nodes_equal, - assert_graphs_equal) +from networkx.testing import assert_edges_equal, assert_nodes_equal, assert_graphs_equal class TestEdgelist: - - def setUp(self): - self.G = nx.Graph(name="test") - e = [('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'e'), ('e', 'f'), ('a', 'f')] - self.G.add_edges_from(e) - self.G.add_node('g') - self.DG = nx.DiGraph(self.G) - self.XG = nx.MultiGraph() - self.XG.add_weighted_edges_from([(1, 2, 5), (1, 2, 5), (1, 2, 1), (3, 3, 42)]) - self. XDG = nx.MultiDiGraph(self.XG) + @classmethod + def setup_class(cls): + cls.G = nx.Graph(name="test") + e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")] + cls.G.add_edges_from(e) + cls.G.add_node("g") + cls.DG = nx.DiGraph(cls.G) + cls.XG = nx.MultiGraph() + cls.XG.add_weighted_edges_from([(1, 2, 5), (1, 2, 5), (1, 2, 1), (3, 3, 42)]) + cls.XDG = nx.MultiDiGraph(cls.XG) def test_read_edgelist_1(self): s = b"""\ @@ -47,8 +46,9 @@ def test_read_edgelist_2(self): bytesIO = io.BytesIO(s) G = nx.read_weighted_edgelist(bytesIO, nodetype=int) - assert_edges_equal(G.edges(data=True), - [(1, 2, {'weight': 2.0}), (2, 3, {'weight': 3.0})]) + assert_edges_equal( + G.edges(data=True), [(1, 2, {"weight": 2.0}), (2, 3, {"weight": 3.0})] + ) def test_read_edgelist_3(self): s = b"""\ @@ -63,8 +63,84 @@ def test_read_edgelist_3(self): bytesIO = io.BytesIO(s) G = nx.read_edgelist(bytesIO, nodetype=int, data=True) - assert_edges_equal(G.edges(data=True), - [(1, 2, {'weight': 2.0}), (2, 3, {'weight': 3.0})]) + assert_edges_equal( + G.edges(data=True), [(1, 2, {"weight": 2.0}), (2, 3, {"weight": 3.0})] + ) + + def test_read_edgelist_4(self): + s = b"""\ +# comment line +1 2 {'weight':2.0} +# comment line +2 3 {'weight':3.0} +""" + bytesIO = io.BytesIO(s) + G = nx.read_edgelist(bytesIO, nodetype=int, data=False) + assert_edges_equal(G.edges(), [(1, 2), (2, 3)]) + + bytesIO = io.BytesIO(s) + G = nx.read_edgelist(bytesIO, nodetype=int, data=True) + assert_edges_equal( + G.edges(data=True), [(1, 2, {"weight": 2.0}), (2, 3, {"weight": 3.0})] + ) + + s = """\ +# comment line +1 2 {'weight':2.0} +# comment line +2 3 {'weight':3.0} +""" + StringIO = io.StringIO(s) + G = nx.read_edgelist(StringIO, nodetype=int, data=False) + assert_edges_equal(G.edges(), [(1, 2), (2, 3)]) + + StringIO = io.StringIO(s) + G = nx.read_edgelist(StringIO, nodetype=int, data=True) + assert_edges_equal( + G.edges(data=True), [(1, 2, {"weight": 2.0}), (2, 3, {"weight": 3.0})] + ) + + def test_read_edgelist_5(self): + s = b"""\ +# comment line +1 2 {'weight':2.0, 'color':'green'} +# comment line +2 3 {'weight':3.0, 'color':'red'} +""" + bytesIO = io.BytesIO(s) + G = nx.read_edgelist(bytesIO, nodetype=int, data=False) + assert_edges_equal(G.edges(), [(1, 2), (2, 3)]) + + bytesIO = io.BytesIO(s) + G = nx.read_edgelist(bytesIO, nodetype=int, data=True) + assert_edges_equal( + G.edges(data=True), + [ + (1, 2, {"weight": 2.0, "color": "green"}), + (2, 3, {"weight": 3.0, "color": "red"}), + ], + ) + + def test_read_edgelist_6(self): + s = b"""\ +# comment line +1, 2, {'weight':2.0, 'color':'green'} +# comment line +2, 3, {'weight':3.0, 'color':'red'} +""" + bytesIO = io.BytesIO(s) + G = nx.read_edgelist(bytesIO, nodetype=int, data=False, delimiter=",") + assert_edges_equal(G.edges(), [(1, 2), (2, 3)]) + + bytesIO = io.BytesIO(s) + G = nx.read_edgelist(bytesIO, nodetype=int, data=True, delimiter=",") + assert_edges_equal( + G.edges(data=True), + [ + (1, 2, {"weight": 2.0, "color": "green"}), + (2, 3, {"weight": 3.0, "color": "red"}), + ], + ) def test_write_edgelist_1(self): fh = io.BytesIO() @@ -72,7 +148,7 @@ def test_write_edgelist_1(self): G.add_edges_from([(1, 2), (2, 3)]) nx.write_edgelist(G, fh, data=False) fh.seek(0) - assert_equal(fh.read(), b"1 2\n2 3\n") + assert fh.read() == b"1 2\n2 3\n" def test_write_edgelist_2(self): fh = io.BytesIO() @@ -80,7 +156,7 @@ def test_write_edgelist_2(self): G.add_edges_from([(1, 2), (2, 3)]) nx.write_edgelist(G, fh, data=True) fh.seek(0) - assert_equal(fh.read(), b"1 2 {}\n2 3 {}\n") + assert fh.read() == b"1 2 {}\n2 3 {}\n" def test_write_edgelist_3(self): fh = io.BytesIO() @@ -89,26 +165,22 @@ def test_write_edgelist_3(self): G.add_edge(2, 3, weight=3.0) nx.write_edgelist(G, fh, data=True) fh.seek(0) - assert_equal(fh.read(), b"1 2 {'weight': 2.0}\n2 3 {'weight': 3.0}\n") + assert fh.read() == b"1 2 {'weight': 2.0}\n2 3 {'weight': 3.0}\n" def test_write_edgelist_4(self): fh = io.BytesIO() G = nx.OrderedGraph() G.add_edge(1, 2, weight=2.0) G.add_edge(2, 3, weight=3.0) - nx.write_edgelist(G, fh, data=[('weight')]) + nx.write_edgelist(G, fh, data=[("weight")]) fh.seek(0) - assert_equal(fh.read(), b"1 2 2.0\n2 3 3.0\n") + assert fh.read() == b"1 2 2.0\n2 3 3.0\n" def test_unicode(self): G = nx.Graph() - try: # Python 3.x - name1 = chr(2344) + chr(123) + chr(6543) - name2 = chr(5543) + chr(1543) + chr(324) - except ValueError: # Python 2.6+ - name1 = unichr(2344) + unichr(123) + unichr(6543) - name2 = unichr(5543) + unichr(1543) + unichr(324) - G.add_edge(name1, 'Radiohead', **{name2: 3}) + name1 = chr(2344) + chr(123) + chr(6543) + name2 = chr(5543) + chr(1543) + chr(324) + G.add_edge(name1, "Radiohead", **{name2: 3}) fd, fname = tempfile.mkstemp() nx.write_edgelist(G, fname) H = nx.read_edgelist(fname) @@ -118,33 +190,24 @@ def test_unicode(self): def test_latin1_issue(self): G = nx.Graph() - try: # Python 3.x - name1 = chr(2344) + chr(123) + chr(6543) - name2 = chr(5543) + chr(1543) + chr(324) - except ValueError: # Python 2.6+ - name1 = unichr(2344) + unichr(123) + unichr(6543) - name2 = unichr(5543) + unichr(1543) + unichr(324) - G.add_edge(name1, 'Radiohead', **{name2: 3}) + name1 = chr(2344) + chr(123) + chr(6543) + name2 = chr(5543) + chr(1543) + chr(324) + G.add_edge(name1, "Radiohead", **{name2: 3}) fd, fname = tempfile.mkstemp() - assert_raises(UnicodeEncodeError, - nx.write_edgelist, - G, fname, encoding='latin-1') + pytest.raises( + UnicodeEncodeError, nx.write_edgelist, G, fname, encoding="latin-1" + ) os.close(fd) os.unlink(fname) def test_latin1(self): G = nx.Graph() - try: # Python 3.x - blurb = chr(1245) # just to trigger the exception - name1 = 'Bj' + chr(246) + 'rk' - name2 = chr(220) + 'ber' - except ValueError: # Python 2.6+ - name1 = 'Bj' + unichr(246) + 'rk' - name2 = unichr(220) + 'ber' - G.add_edge(name1, 'Radiohead', **{name2: 3}) + name1 = "Bj" + chr(246) + "rk" + name2 = chr(220) + "ber" + G.add_edge(name1, "Radiohead", **{name2: 3}) fd, fname = tempfile.mkstemp() - nx.write_edgelist(G, fname, encoding='latin-1') - H = nx.read_edgelist(fname, encoding='latin-1') + nx.write_edgelist(G, fname, encoding="latin-1") + H = nx.read_edgelist(fname, encoding="latin-1") assert_graphs_equal(G, H) os.close(fd) os.unlink(fname) @@ -155,8 +218,8 @@ def test_edgelist_graph(self): nx.write_edgelist(G, fname) H = nx.read_edgelist(fname) H2 = nx.read_edgelist(fname) - assert_not_equal(H, H2) # they should be different graphs - G.remove_node('g') # isolated nodes are not written in edgelist + assert H != H2 # they should be different graphs + G.remove_node("g") # isolated nodes are not written in edgelist assert_nodes_equal(list(H), list(G)) assert_edges_equal(list(H.edges()), list(G.edges())) os.close(fd) @@ -168,8 +231,8 @@ def test_edgelist_digraph(self): nx.write_edgelist(G, fname) H = nx.read_edgelist(fname, create_using=nx.DiGraph()) H2 = nx.read_edgelist(fname, create_using=nx.DiGraph()) - assert_not_equal(H, H2) # they should be different graphs - G.remove_node('g') # isolated nodes are not written in edgelist + assert H != H2 # they should be different graphs + G.remove_node("g") # isolated nodes are not written in edgelist assert_nodes_equal(list(H), list(G)) assert_edges_equal(list(H.edges()), list(G.edges())) os.close(fd) @@ -193,7 +256,7 @@ def test_edgelist_multigraph(self): nx.write_edgelist(G, fname) H = nx.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph()) H2 = nx.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph()) - assert_not_equal(H, H2) # they should be different graphs + assert H != H2 # they should be different graphs assert_nodes_equal(list(H), list(G)) assert_edges_equal(list(H.edges()), list(G.edges())) os.close(fd) @@ -205,7 +268,7 @@ def test_edgelist_multidigraph(self): nx.write_edgelist(G, fname) H = nx.read_edgelist(fname, nodetype=int, create_using=nx.MultiDiGraph()) H2 = nx.read_edgelist(fname, nodetype=int, create_using=nx.MultiDiGraph()) - assert_not_equal(H, H2) # they should be different graphs + assert H != H2 # they should be different graphs assert_nodes_equal(list(H), list(G)) assert_edges_equal(list(H.edges()), list(G.edges())) os.close(fd) diff --git a/networkx/readwrite/tests/test_gexf.py b/networkx/readwrite/tests/test_gexf.py index e05ffd0..9b43794 100644 --- a/networkx/readwrite/tests/test_gexf.py +++ b/networkx/readwrite/tests/test_gexf.py @@ -1,22 +1,17 @@ -#!/usr/bin/env python import io +import sys import time -from nose import SkipTest -from nose.tools import * +import pytest import networkx as nx -class TestGEXF(object): +class TestGEXF: @classmethod - def setupClass(cls): - try: - import xml.etree.ElementTree - except ImportError: - raise SkipTest('xml.etree.ElementTree not available.') + def setup_class(cls): + _ = pytest.importorskip("xml.etree.ElementTree") - def setUp(self): - self.simple_directed_data = """ + cls.simple_directed_data = """ @@ -29,16 +24,17 @@ def setUp(self): """ - self.simple_directed_graph = nx.DiGraph() - self.simple_directed_graph.add_node('0', label='Hello') - self.simple_directed_graph.add_node('1', label='World') - self.simple_directed_graph.add_edge('0', '1', id='0') + cls.simple_directed_graph = nx.DiGraph() + cls.simple_directed_graph.add_node("0", label="Hello") + cls.simple_directed_graph.add_node("1", label="World") + cls.simple_directed_graph.add_edge("0", "1", id="0") - self.simple_directed_fh = \ - io.BytesIO(self.simple_directed_data.encode('UTF-8')) + cls.simple_directed_fh = io.BytesIO(cls.simple_directed_data.encode("UTF-8")) - self.attribute_data = """ - + cls.attribute_data = """\ + Gephi.org A Web network @@ -82,7 +78,7 @@ def setUp(self): - + @@ -91,32 +87,32 @@ def setUp(self): """ - self.attribute_graph = nx.DiGraph() - self.attribute_graph.graph['node_default'] = {'frog': True} - self.attribute_graph.add_node('0', - label='Gephi', - url='https://gephi.org', - indegree=1, frog=False) - self.attribute_graph.add_node('1', - label='Webatlas', - url='http://webatlas.fr', - indegree=2, frog=False) - self.attribute_graph.add_node('2', - label='RTGI', - url='http://rtgi.fr', - indegree=1, frog=True) - self.attribute_graph.add_node('3', - label='BarabasiLab', - url='http://barabasilab.com', - indegree=1, frog=True) - self.attribute_graph.add_edge('0', '1', id='0') - self.attribute_graph.add_edge('0', '2', id='1') - self.attribute_graph.add_edge('1', '0', id='2') - self.attribute_graph.add_edge('2', '1', id='3') - self.attribute_graph.add_edge('0', '3', id='4') - self.attribute_fh = io.BytesIO(self.attribute_data.encode('UTF-8')) - - self.simple_undirected_data = """ + cls.attribute_graph = nx.DiGraph() + cls.attribute_graph.graph["node_default"] = {"frog": True} + cls.attribute_graph.add_node( + "0", label="Gephi", url="https://gephi.org", indegree=1, frog=False + ) + cls.attribute_graph.add_node( + "1", label="Webatlas", url="http://webatlas.fr", indegree=2, frog=False + ) + cls.attribute_graph.add_node( + "2", label="RTGI", url="http://rtgi.fr", indegree=1, frog=True + ) + cls.attribute_graph.add_node( + "3", + label="BarabasiLab", + url="http://barabasilab.com", + indegree=1, + frog=True, + ) + cls.attribute_graph.add_edge("0", "1", id="0", label="foo") + cls.attribute_graph.add_edge("0", "2", id="1") + cls.attribute_graph.add_edge("1", "0", id="2") + cls.attribute_graph.add_edge("2", "1", id="3") + cls.attribute_graph.add_edge("0", "3", id="4") + cls.attribute_fh = io.BytesIO(cls.attribute_data.encode("UTF-8")) + + cls.simple_undirected_data = """ @@ -129,20 +125,21 @@ def setUp(self): """ - self.simple_undirected_graph = nx.Graph() - self.simple_undirected_graph.add_node('0', label='Hello') - self.simple_undirected_graph.add_node('1', label='World') - self.simple_undirected_graph.add_edge('0', '1', id='0') + cls.simple_undirected_graph = nx.Graph() + cls.simple_undirected_graph.add_node("0", label="Hello") + cls.simple_undirected_graph.add_node("1", label="World") + cls.simple_undirected_graph.add_edge("0", "1", id="0") - self.simple_undirected_fh = io.BytesIO(self.simple_undirected_data.encode('UTF-8')) + cls.simple_undirected_fh = io.BytesIO( + cls.simple_undirected_data.encode("UTF-8") + ) def test_read_simple_directed_graphml(self): G = self.simple_directed_graph H = nx.read_gexf(self.simple_directed_fh) - assert_equal(sorted(G.nodes()), sorted(H.nodes())) - assert_equal(sorted(G.edges()), sorted(H.edges())) - assert_equal(sorted(G.edges(data=True)), - sorted(H.edges(data=True))) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(G.edges()) == sorted(H.edges()) + assert sorted(G.edges(data=True)) == sorted(H.edges(data=True)) self.simple_directed_fh.seek(0) def test_write_read_simple_directed_graphml(self): @@ -151,29 +148,28 @@ def test_write_read_simple_directed_graphml(self): nx.write_gexf(G, fh) fh.seek(0) H = nx.read_gexf(fh) - assert_equal(sorted(G.nodes()), sorted(H.nodes())) - assert_equal(sorted(G.edges()), sorted(H.edges())) - assert_equal(sorted(G.edges(data=True)), - sorted(H.edges(data=True))) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(G.edges()) == sorted(H.edges()) + assert sorted(G.edges(data=True)) == sorted(H.edges(data=True)) self.simple_directed_fh.seek(0) def test_read_simple_undirected_graphml(self): G = self.simple_undirected_graph H = nx.read_gexf(self.simple_undirected_fh) - assert_equal(sorted(G.nodes()), sorted(H.nodes())) - assert_equal( - sorted(sorted(e) for e in G.edges()), - sorted(sorted(e) for e in H.edges())) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) self.simple_undirected_fh.seek(0) def test_read_attribute_graphml(self): G = self.attribute_graph H = nx.read_gexf(self.attribute_fh) - assert_equal(sorted(G.nodes(True)), sorted(H.nodes(data=True))) + assert sorted(G.nodes(True)) == sorted(H.nodes(data=True)) ge = sorted(G.edges(data=True)) he = sorted(H.edges(data=True)) for a, b in zip(ge, he): - assert_equal(a, b) + assert a == b self.attribute_fh.seek(0) def test_directed_edge_in_undirected(self): @@ -190,8 +186,8 @@ def test_directed_edge_in_undirected(self): """ - fh = io.BytesIO(s.encode('UTF-8')) - assert_raises(nx.NetworkXError, nx.read_gexf, fh) + fh = io.BytesIO(s.encode("UTF-8")) + pytest.raises(nx.NetworkXError, nx.read_gexf, fh) def test_undirected_edge_in_directed(self): s = """ @@ -207,8 +203,8 @@ def test_undirected_edge_in_directed(self): """ - fh = io.BytesIO(s.encode('UTF-8')) - assert_raises(nx.NetworkXError, nx.read_gexf, fh) + fh = io.BytesIO(s.encode("UTF-8")) + pytest.raises(nx.NetworkXError, nx.read_gexf, fh) def test_key_raises(self): s = """ @@ -228,8 +224,8 @@ def test_key_raises(self): """ - fh = io.BytesIO(s.encode('UTF-8')) - assert_raises(nx.NetworkXError, nx.read_gexf, fh) + fh = io.BytesIO(s.encode("UTF-8")) + pytest.raises(nx.NetworkXError, nx.read_gexf, fh) def test_relabel(self): s = """ @@ -245,30 +241,30 @@ def test_relabel(self): """ - fh = io.BytesIO(s.encode('UTF-8')) + fh = io.BytesIO(s.encode("UTF-8")) G = nx.read_gexf(fh, relabel=True) - assert_equal(sorted(G.nodes()), ["Hello", "Word"]) + assert sorted(G.nodes()) == ["Hello", "Word"] def test_default_attribute(self): G = nx.Graph() - G.add_node(1, label='1', color='green') + G.add_node(1, label="1", color="green") nx.add_path(G, [0, 1, 2, 3]) G.add_edge(1, 2, foo=3) - G.graph['node_default'] = {'color': 'yellow'} - G.graph['edge_default'] = {'foo': 7} + G.graph["node_default"] = {"color": "yellow"} + G.graph["edge_default"] = {"foo": 7} fh = io.BytesIO() nx.write_gexf(G, fh) fh.seek(0) H = nx.read_gexf(fh, node_type=int) - assert_equal(sorted(G.nodes()), sorted(H.nodes())) - assert_equal( - sorted(sorted(e) for e in G.edges()), - sorted(sorted(e) for e in H.edges())) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) # Reading a gexf graph always sets mode attribute to either # 'static' or 'dynamic'. Remove the mode attribute from the # read graph for the sake of comparing remaining attributes. - del H.graph['mode'] - assert_equal(G.graph, H.graph) + del H.graph["mode"] + assert G.graph == H.graph def test_serialize_ints_to_strings(self): G = nx.Graph() @@ -277,26 +273,41 @@ def test_serialize_ints_to_strings(self): nx.write_gexf(G, fh) fh.seek(0) H = nx.read_gexf(fh, node_type=int) - assert_equal(list(H), [7]) - assert_equal(H.nodes[7]['label'], '77') + assert list(H) == [7] + assert H.nodes[7]["label"] == "77" + + # FIXME: We should test xml without caring about their order This is causing a + # problem b/c of a change in Python 3.8 + # + # "Prior to Python 3.8, the serialisation order of the XML attributes of + # elements was artificially made predictable by sorting the attributes by their + # name. Based on the now guaranteed ordering of dicts, this arbitrary + # reordering was removed in Python 3.8 to preserve the order in which + # attributes were originally parsed or created by user code." + # + # https://docs.python.org/3.8/library/xml.etree.elementtree.html + # https://bugs.python.org/issue34160 def test_write_with_node_attributes(self): # Addresses #673. G = nx.OrderedGraph() G.add_edges_from([(0, 1), (1, 2), (2, 3)]) for i in range(4): - G.nodes[i]['id'] = i - G.nodes[i]['label'] = i - G.nodes[i]['pid'] = i - G.nodes[i]['start'] = i - G.nodes[i]['end'] = i + 1 + G.nodes[i]["id"] = i + G.nodes[i]["label"] = i + G.nodes[i]["pid"] = i + G.nodes[i]["start"] = i + G.nodes[i]["end"] = i + 1 - expected = """ + if sys.version_info < (3, 8): + expected = f""" + + NetworkX {nx.__version__} + - - NetworkX {} - {} - @@ -309,9 +320,189 @@ def test_write_with_node_attributes(self): -""".format(nx.__version__, time.strftime('%d/%m/%Y')) - obtained = '\n'.join(nx.generate_gexf(G)) - assert_equal(expected, obtained) +""" + else: + expected = f""" + + NetworkX {nx.__version__} + + + + + + + + + + + + + + +""" + obtained = "\n".join(nx.generate_gexf(G)) + assert expected == obtained + + def test_edge_id_construct(self): + G = nx.Graph() + G.add_edges_from([(0, 1, {"id": 0}), (1, 2, {"id": 2}), (2, 3)]) + + if sys.version_info < (3, 8): + expected = f""" + + NetworkX {nx.__version__} + + + + + + + + + + + + + + +""" + else: + expected = f""" + + NetworkX {nx.__version__} + + + + + + + + + + + + + + +""" + + obtained = "\n".join(nx.generate_gexf(G)) + assert expected == obtained + + def test_numpy_type(self): + G = nx.path_graph(4) + try: + import numpy + except ImportError: + return + nx.set_node_attributes(G, {n: n for n in numpy.arange(4)}, "number") + G[0][1]["edge-number"] = numpy.float64(1.1) + + if sys.version_info < (3, 8): + expected = f""" + + NetworkX {nx.__version__} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +""" + else: + expected = f""" + + NetworkX {nx.__version__} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +""" + obtained = "\n".join(nx.generate_gexf(G)) + assert expected == obtained def test_bool(self): G = nx.Graph() @@ -320,4 +511,148 @@ def test_bool(self): nx.write_gexf(G, fh) fh.seek(0) H = nx.read_gexf(fh, node_type=int) - assert_equal(H.nodes[1]['testattr'], True) + assert H.nodes[1]["testattr"] + + # Test for NaN, INF and -INF + def test_specials(self): + from math import isnan + + inf, nan = float("inf"), float("nan") + G = nx.Graph() + G.add_node(1, testattr=inf, strdata="inf", key="a") + G.add_node(2, testattr=nan, strdata="nan", key="b") + G.add_node(3, testattr=-inf, strdata="-inf", key="c") + + fh = io.BytesIO() + nx.write_gexf(G, fh) + fh.seek(0) + filetext = fh.read() + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + + assert b"INF" in filetext + assert b"NaN" in filetext + assert b"-INF" in filetext + + assert H.nodes[1]["testattr"] == inf + assert isnan(H.nodes[2]["testattr"]) + assert H.nodes[3]["testattr"] == -inf + + assert H.nodes[1]["strdata"] == "inf" + assert H.nodes[2]["strdata"] == "nan" + assert H.nodes[3]["strdata"] == "-inf" + + assert H.nodes[1]["networkx_key"] == "a" + assert H.nodes[2]["networkx_key"] == "b" + assert H.nodes[3]["networkx_key"] == "c" + + def test_simple_list(self): + G = nx.Graph() + list_value = [(1, 2, 3), (9, 1, 2)] + G.add_node(1, key=list_value) + fh = io.BytesIO() + nx.write_gexf(G, fh) + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + assert H.nodes[1]["networkx_key"] == list_value + + def test_dynamic_mode(self): + G = nx.Graph() + G.add_node(1, label="1", color="green") + G.graph["mode"] = "dynamic" + fh = io.BytesIO() + nx.write_gexf(G, fh) + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) + + def test_multigraph_with_missing_attributes(self): + G = nx.MultiGraph() + G.add_node(0, label="1", color="green") + G.add_node(1, label="2", color="green") + G.add_edge(0, 1, id="0", wight=3, type="undirected", start=0, end=1) + G.add_edge(0, 1, id="1", label="foo", start=0, end=1) + G.add_edge(0, 1) + fh = io.BytesIO() + nx.write_gexf(G, fh) + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) + + def test_missing_viz_attributes(self): + G = nx.Graph() + G.add_node(0, label="1", color="green") + G.nodes[0]["viz"] = {"size": 54} + G.nodes[0]["viz"]["position"] = {"x": 0, "y": 1, "z": 0} + G.nodes[0]["viz"]["color"] = {"r": 0, "g": 0, "b": 256} + G.nodes[0]["viz"]["shape"] = "http://random.url" + G.nodes[0]["viz"]["thickness"] = 2 + fh = io.BytesIO() + nx.write_gexf(G, fh, version="1.1draft") + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) + + # Second graph for the other branch + G = nx.Graph() + G.add_node(0, label="1", color="green") + G.nodes[0]["viz"] = {"size": 54} + G.nodes[0]["viz"]["position"] = {"x": 0, "y": 1, "z": 0} + G.nodes[0]["viz"]["color"] = {"r": 0, "g": 0, "b": 256, "a": 0.5} + G.nodes[0]["viz"]["shape"] = "ftp://random.url" + G.nodes[0]["viz"]["thickness"] = 2 + fh = io.BytesIO() + nx.write_gexf(G, fh) + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) + + def test_slice_and_spell(self): + # Test spell first, so version = 1.2 + G = nx.Graph() + G.add_node(0, label="1", color="green") + G.nodes[0]["spells"] = [(1, 2)] + fh = io.BytesIO() + nx.write_gexf(G, fh) + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) + + G = nx.Graph() + G.add_node(0, label="1", color="green") + G.nodes[0]["slices"] = [(1, 2)] + fh = io.BytesIO() + nx.write_gexf(G, fh, version="1.1draft") + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) + + def test_add_parent(self): + G = nx.Graph() + G.add_node(0, label="1", color="green", parents=[1, 2]) + fh = io.BytesIO() + nx.write_gexf(G, fh) + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) diff --git a/networkx/readwrite/tests/test_gml.py b/networkx/readwrite/tests/test_gml.py index aab4df3..d18b594 100644 --- a/networkx/readwrite/tests/test_gml.py +++ b/networkx/readwrite/tests/test_gml.py @@ -1,30 +1,19 @@ -#!/usr/bin/env python -# encoding: utf-8 - from ast import literal_eval import codecs +from contextlib import contextmanager import io -from nose.tools import * -from nose import SkipTest +import pytest import networkx as nx from networkx.readwrite.gml import literal_stringizer, literal_destringizer import os import tempfile +from textwrap import dedent -try: - unicode -except NameError: - unicode = str -try: - unichr -except NameError: - unichr = chr - - -class TestGraph(object): - def setUp(self): - self.simple_data = """Creator "me" +class TestGraph: + @classmethod + def setup_class(cls): + cls.simple_data = """Creator "me" Version "xx" graph [ comment "This is a sample graph" @@ -152,32 +141,36 @@ def test_parse_gml_cytoscape_bug(self): nx.parse_gml(cytoscape_example) def test_parse_gml(self): - G = nx.parse_gml(self.simple_data, label='label') - assert_equals(sorted(G.nodes()), - ['Node 1', 'Node 2', 'Node 3']) - assert_equals([e for e in sorted(G.edges())], - [('Node 1', 'Node 2'), - ('Node 2', 'Node 3'), - ('Node 3', 'Node 1')]) - - assert_equals([e for e in sorted(G.edges(data=True))], - [('Node 1', 'Node 2', - {'color': {'line': 'blue', 'thickness': 3}, - 'label': 'Edge from node 1 to node 2'}), - ('Node 2', 'Node 3', - {'label': 'Edge from node 2 to node 3'}), - ('Node 3', 'Node 1', - {'label': 'Edge from node 3 to node 1'})]) + G = nx.parse_gml(self.simple_data, label="label") + assert sorted(G.nodes()) == ["Node 1", "Node 2", "Node 3"] + assert [e for e in sorted(G.edges())] == [ + ("Node 1", "Node 2"), + ("Node 2", "Node 3"), + ("Node 3", "Node 1"), + ] + + assert [e for e in sorted(G.edges(data=True))] == [ + ( + "Node 1", + "Node 2", + { + "color": {"line": "blue", "thickness": 3}, + "label": "Edge from node 1 to node 2", + }, + ), + ("Node 2", "Node 3", {"label": "Edge from node 2 to node 3"}), + ("Node 3", "Node 1", {"label": "Edge from node 3 to node 1"}), + ] def test_read_gml(self): (fd, fname) = tempfile.mkstemp() - fh = open(fname, 'w') + fh = open(fname, "w") fh.write(self.simple_data) fh.close() - Gin = nx.read_gml(fname, label='label') - G = nx.parse_gml(self.simple_data, label='label') - assert_equals(sorted(G.nodes(data=True)), sorted(Gin.nodes(data=True))) - assert_equals(sorted(G.edges(data=True)), sorted(Gin.edges(data=True))) + Gin = nx.read_gml(fname, label="label") + G = nx.parse_gml(self.simple_data, label="label") + assert sorted(G.nodes(data=True)) == sorted(Gin.nodes(data=True)) + assert sorted(G.edges(data=True)) == sorted(Gin.edges(data=True)) os.close(fd) os.unlink(fname) @@ -191,8 +184,8 @@ def test_labels_are_strings(self): ]""" G = nx.Graph() G.add_node(1203) - data = '\n'.join(nx.generate_gml(G, stringizer=literal_stringizer)) - assert_equal(data, answer) + data = "\n".join(nx.generate_gml(G, stringizer=literal_stringizer)) + assert data == answer def test_relabel_duplicate(self): data = """ @@ -212,17 +205,16 @@ def test_relabel_duplicate(self): ] ] """ - fh = io.BytesIO(data.encode('UTF-8')) + fh = io.BytesIO(data.encode("UTF-8")) fh.seek(0) - assert_raises( - nx.NetworkXError, nx.read_gml, fh, label='label') + pytest.raises(nx.NetworkXError, nx.read_gml, fh, label="label") def test_tuplelabels(self): # https://github.com/networkx/networkx/pull/1048 # Writing tuple labels to GML failed. G = nx.OrderedGraph() G.add_edge((0, 1), (1, 0)) - data = '\n'.join(nx.generate_gml(G, stringizer=literal_stringizer)) + data = "\n".join(nx.generate_gml(G, stringizer=literal_stringizer)) answer = """graph [ node [ id 0 @@ -237,20 +229,20 @@ def test_tuplelabels(self): target 1 ] ]""" - assert_equal(data, answer) + assert data == answer def test_quotes(self): # https://github.com/networkx/networkx/issues/1061 # Encoding quotes as HTML entities. G = nx.path_graph(1) G.name = "path_graph(1)" - attr = 'This is "quoted" and this is a copyright: ' + unichr(169) - G.nodes[0]['demo'] = attr + attr = 'This is "quoted" and this is a copyright: ' + chr(169) + G.nodes[0]["demo"] = attr fobj = tempfile.NamedTemporaryFile() nx.write_gml(G, fobj) fobj.seek(0) # Should be bytes in 2.x and 3.x - data = fobj.read().strip().decode('ascii') + data = fobj.read().strip().decode("ascii") answer = """graph [ name "path_graph(1)" node [ @@ -259,24 +251,24 @@ def test_quotes(self): demo "This is "quoted" and this is a copyright: ©" ] ]""" - assert_equal(data, answer) + assert data == answer def test_unicode_node(self): - node = 'node' + unichr(169) + node = "node" + chr(169) G = nx.Graph() G.add_node(node) fobj = tempfile.NamedTemporaryFile() nx.write_gml(G, fobj) fobj.seek(0) # Should be bytes in 2.x and 3.x - data = fobj.read().strip().decode('ascii') + data = fobj.read().strip().decode("ascii") answer = """graph [ node [ id 0 label "node©" ] ]""" - assert_equal(data, answer) + assert data == answer def test_float_label(self): node = 1.0 @@ -286,41 +278,41 @@ def test_float_label(self): nx.write_gml(G, fobj) fobj.seek(0) # Should be bytes in 2.x and 3.x - data = fobj.read().strip().decode('ascii') + data = fobj.read().strip().decode("ascii") answer = """graph [ node [ id 0 label "1.0" ] ]""" - assert_equal(data, answer) + assert data == answer def test_name(self): G = nx.parse_gml('graph [ name "x" node [ id 0 label "x" ] ]') - assert_equal('x', G.graph['name']) + assert "x" == G.graph["name"] G = nx.parse_gml('graph [ node [ id 0 label "x" ] ]') - assert_equal('', G.name) - assert_not_in('name', G.graph) + assert "" == G.name + assert "name" not in G.graph def test_graph_types(self): for directed in [None, False, True]: for multigraph in [None, False, True]: - gml = 'graph [' + gml = "graph [" if directed is not None: - gml += ' directed ' + str(int(directed)) + gml += " directed " + str(int(directed)) if multigraph is not None: - gml += ' multigraph ' + str(int(multigraph)) + gml += " multigraph " + str(int(multigraph)) gml += ' node [ id 0 label "0" ]' - gml += ' edge [ source 0 target 0 ]' - gml += ' ]' + gml += " edge [ source 0 target 0 ]" + gml += " ]" G = nx.parse_gml(gml) - assert_equal(bool(directed), G.is_directed()) - assert_equal(bool(multigraph), G.is_multigraph()) - gml = 'graph [\n' + assert bool(directed) == G.is_directed() + assert bool(multigraph) == G.is_multigraph() + gml = "graph [\n" if directed is True: - gml += ' directed 1\n' + gml += " directed 1\n" if multigraph is True: - gml += ' multigraph 1\n' + gml += " multigraph 1\n" gml += """ node [ id 0 label "0" @@ -330,129 +322,291 @@ def test_graph_types(self): target 0 """ if multigraph: - gml += ' key 0\n' - gml += ' ]\n]' - assert_equal(gml, '\n'.join(nx.generate_gml(G))) + gml += " key 0\n" + gml += " ]\n]" + assert gml == "\n".join(nx.generate_gml(G)) def test_data_types(self): - data = [True, False, 10 ** 20, -2e33, "'", '"&&&""', - [{(b'\xfd',): '\x7f', unichr(0x4444): (1, 2)}, (2, "3")]] - try: - data.append(unichr(0x14444)) # fails under IronPython - except ValueError: - data.append(unichr(0x1444)) - try: - data.append(literal_eval('{2.3j, 1 - 2.3j, ()}')) # fails under Python 2.7 + data = [ + True, + False, + 10 ** 20, + -2e33, + "'", + '"&&&""', + [{(b"\xfd",): "\x7f", chr(0x4444): (1, 2)}, (2, "3")], + ] + try: # fails under IronPython + data.append(chr(0x14444)) except ValueError: - data.append([2.3j, 1 - 2.3j, ()]) + data.append(chr(0x1444)) + data.append(literal_eval("{2.3j, 1 - 2.3j, ()}")) G = nx.Graph() G.name = data - G.graph['data'] = data + G.graph["data"] = data G.add_node(0, int=-1, data=dict(data=data)) G.add_edge(0, 0, float=-2.5, data=data) - gml = '\n'.join(nx.generate_gml(G, stringizer=literal_stringizer)) + gml = "\n".join(nx.generate_gml(G, stringizer=literal_stringizer)) G = nx.parse_gml(gml, destringizer=literal_destringizer) - assert_equal(data, G.name) - assert_equal({'name': data, unicode('data'): data}, G.graph) - assert_equal(list(G.nodes(data=True)), - [(0, dict(int=-1, data=dict(data=data)))]) - assert_equal(list(G.edges(data=True)), [(0, 0, dict(float=-2.5, data=data))]) + assert data == G.name + assert {"name": data, "data": data} == G.graph + assert list(G.nodes(data=True)) == [(0, dict(int=-1, data=dict(data=data)))] + assert list(G.edges(data=True)) == [(0, 0, dict(float=-2.5, data=data))] G = nx.Graph() - G.graph['data'] = 'frozenset([1, 2, 3])' + G.graph["data"] = "frozenset([1, 2, 3])" G = nx.parse_gml(nx.generate_gml(G), destringizer=literal_eval) - assert_equal(G.graph['data'], 'frozenset([1, 2, 3])') + assert G.graph["data"] == "frozenset([1, 2, 3])" def test_escape_unescape(self): gml = """graph [ name "&"䑄��&unknown;" ]""" G = nx.parse_gml(gml) - assert_equal( - '&"\x0f' + unichr(0x4444) + '��&unknown;', - G.name) - gml = '\n'.join(nx.generate_gml(G)) - assert_equal("""graph [ - name "&"䑄&#1234567890;&#x1234567890abcdef;&unknown;" -]""", gml) + assert ( + '&"\x0f' + chr(0x4444) + "��&unknown;" + == G.name + ) + gml = "\n".join(nx.generate_gml(G)) + alnu = "#1234567890;&#x1234567890abcdef" + answer = ( + """graph [ + name "&"䑄&""" + + alnu + + """;&unknown;" +]""" + ) + assert answer == gml def test_exceptions(self): - assert_raises(ValueError, literal_destringizer, '(') - assert_raises(ValueError, literal_destringizer, 'frozenset([1, 2, 3])') - assert_raises(ValueError, literal_destringizer, literal_destringizer) - assert_raises(ValueError, literal_stringizer, frozenset([1, 2, 3])) - assert_raises(ValueError, literal_stringizer, literal_stringizer) + pytest.raises(ValueError, literal_destringizer, "(") + pytest.raises(ValueError, literal_destringizer, "frozenset([1, 2, 3])") + pytest.raises(ValueError, literal_destringizer, literal_destringizer) + pytest.raises(ValueError, literal_stringizer, frozenset([1, 2, 3])) + pytest.raises(ValueError, literal_stringizer, literal_stringizer) with tempfile.TemporaryFile() as f: - f.write(codecs.BOM_UTF8 + 'graph[]'.encode('ascii')) + f.write(codecs.BOM_UTF8 + b"graph[]") f.seek(0) - assert_raises(nx.NetworkXError, nx.read_gml, f) + pytest.raises(nx.NetworkXError, nx.read_gml, f) def assert_parse_error(gml): - assert_raises(nx.NetworkXError, nx.parse_gml, gml) + pytest.raises(nx.NetworkXError, nx.parse_gml, gml) - assert_parse_error(['graph [\n\n', unicode(']')]) - assert_parse_error('') + assert_parse_error(["graph [\n\n", "]"]) + assert_parse_error("") assert_parse_error('Creator ""') - assert_parse_error('0') - assert_parse_error('graph ]') - assert_parse_error('graph [ 1 ]') - assert_parse_error('graph [ 1.E+2 ]') + assert_parse_error("0") + assert_parse_error("graph ]") + assert_parse_error("graph [ 1 ]") + assert_parse_error("graph [ 1.E+2 ]") assert_parse_error('graph [ "A" ]') - assert_parse_error('graph [ ] graph ]') - assert_parse_error('graph [ ] graph [ ]') - assert_parse_error('graph [ data [1, 2, 3] ]') - assert_parse_error('graph [ node [ ] ]') - assert_parse_error('graph [ node [ id 0 ] ]') - nx.parse_gml('graph [ node [ id "a" ] ]', label='id') - assert_parse_error( - 'graph [ node [ id 0 label 0 ] node [ id 0 label 1 ] ]') + assert_parse_error("graph [ ] graph ]") + assert_parse_error("graph [ ] graph [ ]") + assert_parse_error("graph [ data [1, 2, 3] ]") + assert_parse_error("graph [ node [ ] ]") + assert_parse_error("graph [ node [ id 0 ] ]") + nx.parse_gml('graph [ node [ id "a" ] ]', label="id") + assert_parse_error("graph [ node [ id 0 label 0 ] node [ id 0 label 1 ] ]") + assert_parse_error("graph [ node [ id 0 label 0 ] node [ id 1 label 0 ] ]") + assert_parse_error("graph [ node [ id 0 label 0 ] edge [ ] ]") + assert_parse_error("graph [ node [ id 0 label 0 ] edge [ source 0 ] ]") + nx.parse_gml("graph [edge [ source 0 target 0 ] node [ id 0 label 0 ] ]") + assert_parse_error("graph [ node [ id 0 label 0 ] edge [ source 1 target 0 ] ]") + assert_parse_error("graph [ node [ id 0 label 0 ] edge [ source 0 target 1 ] ]") assert_parse_error( - 'graph [ node [ id 0 label 0 ] node [ id 1 label 0 ] ]') - assert_parse_error('graph [ node [ id 0 label 0 ] edge [ ] ]') - assert_parse_error('graph [ node [ id 0 label 0 ] edge [ source 0 ] ]') + "graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] " + "edge [ source 0 target 1 ] edge [ source 1 target 0 ] ]" + ) nx.parse_gml( - 'graph [edge [ source 0 target 0 ] node [ id 0 label 0 ] ]') - assert_parse_error( - 'graph [ node [ id 0 label 0 ] edge [ source 1 target 0 ] ]') - assert_parse_error( - 'graph [ node [ id 0 label 0 ] edge [ source 0 target 1 ] ]') - assert_parse_error( - 'graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] ' - 'edge [ source 0 target 1 ] edge [ source 1 target 0 ] ]') + "graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] " + "edge [ source 0 target 1 ] edge [ source 1 target 0 ] " + "directed 1 ]" + ) nx.parse_gml( - 'graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] ' - 'edge [ source 0 target 1 ] edge [ source 1 target 0 ] ' - 'directed 1 ]') + "graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] " + "edge [ source 0 target 1 ] edge [ source 0 target 1 ]" + "multigraph 1 ]" + ) nx.parse_gml( - 'graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] ' - 'edge [ source 0 target 1 ] edge [ source 0 target 1 ]' - 'multigraph 1 ]') - nx.parse_gml( - 'graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] ' - 'edge [ source 0 target 1 key 0 ] edge [ source 0 target 1 ]' - 'multigraph 1 ]') + "graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] " + "edge [ source 0 target 1 key 0 ] edge [ source 0 target 1 ]" + "multigraph 1 ]" + ) assert_parse_error( - 'graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] ' - 'edge [ source 0 target 1 key 0 ] edge [ source 0 target 1 key 0 ]' - 'multigraph 1 ]') + "graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] " + "edge [ source 0 target 1 key 0 ] edge [ source 0 target 1 key 0 ]" + "multigraph 1 ]" + ) + nx.parse_gml( + "graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] " + "edge [ source 0 target 1 key 0 ] edge [ source 1 target 0 key 0 ]" + "directed 1 multigraph 1 ]" + ) + + # Tests for string convertable alphanumeric id and label values + nx.parse_gml("graph [edge [ source a target a ] node [ id a label b ] ]") nx.parse_gml( - 'graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] ' - 'edge [ source 0 target 1 key 0 ] edge [ source 1 target 0 key 0 ]' - 'directed 1 multigraph 1 ]') + "graph [ node [ id n42 label 0 ] node [ id x43 label 1 ]" + "edge [ source n42 target x43 key 0 ]" + "edge [ source x43 target n42 key 0 ]" + "directed 1 multigraph 1 ]" + ) + assert_parse_error( + "graph [edge [ source u'u\4200' target u'u\4200' ] " + + "node [ id u'u\4200' label b ] ]" + ) def assert_generate_error(*args, **kwargs): - assert_raises(nx.NetworkXError, - lambda: list(nx.generate_gml(*args, **kwargs))) + pytest.raises( + nx.NetworkXError, lambda: list(nx.generate_gml(*args, **kwargs)) + ) G = nx.Graph() G.graph[3] = 3 assert_generate_error(G) G = nx.Graph() - G.graph['3'] = 3 + G.graph["3"] = 3 assert_generate_error(G) G = nx.Graph() - G.graph['data'] = frozenset([1, 2, 3]) + G.graph["data"] = frozenset([1, 2, 3]) assert_generate_error(G, stringizer=literal_stringizer) G = nx.Graph() - G.graph['data'] = [] + G.graph["data"] = [] assert_generate_error(G) assert_generate_error(G, stringizer=len) + + def test_label_kwarg(self): + G = nx.parse_gml(self.simple_data, label="id") + assert sorted(G.nodes) == [1, 2, 3] + labels = [G.nodes[n]["label"] for n in sorted(G.nodes)] + assert labels == ["Node 1", "Node 2", "Node 3"] + + G = nx.parse_gml(self.simple_data, label=None) + assert sorted(G.nodes) == [1, 2, 3] + labels = [G.nodes[n]["label"] for n in sorted(G.nodes)] + assert labels == ["Node 1", "Node 2", "Node 3"] + + def test_outofrange_integers(self): + # GML restricts integers to 32 signed bits. + # Check that we honor this restriction on export + G = nx.Graph() + # Test export for numbers that barely fit or don't fit into 32 bits, + # and 3 numbers in the middle + numbers = { + "toosmall": (-(2 ** 31)) - 1, + "small": -(2 ** 31), + "med1": -4, + "med2": 0, + "med3": 17, + "big": (2 ** 31) - 1, + "toobig": 2 ** 31, + } + G.add_node("Node", **numbers) + + fd, fname = tempfile.mkstemp() + try: + nx.write_gml(G, fname) + # Check that the export wrote the nonfitting numbers as strings + G2 = nx.read_gml(fname) + for attr, value in G2.nodes["Node"].items(): + if attr == "toosmall" or attr == "toobig": + assert type(value) == str + else: + assert type(value) == int + finally: + os.close(fd) + os.unlink(fname) + + +@contextmanager +def byte_file(): + _file_handle = io.BytesIO() + yield _file_handle + _file_handle.seek(0) + + +class TestPropertyLists: + def test_writing_graph_with_multi_element_property_list(self): + g = nx.Graph() + g.add_node("n1", properties=["element", 0, 1, 2.5, True, False]) + with byte_file() as f: + nx.write_gml(g, f) + result = f.read().decode() + + assert result == dedent( + """\ + graph [ + node [ + id 0 + label "n1" + properties "element" + properties 0 + properties 1 + properties 2.5 + properties 1 + properties 0 + ] + ] + """ + ) + + def test_writing_graph_with_one_element_property_list(self): + g = nx.Graph() + g.add_node("n1", properties=["element"]) + with byte_file() as f: + nx.write_gml(g, f) + result = f.read().decode() + + assert result == dedent( + """\ + graph [ + node [ + id 0 + label "n1" + properties "_networkx_list_start" + properties "element" + ] + ] + """ + ) + + def test_reading_graph_with_list_property(self): + with byte_file() as f: + f.write( + dedent( + """ + graph [ + node [ + id 0 + label "n1" + properties "element" + properties 0 + properties 1 + properties 2.5 + ] + ] + """ + ).encode("ascii") + ) + f.seek(0) + graph = nx.read_gml(f) + assert graph.nodes(data=True)["n1"] == {"properties": ["element", 0, 1, 2.5]} + + def test_reading_graph_with_single_element_list_property(self): + with byte_file() as f: + f.write( + dedent( + """ + graph [ + node [ + id 0 + label "n1" + properties "_networkx_list_start" + properties "element" + ] + ] + """ + ).encode("ascii") + ) + f.seek(0) + graph = nx.read_gml(f) + assert graph.nodes(data=True)["n1"] == {"properties": ["element"]} diff --git a/networkx/readwrite/tests/test_gpickle.py b/networkx/readwrite/tests/test_gpickle.py index c03b2ec..e92991d 100644 --- a/networkx/readwrite/tests/test_gpickle.py +++ b/networkx/readwrite/tests/test_gpickle.py @@ -1,24 +1,27 @@ -#!/usr/bin/env python -from nose.tools import assert_equal import os import tempfile import networkx as nx -from networkx.testing.utils import * +from networkx.testing.utils import ( + assert_graphs_equal, + assert_edges_equal, + assert_nodes_equal, +) -class TestGpickle(object): - def setUp(self): +class TestGpickle: + @classmethod + def setup_class(cls): G = nx.Graph(name="test") - e = [('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'e'), ('e', 'f'), ('a', 'f')] + e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")] G.add_edges_from(e, width=10) - G.add_node('g', color='green') - G.graph['number'] = 1 + G.add_node("g", color="green") + G.graph["number"] = 1 DG = nx.DiGraph(G) MG = nx.MultiGraph(G) - MG.add_edge('a', 'a') + MG.add_edge("a", "a") MDG = nx.MultiDiGraph(G) - MDG.add_edge('a', 'a') + MDG.add_edge("a", "a") fG = G.copy() fDG = DG.copy() fMG = MG.copy() @@ -27,38 +30,50 @@ def setUp(self): nx.freeze(fDG) nx.freeze(fMG) nx.freeze(fMDG) - self.G = G - self.DG = DG - self.MG = MG - self.MDG = MDG - self.fG = fG - self.fDG = fDG - self.fMG = fMG - self.fMDG = fMDG + cls.G = G + cls.DG = DG + cls.MG = MG + cls.MDG = MDG + cls.fG = fG + cls.fDG = fDG + cls.fMG = fMG + cls.fMDG = fMDG def test_gpickle(self): - for G in [self.G, self.DG, self.MG, self.MDG, - self.fG, self.fDG, self.fMG, self.fMDG]: + for G in [ + self.G, + self.DG, + self.MG, + self.MDG, + self.fG, + self.fDG, + self.fMG, + self.fMDG, + ]: (fd, fname) = tempfile.mkstemp() nx.write_gpickle(G, fname) Gin = nx.read_gpickle(fname) - assert_nodes_equal(list(G.nodes(data=True)), - list(Gin.nodes(data=True))) - assert_edges_equal(list(G.edges(data=True)), - list(Gin.edges(data=True))) + assert_nodes_equal(list(G.nodes(data=True)), list(Gin.nodes(data=True))) + assert_edges_equal(list(G.edges(data=True)), list(Gin.edges(data=True))) assert_graphs_equal(G, Gin) os.close(fd) os.unlink(fname) def test_protocol(self): - for G in [self.G, self.DG, self.MG, self.MDG, - self.fG, self.fDG, self.fMG, self.fMDG]: + for G in [ + self.G, + self.DG, + self.MG, + self.MDG, + self.fG, + self.fDG, + self.fMG, + self.fMDG, + ]: with tempfile.TemporaryFile() as f: nx.write_gpickle(G, f, 0) f.seek(0) Gin = nx.read_gpickle(f) - assert_nodes_equal(list(G.nodes(data=True)), - list(Gin.nodes(data=True))) - assert_edges_equal(list(G.edges(data=True)), - list(Gin.edges(data=True))) + assert_nodes_equal(list(G.nodes(data=True)), list(Gin.nodes(data=True))) + assert_edges_equal(list(G.edges(data=True)), list(Gin.edges(data=True))) assert_graphs_equal(G, Gin) diff --git a/networkx/readwrite/tests/test_graph6.py b/networkx/readwrite/tests/test_graph6.py index d7643d2..74d7cea 100644 --- a/networkx/readwrite/tests/test_graph6.py +++ b/networkx/readwrite/tests/test_graph6.py @@ -1,39 +1,32 @@ -from __future__ import division - from io import BytesIO import tempfile -from unittest import TestCase - -from nose.tools import assert_equal +import pytest import networkx as nx import networkx.readwrite.graph6 as g6 from networkx.testing.utils import assert_edges_equal -from networkx.testing.utils import assert_graphs_equal from networkx.testing.utils import assert_nodes_equal -class TestGraph6Utils(object): - +class TestGraph6Utils: def test_n_data_n_conversion(self): for i in [0, 1, 42, 62, 63, 64, 258047, 258048, 7744773, 68719476735]: - assert_equal(g6.data_to_n(g6.n_to_data(i))[0], i) - assert_equal(g6.data_to_n(g6.n_to_data(i))[1], []) - assert_equal(g6.data_to_n(g6.n_to_data(i) + [42, 43])[1], - [42, 43]) + assert g6.data_to_n(g6.n_to_data(i))[0] == i + assert g6.data_to_n(g6.n_to_data(i))[1] == [] + assert g6.data_to_n(g6.n_to_data(i) + [42, 43])[1] == [42, 43] -class TestFromGraph6Bytes(TestCase): - +class TestFromGraph6Bytes: def test_from_graph6_bytes(self): - data = b'DF{' + data = b"DF{" G = nx.from_graph6_bytes(data) assert_nodes_equal(G.nodes(), [0, 1, 2, 3, 4]) - assert_edges_equal(G.edges(), - [(0, 3), (0, 4), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)]) + assert_edges_equal( + G.edges(), [(0, 3), (0, 4), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)] + ) def test_read_equals_from_bytes(self): - data = b'DF{' + data = b"DF{" G = nx.from_graph6_bytes(data) fh = BytesIO(data) Gin = nx.read_graph6(fh) @@ -41,55 +34,54 @@ def test_read_equals_from_bytes(self): assert_edges_equal(G.edges(), Gin.edges()) -class TestReadGraph6(TestCase): - +class TestReadGraph6: def test_read_many_graph6(self): """Test for reading many graphs from a file into a list.""" - data = b'DF{\nD`{\nDqK\nD~{\n' + data = b"DF{\nD`{\nDqK\nD~{\n" fh = BytesIO(data) glist = nx.read_graph6(fh) - assert_equal(len(glist), 4) + assert len(glist) == 4 for G in glist: - assert_equal(sorted(G), list(range(5))) + assert sorted(G) == list(range(5)) -class TestWriteGraph6(TestCase): +class TestWriteGraph6: """Unit tests for writing a graph to a file in graph6 format.""" def test_null_graph(self): result = BytesIO() nx.write_graph6(nx.null_graph(), result) - self.assertEqual(result.getvalue(), b'>>graph6<>graph6<>graph6<<@\n') + assert result.getvalue() == b">>graph6<<@\n" def test_complete_graph(self): result = BytesIO() nx.write_graph6(nx.complete_graph(4), result) - self.assertEqual(result.getvalue(), b'>>graph6<>graph6<>graph6<>graph6<>graph6<>graph6<>graph6<>graph6<>graph6<>graph6< +class BaseGraphML: + @classmethod + def setup_class(cls): + cls.simple_directed_data = """ """ - self.simple_directed_graph = nx.DiGraph() - self.simple_directed_graph.add_node('n10') - self.simple_directed_graph.add_edge('n0', 'n2', id='foo') - self.simple_directed_graph.add_edges_from([('n1', 'n2'), - ('n2', 'n3'), - ('n3', 'n5'), - ('n3', 'n4'), - ('n4', 'n6'), - ('n6', 'n5'), - ('n5', 'n7'), - ('n6', 'n8'), - ('n8', 'n7'), - ('n8', 'n9'), - ]) - self.simple_directed_fh = \ - io.BytesIO(self.simple_directed_data.encode('UTF-8')) - - self.attribute_data = """ + cls.simple_directed_graph = nx.DiGraph() + cls.simple_directed_graph.add_node("n10") + cls.simple_directed_graph.add_edge("n0", "n2", id="foo") + cls.simple_directed_graph.add_edge("n0", "n2") + cls.simple_directed_graph.add_edges_from( + [ + ("n1", "n2"), + ("n2", "n3"), + ("n3", "n5"), + ("n3", "n4"), + ("n4", "n6"), + ("n6", "n5"), + ("n5", "n7"), + ("n6", "n8"), + ("n8", "n7"), + ("n8", "n9"), + ] + ) + cls.simple_directed_fh = io.BytesIO(cls.simple_directed_data.encode("UTF-8")) + + cls.attribute_data = """ + cls.attribute_graph = nx.DiGraph(id="G") + cls.attribute_graph.graph["node_default"] = {"color": "yellow"} + cls.attribute_graph.add_node("n0", color="green") + cls.attribute_graph.add_node("n2", color="blue") + cls.attribute_graph.add_node("n3", color="red") + cls.attribute_graph.add_node("n4") + cls.attribute_graph.add_node("n5", color="turquoise") + cls.attribute_graph.add_edge("n0", "n2", id="e0", weight=1.0) + cls.attribute_graph.add_edge("n0", "n1", id="e1", weight=1.0) + cls.attribute_graph.add_edge("n1", "n3", id="e2", weight=2.0) + cls.attribute_graph.add_edge("n3", "n2", id="e3") + cls.attribute_graph.add_edge("n2", "n4", id="e4") + cls.attribute_graph.add_edge("n3", "n5", id="e5") + cls.attribute_graph.add_edge("n5", "n4", id="e6", weight=1.1) + cls.attribute_fh = io.BytesIO(cls.attribute_data.encode("UTF-8")) + + cls.attribute_named_key_ids_data = """ + + + + + + + val1 + val2 + + + val_one + val2 + + + edge_value + + + +""" + cls.attribute_named_key_ids_graph = nx.DiGraph() + cls.attribute_named_key_ids_graph.add_node("0", prop1="val1", prop2="val2") + cls.attribute_named_key_ids_graph.add_node("1", prop1="val_one", prop2="val2") + cls.attribute_named_key_ids_graph.add_edge("0", "1", edge_prop="edge_value") + fh = io.BytesIO(cls.attribute_named_key_ids_data.encode("UTF-8")) + cls.attribute_named_key_ids_fh = fh + + cls.attribute_numeric_type_data = """ + cls.attribute_numeric_type_graph = nx.DiGraph() + cls.attribute_numeric_type_graph.add_node("n0", weight=1) + cls.attribute_numeric_type_graph.add_node("n1", weight=2.0) + cls.attribute_numeric_type_graph.add_edge("n0", "n1", weight=1) + cls.attribute_numeric_type_graph.add_edge("n1", "n1", weight=1.0) + fh = io.BytesIO(cls.attribute_numeric_type_data.encode("UTF-8")) + cls.attribute_numeric_type_fh = fh + + cls.simple_undirected_data = """ """ -# - self.simple_undirected_graph = nx.Graph() - self.simple_undirected_graph.add_node('n10') - self.simple_undirected_graph.add_edge('n0', 'n2', id='foo') - self.simple_undirected_graph.add_edges_from([('n1', 'n2'), - ('n2', 'n3'), - ]) - fh = io.BytesIO(self.simple_undirected_data.encode('UTF-8')) - self.simple_undirected_fh = fh + # + cls.simple_undirected_graph = nx.Graph() + cls.simple_undirected_graph.add_node("n10") + cls.simple_undirected_graph.add_edge("n0", "n2", id="foo") + cls.simple_undirected_graph.add_edges_from([("n1", "n2"), ("n2", "n3")]) + fh = io.BytesIO(cls.simple_undirected_data.encode("UTF-8")) + cls.simple_undirected_fh = fh + + cls.undirected_multigraph_data = """ + + + + + + + + + + +""" + cls.undirected_multigraph = nx.MultiGraph() + cls.undirected_multigraph.add_node("n10") + cls.undirected_multigraph.add_edge("n0", "n2", id="e0") + cls.undirected_multigraph.add_edge("n1", "n2", id="e1") + cls.undirected_multigraph.add_edge("n2", "n1", id="e2") + fh = io.BytesIO(cls.undirected_multigraph_data.encode("UTF-8")) + cls.undirected_multigraph_fh = fh + + cls.undirected_multigraph_no_multiedge_data = """ + + + + + + + + + + +""" + cls.undirected_multigraph_no_multiedge = nx.MultiGraph() + cls.undirected_multigraph_no_multiedge.add_node("n10") + cls.undirected_multigraph_no_multiedge.add_edge("n0", "n2", id="e0") + cls.undirected_multigraph_no_multiedge.add_edge("n1", "n2", id="e1") + cls.undirected_multigraph_no_multiedge.add_edge("n2", "n3", id="e2") + fh = io.BytesIO(cls.undirected_multigraph_no_multiedge_data.encode("UTF-8")) + cls.undirected_multigraph_no_multiedge_fh = fh + + cls.multigraph_only_ids_for_multiedges_data = """ + + + + + + + + + + +""" + cls.multigraph_only_ids_for_multiedges = nx.MultiGraph() + cls.multigraph_only_ids_for_multiedges.add_node("n10") + cls.multigraph_only_ids_for_multiedges.add_edge("n0", "n2") + cls.multigraph_only_ids_for_multiedges.add_edge("n1", "n2", id="e1") + cls.multigraph_only_ids_for_multiedges.add_edge("n2", "n1", id="e2") + fh = io.BytesIO(cls.multigraph_only_ids_for_multiedges_data.encode("UTF-8")) + cls.multigraph_only_ids_for_multiedges_fh = fh class TestReadGraphML(BaseGraphML): def test_read_simple_directed_graphml(self): G = self.simple_directed_graph H = nx.read_graphml(self.simple_directed_fh) - assert_equal(sorted(G.nodes()), sorted(H.nodes())) - assert_equal(sorted(G.edges()), sorted(H.edges())) - assert_equal(sorted(G.edges(data=True)), - sorted(H.edges(data=True))) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(G.edges()) == sorted(H.edges()) + assert sorted(G.edges(data=True)) == sorted(H.edges(data=True)) self.simple_directed_fh.seek(0) - I = nx.parse_graphml(self.simple_directed_data) - assert_equal(sorted(G.nodes()), sorted(I.nodes())) - assert_equal(sorted(G.edges()), sorted(I.edges())) - assert_equal(sorted(G.edges(data=True)), - sorted(I.edges(data=True))) + PG = nx.parse_graphml(self.simple_directed_data) + assert sorted(G.nodes()) == sorted(PG.nodes()) + assert sorted(G.edges()) == sorted(PG.edges()) + assert sorted(G.edges(data=True)) == sorted(PG.edges(data=True)) def test_read_simple_undirected_graphml(self): G = self.simple_undirected_graph @@ -199,9 +297,42 @@ def test_read_simple_undirected_graphml(self): assert_edges_equal(G.edges(), H.edges()) self.simple_undirected_fh.seek(0) - I = nx.parse_graphml(self.simple_undirected_data) - assert_nodes_equal(G.nodes(), I.nodes()) - assert_edges_equal(G.edges(), I.edges()) + PG = nx.parse_graphml(self.simple_undirected_data) + assert_nodes_equal(G.nodes(), PG.nodes()) + assert_edges_equal(G.edges(), PG.edges()) + + def test_read_undirected_multigraph_graphml(self): + G = self.undirected_multigraph + H = nx.read_graphml(self.undirected_multigraph_fh) + assert_nodes_equal(G.nodes(), H.nodes()) + assert_edges_equal(G.edges(), H.edges()) + self.undirected_multigraph_fh.seek(0) + + PG = nx.parse_graphml(self.undirected_multigraph_data) + assert_nodes_equal(G.nodes(), PG.nodes()) + assert_edges_equal(G.edges(), PG.edges()) + + def test_read_undirected_multigraph_no_multiedge_graphml(self): + G = self.undirected_multigraph_no_multiedge + H = nx.read_graphml(self.undirected_multigraph_no_multiedge_fh) + assert_nodes_equal(G.nodes(), H.nodes()) + assert_edges_equal(G.edges(), H.edges()) + self.undirected_multigraph_no_multiedge_fh.seek(0) + + PG = nx.parse_graphml(self.undirected_multigraph_no_multiedge_data) + assert_nodes_equal(G.nodes(), PG.nodes()) + assert_edges_equal(G.edges(), PG.edges()) + + def test_read_undirected_multigraph_only_ids_for_multiedges_graphml(self): + G = self.multigraph_only_ids_for_multiedges + H = nx.read_graphml(self.multigraph_only_ids_for_multiedges_fh) + assert_nodes_equal(G.nodes(), H.nodes()) + assert_edges_equal(G.edges(), H.edges()) + self.multigraph_only_ids_for_multiedges_fh.seek(0) + + PG = nx.parse_graphml(self.multigraph_only_ids_for_multiedges_data) + assert_nodes_equal(G.nodes(), PG.nodes()) + assert_edges_equal(G.edges(), PG.edges()) def test_read_attribute_graphml(self): G = self.attribute_graph @@ -210,15 +341,15 @@ def test_read_attribute_graphml(self): ge = sorted(G.edges(data=True)) he = sorted(H.edges(data=True)) for a, b in zip(ge, he): - assert_equal(a, b) + assert a == b self.attribute_fh.seek(0) - I = nx.parse_graphml(self.attribute_data) - assert_equal(sorted(G.nodes(True)), sorted(I.nodes(data=True))) + PG = nx.parse_graphml(self.attribute_data) + assert sorted(G.nodes(True)) == sorted(PG.nodes(data=True)) ge = sorted(G.edges(data=True)) - he = sorted(I.edges(data=True)) + he = sorted(PG.edges(data=True)) for a, b in zip(ge, he): - assert_equal(a, b) + assert a == b def test_directed_edge_in_undirected(self): s = """ @@ -234,9 +365,9 @@ def test_directed_edge_in_undirected(self): """ - fh = io.BytesIO(s.encode('UTF-8')) - assert_raises(nx.NetworkXError, nx.read_graphml, fh) - assert_raises(nx.NetworkXError, nx.parse_graphml, s) + fh = io.BytesIO(s.encode("UTF-8")) + pytest.raises(nx.NetworkXError, nx.read_graphml, fh) + pytest.raises(nx.NetworkXError, nx.parse_graphml, s) def test_undirected_edge_in_directed(self): s = """ @@ -252,9 +383,9 @@ def test_undirected_edge_in_directed(self): """ - fh = io.BytesIO(s.encode('UTF-8')) - assert_raises(nx.NetworkXError, nx.read_graphml, fh) - assert_raises(nx.NetworkXError, nx.parse_graphml, s) + fh = io.BytesIO(s.encode("UTF-8")) + pytest.raises(nx.NetworkXError, nx.read_graphml, fh) + pytest.raises(nx.NetworkXError, nx.parse_graphml, s) def test_key_raise(self): s = """ @@ -280,9 +411,9 @@ def test_key_raise(self): """ - fh = io.BytesIO(s.encode('UTF-8')) - assert_raises(nx.NetworkXError, nx.read_graphml, fh) - assert_raises(nx.NetworkXError, nx.parse_graphml, s) + fh = io.BytesIO(s.encode("UTF-8")) + pytest.raises(nx.NetworkXError, nx.read_graphml, fh) + pytest.raises(nx.NetworkXError, nx.parse_graphml, s) def test_hyperedge_raise(self): s = """ @@ -310,9 +441,9 @@ def test_hyperedge_raise(self): """ - fh = io.BytesIO(s.encode('UTF-8')) - assert_raises(nx.NetworkXError, nx.read_graphml, fh) - assert_raises(nx.NetworkXError, nx.parse_graphml, s) + fh = io.BytesIO(s.encode("UTF-8")) + pytest.raises(nx.NetworkXError, nx.read_graphml, fh) + pytest.raises(nx.NetworkXError, nx.parse_graphml, s) def test_multigraph_keys(self): # Test that reading multigraphs uses edge id attributes as keys @@ -329,13 +460,13 @@ def test_multigraph_keys(self): """ - fh = io.BytesIO(s.encode('UTF-8')) + fh = io.BytesIO(s.encode("UTF-8")) G = nx.read_graphml(fh) expected = [("n0", "n1", "e0"), ("n0", "n1", "e1")] - assert_equal(sorted(G.edges(keys=True)), expected) + assert sorted(G.edges(keys=True)) == expected fh.seek(0) H = nx.parse_graphml(s) - assert_equal(sorted(H.edges(keys=True)), expected) + assert sorted(H.edges(keys=True)) == expected def test_preserve_multi_edge_data(self): """ @@ -345,26 +476,42 @@ def test_preserve_multi_edge_data(self): G = nx.MultiGraph() G.add_node(1) G.add_node(2) - G.add_edges_from([ - # edges with no data, no keys: - (1, 2), - # edges with only data: - (1, 2, dict(key='data_key1')), - (1, 2, dict(id='data_id2')), - (1, 2, dict(key='data_key3', id='data_id3')), - # edges with both data and keys: - (1, 2, 103, dict(key='data_key4')), - (1, 2, 104, dict(id='data_id5')), - (1, 2, 105, dict(key='data_key6', id='data_id7')), - ]) + G.add_edges_from( + [ + # edges with no data, no keys: + (1, 2), + # edges with only data: + (1, 2, dict(key="data_key1")), + (1, 2, dict(id="data_id2")), + (1, 2, dict(key="data_key3", id="data_id3")), + # edges with both data and keys: + (1, 2, 103, dict(key="data_key4")), + (1, 2, 104, dict(id="data_id5")), + (1, 2, 105, dict(key="data_key6", id="data_id7")), + ] + ) fh = io.BytesIO() nx.write_graphml(G, fh) fh.seek(0) H = nx.read_graphml(fh, node_type=int) - assert_edges_equal( - G.edges(data=True, keys=True), H.edges(data=True, keys=True) - ) - assert_equal(G._adj, H._adj) + assert_edges_equal(G.edges(data=True, keys=True), H.edges(data=True, keys=True)) + assert G._adj == H._adj + + Gadj = { + str(node): { + str(nbr): {str(ekey): dd for ekey, dd in key_dict.items()} + for nbr, key_dict in nbr_dict.items() + } + for node, nbr_dict in G._adj.items() + } + fh.seek(0) + HH = nx.read_graphml(fh, node_type=str, edge_key_type=str) + assert Gadj == HH._adj + + fh.seek(0) + string_fh = fh.read() + HH = nx.parse_graphml(string_fh, node_type=str, edge_key_type=str) + assert Gadj == HH._adj def test_yfiles_extension(self): data = """ @@ -434,18 +581,30 @@ def test_yfiles_extension(self): """ - fh = io.BytesIO(data.encode('UTF-8')) + fh = io.BytesIO(data.encode("UTF-8")) + G = nx.read_graphml(fh, force_multigraph=True) + assert list(G.edges()) == [("n0", "n1")] + assert G.has_edge("n0", "n1", key="e0") + assert G.nodes["n0"]["label"] == "1" + assert G.nodes["n1"]["label"] == "2" + fh.seek(0) G = nx.read_graphml(fh) - assert_equal(list(G.edges()), [('n0', 'n1')]) - assert_equal(G['n0']['n1']['id'], 'e0') - assert_equal(G.nodes['n0']['label'], '1') - assert_equal(G.nodes['n1']['label'], '2') + assert list(G.edges()) == [("n0", "n1")] + assert G["n0"]["n1"]["id"] == "e0" + assert G.nodes["n0"]["label"] == "1" + assert G.nodes["n1"]["label"] == "2" + + H = nx.parse_graphml(data, force_multigraph=True) + assert list(H.edges()) == [("n0", "n1")] + assert H.has_edge("n0", "n1", key="e0") + assert H.nodes["n0"]["label"] == "1" + assert H.nodes["n1"]["label"] == "2" H = nx.parse_graphml(data) - assert_equal(list(H.edges()), [('n0', 'n1')]) - assert_equal(H['n0']['n1']['id'], 'e0') - assert_equal(H.nodes['n0']['label'], '1') - assert_equal(H.nodes['n1']['label'], '2') + assert list(H.edges()) == [("n0", "n1")] + assert H["n0"]["n1"]["id"] == "e0" + assert H.nodes["n0"]["label"] == "1" + assert H.nodes["n1"]["label"] == "2" def test_bool(self): s = """ @@ -479,16 +638,16 @@ def test_bool(self): """ - fh = io.BytesIO(s.encode('UTF-8')) + fh = io.BytesIO(s.encode("UTF-8")) G = nx.read_graphml(fh) H = nx.parse_graphml(s) for graph in [G, H]: - assert_equal(graph.nodes['n0']['test'], True) - assert_equal(graph.nodes['n2']['test'], False) - assert_equal(graph.nodes['n3']['test'], False) - assert_equal(graph.nodes['n4']['test'], True) - assert_equal(graph.nodes['n5']['test'], False) - assert_equal(graph.nodes['n6']['test'], True) + assert graph.nodes["n0"]["test"] + assert not graph.nodes["n2"]["test"] + assert not graph.nodes["n3"]["test"] + assert graph.nodes["n4"]["test"] + assert not graph.nodes["n5"]["test"] + assert graph.nodes["n6"]["test"] def test_graphml_header_line(self): good = """ @@ -531,15 +690,15 @@ def test_graphml_header_line(self): """ for s in (good, bad): - fh = io.BytesIO(s.encode('UTF-8')) + fh = io.BytesIO(s.encode("UTF-8")) G = nx.read_graphml(fh) H = nx.parse_graphml(s) for graph in [G, H]: - assert_equal(graph.nodes['n0']['test'], True) + assert graph.nodes["n0"]["test"] - fh = io.BytesIO(ugly.encode('UTF-8')) - assert_raises(nx.NetworkXError, nx.read_graphml, fh) - assert_raises(nx.NetworkXError, nx.parse_graphml, ugly) + fh = io.BytesIO(ugly.encode("UTF-8")) + pytest.raises(nx.NetworkXError, nx.read_graphml, fh) + pytest.raises(nx.NetworkXError, nx.parse_graphml, ugly) def test_read_attributes_with_groups(self): data = """\ @@ -826,43 +985,87 @@ def test_read_attributes_with_groups(self): """ # verify that nodes / attributes are correctly read when part of a group - fh = io.BytesIO(data.encode('UTF-8')) + fh = io.BytesIO(data.encode("UTF-8")) G = nx.read_graphml(fh) data = [x for _, x in G.nodes(data=True)] - assert_equal(len(data), 9) + assert len(data) == 9 for node_data in data: - assert_not_equal(node_data['CustomProperty'], '') + assert node_data["CustomProperty"] != "" class TestWriteGraphML(BaseGraphML): writer = staticmethod(nx.write_graphml_lxml) @classmethod - def setupClass(cls): - try: - import lxml.etree - except ImportError: - raise SkipTest('lxml.etree not available.') + def setup_class(cls): + BaseGraphML.setup_class() + _ = pytest.importorskip("lxml.etree") def test_write_interface(self): try: import lxml.etree - assert_equal(nx.write_graphml, nx.write_graphml_lxml) + + assert nx.write_graphml == nx.write_graphml_lxml except ImportError: - assert_equal(nx.write_graphml, nx.write_graphml_xml) + assert nx.write_graphml == nx.write_graphml_xml def test_write_read_simple_directed_graphml(self): G = self.simple_directed_graph - G.graph['hi'] = 'there' + G.graph["hi"] = "there" fh = io.BytesIO() self.writer(G, fh) fh.seek(0) H = nx.read_graphml(fh) - assert_equal(sorted(G.nodes()), sorted(H.nodes())) - assert_equal(sorted(G.edges()), sorted(H.edges())) - assert_equal(sorted(G.edges(data=True)), sorted(H.edges(data=True))) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(G.edges()) == sorted(H.edges()) + assert sorted(G.edges(data=True)) == sorted(H.edges(data=True)) self.simple_directed_fh.seek(0) + def test_write_read_attribute_named_key_ids_graphml(self): + from xml.etree.ElementTree import parse + + G = self.attribute_named_key_ids_graph + fh = io.BytesIO() + self.writer(G, fh, named_key_ids=True) + fh.seek(0) + H = nx.read_graphml(fh) + fh.seek(0) + + assert_nodes_equal(G.nodes(), H.nodes()) + assert_edges_equal(G.edges(), H.edges()) + assert_edges_equal(G.edges(data=True), H.edges(data=True)) + self.attribute_named_key_ids_fh.seek(0) + + xml = parse(fh) + # Children are the key elements, and the graph element + children = list(xml.getroot()) + assert len(children) == 4 + + keys = [child.items() for child in children[:3]] + + assert len(keys) == 3 + assert ("id", "edge_prop") in keys[0] + assert ("attr.name", "edge_prop") in keys[0] + assert ("id", "prop2") in keys[1] + assert ("attr.name", "prop2") in keys[1] + assert ("id", "prop1") in keys[2] + assert ("attr.name", "prop1") in keys[2] + + # Confirm the read graph nodes/edge are identical when compared to + # default writing behavior. + default_behavior_fh = io.BytesIO() + nx.write_graphml(G, default_behavior_fh) + default_behavior_fh.seek(0) + H = nx.read_graphml(default_behavior_fh) + + named_key_ids_behavior_fh = io.BytesIO() + nx.write_graphml(G, named_key_ids_behavior_fh, named_key_ids=True) + named_key_ids_behavior_fh.seek(0) + J = nx.read_graphml(named_key_ids_behavior_fh) + + assert all(n1 == n2 for (n1, n2) in zip(H.nodes, J.nodes)) + assert all(e1 == e2 for (e1, e2) in zip(H.edges, J.edges)) + def test_write_read_attribute_numeric_type_graphml(self): from xml.etree.ElementTree import parse @@ -880,100 +1083,172 @@ def test_write_read_attribute_numeric_type_graphml(self): xml = parse(fh) # Children are the key elements, and the graph element - children = xml.getroot().getchildren() - assert_equal(len(children), 3) + children = list(xml.getroot()) + assert len(children) == 3 keys = [child.items() for child in children[:2]] - assert_equal(len(keys), 2) - assert_in(('attr.type', 'double'), keys[0]) - assert_in(('attr.type', 'double'), keys[1]) + assert len(keys) == 2 + assert ("attr.type", "double") in keys[0] + assert ("attr.type", "double") in keys[1] def test_more_multigraph_keys(self): """Writing keys as edge id attributes means keys become strings. The original keys are stored as data, so read them back in - if `make_str(key) == edge_id` + if `str(key) == edge_id` This allows the adjacency to remain the same. """ G = nx.MultiGraph() - G.add_edges_from([('a', 'b', 2), ('a', 'b', 3)]) + G.add_edges_from([("a", "b", 2), ("a", "b", 3)]) fd, fname = tempfile.mkstemp() self.writer(G, fname) H = nx.read_graphml(fname) - assert_true(H.is_multigraph()) + assert H.is_multigraph() assert_edges_equal(G.edges(keys=True), H.edges(keys=True)) - assert_equal(G._adj, H._adj) + assert G._adj == H._adj os.close(fd) os.unlink(fname) def test_default_attribute(self): G = nx.Graph(name="Fred") - G.add_node(1, label=1, color='green') + G.add_node(1, label=1, color="green") nx.add_path(G, [0, 1, 2, 3]) G.add_edge(1, 2, weight=3) - G.graph['node_default'] = {'color': 'yellow'} - G.graph['edge_default'] = {'weight': 7} + G.graph["node_default"] = {"color": "yellow"} + G.graph["edge_default"] = {"weight": 7} fh = io.BytesIO() self.writer(G, fh) fh.seek(0) H = nx.read_graphml(fh, node_type=int) assert_nodes_equal(G.nodes(), H.nodes()) assert_edges_equal(G.edges(), H.edges()) - assert_equal(G.graph, H.graph) + assert G.graph == H.graph + + def test_mixed_type_attributes(self): + G = nx.MultiGraph() + G.add_node("n0", special=False) + G.add_node("n1", special=0) + G.add_edge("n0", "n1", special=False) + G.add_edge("n0", "n1", special=0) + fh = io.BytesIO() + self.writer(G, fh) + fh.seek(0) + H = nx.read_graphml(fh) + assert not H.nodes["n0"]["special"] + assert H.nodes["n1"]["special"] == 0 + assert not H.edges["n0", "n1", 0]["special"] + assert H.edges["n0", "n1", 1]["special"] == 0 + + def test_numpy_float(self): + np = pytest.importorskip("numpy") + wt = np.float(3.4) + G = nx.Graph([(1, 2, {"weight": wt})]) + fd, fname = tempfile.mkstemp() + self.writer(G, fname) + H = nx.read_graphml(fname, node_type=int) + assert G._adj == H._adj + os.close(fd) + os.unlink(fname) def test_multigraph_to_graph(self): # test converting multigraph to graph if no parallel edges found G = nx.MultiGraph() - G.add_edges_from([('a', 'b', 2), ('b', 'c', 3)]) # no multiedges + G.add_edges_from([("a", "b", 2), ("b", "c", 3)]) # no multiedges fd, fname = tempfile.mkstemp() self.writer(G, fname) H = nx.read_graphml(fname) - assert_false(H.is_multigraph()) + assert not H.is_multigraph() + H = nx.read_graphml(fname, force_multigraph=True) + assert H.is_multigraph() + os.close(fd) + os.unlink(fname) + + # add a multiedge + G.add_edge("a", "b", "e-id") + fd, fname = tempfile.mkstemp() + self.writer(G, fname) + H = nx.read_graphml(fname) + assert H.is_multigraph() + H = nx.read_graphml(fname, force_multigraph=True) + assert H.is_multigraph() + os.close(fd) + os.unlink(fname) + + def test_numpy_float64(self): + np = pytest.importorskip("numpy") + wt = np.float64(3.4) + G = nx.Graph([(1, 2, {"weight": wt})]) + fd, fname = tempfile.mkstemp() + self.writer(G, fname) + H = nx.read_graphml(fname, node_type=int) + assert G.edges == H.edges + wtG = G[1][2]["weight"] + wtH = H[1][2]["weight"] + assert almost_equal(wtG, wtH, places=6) + assert type(wtG) == np.float64 + assert type(wtH) == float + os.close(fd) + os.unlink(fname) + + def test_numpy_float32(self): + np = pytest.importorskip("numpy") + wt = np.float32(3.4) + G = nx.Graph([(1, 2, {"weight": wt})]) + fd, fname = tempfile.mkstemp() + self.writer(G, fname) + H = nx.read_graphml(fname, node_type=int) + assert G.edges == H.edges + wtG = G[1][2]["weight"] + wtH = H[1][2]["weight"] + assert almost_equal(wtG, wtH, places=6) + assert type(wtG) == np.float32 + assert type(wtH) == float + os.close(fd) + os.unlink(fname) + + def test_numpy_float64_inference(self): + np = pytest.importorskip("numpy") + G = self.attribute_numeric_type_graph + G.edges[("n1", "n1")]["weight"] = np.float64(1.1) + fd, fname = tempfile.mkstemp() + self.writer(G, fname, infer_numeric_types=True) + H = nx.read_graphml(fname) + assert G._adj == H._adj os.close(fd) os.unlink(fname) def test_unicode_attributes(self): G = nx.Graph() - try: # Python 3.x - name1 = chr(2344) + chr(123) + chr(6543) - name2 = chr(5543) + chr(1543) + chr(324) - node_type = str - except ValueError: # Python 2.6+ - name1 = unichr(2344) + unichr(123) + unichr(6543) - name2 = unichr(5543) + unichr(1543) + unichr(324) - node_type = unicode - G.add_edge(name1, 'Radiohead', foo=name2) + name1 = chr(2344) + chr(123) + chr(6543) + name2 = chr(5543) + chr(1543) + chr(324) + node_type = str + G.add_edge(name1, "Radiohead", foo=name2) fd, fname = tempfile.mkstemp() self.writer(G, fname) H = nx.read_graphml(fname, node_type=node_type) - assert_equal(G._adj, H._adj) + assert G._adj == H._adj os.close(fd) os.unlink(fname) def test_unicode_escape(self): # test for handling json escaped stings in python 2 Issue #1880 import json + a = dict(a='{"a": "123"}') # an object with many chars to escape - try: # Python 3.x - chr(2344) - sa = json.dumps(a) - except ValueError: # Python 2.6+ - sa = unicode(json.dumps(a)) + sa = json.dumps(a) G = nx.Graph() - G.graph['test'] = sa + G.graph["test"] = sa fh = io.BytesIO() self.writer(G, fh) fh.seek(0) H = nx.read_graphml(fh) - assert_equal(G.graph['test'], H.graph['test']) + assert G.graph["test"] == H.graph["test"] class TestXMLGraphML(TestWriteGraphML): writer = staticmethod(nx.write_graphml_xml) @classmethod - def setupClass(cls): - try: - import xml.etree.ElementTree - except ImportError: - raise SkipTest('xml.etree.ElementTree not available.') + def setup_class(cls): + TestWriteGraphML.setup_class() + pytest.importorskip("xml.etree.ElementTree") diff --git a/networkx/readwrite/tests/test_leda.py b/networkx/readwrite/tests/test_leda.py index e161b06..03e2b68 100644 --- a/networkx/readwrite/tests/test_leda.py +++ b/networkx/readwrite/tests/test_leda.py @@ -1,34 +1,29 @@ -#!/usr/bin/env python -from nose.tools import * import networkx as nx import io -import os -import tempfile -class TestLEDA(object): - +class TestLEDA: def test_parse_leda(self): - data = """#header section \nLEDA.GRAPH \nstring\nint\n-1\n#nodes section\n5 \n|{v1}| \n|{v2}| \n|{v3}| \n|{v4}| \n|{v5}| \n\n#edges section\n7 \n1 2 0 |{4}| \n1 3 0 |{3}| \n2 3 0 |{2}| \n3 4 0 |{3}| \n3 5 0 |{7}| \n4 5 0 |{6}| \n5 1 0 |{foo}|""" + data = """#header section \nLEDA.GRAPH \nstring\nint\n-1\n#nodes section\n5 \n|{v1}| \n|{v2}| \n|{v3}| \n|{v4}| \n|{v5}| \n\n#edges section\n7 \n1 2 0 |{4}| \n1 3 0 |{3}| \n2 3 0 |{2}| \n3 4 0 |{3}| \n3 5 0 |{7}| \n4 5 0 |{6}| \n5 1 0 |{foo}|""" G = nx.parse_leda(data) - G = nx.parse_leda(data.split('\n')) - assert_equal(sorted(G.nodes()), - ['v1', 'v2', 'v3', 'v4', 'v5']) - assert_equal(sorted(G.edges(data=True)), - [('v1', 'v2', {'label': '4'}), - ('v1', 'v3', {'label': '3'}), - ('v2', 'v3', {'label': '2'}), - ('v3', 'v4', {'label': '3'}), - ('v3', 'v5', {'label': '7'}), - ('v4', 'v5', {'label': '6'}), - ('v5', 'v1', {'label': 'foo'})]) + G = nx.parse_leda(data.split("\n")) + assert sorted(G.nodes()) == ["v1", "v2", "v3", "v4", "v5"] + assert sorted(G.edges(data=True)) == [ + ("v1", "v2", {"label": "4"}), + ("v1", "v3", {"label": "3"}), + ("v2", "v3", {"label": "2"}), + ("v3", "v4", {"label": "3"}), + ("v3", "v5", {"label": "7"}), + ("v4", "v5", {"label": "6"}), + ("v5", "v1", {"label": "foo"}), + ] def test_read_LEDA(self): fh = io.BytesIO() - data = """#header section \nLEDA.GRAPH \nstring\nint\n-1\n#nodes section\n5 \n|{v1}| \n|{v2}| \n|{v3}| \n|{v4}| \n|{v5}| \n\n#edges section\n7 \n1 2 0 |{4}| \n1 3 0 |{3}| \n2 3 0 |{2}| \n3 4 0 |{3}| \n3 5 0 |{7}| \n4 5 0 |{6}| \n5 1 0 |{foo}|""" + data = """#header section \nLEDA.GRAPH \nstring\nint\n-1\n#nodes section\n5 \n|{v1}| \n|{v2}| \n|{v3}| \n|{v4}| \n|{v5}| \n\n#edges section\n7 \n1 2 0 |{4}| \n1 3 0 |{3}| \n2 3 0 |{2}| \n3 4 0 |{3}| \n3 5 0 |{7}| \n4 5 0 |{6}| \n5 1 0 |{foo}|""" G = nx.parse_leda(data) - fh.write(data.encode('UTF-8')) + fh.write(data.encode("UTF-8")) fh.seek(0) Gin = nx.read_leda(fh) - assert_equal(sorted(G.nodes()), sorted(Gin.nodes())) - assert_equal(sorted(G.edges()), sorted(Gin.edges())) + assert sorted(G.nodes()) == sorted(Gin.nodes()) + assert sorted(G.edges()) == sorted(Gin.edges()) diff --git a/networkx/readwrite/tests/test_p2g.py b/networkx/readwrite/tests/test_p2g.py index 5960e6e..0b1a910 100644 --- a/networkx/readwrite/tests/test_p2g.py +++ b/networkx/readwrite/tests/test_p2g.py @@ -1,20 +1,17 @@ -from nose.tools import assert_equal, assert_raises, assert_not_equal import networkx as nx import io -import tempfile -import os -from networkx.readwrite.p2g import * -from networkx.testing import * +from networkx.readwrite.p2g import read_p2g, write_p2g +from networkx.testing import assert_edges_equal class TestP2G: - - def setUp(self): - self.G = nx.Graph(name="test") - e = [('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'e'), ('e', 'f'), ('a', 'f')] - self.G.add_edges_from(e) - self.G.add_node('g') - self.DG = nx.DiGraph(self.G) + @classmethod + def setup_class(cls): + cls.G = nx.Graph(name="test") + e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")] + cls.G.add_edges_from(e) + cls.G.add_node("g") + cls.DG = nx.DiGraph(cls.G) def test_read_p2g(self): s = b"""\ @@ -29,10 +26,10 @@ def test_read_p2g(self): """ bytesIO = io.BytesIO(s) G = read_p2g(bytesIO) - assert_equal(G.name, 'name') - assert_equal(sorted(G), ['a', 'b', 'c']) + assert G.name == "name" + assert sorted(G) == ["a", "b", "c"] edges = [(str(u), str(v)) for u, v in G.edges()] - assert_edges_equal(G.edges(), [('a', 'c'), ('a', 'b'), ('c', 'a'), ('c', 'c')]) + assert_edges_equal(G.edges(), [("a", "c"), ("a", "b"), ("c", "a"), ("c", "c")]) def test_write_p2g(self): s = b"""foo @@ -46,18 +43,18 @@ def test_write_p2g(self): """ fh = io.BytesIO() G = nx.OrderedDiGraph() - G.name = 'foo' + G.name = "foo" G.add_edges_from([(1, 2), (2, 3)]) write_p2g(G, fh) fh.seek(0) r = fh.read() - assert_equal(r, s) + assert r == s def test_write_read_p2g(self): fh = io.BytesIO() G = nx.DiGraph() - G.name = 'foo' - G.add_edges_from([('a', 'b'), ('b', 'c')]) + G.name = "foo" + G.add_edges_from([("a", "b"), ("b", "c")]) write_p2g(G, fh) fh.seek(0) H = read_p2g(fh) diff --git a/networkx/readwrite/tests/test_pajek.py b/networkx/readwrite/tests/test_pajek.py index ca168a8..7d7e4c4 100644 --- a/networkx/readwrite/tests/test_pajek.py +++ b/networkx/readwrite/tests/test_pajek.py @@ -1,84 +1,129 @@ -#!/usr/bin/env python """ Pajek tests """ -from nose.tools import assert_equal -from networkx import * +import networkx as nx import os import tempfile -from io import open -from networkx.testing import * +from networkx.testing import assert_edges_equal, assert_nodes_equal -class TestPajek(object): - def setUp(self): - self.data = """*network Tralala\n*vertices 4\n 1 "A1" 0.0938 0.0896 ellipse x_fact 1 y_fact 1\n 2 "Bb" 0.8188 0.2458 ellipse x_fact 1 y_fact 1\n 3 "C" 0.3688 0.7792 ellipse x_fact 1\n 4 "D2" 0.9583 0.8563 ellipse x_fact 1\n*arcs\n1 1 1 h2 0 w 3 c Blue s 3 a1 -130 k1 0.6 a2 -130 k2 0.6 ap 0.5 l "Bezier loop" lc BlueViolet fos 20 lr 58 lp 0.3 la 360\n2 1 1 h2 0 a1 120 k1 1.3 a2 -120 k2 0.3 ap 25 l "Bezier arc" lphi 270 la 180 lr 19 lp 0.5\n1 2 1 h2 0 a1 40 k1 2.8 a2 30 k2 0.8 ap 25 l "Bezier arc" lphi 90 la 0 lp 0.65\n4 2 -1 h2 0 w 1 k1 -2 k2 250 ap 25 l "Circular arc" c Red lc OrangeRed\n3 4 1 p Dashed h2 0 w 2 c OliveGreen ap 25 l "Straight arc" lc PineGreen\n1 3 1 p Dashed h2 0 w 5 k1 -1 k2 -20 ap 25 l "Oval arc" c Brown lc Black\n3 3 -1 h1 6 w 1 h2 12 k1 -2 k2 -15 ap 0.5 l "Circular loop" c Red lc OrangeRed lphi 270 la 180""" - self.G = nx.MultiDiGraph() - self.G.add_nodes_from(['A1', 'Bb', 'C', 'D2']) - self.G.add_edges_from([('A1', 'A1'), ('A1', 'Bb'), ('A1', 'C'), - ('Bb', 'A1'), ('C', 'C'), ('C', 'D2'), - ('D2', 'Bb')]) +class TestPajek: + @classmethod + def setup_class(cls): + cls.data = """*network Tralala\n*vertices 4\n 1 "A1" 0.0938 0.0896 ellipse x_fact 1 y_fact 1\n 2 "Bb" 0.8188 0.2458 ellipse x_fact 1 y_fact 1\n 3 "C" 0.3688 0.7792 ellipse x_fact 1\n 4 "D2" 0.9583 0.8563 ellipse x_fact 1\n*arcs\n1 1 1 h2 0 w 3 c Blue s 3 a1 -130 k1 0.6 a2 -130 k2 0.6 ap 0.5 l "Bezier loop" lc BlueViolet fos 20 lr 58 lp 0.3 la 360\n2 1 1 h2 0 a1 120 k1 1.3 a2 -120 k2 0.3 ap 25 l "Bezier arc" lphi 270 la 180 lr 19 lp 0.5\n1 2 1 h2 0 a1 40 k1 2.8 a2 30 k2 0.8 ap 25 l "Bezier arc" lphi 90 la 0 lp 0.65\n4 2 -1 h2 0 w 1 k1 -2 k2 250 ap 25 l "Circular arc" c Red lc OrangeRed\n3 4 1 p Dashed h2 0 w 2 c OliveGreen ap 25 l "Straight arc" lc PineGreen\n1 3 1 p Dashed h2 0 w 5 k1 -1 k2 -20 ap 25 l "Oval arc" c Brown lc Black\n3 3 -1 h1 6 w 1 h2 12 k1 -2 k2 -15 ap 0.5 l "Circular loop" c Red lc OrangeRed lphi 270 la 180""" + cls.G = nx.MultiDiGraph() + cls.G.add_nodes_from(["A1", "Bb", "C", "D2"]) + cls.G.add_edges_from( + [ + ("A1", "A1"), + ("A1", "Bb"), + ("A1", "C"), + ("Bb", "A1"), + ("C", "C"), + ("C", "D2"), + ("D2", "Bb"), + ] + ) - self.G.graph['name'] = 'Tralala' - (fd, self.fname) = tempfile.mkstemp() - with os.fdopen(fd, 'wb') as fh: - fh.write(self.data.encode('UTF-8')) + cls.G.graph["name"] = "Tralala" + (fd, cls.fname) = tempfile.mkstemp() + with os.fdopen(fd, "wb") as fh: + fh.write(cls.data.encode("UTF-8")) - def tearDown(self): - os.unlink(self.fname) + @classmethod + def teardown_class(cls): + os.unlink(cls.fname) def test_parse_pajek_simple(self): # Example without node positions or shape data = """*Vertices 2\n1 "1"\n2 "2"\n*Edges\n1 2\n2 1""" - G = parse_pajek(data) - assert_equal(sorted(G.nodes()), ['1', '2']) - assert_edges_equal(G.edges(), [('1', '2'), ('1', '2')]) + G = nx.parse_pajek(data) + assert sorted(G.nodes()) == ["1", "2"] + assert_edges_equal(G.edges(), [("1", "2"), ("1", "2")]) def test_parse_pajek(self): - G = parse_pajek(self.data) - assert_equal(sorted(G.nodes()), ['A1', 'Bb', 'C', 'D2']) - assert_edges_equal(G.edges(), [('A1', 'A1'), ('A1', 'Bb'), - ('A1', 'C'), ('Bb', 'A1'), - ('C', 'C'), ('C', 'D2'), ('D2', 'Bb')]) + G = nx.parse_pajek(self.data) + assert sorted(G.nodes()) == ["A1", "Bb", "C", "D2"] + assert_edges_equal( + G.edges(), + [ + ("A1", "A1"), + ("A1", "Bb"), + ("A1", "C"), + ("Bb", "A1"), + ("C", "C"), + ("C", "D2"), + ("D2", "Bb"), + ], + ) def test_parse_pajet_mat(self): data = """*Vertices 3\n1 "one"\n2 "two"\n3 "three"\n*Matrix\n1 1 0\n0 1 0\n0 1 0\n""" - G = parse_pajek(data) - assert_equal(set(G.nodes()), {'one', 'two', 'three'}) - assert_equal(G.nodes['two'], {'id': '2'}) - assert_edges_equal(set(G.edges()), {('one', 'one'), ('two', 'one'), ('two', 'two'), ('two', 'three')}) + G = nx.parse_pajek(data) + assert set(G.nodes()) == {"one", "two", "three"} + assert G.nodes["two"] == {"id": "2"} + assert_edges_equal( + set(G.edges()), + {("one", "one"), ("two", "one"), ("two", "two"), ("two", "three")}, + ) def test_read_pajek(self): - G = parse_pajek(self.data) - Gin = read_pajek(self.fname) - assert_equal(sorted(G.nodes()), sorted(Gin.nodes())) + G = nx.parse_pajek(self.data) + Gin = nx.read_pajek(self.fname) + assert sorted(G.nodes()) == sorted(Gin.nodes()) assert_edges_equal(G.edges(), Gin.edges()) - assert_equal(self.G.graph, Gin.graph) + assert self.G.graph == Gin.graph for n in G: - assert_equal(G.nodes[n], Gin.nodes[n]) + assert G.nodes[n] == Gin.nodes[n] + + def test_write_pajek(self): + import io + + G = nx.parse_pajek(self.data) + fh = io.BytesIO() + nx.write_pajek(G, fh) + fh.seek(0) + H = nx.read_pajek(fh) + assert_nodes_equal(list(G), list(H)) + assert_edges_equal(list(G.edges()), list(H.edges())) + # Graph name is left out for now, therefore it is not tested. + # assert_equal(G.graph, H.graph) + + def test_ignored_attribute(self): + import io + + G = nx.Graph() + fh = io.BytesIO() + G.add_node(1, int_attr=1) + G.add_node(2, empty_attr=" ") + G.add_edge(1, 2, int_attr=2) + G.add_edge(2, 3, empty_attr=" ") + + import warnings + + with warnings.catch_warnings(record=True) as w: + nx.write_pajek(G, fh) + assert len(w) == 4 def test_noname(self): # Make sure we can parse a line such as: *network # Issue #952 line = "*network\n" - other_lines = self.data.split('\n')[1:] - data = line + '\n'.join(other_lines) - G = parse_pajek(data) + other_lines = self.data.split("\n")[1:] + data = line + "\n".join(other_lines) + G = nx.parse_pajek(data) def test_unicode(self): import io + G = nx.Graph() - try: # Python 3.x - name1 = chr(2344) + chr(123) + chr(6543) - name2 = chr(5543) + chr(1543) + chr(324) - except ValueError: # Python 2.6+ - name1 = unichr(2344) + unichr(123) + unichr(6543) - name2 = unichr(5543) + unichr(1543) + unichr(324) - G.add_edge(name1, 'Radiohead', foo=name2) + name1 = chr(2344) + chr(123) + chr(6543) + name2 = chr(5543) + chr(1543) + chr(324) + G.add_edge(name1, "Radiohead", foo=name2) fh = io.BytesIO() nx.write_pajek(G, fh) fh.seek(0) H = nx.read_pajek(fh) assert_nodes_equal(list(G), list(H)) assert_edges_equal(list(G.edges()), list(H.edges())) - assert_equal(G.graph, H.graph) + assert G.graph == H.graph diff --git a/networkx/readwrite/tests/test_shp.py b/networkx/readwrite/tests/test_shp.py index f7f6dac..25df61d 100644 --- a/networkx/readwrite/tests/test_shp.py +++ b/networkx/readwrite/tests/test_shp.py @@ -3,29 +3,15 @@ import os import tempfile -from nose import SkipTest -from nose.tools import assert_equal -from nose.tools import raises +import pytest -import networkx as nx - - -class TestShp(object): - @classmethod - def setupClass(cls): - global ogr - try: - from osgeo import ogr - except ImportError: - raise SkipTest('ogr not available.') +ogr = pytest.importorskip("osgeo.ogr") - def deletetmp(self, drv, *paths): - for p in paths: - if os.path.exists(p): - drv.DeleteDataSource(p) +import networkx as nx - def setUp(self): +class TestShp: + def setup_method(self): def createlayer(driver, layerType=ogr.wkbLineString): lyr = driver.CreateLayer("edges", None, layerType) namedef = ogr.FieldDefn("Name", ogr.OFTString) @@ -35,24 +21,28 @@ def createlayer(driver, layerType=ogr.wkbLineString): drv = ogr.GetDriverByName("ESRI Shapefile") - testdir = os.path.join(tempfile.gettempdir(), 'shpdir') - shppath = os.path.join(tempfile.gettempdir(), 'tmpshp.shp') - multi_shppath = os.path.join(tempfile.gettempdir(), 'tmp_mshp.shp') + testdir = os.path.join(tempfile.gettempdir(), "shpdir") + shppath = os.path.join(tempfile.gettempdir(), "tmpshp.shp") + multi_shppath = os.path.join(tempfile.gettempdir(), "tmp_mshp.shp") self.deletetmp(drv, testdir, shppath, multi_shppath) os.mkdir(testdir) - self.names = ['a', 'b', 'c', 'c'] # edgenames - self.paths = ([(1.0, 1.0), (2.0, 2.0)], - [(2.0, 2.0), (3.0, 3.0)], - [(0.9, 0.9), (4.0, 0.9), (4.0, 2.0)]) + self.names = ["a", "b", "c", "c"] # edgenames + self.paths = ( + [(1.0, 1.0), (2.0, 2.0)], + [(2.0, 2.0), (3.0, 3.0)], + [(0.9, 0.9), (4.0, 0.9), (4.0, 2.0)], + ) - self.simplified_names = ['a', 'b', 'c'] # edgenames - self.simplified_paths = ([(1.0, 1.0), (2.0, 2.0)], - [(2.0, 2.0), (3.0, 3.0)], - [(0.9, 0.9), (4.0, 2.0)]) + self.simplified_names = ["a", "b", "c"] # edgenames + self.simplified_paths = ( + [(1.0, 1.0), (2.0, 2.0)], + [(2.0, 2.0), (3.0, 3.0)], + [(0.9, 0.9), (4.0, 2.0)], + ) - self.multi_names = ['a', 'a', 'a', 'a'] # edgenames + self.multi_names = ["a", "a", "a", "a"] # edgenames shp = drv.CreateDataSource(shppath) lyr = createlayer(shp) @@ -81,7 +71,7 @@ def createlayer(driver, layerType=ogr.wkbLineString): multi_feat = ogr.Feature(multi_lyr.GetLayerDefn()) multi_feat.SetGeometry(multi_g) - multi_feat.SetField("Name", 'a') + multi_feat.SetField("Name", "a") multi_lyr.CreateFeature(multi_feat) self.shppath = shppath @@ -89,21 +79,24 @@ def createlayer(driver, layerType=ogr.wkbLineString): self.testdir = testdir self.drv = drv - def testload(self): + def deletetmp(self, drv, *paths): + for p in paths: + if os.path.exists(p): + drv.DeleteDataSource(p) + def testload(self): def compare_graph_paths_names(g, paths, names): expected = nx.DiGraph() for p in paths: nx.add_path(expected, p) - assert_equal(sorted(expected.nodes), sorted(g.nodes)) - assert_equal(sorted(expected.edges()), sorted(g.edges())) - g_names = [g.get_edge_data(s, e)['Name'] for s, e in g.edges()] - assert_equal(names, sorted(g_names)) + assert sorted(expected.nodes) == sorted(g.nodes) + assert sorted(expected.edges()) == sorted(g.edges()) + g_names = [g.get_edge_data(s, e)["Name"] for s, e in g.edges()] + assert names == sorted(g_names) # simplified G = nx.read_shp(self.shppath) - compare_graph_paths_names(G, self.simplified_paths, - self.simplified_names) + compare_graph_paths_names(G, self.simplified_paths, self.simplified_names) # unsimplified G = nx.read_shp(self.shppath, simplify=False) @@ -119,7 +112,7 @@ def checkgeom(self, lyr, expected): while feature: actualwkt.append(feature.GetGeometryRef().ExportToWkt()) feature = lyr.GetNextFeature() - assert_equal(sorted(expected), sorted(actualwkt)) + assert sorted(expected) == sorted(actualwkt) def test_geometryexport(self): expectedpoints_simple = ( @@ -127,12 +120,12 @@ def test_geometryexport(self): "POINT (2 2)", "POINT (3 3)", "POINT (0.9 0.9)", - "POINT (4 2)" + "POINT (4 2)", ) expectedlines_simple = ( "LINESTRING (1 1,2 2)", "LINESTRING (2 2,3 3)", - "LINESTRING (0.9 0.9,4.0 0.9,4 2)" + "LINESTRING (0.9 0.9,4.0 0.9,4 2)", ) expectedpoints = ( "POINT (1 1)", @@ -140,16 +133,16 @@ def test_geometryexport(self): "POINT (3 3)", "POINT (0.9 0.9)", "POINT (4.0 0.9)", - "POINT (4 2)" + "POINT (4 2)", ) expectedlines = ( "LINESTRING (1 1,2 2)", "LINESTRING (2 2,3 3)", "LINESTRING (0.9 0.9,4.0 0.9)", - "LINESTRING (4.0 0.9,4 2)" + "LINESTRING (4.0 0.9,4 2)", ) - tpath = os.path.join(tempfile.gettempdir(), 'shpdir') + tpath = os.path.join(tempfile.gettempdir(), "shpdir") G = nx.read_shp(self.shppath) nx.write_shp(G, tpath) shpdir = ogr.Open(tpath) @@ -173,11 +166,11 @@ def testattributes(lyr, graph): ref = feature.GetGeometryRef() last = ref.GetPointCount() - 1 edge_nodes = (ref.GetPoint_2D(0), ref.GetPoint_2D(last)) - name = feature.GetFieldAsString('Name') - assert_equal(graph.get_edge_data(*edge_nodes)['Name'], name) + name = feature.GetFieldAsString("Name") + assert graph.get_edge_data(*edge_nodes)["Name"] == name feature = lyr.GetNextFeature() - tpath = os.path.join(tempfile.gettempdir(), 'shpdir') + tpath = os.path.join(tempfile.gettempdir(), "shpdir") G = nx.read_shp(self.shppath) nx.write_shp(G, tpath) @@ -187,7 +180,7 @@ def testattributes(lyr, graph): # Test export of node attributes in nx.write_shp (#2778) def test_nodeattributeexport(self): - tpath = os.path.join(tempfile.gettempdir(), 'shpdir') + tpath = os.path.join(tempfile.gettempdir(), "shpdir") G = nx.DiGraph() A = (0, 0) @@ -195,25 +188,20 @@ def test_nodeattributeexport(self): C = (2, 2) G.add_edge(A, B) G.add_edge(A, C) - label = 'node_label' + label = "node_label" for n, d in G.nodes(data=True): - d['label'] = label + d["label"] = label nx.write_shp(G, tpath) H = nx.read_shp(tpath) for n, d in H.nodes(data=True): - assert_equal(d['label'], label) + assert d["label"] == label def test_wkt_export(self): G = nx.DiGraph() - tpath = os.path.join(tempfile.gettempdir(), 'shpdir') - points = ( - "POINT (0.9 0.9)", - "POINT (4 2)" - ) - line = ( - "LINESTRING (0.9 0.9,4 2)", - ) + tpath = os.path.join(tempfile.gettempdir(), "shpdir") + points = ("POINT (0.9 0.9)", "POINT (4 2)") + line = ("LINESTRING (0.9 0.9,4 2)",) G.add_node(1, Wkt=points[0]) G.add_node(2, Wkt=points[1]) G.add_edge(1, 2, Wkt=line[0]) @@ -225,38 +213,26 @@ def test_wkt_export(self): self.checkgeom(shpdir.GetLayerByName("nodes"), points) self.checkgeom(shpdir.GetLayerByName("edges"), line) - def tearDown(self): + def teardown_method(self): self.deletetmp(self.drv, self.testdir, self.shppath) -@raises(RuntimeError) def test_read_shp_nofile(): - try: - from osgeo import ogr - except ImportError: - raise SkipTest('ogr not available.') - G = nx.read_shp("hopefully_this_file_will_not_be_available") - + with pytest.raises(RuntimeError): + G = nx.read_shp("hopefully_this_file_will_not_be_available") -class TestMissingGeometry(object): - @classmethod - def setup_class(cls): - global ogr - try: - from osgeo import ogr - except ImportError: - raise SkipTest('ogr not available.') - def setUp(self): +class TestMissingGeometry: + def setup_method(self): self.setup_path() self.delete_shapedir() self.create_shapedir() - def tearDown(self): + def teardown_method(self): self.delete_shapedir() def setup_path(self): - self.path = os.path.join(tempfile.gettempdir(), 'missing_geometry') + self.path = os.path.join(tempfile.gettempdir(), "missing_geometry") def create_shapedir(self): drv = ogr.GetDriverByName("ESRI Shapefile") @@ -272,29 +248,21 @@ def delete_shapedir(self): if os.path.exists(self.path): drv.DeleteDataSource(self.path) - @raises(nx.NetworkXError) def test_missing_geometry(self): - G = nx.read_shp(self.path) - + with pytest.raises(nx.NetworkXError): + G = nx.read_shp(self.path) -class TestMissingAttrWrite(object): - @classmethod - def setup_class(cls): - global ogr - try: - from osgeo import ogr - except ImportError: - raise SkipTest('ogr not available.') - def setUp(self): +class TestMissingAttrWrite: + def setup_method(self): self.setup_path() self.delete_shapedir() - def tearDown(self): + def teardown_method(self): self.delete_shapedir() def setup_path(self): - self.path = os.path.join(tempfile.gettempdir(), 'missing_attributes') + self.path = os.path.join(tempfile.gettempdir(), "missing_attributes") def delete_shapedir(self): drv = ogr.GetDriverByName("ESRI Shapefile") @@ -314,6 +282,6 @@ def test_missing_attributes(self): for u, v, d in H.edges(data=True): if u == A and v == B: - assert_equal(d['foo'], 100) + assert d["foo"] == 100 if u == A and v == C: - assert_equal(d['foo'], None) + assert d["foo"] is None diff --git a/networkx/readwrite/tests/test_sparse6.py b/networkx/readwrite/tests/test_sparse6.py index 526a9dd..141d823 100644 --- a/networkx/readwrite/tests/test_sparse6.py +++ b/networkx/readwrite/tests/test_sparse6.py @@ -1,40 +1,63 @@ from io import BytesIO import tempfile -from unittest import TestCase - -from nose.tools import assert_equal -from nose.tools import assert_true +import pytest import networkx as nx from networkx.testing.utils import assert_edges_equal from networkx.testing.utils import assert_nodes_equal -class TestSparseGraph6(object): - +class TestSparseGraph6: def test_from_sparse6_bytes(self): - data = b':Q___eDcdFcDeFcE`GaJ`IaHbKNbLM' + data = b":Q___eDcdFcDeFcE`GaJ`IaHbKNbLM" G = nx.from_sparse6_bytes(data) - assert_nodes_equal(sorted(G.nodes()), - [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, - 10, 11, 12, 13, 14, 15, 16, 17]) - assert_edges_equal(G.edges(), - [(0, 1), (0, 2), (0, 3), (1, 12), (1, 14), (2, 13), - (2, 15), (3, 16), (3, 17), (4, 7), (4, 9), (4, 11), - (5, 6), (5, 8), (5, 9), (6, 10), (6, 11), (7, 8), - (7, 10), (8, 12), (9, 15), (10, 14), (11, 13), - (12, 16), (13, 17), (14, 17), (15, 16)]) + assert_nodes_equal( + sorted(G.nodes()), + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17], + ) + assert_edges_equal( + G.edges(), + [ + (0, 1), + (0, 2), + (0, 3), + (1, 12), + (1, 14), + (2, 13), + (2, 15), + (3, 16), + (3, 17), + (4, 7), + (4, 9), + (4, 11), + (5, 6), + (5, 8), + (5, 9), + (6, 10), + (6, 11), + (7, 8), + (7, 10), + (8, 12), + (9, 15), + (10, 14), + (11, 13), + (12, 16), + (13, 17), + (14, 17), + (15, 16), + ], + ) def test_from_bytes_multigraph_graph(self): - graph_data = b':An' + graph_data = b":An" G = nx.from_sparse6_bytes(graph_data) - assert_true(type(G), nx.Graph) - multigraph_data = b':Ab' + assert type(G) == nx.Graph + multigraph_data = b":Ab" M = nx.from_sparse6_bytes(multigraph_data) - assert_true(type(M), nx.MultiGraph) + assert type(M) == nx.MultiGraph def test_read_sparse6(self): - data = b':Q___eDcdFcDeFcE`GaJ`IaHbKNbLM' + data = b":Q___eDcdFcDeFcE`GaJ`IaHbKNbLM" G = nx.from_sparse6_bytes(data) fh = BytesIO(data) Gin = nx.read_sparse6(fh) @@ -43,18 +66,18 @@ def test_read_sparse6(self): def test_read_many_graph6(self): # Read many graphs into list - data = (b':Q___eDcdFcDeFcE`GaJ`IaHbKNbLM\n' - b':Q___dCfDEdcEgcbEGbFIaJ`JaHN`IM') + data = b":Q___eDcdFcDeFcE`GaJ`IaHbKNbLM\n" b":Q___dCfDEdcEgcbEGbFIaJ`JaHN`IM" fh = BytesIO(data) glist = nx.read_sparse6(fh) - assert_equal(len(glist), 2) + assert len(glist) == 2 for G in glist: - assert_nodes_equal(G.nodes(), - [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, - 10, 11, 12, 13, 14, 15, 16, 17]) + assert_nodes_equal( + G.nodes(), + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17], + ) -class TestWriteSparse6(TestCase): +class TestWriteSparse6: """Unit tests for writing graphs in the sparse6 format. Most of the test cases were checked against the sparse6 encoder in Sage. @@ -65,59 +88,59 @@ def test_null_graph(self): G = nx.null_graph() result = BytesIO() nx.write_sparse6(G, result) - self.assertEqual(result.getvalue(), b'>>sparse6<<:?\n') + assert result.getvalue() == b">>sparse6<<:?\n" def test_trivial_graph(self): G = nx.trivial_graph() result = BytesIO() nx.write_sparse6(G, result) - self.assertEqual(result.getvalue(), b'>>sparse6<<:@\n') + assert result.getvalue() == b">>sparse6<<:@\n" def test_empty_graph(self): G = nx.empty_graph(5) result = BytesIO() nx.write_sparse6(G, result) - self.assertEqual(result.getvalue(), b'>>sparse6<<:D\n') + assert result.getvalue() == b">>sparse6<<:D\n" def test_large_empty_graph(self): G = nx.empty_graph(68) result = BytesIO() nx.write_sparse6(G, result) - self.assertEqual(result.getvalue(), b'>>sparse6<<:~?@C\n') + assert result.getvalue() == b">>sparse6<<:~?@C\n" def test_very_large_empty_graph(self): G = nx.empty_graph(258049) result = BytesIO() nx.write_sparse6(G, result) - self.assertEqual(result.getvalue(), b'>>sparse6<<:~~???~?@\n') + assert result.getvalue() == b">>sparse6<<:~~???~?@\n" def test_complete_graph(self): G = nx.complete_graph(4) result = BytesIO() nx.write_sparse6(G, result) - self.assertEqual(result.getvalue(), b'>>sparse6<<:CcKI\n') + assert result.getvalue() == b">>sparse6<<:CcKI\n" def test_no_header(self): G = nx.complete_graph(4) result = BytesIO() nx.write_sparse6(G, result, header=False) - self.assertEqual(result.getvalue(), b':CcKI\n') + assert result.getvalue() == b":CcKI\n" def test_padding(self): - codes = (b':Cdv', b':DaYn', b':EaYnN', b':FaYnL', b':GaYnLz') + codes = (b":Cdv", b":DaYn", b":EaYnN", b":FaYnL", b":GaYnLz") for n, code in enumerate(codes, start=4): G = nx.path_graph(n) result = BytesIO() nx.write_sparse6(G, result, header=False) - self.assertEqual(result.getvalue(), code + b'\n') + assert result.getvalue() == code + b"\n" def test_complete_bipartite(self): G = nx.complete_bipartite_graph(6, 9) result = BytesIO() nx.write_sparse6(G, result) # Compared with sage - expected = b'>>sparse6<<:Nk' + b'?G`cJ' * 9 + b'\n' - assert_equal(result.getvalue(), expected) + expected = b">>sparse6<<:Nk" + b"?G`cJ" * 9 + b"\n" + assert result.getvalue() == expected def test_read_write_inverse(self): for i in list(range(13)) + [31, 47, 62, 63, 64, 72]: @@ -128,11 +151,11 @@ def test_read_write_inverse(self): # Strip the trailing newline. gstr = gstr.getvalue().rstrip() g2 = nx.from_sparse6_bytes(gstr) - assert_equal(g2.order(), g.order()) + assert g2.order() == g.order() assert_edges_equal(g2.edges(), g.edges()) - def no_directed_graphs(self): - with self.assertRaises(nx.NetworkXNotImplemented): + def test_no_directed_graphs(self): + with pytest.raises(nx.NetworkXNotImplemented): nx.write_sparse6(nx.DiGraph(), BytesIO()) def test_write_path(self): @@ -142,8 +165,9 @@ def test_write_path(self): fullfilename = f.name # file should be closed now, so write_sparse6 can open it nx.write_sparse6(nx.null_graph(), fullfilename) - fh = open(fullfilename, mode='rb') - self.assertEqual(fh.read(), b'>>sparse6<<:?\n') + fh = open(fullfilename, mode="rb") + assert fh.read() == b">>sparse6<<:?\n" fh.close() import os + os.remove(fullfilename) diff --git a/networkx/readwrite/tests/test_yaml.py b/networkx/readwrite/tests/test_yaml.py index 5298922..c1238fa 100644 --- a/networkx/readwrite/tests/test_yaml.py +++ b/networkx/readwrite/tests/test_yaml.py @@ -4,35 +4,30 @@ import os import tempfile -from nose import SkipTest -from nose.tools import assert_equal +import pytest + +yaml = pytest.importorskip("yaml") import networkx as nx from networkx.testing import assert_edges_equal, assert_nodes_equal -class TestYaml(object): +class TestYaml: + @classmethod + def setup_class(cls): + cls.build_graphs() + @classmethod - def setupClass(cls): - global yaml - try: - import yaml - except ImportError: - raise SkipTest('yaml not available.') - - def setUp(self): - self.build_graphs() - - def build_graphs(self): - self.G = nx.Graph(name="test") - e = [('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'e'), ('e', 'f'), ('a', 'f')] - self.G.add_edges_from(e) - self.G.add_node('g') - - self.DG = nx.DiGraph(self.G) - - self.MG = nx.MultiGraph() - self.MG.add_weighted_edges_from([(1, 2, 5), (1, 2, 5), (1, 2, 1), (3, 3, 42)]) + def build_graphs(cls): + cls.G = nx.Graph(name="test") + e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")] + cls.G.add_edges_from(e) + cls.G.add_node("g") + + cls.DG = nx.DiGraph(cls.G) + + cls.MG = nx.MultiGraph() + cls.MG.add_weighted_edges_from([(1, 2, 5), (1, 2, 5), (1, 2, 1), (3, 3, 42)]) def assert_equal(self, G, data=False): (fd, fname) = tempfile.mkstemp() diff --git a/networkx/relabel.py b/networkx/relabel.py index 2d3dc25..424a828 100644 --- a/networkx/relabel.py +++ b/networkx/relabel.py @@ -1,12 +1,6 @@ -# Copyright (C) 2006-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. import networkx as nx -__all__ = ['convert_node_labels_to_integers', 'relabel_nodes'] +__all__ = ["convert_node_labels_to_integers", "relabel_nodes"] def relabel_nodes(G, mapping, copy=True): @@ -19,7 +13,7 @@ def relabel_nodes(G, mapping, copy=True): mapping : dictionary A dictionary with the old labels as keys and new labels as values. - A partial mapping is allowed. + A partial mapping is allowed. Mapping 2 nodes to a single node is allowed. copy : bool (optional, default=True) If True return a copy, or if False relabel the nodes in place. @@ -32,7 +26,7 @@ def relabel_nodes(G, mapping, copy=True): >>> G = nx.path_graph(3) >>> sorted(G) [0, 1, 2] - >>> mapping = {0: 'a', 1: 'b', 2: 'c'} + >>> mapping = {0: "a", 1: "b", 2: "c"} >>> H = nx.relabel_nodes(G, mapping) >>> sorted(H) ['a', 'b', 'c'] @@ -45,7 +39,7 @@ def relabel_nodes(G, mapping, copy=True): >>> sorted(G)[:3] [0, 1, 2] >>> mapping = dict(zip(G, string.ascii_lowercase)) - >>> G = nx.relabel_nodes(G, mapping) # nodes are characters a through z + >>> G = nx.relabel_nodes(G, mapping) # nodes are characters a through z >>> sorted(G)[:3] ['a', 'b', 'c'] >>> mapping = dict(zip(G, range(1, 27))) @@ -58,7 +52,7 @@ def relabel_nodes(G, mapping, copy=True): argument to False: >>> G = nx.path_graph(3) # nodes 0-1-2 - >>> mapping = {0: 'a', 1: 'b'} # 0->'a' and 1->'b' + >>> mapping = {0: "a", 1: "b"} # 0->'a' and 1->'b' >>> G = nx.relabel_nodes(G, mapping, copy=False) >>> sorted(G, key=str) [2, 'a', 'b'] @@ -70,6 +64,27 @@ def relabel_nodes(G, mapping, copy=True): >>> list(H) [0, 1, 4] + In a multigraph, relabeling two or more nodes to the same new node + will retain all edges, but may change the edge keys in the process: + + >>> G = nx.MultiGraph() + >>> G.add_edge(0, 1, value="a") # returns the key for this edge + 0 + >>> G.add_edge(0, 2, value="b") + 0 + >>> G.add_edge(0, 3, value="c") + 0 + >>> mapping = {1: 4, 2: 4, 3: 4} + >>> H = nx.relabel_nodes(G, mapping, copy=True) + >>> print(H[0]) + {4: {0: {'value': 'a'}, 1: {'value': 'b'}, 2: {'value': 'c'}}} + + This works for in-place relabeling too: + + >>> G = nx.relabel_nodes(G, mapping, copy=False) + >>> print(G[0]) + {4: {0: {'value': 'a'}, 1: {'value': 'b'}, 2: {'value': 'c'}}} + Notes ----- Only the nodes specified in the mapping will be relabeled. @@ -83,6 +98,13 @@ def relabel_nodes(G, mapping, copy=True): graph is not possible in-place and an exception is raised. In that case, use copy=True. + If a relabel operation on a multigraph would cause two or more + edges to have the same source, target and key, the second edge must + be assigned a new key to retain all edges. The new key is set + to the lowest non-negative integer not already used as a key + for edges between these two nodes. Note that this means non-numeric + keys may be replaced by numeric keys. + See Also -------- convert_node_labels_to_integers @@ -109,10 +131,11 @@ def _relabel_inplace(G, mapping): D.remove_edges_from(nx.selfloop_edges(D)) try: nodes = reversed(list(nx.topological_sort(D))) - except nx.NetworkXUnfeasible: - raise nx.NetworkXUnfeasible('The node label sets are overlapping ' - 'and no ordering can resolve the ' - 'mapping. Use copy=True.') + except nx.NetworkXUnfeasible as e: + raise nx.NetworkXUnfeasible( + "The node label sets are overlapping and no ordering can " + "resolve the mapping. Use copy=True." + ) from e else: # non-overlapping label sets nodes = old_labels @@ -129,44 +152,79 @@ def _relabel_inplace(G, mapping): continue try: G.add_node(new, **G.nodes[old]) - except KeyError: - raise KeyError("Node %s is not in the graph" % old) + except KeyError as e: + raise KeyError(f"Node {old} is not in the graph") from e if multigraph: - new_edges = [(new, new if old == target else target, key, data) - for (_, target, key, data) - in G.edges(old, data=True, keys=True)] + new_edges = [ + (new, new if old == target else target, key, data) + for (_, target, key, data) in G.edges(old, data=True, keys=True) + ] if directed: - new_edges += [(new if old == source else source, new, key, data) - for (source, _, key, data) - in G.in_edges(old, data=True, keys=True)] + new_edges += [ + (new if old == source else source, new, key, data) + for (source, _, key, data) in G.in_edges(old, data=True, keys=True) + ] + # Ensure new edges won't overwrite existing ones + seen = set() + for i, (source, target, key, data) in enumerate(new_edges): + if target in G[source] and key in G[source][target]: + new_key = 0 if not isinstance(key, (int, float)) else key + while new_key in G[source][target] or (target, new_key) in seen: + new_key += 1 + new_edges[i] = (source, target, new_key, data) + seen.add((target, new_key)) else: - new_edges = [(new, new if old == target else target, data) - for (_, target, data) in G.edges(old, data=True)] + new_edges = [ + (new, new if old == target else target, data) + for (_, target, data) in G.edges(old, data=True) + ] if directed: - new_edges += [(new if old == source else source, new, data) - for (source, _, data) in G.in_edges(old, data=True)] + new_edges += [ + (new if old == source else source, new, data) + for (source, _, data) in G.in_edges(old, data=True) + ] G.remove_node(old) G.add_edges_from(new_edges) return G def _relabel_copy(G, mapping): - H = G.fresh_copy() + H = G.__class__() H.add_nodes_from(mapping.get(n, n) for n in G) H._node.update((mapping.get(n, n), d.copy()) for n, d in G.nodes.items()) if G.is_multigraph(): - H.add_edges_from((mapping.get(n1, n1), mapping.get(n2, n2), k, d.copy()) - for (n1, n2, k, d) in G.edges(keys=True, data=True)) + new_edges = [ + (mapping.get(n1, n1), mapping.get(n2, n2), k, d.copy()) + for (n1, n2, k, d) in G.edges(keys=True, data=True) + ] + + # check for conflicting edge-keys + undirected = not G.is_directed() + seen_edges = set() + for i, (source, target, key, data) in enumerate(new_edges): + while (source, target, key) in seen_edges: + if not isinstance(key, (int, float)): + key = 0 + key += 1 + seen_edges.add((source, target, key)) + if undirected: + seen_edges.add((target, source, key)) + new_edges[i] = (source, target, key, data) + + H.add_edges_from(new_edges) else: - H.add_edges_from((mapping.get(n1, n1), mapping.get(n2, n2), d.copy()) - for (n1, n2, d) in G.edges(data=True)) + H.add_edges_from( + (mapping.get(n1, n1), mapping.get(n2, n2), d.copy()) + for (n1, n2, d) in G.edges(data=True) + ) H.graph.update(G.graph) return H -def convert_node_labels_to_integers(G, first_label=0, ordering="default", - label_attribute=None): - """Return a copy of the graph G with the nodes relabeled using +def convert_node_labels_to_integers( + G, first_label=0, ordering="default", label_attribute=None +): + """Returns a copy of the graph G with the nodes relabeled using consecutive integers. Parameters @@ -216,10 +274,9 @@ def convert_node_labels_to_integers(G, first_label=0, ordering="default", dv_pairs.reverse() mapping = dict(zip([n for d, n in dv_pairs], range(first_label, N))) else: - raise nx.NetworkXError('Unknown node ordering: %s' % ordering) + raise nx.NetworkXError(f"Unknown node ordering: {ordering}") H = relabel_nodes(G, mapping) # create node attribute with the old label if label_attribute is not None: - nx.set_node_attributes(H, {v: k for k, v in mapping.items()}, - label_attribute) + nx.set_node_attributes(H, {v: k for k, v in mapping.items()}, label_attribute) return H diff --git a/networkx/release.py b/networkx/release.py index 9365288..675b13e 100644 --- a/networkx/release.py +++ b/networkx/release.py @@ -29,15 +29,6 @@ """ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. - -from __future__ import absolute_import - import os import sys import time @@ -48,7 +39,7 @@ def write_versionfile(): """Creates a static file containing version information.""" - versionfile = os.path.join(basedir, 'version.py') + versionfile = os.path.join(basedir, "version.py") text = '''""" Version information for NetworkX, created during installation. @@ -81,19 +72,19 @@ def write_versionfile(): date, date_info, version, version_info, vcs_info = get_info(dynamic=True) def writefile(): - fh = open(versionfile, 'w') + fh = open(versionfile, "w") subs = { - 'dev': dev, - 'version': version, - 'version_info': version_info, - 'date': date, - 'date_info': date_info, - 'vcs_info': vcs_info + "dev": dev, + "version": version, + "version_info": version_info, + "date": date, + "date_info": date_info, + "vcs_info": vcs_info, } fh.write(text % subs) fh.close() - if vcs_info[0] == 'mercurial': + if vcs_info[0] == "mercurial": # Then, we want to update version.py. writefile() else: @@ -103,6 +94,7 @@ def writefile(): # Grab the version so that setup can use it. # sys.path.insert(0, basedir) from version import version + # del sys.path[0] else: # This is *bad*. It means the user might have a tarball that @@ -122,10 +114,10 @@ def get_revision(): """Returns revision and vcs information, dynamically obtained.""" vcs, revision, tag = None, None, None - gitdir = os.path.join(basedir, '..', '.git') + gitdir = os.path.join(basedir, "..", ".git") if os.path.isdir(gitdir): - vcs = 'git' + vcs = "git" # For now, we are not bothering with revision and tag. vcs_info = (vcs, (revision, tag)) @@ -135,7 +127,9 @@ def get_revision(): def get_info(dynamic=True): # Date information - date_info = datetime.datetime.utcfromtimestamp(int(os.environ.get('SOURCE_DATE_EPOCH', time.time()))) + date_info = datetime.datetime.utcfromtimestamp( + int(os.environ.get("SOURCE_DATE_EPOCH", time.time())) + ) date = time.asctime(date_info.timetuple()) revision, version, version_info, vcs_info = None, None, None, None @@ -160,70 +154,75 @@ def get_info(dynamic=True): vcs_info = (None, (None, None)) else: revision = vcs_info[1][0] - #del sys.path[0] + # del sys.path[0] if import_failed or (dynamic and not dynamic_failed): # We are here if: # we failed to determine static versioning info, or # we successfully obtained dynamic revision info - version = ''.join([str(major), '.', str(minor)]) + version = "".join([str(major), ".", str(minor)]) if dev: - version += '.dev_' + date_info.strftime("%Y%m%d%H%M%S") + version += ".dev_" + date_info.strftime("%Y%m%d%H%M%S") version_info = (name, major, minor, revision) return date, date_info, version, version_info, vcs_info # Version information -name = 'networkx' +name = "networkx" major = "2" -minor = "2rc1" +minor = "5" # Declare current release as a development release. # Change to False before tagging a release; then change back. -dev = True +dev = False description = "Python package for creating and manipulating graphs and networks" - -long_description = \ - """ -NetworkX is a Python package for the creation, manipulation, and -study of the structure, dynamics, and functions of complex networks. - -""" -license = 'BSD' -authors = {'Hagberg': ('Aric Hagberg', 'hagberg@lanl.gov'), - 'Schult': ('Dan Schult', 'dschult@colgate.edu'), - 'Swart': ('Pieter Swart', 'swart@lanl.gov')} +authors = { + "Hagberg": ("Aric Hagberg", "hagberg@lanl.gov"), + "Schult": ("Dan Schult", "dschult@colgate.edu"), + "Swart": ("Pieter Swart", "swart@lanl.gov"), +} maintainer = "NetworkX Developers" maintainer_email = "networkx-discuss@googlegroups.com" -url = 'http://networkx.github.io/' -download_url = 'https://pypi.python.org/pypi/networkx/' -platforms = ['Linux', 'Mac OSX', 'Windows', 'Unix'] -keywords = ['Networks', 'Graph Theory', 'Mathematics', - 'network', 'graph', 'discrete mathematics', 'math'] +url = "http://networkx.github.io/" +project_urls = { + "Bug Tracker": "https://github.com/networkx/networkx/issues", + "Documentation": "https://networkx.github.io/documentation/stable/", + "Source Code": "https://github.com/networkx/networkx", +} +platforms = ["Linux", "Mac OSX", "Windows", "Unix"] +keywords = [ + "Networks", + "Graph Theory", + "Mathematics", + "network", + "graph", + "discrete mathematics", + "math", +] classifiers = [ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: Science/Research', - 'License :: OSI Approved :: BSD License', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Topic :: Software Development :: Libraries :: Python Modules', - 'Topic :: Scientific/Engineering :: Bio-Informatics', - 'Topic :: Scientific/Engineering :: Information Analysis', - 'Topic :: Scientific/Engineering :: Mathematics', - 'Topic :: Scientific/Engineering :: Physics'] + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3 :: Only", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: Scientific/Engineering :: Bio-Informatics", + "Topic :: Scientific/Engineering :: Information Analysis", + "Topic :: Scientific/Engineering :: Mathematics", + "Topic :: Scientific/Engineering :: Physics", +] date, date_info, version, version_info, vcs_info = get_info() -if __name__ == '__main__': +if __name__ == "__main__": # Write versionfile for nightly snapshots. write_versionfile() diff --git a/networkx/testing/__init__.py b/networkx/testing/__init__.py index db57076..884ac83 100644 --- a/networkx/testing/__init__.py +++ b/networkx/testing/__init__.py @@ -1 +1,2 @@ from networkx.testing.utils import * +from networkx.testing.test import run diff --git a/networkx/testing/test.py b/networkx/testing/test.py new file mode 100644 index 0000000..ce2c636 --- /dev/null +++ b/networkx/testing/test.py @@ -0,0 +1,34 @@ +def run(verbosity=1, doctest=False): + """Run NetworkX tests. + + Parameters + ---------- + verbosity: integer, optional + Level of detail in test reports. Higher numbers provide more detail. + + doctest: bool, optional + True to run doctests in code modules + """ + + import pytest + + pytest_args = ["-l"] + + if verbosity and int(verbosity) > 1: + pytest_args += ["-" + "v" * (int(verbosity) - 1)] + + if doctest: + pytest_args += ["--doctest-modules"] + + pytest_args += ["--pyargs", "networkx"] + + try: + code = pytest.main(pytest_args) + except SystemExit as exc: + code = exc.code + + return code == 0 + + +if __name__ == "__main__": + run() diff --git a/networkx/testing/tests/__init__.py b/networkx/testing/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/testing/tests/test_utils.py b/networkx/testing/tests/test_utils.py index b77007a..3a7d76d 100644 --- a/networkx/testing/tests/test_utils.py +++ b/networkx/testing/tests/test_utils.py @@ -1,17 +1,18 @@ -from nose.tools import * import networkx as nx -from networkx.testing import * +from networkx.testing import assert_graphs_equal, assert_edges_equal, assert_nodes_equal # thanks to numpy for this GenericTest class (numpy/testing/test_utils.py) -class _GenericTest(object): - def _test_equal(self, a, b): - self._assert_func(a, b) +class _GenericTest: + @classmethod + def _test_equal(cls, a, b): + cls._assert_func(a, b) - def _test_not_equal(self, a, b): + @classmethod + def _test_not_equal(cls, a, b): try: - self._assert_func(a, b) + cls._assert_func(a, b) passed = True except AssertionError: pass @@ -20,8 +21,7 @@ def _test_not_equal(self, a, b): class TestNodesEqual(_GenericTest): - def setUp(self): - self._assert_func = assert_nodes_equal + _assert_func = assert_nodes_equal def test_nodes_equal(self): a = [1, 2, 5, 4] @@ -35,22 +35,21 @@ def test_nodes_not_equal(self): def test_nodes_with_data_equal(self): G = nx.Graph() - G.add_nodes_from([1, 2, 3], color='red') + G.add_nodes_from([1, 2, 3], color="red") H = nx.Graph() - H.add_nodes_from([1, 2, 3], color='red') + H.add_nodes_from([1, 2, 3], color="red") self._test_equal(G.nodes(data=True), H.nodes(data=True)) def test_edges_with_data_not_equal(self): G = nx.Graph() - G.add_nodes_from([1, 2, 3], color='red') + G.add_nodes_from([1, 2, 3], color="red") H = nx.Graph() - H.add_nodes_from([1, 2, 3], color='blue') + H.add_nodes_from([1, 2, 3], color="blue") self._test_not_equal(G.nodes(data=True), H.nodes(data=True)) class TestEdgesEqual(_GenericTest): - def setUp(self): - self._assert_func = assert_edges_equal + _assert_func = assert_edges_equal def test_edges_equal(self): a = [(1, 2), (5, 4)] @@ -67,22 +66,21 @@ def test_edges_with_data_equal(self): nx.add_path(G, [0, 1, 2], weight=1) H = nx.MultiGraph() nx.add_path(H, [0, 1, 2], weight=1) - self._test_equal(G.edges(data=True, keys=True), - H.edges(data=True, keys=True)) + self._test_equal(G.edges(data=True, keys=True), H.edges(data=True, keys=True)) def test_edges_with_data_not_equal(self): G = nx.MultiGraph() nx.add_path(G, [0, 1, 2], weight=1) H = nx.MultiGraph() nx.add_path(H, [0, 1, 2], weight=2) - self._test_not_equal(G.edges(data=True, keys=True), - H.edges(data=True, keys=True)) + self._test_not_equal( + G.edges(data=True, keys=True), H.edges(data=True, keys=True) + ) def test_no_edges(self): G = nx.MultiGraph() H = nx.MultiGraph() - self._test_equal(G.edges(data=True, keys=True), - H.edges(data=True, keys=True)) + self._test_equal(G.edges(data=True, keys=True), H.edges(data=True, keys=True)) def test_duplicate_edges(self): a = [(1, 2), (5, 4), (1, 2)] @@ -90,34 +88,33 @@ def test_duplicate_edges(self): self._test_not_equal(a, b) def test_duplicate_edges_with_data(self): - a = [(1, 2, {'weight': 10}), (5, 4), (1, 2, {'weight': 1})] - b = [(4, 5), (1, 2), (1, 2, {'weight': 1})] + a = [(1, 2, {"weight": 10}), (5, 4), (1, 2, {"weight": 1})] + b = [(4, 5), (1, 2), (1, 2, {"weight": 1})] self._test_not_equal(a, b) def test_order_of_edges_with_data(self): - a = [(1, 2, {'weight': 10}), (1, 2, {'weight': 1})] - b = [(1, 2, {'weight': 1}), (1, 2, {'weight': 10})] + a = [(1, 2, {"weight": 10}), (1, 2, {"weight": 1})] + b = [(1, 2, {"weight": 1}), (1, 2, {"weight": 10})] self._test_equal(a, b) def test_order_of_multiedges(self): - wt1 = {'weight': 1} - wt2 = {'weight': 2} + wt1 = {"weight": 1} + wt2 = {"weight": 2} a = [(1, 2, wt1), (1, 2, wt1), (1, 2, wt2)] b = [(1, 2, wt1), (1, 2, wt2), (1, 2, wt2)] self._test_not_equal(a, b) def test_order_of_edges_with_keys(self): - a = [(1, 2, 0, {'weight': 10}), (1, 2, 1, {'weight': 1}), (1, 2, 2)] - b = [(1, 2, 1, {'weight': 1}), (1, 2, 2), (1, 2, 0, {'weight': 10})] + a = [(1, 2, 0, {"weight": 10}), (1, 2, 1, {"weight": 1}), (1, 2, 2)] + b = [(1, 2, 1, {"weight": 1}), (1, 2, 2), (1, 2, 0, {"weight": 10})] self._test_equal(a, b) - a = [(1, 2, 1, {'weight': 10}), (1, 2, 0, {'weight': 1}), (1, 2, 2)] - b = [(1, 2, 1, {'weight': 1}), (1, 2, 2), (1, 2, 0, {'weight': 10})] + a = [(1, 2, 1, {"weight": 10}), (1, 2, 0, {"weight": 1}), (1, 2, 2)] + b = [(1, 2, 1, {"weight": 1}), (1, 2, 2), (1, 2, 0, {"weight": 10})] self._test_not_equal(a, b) class TestGraphsEqual(_GenericTest): - def setUp(self): - self._assert_func = assert_graphs_equal + _assert_func = assert_graphs_equal def test_graphs_equal(self): G = nx.path_graph(4) @@ -159,5 +156,5 @@ def test_graphs_not_equal3(self): G = nx.path_graph(4) H = nx.Graph() nx.add_path(H, range(4)) - H.name = 'path_graph(4)' + H.name = "path_graph(4)" self._test_not_equal(G, H) diff --git a/networkx/testing/utils.py b/networkx/testing/utils.py index 818f672..795cefa 100644 --- a/networkx/testing/utils.py +++ b/networkx/testing/utils.py @@ -1,6 +1,13 @@ -from nose.tools import assert_equal, assert_in +__all__ = [ + "assert_nodes_equal", + "assert_edges_equal", + "assert_graphs_equal", + "almost_equal", +] -__all__ = ['assert_nodes_equal', 'assert_edges_equal', 'assert_graphs_equal'] + +def almost_equal(x, y, places=7): + return round(abs(x - y), places) == 0 def assert_nodes_equal(nodes1, nodes2): @@ -13,7 +20,7 @@ def assert_nodes_equal(nodes1, nodes2): except (ValueError, TypeError): d1 = dict.fromkeys(nlist1) d2 = dict.fromkeys(nlist2) - assert_equal(d1, d2) + assert d1 == d2 def assert_edges_equal(edges1, edges2): @@ -22,6 +29,7 @@ def assert_edges_equal(edges1, edges2): # edge tuples with data dicts (u,v,d), or # edge tuples with keys and data dicts (u,v,k, d) from collections import defaultdict + d1 = defaultdict(dict) d2 = defaultdict(dict) c1 = 0 @@ -40,18 +48,18 @@ def assert_edges_equal(edges1, edges2): data = d2[u][v] + data d2[u][v] = data d2[v][u] = data - assert_equal(c1, c2) + assert c1 == c2 # can check one direction because lengths are the same. for n, nbrdict in d1.items(): for nbr, datalist in nbrdict.items(): - assert_in(n, d2) - assert_in(nbr, d2[n]) + assert n in d2 + assert nbr in d2[n] d2datalist = d2[n][nbr] for data in datalist: - assert_equal(datalist.count(data), d2datalist.count(data)) + assert datalist.count(data) == d2datalist.count(data) def assert_graphs_equal(graph1, graph2): - assert_equal(graph1.adj, graph2.adj) - assert_equal(graph1.nodes, graph2.nodes) - assert_equal(graph1.graph, graph2.graph) + assert graph1.adj == graph2.adj + assert graph1.nodes == graph2.nodes + assert graph1.graph == graph2.graph diff --git a/networkx/tests/README b/networkx/tests/README deleted file mode 100644 index cc299aa..0000000 --- a/networkx/tests/README +++ /dev/null @@ -1,24 +0,0 @@ -These files are for testing the methods and functions in NetworkX -The nose testing package is required for all tests: -http://nose.readthedocs.io/en/latest/ - -The tests also demonstrate the usage of many of the features of NetworkX. - -There are a few ways to run the tests. - -The simplest way is to import networkx and run the test() function. - ->>> import networkx ->>> networkx.test() - -or:: - - python -c "import networkx; networkx.test() - -If you have the source package and the nose testing package you -can test the complete package from the unpacked source directory with:: - - python setup_egg.py nosetests - -The python module benchmark.py can be used to compare relative speed of small -code bits using the timeit module for different graph classes. diff --git a/networkx/tests/test.py b/networkx/tests/test.py deleted file mode 100644 index 7941051..0000000 --- a/networkx/tests/test.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python -import sys -from os import path, getcwd - - -def run(verbosity=1, doctest=False, numpy=True): - """Run NetworkX tests. - - Parameters - ---------- - verbosity: integer, optional - Level of detail in test reports. Higher numbers provide more detail. - - doctest: bool, optional - True to run doctests in code modules - - numpy: bool, optional - True to test modules dependent on numpy - """ - try: - import nose - except ImportError: - raise ImportError( - "The nose package is needed to run the NetworkX tests.") - - sys.stderr.write("Running NetworkX tests:") - nx_install_dir = path.join(path.dirname(__file__), path.pardir) - # stop if running from source directory - if getcwd() == path.abspath(path.join(nx_install_dir, path.pardir)): - raise RuntimeError("Can't run tests from source directory.\n" - "Run 'nosetests' from the command line.") - - argv = [' ', '--verbosity=%d' % verbosity, - '-w', nx_install_dir, - '-exe'] - if doctest: - argv.extend(['--with-doctest', '--doctest-extension=txt']) - if not numpy: - argv.extend(['-A not numpy']) - - nose.run(argv=argv) - - -if __name__ == "__main__": - run() diff --git a/networkx/tests/test_all_random_functions.py b/networkx/tests/test_all_random_functions.py new file mode 100644 index 0000000..0611462 --- /dev/null +++ b/networkx/tests/test_all_random_functions.py @@ -0,0 +1,233 @@ +import pytest + +np = pytest.importorskip("numpy") +import random + +import networkx as nx +from networkx.algorithms import approximation as approx +from networkx.algorithms import threshold + +progress = 0 + +# store the random numbers after setting a global seed +np.random.seed(42) +np_rv = np.random.rand() +random.seed(42) +py_rv = random.random() + + +def t(f, *args, **kwds): + """call one function and check if global RNG changed""" + global progress + progress += 1 + print(progress, ",", end="") + + f(*args, **kwds) + + after_np_rv = np.random.rand() + # if np_rv != after_np_rv: + # print(np_rv, after_np_rv, "don't match np!") + assert np_rv == after_np_rv + np.random.seed(42) + + after_py_rv = random.random() + # if py_rv != after_py_rv: + # print(py_rv, after_py_rv, "don't match py!") + assert py_rv == after_py_rv + random.seed(42) + + +def run_all_random_functions(seed): + n = 20 + m = 10 + k = l = 2 + s = v = 10 + p = q = p1 = p2 = p_in = p_out = 0.4 + alpha = radius = theta = 0.75 + sizes = (20, 20, 10) + colors = [1, 2, 3] + G = nx.barbell_graph(12, 20) + deg_sequence = [3, 2, 1, 3, 2, 1, 3, 2, 1, 2, 1, 2, 1] + in_degree_sequence = w = sequence = aseq = bseq = deg_sequence + + # print("starting...") + t(nx.maximal_independent_set, G, seed=seed) + t(nx.rich_club_coefficient, G, seed=seed, normalized=False) + t(nx.random_reference, G, seed=seed) + t(nx.lattice_reference, G, seed=seed) + t(nx.sigma, G, 1, 2, seed=seed) + t(nx.omega, G, 1, 2, seed=seed) + # print("out of smallworld.py") + t(nx.double_edge_swap, G, seed=seed) + # print("starting connected_double_edge_swap") + t(nx.connected_double_edge_swap, nx.complete_graph(9), seed=seed) + # print("ending connected_double_edge_swap") + t(nx.random_layout, G, seed=seed) + t(nx.fruchterman_reingold_layout, G, seed=seed) + t(nx.algebraic_connectivity, G, seed=seed) + t(nx.fiedler_vector, G, seed=seed) + t(nx.spectral_ordering, G, seed=seed) + # print('starting average_clustering') + t(approx.average_clustering, G, seed=seed) + t(nx.betweenness_centrality, G, seed=seed) + t(nx.edge_betweenness_centrality, G, seed=seed) + t(nx.edge_betweenness, G, seed=seed) + t(nx.approximate_current_flow_betweenness_centrality, G, seed=seed) + # print("kernighan") + t(nx.algorithms.community.kernighan_lin_bisection, G, seed=seed) + # nx.algorithms.community.asyn_lpa_communities(G, seed=seed) + t(nx.algorithms.tree.greedy_branching, G, seed=seed) + t(nx.algorithms.tree.Edmonds, G, seed=seed) + # print('done with graph argument functions') + + t(nx.spectral_graph_forge, G, alpha, seed=seed) + t(nx.algorithms.community.asyn_fluidc, G, k, max_iter=1, seed=seed) + t( + nx.algorithms.connectivity.edge_augmentation.greedy_k_edge_augmentation, + G, + k, + seed=seed, + ) + t(nx.algorithms.coloring.strategy_random_sequential, G, colors, seed=seed) + + cs = ["d", "i", "i", "d", "d", "i"] + t(threshold.swap_d, cs, seed=seed) + t(nx.configuration_model, deg_sequence, seed=seed) + t( + nx.directed_configuration_model, + in_degree_sequence, + in_degree_sequence, + seed=seed, + ) + t(nx.expected_degree_graph, w, seed=seed) + t(nx.random_degree_sequence_graph, sequence, seed=seed) + joint_degrees = { + 1: {4: 1}, + 2: {2: 2, 3: 2, 4: 2}, + 3: {2: 2, 4: 1}, + 4: {1: 1, 2: 2, 3: 1}, + } + t(nx.joint_degree_graph, joint_degrees, seed=seed) + joint_degree_sequence = [ + (1, 0), + (1, 0), + (1, 0), + (2, 0), + (1, 0), + (2, 1), + (0, 1), + (0, 1), + ] + t(nx.random_clustered_graph, joint_degree_sequence, seed=seed) + constructor = [(3, 3, 0.5), (10, 10, 0.7)] + t(nx.random_shell_graph, constructor, seed=seed) + mapping = {1: 0.4, 2: 0.3, 3: 0.3} + t(nx.utils.random_weighted_sample, mapping, k, seed=seed) + t(nx.utils.weighted_choice, mapping, seed=seed) + t(nx.algorithms.bipartite.configuration_model, aseq, bseq, seed=seed) + t(nx.algorithms.bipartite.preferential_attachment_graph, aseq, p, seed=seed) + + def kernel_integral(u, w, z): + return z - w + + t(nx.random_kernel_graph, n, kernel_integral, seed=seed) + + sizes = [75, 75, 300] + probs = [[0.25, 0.05, 0.02], [0.05, 0.35, 0.07], [0.02, 0.07, 0.40]] + t(nx.stochastic_block_model, sizes, probs, seed=seed) + t(nx.random_partition_graph, sizes, p_in, p_out, seed=seed) + + # print("starting generator functions") + t(threshold.random_threshold_sequence, n, p, seed=seed) + t(nx.tournament.random_tournament, n, seed=seed) + t(nx.relaxed_caveman_graph, l, k, p, seed=seed) + t(nx.planted_partition_graph, l, k, p_in, p_out, seed=seed) + t(nx.gaussian_random_partition_graph, n, s, v, p_in, p_out, seed=seed) + t(nx.gn_graph, n, seed=seed) + t(nx.gnr_graph, n, p, seed=seed) + t(nx.gnc_graph, n, seed=seed) + t(nx.scale_free_graph, n, seed=seed) + t(nx.directed.random_uniform_k_out_graph, n, k, seed=seed) + t(nx.random_k_out_graph, n, k, alpha, seed=seed) + N = 1000 + t(nx.partial_duplication_graph, N, n, p, q, seed=seed) + t(nx.duplication_divergence_graph, n, p, seed=seed) + t(nx.random_geometric_graph, n, radius, seed=seed) + t(nx.soft_random_geometric_graph, n, radius, seed=seed) + t(nx.geographical_threshold_graph, n, theta, seed=seed) + t(nx.waxman_graph, n, seed=seed) + t(nx.navigable_small_world_graph, n, seed=seed) + t(nx.thresholded_random_geometric_graph, n, radius, theta, seed=seed) + t(nx.uniform_random_intersection_graph, n, m, p, seed=seed) + t(nx.k_random_intersection_graph, n, m, k, seed=seed) + + t(nx.general_random_intersection_graph, n, 2, [0.1, 0.5], seed=seed) + t(nx.fast_gnp_random_graph, n, p, seed=seed) + t(nx.gnp_random_graph, n, p, seed=seed) + t(nx.dense_gnm_random_graph, n, m, seed=seed) + t(nx.gnm_random_graph, n, m, seed=seed) + t(nx.newman_watts_strogatz_graph, n, k, p, seed=seed) + t(nx.watts_strogatz_graph, n, k, p, seed=seed) + t(nx.connected_watts_strogatz_graph, n, k, p, seed=seed) + t(nx.random_regular_graph, 3, n, seed=seed) + t(nx.barabasi_albert_graph, n, m, seed=seed) + t(nx.extended_barabasi_albert_graph, n, m, p, q, seed=seed) + t(nx.powerlaw_cluster_graph, n, m, p, seed=seed) + t(nx.random_lobster, n, p1, p2, seed=seed) + t(nx.random_powerlaw_tree, n, seed=seed, tries=5000) + t(nx.random_powerlaw_tree_sequence, 10, seed=seed, tries=5000) + t(nx.random_tree, n, seed=seed) + t(nx.utils.powerlaw_sequence, n, seed=seed) + t(nx.utils.zipf_rv, 2.3, seed=seed) + cdist = [0.2, 0.4, 0.5, 0.7, 0.9, 1.0] + t(nx.utils.discrete_sequence, n, cdistribution=cdist, seed=seed) + t(nx.algorithms.bipartite.random_graph, n, m, p, seed=seed) + t(nx.algorithms.bipartite.gnmk_random_graph, n, m, k, seed=seed) + LFR = nx.generators.LFR_benchmark_graph + t( + LFR, + 25, + 3, + 1.5, + 0.1, + average_degree=3, + min_community=10, + seed=seed, + max_community=20, + ) + t(nx.random_internet_as_graph, n, seed=seed) + # print("done") + + +# choose to test an integer seed, or whether a single RNG can be everywhere +# np_rng = np.random.RandomState(14) +# seed = np_rng +# seed = 14 + + +@pytest.mark.slow +# print("NetworkX Version:", nx.__version__) +def test_rng_interface(): + global progress + + # try different kinds of seeds + for seed in [14, np.random.RandomState(14)]: + np.random.seed(42) + random.seed(42) + run_all_random_functions(seed) + progress = 0 + + # check that both global RNGs are unaffected + after_np_rv = np.random.rand() + # if np_rv != after_np_rv: + # print(np_rv, after_np_rv, "don't match np!") + assert np_rv == after_np_rv + after_py_rv = random.random() + # if py_rv != after_py_rv: + # print(py_rv, after_py_rv, "don't match py!") + assert py_rv == after_py_rv + + +# print("\nDone testing seed:", seed) + +# test_rng_interface() diff --git a/networkx/tests/test_convert.py b/networkx/tests/test_convert.py index b18aaf5..72c9fee 100644 --- a/networkx/tests/test_convert.py +++ b/networkx/tests/test_convert.py @@ -1,25 +1,26 @@ -#!/usr/bin/env python -from nose.tools import (assert_equal, assert_not_equal, - assert_true, assert_false, - assert_raises) +import pytest import networkx as nx from networkx.testing import assert_nodes_equal, assert_edges_equal, assert_graphs_equal -from networkx.convert import (to_networkx_graph, - to_dict_of_dicts, - from_dict_of_dicts, - to_dict_of_lists, - from_dict_of_lists) +from networkx.convert import ( + to_networkx_graph, + to_dict_of_dicts, + from_dict_of_dicts, + to_dict_of_lists, + from_dict_of_lists, +) from networkx.generators.classic import barbell_graph, cycle_graph -class TestConvert(): +class TestConvert: def edgelists_equal(self, e1, e2): return sorted(sorted(e) for e in e1) == sorted(sorted(e) for e in e2) def test_simple_graphs(self): - for dest, source in [(to_dict_of_dicts, from_dict_of_dicts), - (to_dict_of_lists, from_dict_of_lists)]: + for dest, source in [ + (to_dict_of_dicts, from_dict_of_dicts), + (to_dict_of_lists, from_dict_of_lists), + ]: G = barbell_graph(10, 3) G.graph = {} dod = dest(G) @@ -42,40 +43,36 @@ def test_simple_graphs(self): assert_graphs_equal(Gdod, P3) def test_exceptions(self): - # _prep_create_using - G = {"a": "a"} - H = nx.to_networkx_graph(G) - assert_graphs_equal(H, nx.Graph([('a', 'a')])) - assert_raises(TypeError, to_networkx_graph, G, create_using=0.0) - # NX graph - class G(object): + class G: adj = None - assert_raises(nx.NetworkXError, to_networkx_graph, G) + pytest.raises(nx.NetworkXError, to_networkx_graph, G) # pygraphviz agraph - class G(object): + class G: is_strict = None - assert_raises(nx.NetworkXError, to_networkx_graph, G) + pytest.raises(nx.NetworkXError, to_networkx_graph, G) # Dict of [dicts, lists] G = {"a": 0} - assert_raises(TypeError, to_networkx_graph, G) + pytest.raises(TypeError, to_networkx_graph, G) # list or generator of edges - class G(object): + class G: next = None - assert_raises(nx.NetworkXError, to_networkx_graph, G) + pytest.raises(nx.NetworkXError, to_networkx_graph, G) # no match - assert_raises(nx.NetworkXError, to_networkx_graph, "a") + pytest.raises(nx.NetworkXError, to_networkx_graph, "a") def test_digraphs(self): - for dest, source in [(to_dict_of_dicts, from_dict_of_dicts), - (to_dict_of_lists, from_dict_of_lists)]: + for dest, source in [ + (to_dict_of_dicts, from_dict_of_dicts), + (to_dict_of_lists, from_dict_of_lists), + ]: G = cycle_graph(10) # Dict of [dicts, lists] @@ -90,17 +87,17 @@ def test_digraphs(self): assert_nodes_equal(sorted(G.nodes()), sorted(GI.nodes())) assert_edges_equal(sorted(G.edges()), sorted(GI.edges())) - G = cycle_graph(10, create_using=nx.DiGraph()) + G = cycle_graph(10, create_using=nx.DiGraph) dod = dest(G) - GG = source(dod, create_using=nx.DiGraph()) - assert_equal(sorted(G.nodes()), sorted(GG.nodes())) - assert_equal(sorted(G.edges()), sorted(GG.edges())) - GW = to_networkx_graph(dod, create_using=nx.DiGraph()) - assert_equal(sorted(G.nodes()), sorted(GW.nodes())) - assert_equal(sorted(G.edges()), sorted(GW.edges())) + GG = source(dod, create_using=nx.DiGraph) + assert sorted(G.nodes()) == sorted(GG.nodes()) + assert sorted(G.edges()) == sorted(GG.edges()) + GW = to_networkx_graph(dod, create_using=nx.DiGraph) + assert sorted(G.nodes()) == sorted(GW.nodes()) + assert sorted(G.edges()) == sorted(GW.edges()) GI = nx.DiGraph(dod) - assert_equal(sorted(G.nodes()), sorted(GI.nodes())) - assert_equal(sorted(G.edges()), sorted(GI.edges())) + assert sorted(G.nodes()) == sorted(GI.nodes()) + assert sorted(G.edges()) == sorted(GI.edges()) def test_graph(self): g = nx.cycle_graph(10) @@ -110,24 +107,24 @@ def test_graph(self): # Dict of dicts dod = to_dict_of_dicts(G) - GG = from_dict_of_dicts(dod, create_using=nx.Graph()) + GG = from_dict_of_dicts(dod, create_using=nx.Graph) assert_nodes_equal(sorted(G.nodes()), sorted(GG.nodes())) assert_edges_equal(sorted(G.edges()), sorted(GG.edges())) - GW = to_networkx_graph(dod, create_using=nx.Graph()) + GW = to_networkx_graph(dod, create_using=nx.Graph) assert_nodes_equal(sorted(G.nodes()), sorted(GW.nodes())) assert_edges_equal(sorted(G.edges()), sorted(GW.edges())) GI = nx.Graph(dod) - assert_equal(sorted(G.nodes()), sorted(GI.nodes())) - assert_equal(sorted(G.edges()), sorted(GI.edges())) + assert sorted(G.nodes()) == sorted(GI.nodes()) + assert sorted(G.edges()) == sorted(GI.edges()) # Dict of lists dol = to_dict_of_lists(G) - GG = from_dict_of_lists(dol, create_using=nx.Graph()) + GG = from_dict_of_lists(dol, create_using=nx.Graph) # dict of lists throws away edge data so set it to none enone = [(u, v, {}) for (u, v, d) in G.edges(data=True)] assert_nodes_equal(sorted(G.nodes()), sorted(GG.nodes())) assert_edges_equal(enone, sorted(GG.edges(data=True))) - GW = to_networkx_graph(dol, create_using=nx.Graph()) + GW = to_networkx_graph(dol, create_using=nx.Graph) assert_nodes_equal(sorted(G.nodes()), sorted(GW.nodes())) assert_edges_equal(enone, sorted(GW.edges(data=True))) GI = nx.Graph(dol) @@ -151,10 +148,10 @@ def test_with_multiedges_self_loops(self): # Dict of dicts # with self loops, OK dod = to_dict_of_dicts(XGS) - GG = from_dict_of_dicts(dod, create_using=nx.Graph()) + GG = from_dict_of_dicts(dod, create_using=nx.Graph) assert_nodes_equal(XGS.nodes(), GG.nodes()) assert_edges_equal(XGS.edges(), GG.edges()) - GW = to_networkx_graph(dod, create_using=nx.Graph()) + GW = to_networkx_graph(dod, create_using=nx.Graph) assert_nodes_equal(XGS.nodes(), GW.nodes()) assert_edges_equal(XGS.edges(), GW.edges()) GI = nx.Graph(dod) @@ -164,12 +161,12 @@ def test_with_multiedges_self_loops(self): # Dict of lists # with self loops, OK dol = to_dict_of_lists(XGS) - GG = from_dict_of_lists(dol, create_using=nx.Graph()) + GG = from_dict_of_lists(dol, create_using=nx.Graph) # dict of lists throws away edge data so set it to none enone = [(u, v, {}) for (u, v, d) in XGS.edges(data=True)] assert_nodes_equal(sorted(XGS.nodes()), sorted(GG.nodes())) assert_edges_equal(enone, sorted(GG.edges(data=True))) - GW = to_networkx_graph(dol, create_using=nx.Graph()) + GW = to_networkx_graph(dol, create_using=nx.Graph) assert_nodes_equal(sorted(XGS.nodes()), sorted(GW.nodes())) assert_edges_equal(enone, sorted(GW.edges(data=True))) GI = nx.Graph(dol) @@ -179,21 +176,19 @@ def test_with_multiedges_self_loops(self): # Dict of dicts # with multiedges, OK dod = to_dict_of_dicts(XGM) - GG = from_dict_of_dicts(dod, create_using=nx.MultiGraph(), - multigraph_input=True) + GG = from_dict_of_dicts(dod, create_using=nx.MultiGraph, multigraph_input=True) assert_nodes_equal(sorted(XGM.nodes()), sorted(GG.nodes())) assert_edges_equal(sorted(XGM.edges()), sorted(GG.edges())) - GW = to_networkx_graph(dod, create_using=nx.MultiGraph(), multigraph_input=True) + GW = to_networkx_graph(dod, create_using=nx.MultiGraph, multigraph_input=True) assert_nodes_equal(sorted(XGM.nodes()), sorted(GW.nodes())) assert_edges_equal(sorted(XGM.edges()), sorted(GW.edges())) GI = nx.MultiGraph(dod) # convert can't tell whether to duplicate edges! assert_nodes_equal(sorted(XGM.nodes()), sorted(GI.nodes())) - #assert_not_equal(sorted(XGM.edges()), sorted(GI.edges())) - assert_false(sorted(XGM.edges()) == sorted(GI.edges())) - GE = from_dict_of_dicts(dod, create_using=nx.MultiGraph(), - multigraph_input=False) + # assert_not_equal(sorted(XGM.edges()), sorted(GI.edges())) + assert not sorted(XGM.edges()) == sorted(GI.edges()) + GE = from_dict_of_dicts(dod, create_using=nx.MultiGraph, multigraph_input=False) assert_nodes_equal(sorted(XGM.nodes()), sorted(GE.nodes())) - assert_not_equal(sorted(XGM.edges()), sorted(GE.edges())) + assert sorted(XGM.edges()) != sorted(GE.edges()) GI = nx.MultiGraph(XGM) assert_nodes_equal(sorted(XGM.nodes()), sorted(GI.nodes())) assert_edges_equal(sorted(XGM.edges()), sorted(GI.edges())) @@ -205,10 +200,10 @@ def test_with_multiedges_self_loops(self): # with multiedges, OK, but better write as DiGraph else you'll # get double edges dol = to_dict_of_lists(G) - GG = from_dict_of_lists(dol, create_using=nx.MultiGraph()) + GG = from_dict_of_lists(dol, create_using=nx.MultiGraph) assert_nodes_equal(sorted(G.nodes()), sorted(GG.nodes())) assert_edges_equal(sorted(G.edges()), sorted(GG.edges())) - GW = to_networkx_graph(dol, create_using=nx.MultiGraph()) + GW = to_networkx_graph(dol, create_using=nx.MultiGraph) assert_nodes_equal(sorted(G.nodes()), sorted(GW.nodes())) assert_edges_equal(sorted(G.edges()), sorted(GW.edges())) GI = nx.MultiGraph(dol) @@ -238,29 +233,49 @@ def test_edgelists(self): def test_directed_to_undirected(self): edges1 = [(0, 1), (1, 2), (2, 0)] edges2 = [(0, 1), (1, 2), (0, 2)] - assert_true(self.edgelists_equal(nx.Graph(nx.DiGraph(edges1)).edges(), edges1)) - assert_true(self.edgelists_equal(nx.Graph(nx.DiGraph(edges2)).edges(), edges1)) - assert_true(self.edgelists_equal(nx.MultiGraph(nx.DiGraph(edges1)).edges(), edges1)) - assert_true(self.edgelists_equal(nx.MultiGraph(nx.DiGraph(edges2)).edges(), edges1)) + assert self.edgelists_equal(nx.Graph(nx.DiGraph(edges1)).edges(), edges1) + assert self.edgelists_equal(nx.Graph(nx.DiGraph(edges2)).edges(), edges1) + assert self.edgelists_equal(nx.MultiGraph(nx.DiGraph(edges1)).edges(), edges1) + assert self.edgelists_equal(nx.MultiGraph(nx.DiGraph(edges2)).edges(), edges1) - assert_true(self.edgelists_equal(nx.MultiGraph(nx.MultiDiGraph(edges1)).edges(), - edges1)) - assert_true(self.edgelists_equal(nx.MultiGraph(nx.MultiDiGraph(edges2)).edges(), - edges1)) + assert self.edgelists_equal( + nx.MultiGraph(nx.MultiDiGraph(edges1)).edges(), edges1 + ) + assert self.edgelists_equal( + nx.MultiGraph(nx.MultiDiGraph(edges2)).edges(), edges1 + ) - assert_true(self.edgelists_equal(nx.Graph(nx.MultiDiGraph(edges1)).edges(), edges1)) - assert_true(self.edgelists_equal(nx.Graph(nx.MultiDiGraph(edges2)).edges(), edges1)) + assert self.edgelists_equal(nx.Graph(nx.MultiDiGraph(edges1)).edges(), edges1) + assert self.edgelists_equal(nx.Graph(nx.MultiDiGraph(edges2)).edges(), edges1) def test_attribute_dict_integrity(self): # we must not replace dict-like graph data structures with dicts G = nx.OrderedGraph() G.add_nodes_from("abc") - H = to_networkx_graph(G, create_using=nx.OrderedGraph()) - assert_equal(list(H.nodes), list(G.nodes)) + H = to_networkx_graph(G, create_using=nx.OrderedGraph) + assert list(H.nodes) == list(G.nodes) H = nx.OrderedDiGraph(G) - assert_equal(list(H.nodes), list(G.nodes)) + assert list(H.nodes) == list(G.nodes) def test_to_edgelist(self): G = nx.Graph([(1, 1)]) elist = nx.to_edgelist(G, nodelist=list(G)) assert_edges_equal(G.edges(data=True), elist) + + def test_custom_node_attr_dict_safekeeping(self): + class custom_dict(dict): + pass + + class Custom(nx.Graph): + node_attr_dict_factory = custom_dict + + g = nx.Graph() + g.add_node(1, weight=1) + + h = Custom(g) + assert isinstance(g._node[1], dict) + assert isinstance(h._node[1], custom_dict) + + # this raise exception + # h._node.update((n, dd.copy()) for n, dd in g.nodes.items()) + # assert isinstance(h._node[1], custom_dict) diff --git a/networkx/tests/test_convert_numpy.py b/networkx/tests/test_convert_numpy.py index f9fb9fe..4672030 100644 --- a/networkx/tests/test_convert_numpy.py +++ b/networkx/tests/test_convert_numpy.py @@ -1,34 +1,24 @@ -from nose import SkipTest -from nose.tools import assert_raises, assert_true, assert_equal +import pytest + +np = pytest.importorskip("numpy") +np_assert_equal = np.testing.assert_equal import networkx as nx from networkx.generators.classic import barbell_graph, cycle_graph, path_graph from networkx.testing.utils import assert_graphs_equal -class TestConvertNumpy(object): - numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test - - @classmethod - def setupClass(cls): - global np - global np_assert_equal - try: - import numpy as np - np_assert_equal = np.testing.assert_equal - except ImportError: - raise SkipTest('NumPy not available.') - - def __init__(self): +class TestConvertNumpy: + def setup_method(self): self.G1 = barbell_graph(10, 3) - self.G2 = cycle_graph(10, create_using=nx.DiGraph()) + self.G2 = cycle_graph(10, create_using=nx.DiGraph) self.G3 = self.create_weighted(nx.Graph()) self.G4 = self.create_weighted(nx.DiGraph()) def test_exceptions(self): G = np.array("a") - assert_raises(nx.NetworkXError, nx.to_networkx_graph, G) + pytest.raises(nx.NetworkXError, nx.to_networkx_graph, G) def create_weighted(self, G): g = cycle_graph(4) @@ -37,22 +27,22 @@ def create_weighted(self, G): return G def assert_equal(self, G1, G2): - assert_true(sorted(G1.nodes()) == sorted(G2.nodes())) - assert_true(sorted(G1.edges()) == sorted(G2.edges())) + assert sorted(G1.nodes()) == sorted(G2.nodes()) + assert sorted(G1.edges()) == sorted(G2.edges()) def identity_conversion(self, G, A, create_using): - assert(A.sum() > 0) + assert A.sum() > 0 GG = nx.from_numpy_matrix(A, create_using=create_using) self.assert_equal(G, GG) GW = nx.to_networkx_graph(A, create_using=create_using) self.assert_equal(G, GW) - GI = create_using.__class__(A) + GI = nx.empty_graph(0, create_using).__class__(A) self.assert_equal(G, GI) def test_shape(self): "Conversion from non-square array." A = np.array([[1, 2, 3], [4, 5, 6]]) - assert_raises(nx.NetworkXError, nx.from_numpy_matrix, A) + pytest.raises(nx.NetworkXError, nx.from_numpy_matrix, A) def test_identity_graph_matrix(self): "Conversion from graph to matrix to graph." @@ -109,7 +99,7 @@ def test_nodelist(self): # Make nodelist ambiguous by containing duplicates. nodelist += [nodelist[0]] - assert_raises(nx.NetworkXError, nx.to_numpy_matrix, P3, nodelist=nodelist) + pytest.raises(nx.NetworkXError, nx.to_numpy_matrix, P3, nodelist=nodelist) def test_weight_keyword(self): WP4 = nx.Graph() @@ -118,61 +108,68 @@ def test_weight_keyword(self): A = nx.to_numpy_matrix(P4) np_assert_equal(A, nx.to_numpy_matrix(WP4, weight=None)) np_assert_equal(0.5 * A, nx.to_numpy_matrix(WP4)) - np_assert_equal(0.3 * A, nx.to_numpy_matrix(WP4, weight='other')) + np_assert_equal(0.3 * A, nx.to_numpy_matrix(WP4, weight="other")) def test_from_numpy_matrix_type(self): A = np.matrix([[1]]) G = nx.from_numpy_matrix(A) - assert_equal(type(G[0][0]['weight']), int) + assert type(G[0][0]["weight"]) == int A = np.matrix([[1]]).astype(np.float) G = nx.from_numpy_matrix(A) - assert_equal(type(G[0][0]['weight']), float) + assert type(G[0][0]["weight"]) == float A = np.matrix([[1]]).astype(np.str) G = nx.from_numpy_matrix(A) - assert_equal(type(G[0][0]['weight']), str) + assert type(G[0][0]["weight"]) == str A = np.matrix([[1]]).astype(np.bool) G = nx.from_numpy_matrix(A) - assert_equal(type(G[0][0]['weight']), bool) + assert type(G[0][0]["weight"]) == bool A = np.matrix([[1]]).astype(np.complex) G = nx.from_numpy_matrix(A) - assert_equal(type(G[0][0]['weight']), complex) + assert type(G[0][0]["weight"]) == complex A = np.matrix([[1]]).astype(np.object) - assert_raises(TypeError, nx.from_numpy_matrix, A) + pytest.raises(TypeError, nx.from_numpy_matrix, A) + + G = nx.cycle_graph(3) + A = nx.adj_matrix(G).todense() + H = nx.from_numpy_matrix(A) + assert all(type(m) == int and type(n) == int for m, n in H.edges()) + H = nx.from_numpy_array(A) + assert all(type(m) == int and type(n) == int for m, n in H.edges()) def test_from_numpy_matrix_dtype(self): - dt = [('weight', float), ('cost', int)] + dt = [("weight", float), ("cost", int)] A = np.matrix([[(1.0, 2)]], dtype=dt) G = nx.from_numpy_matrix(A) - assert_equal(type(G[0][0]['weight']), float) - assert_equal(type(G[0][0]['cost']), int) - assert_equal(G[0][0]['cost'], 2) - assert_equal(G[0][0]['weight'], 1.0) + assert type(G[0][0]["weight"]) == float + assert type(G[0][0]["cost"]) == int + assert G[0][0]["cost"] == 2 + assert G[0][0]["weight"] == 1.0 def test_to_numpy_recarray(self): G = nx.Graph() G.add_edge(1, 2, weight=7.0, cost=5) - A = nx.to_numpy_recarray(G, dtype=[('weight', float), ('cost', int)]) - assert_equal(sorted(A.dtype.names), ['cost', 'weight']) - assert_equal(A.weight[0, 1], 7.0) - assert_equal(A.weight[0, 0], 0.0) - assert_equal(A.cost[0, 1], 5) - assert_equal(A.cost[0, 0], 0) + A = nx.to_numpy_recarray(G, dtype=[("weight", float), ("cost", int)]) + assert sorted(A.dtype.names) == ["cost", "weight"] + assert A.weight[0, 1] == 7.0 + assert A.weight[0, 0] == 0.0 + assert A.cost[0, 1] == 5 + assert A.cost[0, 0] == 0 def test_numpy_multigraph(self): G = nx.MultiGraph() G.add_edge(1, 2, weight=7) G.add_edge(1, 2, weight=70) A = nx.to_numpy_matrix(G) - assert_equal(A[1, 0], 77) + assert A[1, 0] == 77 A = nx.to_numpy_matrix(G, multigraph_weight=min) - assert_equal(A[1, 0], 7) + assert A[1, 0] == 7 A = nx.to_numpy_matrix(G, multigraph_weight=max) - assert_equal(A[1, 0], 70) + assert A[1, 0] == 70 def test_from_numpy_matrix_parallel_edges(self): """Tests that the :func:`networkx.from_numpy_matrix` function @@ -187,11 +184,9 @@ def test_from_numpy_matrix_parallel_edges(self): edges = [(0, 0), (0, 1), (1, 0)] expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges]) expected.add_edge(1, 1, weight=2) - actual = nx.from_numpy_matrix(A, parallel_edges=True, - create_using=nx.DiGraph()) + actual = nx.from_numpy_matrix(A, parallel_edges=True, create_using=nx.DiGraph) assert_graphs_equal(actual, expected) - actual = nx.from_numpy_matrix(A, parallel_edges=False, - create_using=nx.DiGraph()) + actual = nx.from_numpy_matrix(A, parallel_edges=False, create_using=nx.DiGraph) assert_graphs_equal(actual, expected) # Now each integer entry in the adjacency matrix is interpreted as the # number of parallel edges in the graph if the appropriate keyword @@ -199,15 +194,17 @@ def test_from_numpy_matrix_parallel_edges(self): edges = [(0, 0), (0, 1), (1, 0), (1, 1), (1, 1)] expected = nx.MultiDiGraph() expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges]) - actual = nx.from_numpy_matrix(A, parallel_edges=True, - create_using=nx.MultiDiGraph()) + actual = nx.from_numpy_matrix( + A, parallel_edges=True, create_using=nx.MultiDiGraph + ) assert_graphs_equal(actual, expected) expected = nx.MultiDiGraph() expected.add_edges_from(set(edges), weight=1) # The sole self-loop (edge 0) on vertex 1 should have weight 2. - expected[1][1][0]['weight'] = 2 - actual = nx.from_numpy_matrix(A, parallel_edges=False, - create_using=nx.MultiDiGraph()) + expected[1][1][0]["weight"] = 2 + actual = nx.from_numpy_matrix( + A, parallel_edges=False, create_using=nx.MultiDiGraph + ) assert_graphs_equal(actual, expected) def test_symmetric(self): @@ -216,7 +213,7 @@ def test_symmetric(self): """ A = np.matrix([[0, 1], [1, 0]]) - G = nx.from_numpy_matrix(A, create_using=nx.MultiGraph()) + G = nx.from_numpy_matrix(A, create_using=nx.MultiGraph) expected = nx.MultiGraph() expected.add_edge(0, 1, weight=1) assert_graphs_equal(G, expected) @@ -229,7 +226,7 @@ def test_dtype_int_graph(self): """ G = nx.complete_graph(3) A = nx.to_numpy_matrix(G, dtype=int) - assert_equal(A.dtype, int) + assert A.dtype == int def test_dtype_int_multigraph(self): """Test that setting dtype int actually gives an integer matrix. @@ -239,26 +236,13 @@ def test_dtype_int_multigraph(self): """ G = nx.MultiGraph(nx.complete_graph(3)) A = nx.to_numpy_matrix(G, dtype=int) - assert_equal(A.dtype, int) + assert A.dtype == int -class TestConvertNumpyArray(object): - numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test - - @classmethod - def setupClass(cls): - global np - global np_assert_equal - try: - import numpy as np - np_assert_equal = np.testing.assert_equal - except ImportError: - raise SkipTest('NumPy not available.') - - def __init__(self): +class TestConvertNumpyArray: + def setup_method(self): self.G1 = barbell_graph(10, 3) - self.G2 = cycle_graph(10, create_using=nx.DiGraph()) - + self.G2 = cycle_graph(10, create_using=nx.DiGraph) self.G3 = self.create_weighted(nx.Graph()) self.G4 = self.create_weighted(nx.DiGraph()) @@ -269,22 +253,22 @@ def create_weighted(self, G): return G def assert_equal(self, G1, G2): - assert_true(sorted(G1.nodes()) == sorted(G2.nodes())) - assert_true(sorted(G1.edges()) == sorted(G2.edges())) + assert sorted(G1.nodes()) == sorted(G2.nodes()) + assert sorted(G1.edges()) == sorted(G2.edges()) def identity_conversion(self, G, A, create_using): - assert(A.sum() > 0) + assert A.sum() > 0 GG = nx.from_numpy_array(A, create_using=create_using) self.assert_equal(G, GG) GW = nx.to_networkx_graph(A, create_using=create_using) self.assert_equal(G, GW) - GI = create_using.__class__(A) + GI = nx.empty_graph(0, create_using).__class__(A) self.assert_equal(G, GI) def test_shape(self): "Conversion from non-square array." A = np.array([[1, 2, 3], [4, 5, 6]]) - assert_raises(nx.NetworkXError, nx.from_numpy_array, A) + pytest.raises(nx.NetworkXError, nx.from_numpy_array, A) def test_identity_graph_array(self): "Conversion from graph to array to graph." @@ -317,7 +301,7 @@ def test_nodelist(self): # Make nodelist ambiguous by containing duplicates. nodelist += [nodelist[0]] - assert_raises(nx.NetworkXError, nx.to_numpy_array, P3, nodelist=nodelist) + pytest.raises(nx.NetworkXError, nx.to_numpy_array, P3, nodelist=nodelist) def test_weight_keyword(self): WP4 = nx.Graph() @@ -326,61 +310,61 @@ def test_weight_keyword(self): A = nx.to_numpy_array(P4) np_assert_equal(A, nx.to_numpy_array(WP4, weight=None)) np_assert_equal(0.5 * A, nx.to_numpy_array(WP4)) - np_assert_equal(0.3 * A, nx.to_numpy_array(WP4, weight='other')) + np_assert_equal(0.3 * A, nx.to_numpy_array(WP4, weight="other")) def test_from_numpy_array_type(self): A = np.array([[1]]) G = nx.from_numpy_array(A) - assert_equal(type(G[0][0]['weight']), int) + assert type(G[0][0]["weight"]) == int A = np.array([[1]]).astype(np.float) G = nx.from_numpy_array(A) - assert_equal(type(G[0][0]['weight']), float) + assert type(G[0][0]["weight"]) == float A = np.array([[1]]).astype(np.str) G = nx.from_numpy_array(A) - assert_equal(type(G[0][0]['weight']), str) + assert type(G[0][0]["weight"]) == str A = np.array([[1]]).astype(np.bool) G = nx.from_numpy_array(A) - assert_equal(type(G[0][0]['weight']), bool) + assert type(G[0][0]["weight"]) == bool A = np.array([[1]]).astype(np.complex) G = nx.from_numpy_array(A) - assert_equal(type(G[0][0]['weight']), complex) + assert type(G[0][0]["weight"]) == complex A = np.array([[1]]).astype(np.object) - assert_raises(TypeError, nx.from_numpy_array, A) + pytest.raises(TypeError, nx.from_numpy_array, A) def test_from_numpy_array_dtype(self): - dt = [('weight', float), ('cost', int)] + dt = [("weight", float), ("cost", int)] A = np.array([[(1.0, 2)]], dtype=dt) G = nx.from_numpy_array(A) - assert_equal(type(G[0][0]['weight']), float) - assert_equal(type(G[0][0]['cost']), int) - assert_equal(G[0][0]['cost'], 2) - assert_equal(G[0][0]['weight'], 1.0) + assert type(G[0][0]["weight"]) == float + assert type(G[0][0]["cost"]) == int + assert G[0][0]["cost"] == 2 + assert G[0][0]["weight"] == 1.0 def test_to_numpy_recarray(self): G = nx.Graph() G.add_edge(1, 2, weight=7.0, cost=5) - A = nx.to_numpy_recarray(G, dtype=[('weight', float), ('cost', int)]) - assert_equal(sorted(A.dtype.names), ['cost', 'weight']) - assert_equal(A.weight[0, 1], 7.0) - assert_equal(A.weight[0, 0], 0.0) - assert_equal(A.cost[0, 1], 5) - assert_equal(A.cost[0, 0], 0) + A = nx.to_numpy_recarray(G, dtype=[("weight", float), ("cost", int)]) + assert sorted(A.dtype.names) == ["cost", "weight"] + assert A.weight[0, 1] == 7.0 + assert A.weight[0, 0] == 0.0 + assert A.cost[0, 1] == 5 + assert A.cost[0, 0] == 0 def test_numpy_multigraph(self): G = nx.MultiGraph() G.add_edge(1, 2, weight=7) G.add_edge(1, 2, weight=70) A = nx.to_numpy_array(G) - assert_equal(A[1, 0], 77) + assert A[1, 0] == 77 A = nx.to_numpy_array(G, multigraph_weight=min) - assert_equal(A[1, 0], 7) + assert A[1, 0] == 7 A = nx.to_numpy_array(G, multigraph_weight=max) - assert_equal(A[1, 0], 70) + assert A[1, 0] == 70 def test_from_numpy_array_parallel_edges(self): """Tests that the :func:`networkx.from_numpy_array` function @@ -395,11 +379,9 @@ def test_from_numpy_array_parallel_edges(self): edges = [(0, 0), (0, 1), (1, 0)] expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges]) expected.add_edge(1, 1, weight=2) - actual = nx.from_numpy_array(A, parallel_edges=True, - create_using=nx.DiGraph()) + actual = nx.from_numpy_array(A, parallel_edges=True, create_using=nx.DiGraph) assert_graphs_equal(actual, expected) - actual = nx.from_numpy_array(A, parallel_edges=False, - create_using=nx.DiGraph()) + actual = nx.from_numpy_array(A, parallel_edges=False, create_using=nx.DiGraph) assert_graphs_equal(actual, expected) # Now each integer entry in the adjacency matrix is interpreted as the # number of parallel edges in the graph if the appropriate keyword @@ -407,15 +389,17 @@ def test_from_numpy_array_parallel_edges(self): edges = [(0, 0), (0, 1), (1, 0), (1, 1), (1, 1)] expected = nx.MultiDiGraph() expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges]) - actual = nx.from_numpy_array(A, parallel_edges=True, - create_using=nx.MultiDiGraph()) + actual = nx.from_numpy_array( + A, parallel_edges=True, create_using=nx.MultiDiGraph + ) assert_graphs_equal(actual, expected) expected = nx.MultiDiGraph() expected.add_edges_from(set(edges), weight=1) # The sole self-loop (edge 0) on vertex 1 should have weight 2. - expected[1][1][0]['weight'] = 2 - actual = nx.from_numpy_array(A, parallel_edges=False, - create_using=nx.MultiDiGraph()) + expected[1][1][0]["weight"] = 2 + actual = nx.from_numpy_array( + A, parallel_edges=False, create_using=nx.MultiDiGraph + ) assert_graphs_equal(actual, expected) def test_symmetric(self): @@ -424,7 +408,7 @@ def test_symmetric(self): """ A = np.array([[0, 1], [1, 0]]) - G = nx.from_numpy_array(A, create_using=nx.MultiGraph()) + G = nx.from_numpy_array(A, create_using=nx.MultiGraph) expected = nx.MultiGraph() expected.add_edge(0, 1, weight=1) assert_graphs_equal(G, expected) @@ -437,7 +421,7 @@ def test_dtype_int_graph(self): """ G = nx.complete_graph(3) A = nx.to_numpy_array(G, dtype=int) - assert_equal(A.dtype, int) + assert A.dtype == int def test_dtype_int_multigraph(self): """Test that setting dtype int actually gives an integer array. @@ -447,4 +431,4 @@ def test_dtype_int_multigraph(self): """ G = nx.MultiGraph(nx.complete_graph(3)) A = nx.to_numpy_array(G, dtype=int) - assert_equal(A.dtype, int) + assert A.dtype == int diff --git a/networkx/tests/test_convert_pandas.py b/networkx/tests/test_convert_pandas.py index f20ea91..b06b96b 100644 --- a/networkx/tests/test_convert_pandas.py +++ b/networkx/tests/test_convert_pandas.py @@ -1,129 +1,210 @@ -from nose import SkipTest -from nose.tools import assert_raises - +import pytest import networkx as nx -from networkx.testing import assert_nodes_equal, assert_edges_equal, assert_graphs_equal - +from networkx.testing import assert_nodes_equal +from networkx.testing import assert_edges_equal +from networkx.testing import assert_graphs_equal -class TestConvertPandas(object): - numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test +np = pytest.importorskip("numpy") +pd = pytest.importorskip("pandas") - @classmethod - def setupClass(cls): - try: - import pandas as pd - except ImportError: - raise SkipTest('Pandas not available.') - def __init__(self): - global pd - import pandas as pd - - self.r = pd.np.random.RandomState(seed=5) - ints = self.r.random_integers(1, 10, size=(3, 2)) - a = ['A', 'B', 'C'] - b = ['D', 'A', 'E'] - df = pd.DataFrame(ints, columns=['weight', 'cost']) +class TestConvertPandas: + def setup_method(self): + self.rng = np.random.RandomState(seed=5) + ints = self.rng.randint(1, 11, size=(3, 2)) + a = ["A", "B", "C"] + b = ["D", "A", "E"] + df = pd.DataFrame(ints, columns=["weight", "cost"]) df[0] = a # Column label 0 (int) - df['b'] = b # Column label 'b' (str) + df["b"] = b # Column label 'b' (str) self.df = df - mdf = pd.DataFrame([[4, 16, 'A', 'D']], - columns=['weight', 'cost', 0, 'b']) + + mdf = pd.DataFrame([[4, 16, "A", "D"]], columns=["weight", "cost", 0, "b"]) self.mdf = df.append(mdf) def test_exceptions(self): G = pd.DataFrame(["a"]) # adj - assert_raises(nx.NetworkXError, nx.to_networkx_graph, G) + pytest.raises(nx.NetworkXError, nx.to_networkx_graph, G) G = pd.DataFrame(["a", 0.0]) # elist - assert_raises(nx.NetworkXError, nx.to_networkx_graph, G) + pytest.raises(nx.NetworkXError, nx.to_networkx_graph, G) df = pd.DataFrame([[1, 1], [1, 0]], dtype=int, index=[1, 2], columns=["a", "b"]) - assert_raises(nx.NetworkXError, nx.from_pandas_adjacency, df) + pytest.raises(nx.NetworkXError, nx.from_pandas_adjacency, df) def test_from_edgelist_all_attr(self): - Gtrue = nx.Graph([('E', 'C', {'cost': 9, 'weight': 10}), - ('B', 'A', {'cost': 1, 'weight': 7}), - ('A', 'D', {'cost': 7, 'weight': 4})]) - G = nx.from_pandas_edgelist(self.df, 0, 'b', True) + Gtrue = nx.Graph( + [ + ("E", "C", {"cost": 9, "weight": 10}), + ("B", "A", {"cost": 1, "weight": 7}), + ("A", "D", {"cost": 7, "weight": 4}), + ] + ) + G = nx.from_pandas_edgelist(self.df, 0, "b", True) assert_graphs_equal(G, Gtrue) # MultiGraph MGtrue = nx.MultiGraph(Gtrue) - MGtrue.add_edge('A', 'D', cost=16, weight=4) - MG = nx.from_pandas_edgelist(self.mdf, 0, 'b', True, nx.MultiGraph()) + MGtrue.add_edge("A", "D", cost=16, weight=4) + MG = nx.from_pandas_edgelist(self.mdf, 0, "b", True, nx.MultiGraph()) assert_graphs_equal(MG, MGtrue) def test_from_edgelist_multi_attr(self): - Gtrue = nx.Graph([('E', 'C', {'cost': 9, 'weight': 10}), - ('B', 'A', {'cost': 1, 'weight': 7}), - ('A', 'D', {'cost': 7, 'weight': 4})]) - G = nx.from_pandas_edgelist(self.df, 0, 'b', ['weight', 'cost']) + Gtrue = nx.Graph( + [ + ("E", "C", {"cost": 9, "weight": 10}), + ("B", "A", {"cost": 1, "weight": 7}), + ("A", "D", {"cost": 7, "weight": 4}), + ] + ) + G = nx.from_pandas_edgelist(self.df, 0, "b", ["weight", "cost"]) + assert_graphs_equal(G, Gtrue) + + def test_from_edgelist_multi_attr_incl_target(self): + Gtrue = nx.Graph( + [ + ("E", "C", {0: "C", "b": "E", "weight": 10}), + ("B", "A", {0: "B", "b": "A", "weight": 7}), + ("A", "D", {0: "A", "b": "D", "weight": 4}), + ] + ) + G = nx.from_pandas_edgelist(self.df, 0, "b", [0, "b", "weight"]) assert_graphs_equal(G, Gtrue) def test_from_edgelist_multidigraph_and_edge_attr(self): # example from issue #2374 - Gtrue = nx.MultiDiGraph([('X1', 'X4', {'Co': 'zA', 'Mi': 0, 'St': 'X1'}), - ('X1', 'X4', {'Co': 'zB', 'Mi': 54, 'St': 'X2'}), - ('X1', 'X4', {'Co': 'zB', 'Mi': 49, 'St': 'X3'}), - ('X1', 'X4', {'Co': 'zB', 'Mi': 44, 'St': 'X4'}), - ('Y1', 'Y3', {'Co': 'zC', 'Mi': 0, 'St': 'Y1'}), - ('Y1', 'Y3', {'Co': 'zC', 'Mi': 34, 'St': 'Y2'}), - ('Y1', 'Y3', {'Co': 'zC', 'Mi': 29, 'St': 'X2'}), - ('Y1', 'Y3', {'Co': 'zC', 'Mi': 24, 'St': 'Y3'}), - ('Z1', 'Z3', {'Co': 'zD', 'Mi': 0, 'St': 'Z1'}), - ('Z1', 'Z3', {'Co': 'zD', 'Mi': 14, 'St': 'X3'}), - ('Z1', 'Z3', {'Co': 'zE', 'Mi': 9, 'St': 'Z2'}), - ('Z1', 'Z3', {'Co': 'zE', 'Mi': 4, 'St': 'Z3'})]) - df = pd.DataFrame.from_items([ - ('O', ['X1', 'X1', 'X1', 'X1', 'Y1', 'Y1', 'Y1', 'Y1', 'Z1', 'Z1', 'Z1', 'Z1']), - ('D', ['X4', 'X4', 'X4', 'X4', 'Y3', 'Y3', 'Y3', 'Y3', 'Z3', 'Z3', 'Z3', 'Z3']), - ('St', ['X1', 'X2', 'X3', 'X4', 'Y1', 'Y2', 'X2', 'Y3', 'Z1', 'X3', 'Z2', 'Z3']), - ('Co', ['zA', 'zB', 'zB', 'zB', 'zC', 'zC', 'zC', 'zC', 'zD', 'zD', 'zE', 'zE']), - ('Mi', [0, 54, 49, 44, 0, 34, 29, 24, 0, 14, 9, 4])]) - G1 = nx.from_pandas_edgelist(df, source='O', target='D', - edge_attr=True, - create_using=nx.MultiDiGraph()) - G2 = nx.from_pandas_edgelist(df, source='O', target='D', - edge_attr=['St', 'Co', 'Mi'], - create_using=nx.MultiDiGraph()) + edges = [ + ("X1", "X4", {"Co": "zA", "Mi": 0, "St": "X1"}), + ("X1", "X4", {"Co": "zB", "Mi": 54, "St": "X2"}), + ("X1", "X4", {"Co": "zB", "Mi": 49, "St": "X3"}), + ("X1", "X4", {"Co": "zB", "Mi": 44, "St": "X4"}), + ("Y1", "Y3", {"Co": "zC", "Mi": 0, "St": "Y1"}), + ("Y1", "Y3", {"Co": "zC", "Mi": 34, "St": "Y2"}), + ("Y1", "Y3", {"Co": "zC", "Mi": 29, "St": "X2"}), + ("Y1", "Y3", {"Co": "zC", "Mi": 24, "St": "Y3"}), + ("Z1", "Z3", {"Co": "zD", "Mi": 0, "St": "Z1"}), + ("Z1", "Z3", {"Co": "zD", "Mi": 14, "St": "X3"}), + ] + Gtrue = nx.MultiDiGraph(edges) + data = { + "O": ["X1", "X1", "X1", "X1", "Y1", "Y1", "Y1", "Y1", "Z1", "Z1"], + "D": ["X4", "X4", "X4", "X4", "Y3", "Y3", "Y3", "Y3", "Z3", "Z3"], + "St": ["X1", "X2", "X3", "X4", "Y1", "Y2", "X2", "Y3", "Z1", "X3"], + "Co": ["zA", "zB", "zB", "zB", "zC", "zC", "zC", "zC", "zD", "zD"], + "Mi": [0, 54, 49, 44, 0, 34, 29, 24, 0, 14], + } + df = pd.DataFrame.from_dict(data) + G1 = nx.from_pandas_edgelist( + df, source="O", target="D", edge_attr=True, create_using=nx.MultiDiGraph + ) + G2 = nx.from_pandas_edgelist( + df, + source="O", + target="D", + edge_attr=["St", "Co", "Mi"], + create_using=nx.MultiDiGraph, + ) assert_graphs_equal(G1, Gtrue) assert_graphs_equal(G2, Gtrue) def test_from_edgelist_one_attr(self): - Gtrue = nx.Graph([('E', 'C', {'weight': 10}), - ('B', 'A', {'weight': 7}), - ('A', 'D', {'weight': 4})]) - G = nx.from_pandas_edgelist(self.df, 0, 'b', 'weight') + Gtrue = nx.Graph( + [ + ("E", "C", {"weight": 10}), + ("B", "A", {"weight": 7}), + ("A", "D", {"weight": 4}), + ] + ) + G = nx.from_pandas_edgelist(self.df, 0, "b", "weight") assert_graphs_equal(G, Gtrue) + def test_from_edgelist_int_attr_name(self): + # note: this also tests that edge_attr can be `source` + Gtrue = nx.Graph( + [("E", "C", {0: "C"}), ("B", "A", {0: "B"}), ("A", "D", {0: "A"})] + ) + G = nx.from_pandas_edgelist(self.df, 0, "b", 0) + assert_graphs_equal(G, Gtrue) + + def test_from_edgelist_invalid_attr(self): + pytest.raises( + nx.NetworkXError, nx.from_pandas_edgelist, self.df, 0, "b", "misspell" + ) + pytest.raises(nx.NetworkXError, nx.from_pandas_edgelist, self.df, 0, "b", 1) + # see Issue #3562 + edgeframe = pd.DataFrame([[0, 1], [1, 2], [2, 0]], columns=["s", "t"]) + pytest.raises( + nx.NetworkXError, nx.from_pandas_edgelist, edgeframe, "s", "t", True + ) + pytest.raises( + nx.NetworkXError, nx.from_pandas_edgelist, edgeframe, "s", "t", "weight" + ) + pytest.raises( + nx.NetworkXError, + nx.from_pandas_edgelist, + edgeframe, + "s", + "t", + ["weight", "size"], + ) + def test_from_edgelist_no_attr(self): - Gtrue = nx.Graph([('E', 'C', {}), - ('B', 'A', {}), - ('A', 'D', {})]) - G = nx.from_pandas_edgelist(self.df, 0, 'b',) + Gtrue = nx.Graph([("E", "C", {}), ("B", "A", {}), ("A", "D", {})]) + G = nx.from_pandas_edgelist(self.df, 0, "b") assert_graphs_equal(G, Gtrue) def test_from_edgelist(self): # Pandas DataFrame - g = nx.cycle_graph(10) - G = nx.Graph() - G.add_nodes_from(g) - G.add_weighted_edges_from((u, v, u) for u, v in g.edges()) + G = nx.cycle_graph(10) + G.add_weighted_edges_from((u, v, u) for u, v in list(G.edges)) + edgelist = nx.to_edgelist(G) source = [s for s, t, d in edgelist] target = [t for s, t, d in edgelist] - weight = [d['weight'] for s, t, d in edgelist] - edges = pd.DataFrame({'source': source, - 'target': target, - 'weight': weight}) - GG = nx.from_pandas_edgelist(edges, edge_attr='weight') + weight = [d["weight"] for s, t, d in edgelist] + edges = pd.DataFrame({"source": source, "target": target, "weight": weight}) + + GG = nx.from_pandas_edgelist(edges, edge_attr="weight") assert_nodes_equal(G.nodes(), GG.nodes()) assert_edges_equal(G.edges(), GG.edges()) - GW = nx.to_networkx_graph(edges, create_using=nx.Graph()) + GW = nx.to_networkx_graph(edges, create_using=nx.Graph) assert_nodes_equal(G.nodes(), GW.nodes()) assert_edges_equal(G.edges(), GW.edges()) + def test_to_edgelist_default_source_or_target_col_exists(self): + + G = nx.path_graph(10) + G.add_weighted_edges_from((u, v, u) for u, v in list(G.edges)) + nx.set_edge_attributes(G, 0, name="source") + pytest.raises(nx.NetworkXError, nx.to_pandas_edgelist, G) + + # drop source column to test an exception raised for the target column + for u, v, d in G.edges(data=True): + d.pop("source", None) + + nx.set_edge_attributes(G, 0, name="target") + pytest.raises(nx.NetworkXError, nx.to_pandas_edgelist, G) + + def test_to_edgelist_custom_source_or_target_col_exists(self): + + G = nx.path_graph(10) + G.add_weighted_edges_from((u, v, u) for u, v in list(G.edges)) + nx.set_edge_attributes(G, 0, name="source_col_name") + pytest.raises( + nx.NetworkXError, nx.to_pandas_edgelist, G, source="source_col_name" + ) + + # drop source column to test an exception raised for the target column + for u, v, d in G.edges(data=True): + d.pop("source_col_name", None) + + nx.set_edge_attributes(G, 0, name="target_col_name") + pytest.raises( + nx.NetworkXError, nx.to_pandas_edgelist, G, target="target_col_name" + ) + def test_from_adjacency(self): nodelist = [1, 2] - dftrue = pd.DataFrame([[1, 1], [1, 0]], dtype=int, index=nodelist, columns=nodelist) + dftrue = pd.DataFrame( + [[1, 1], [1, 0]], dtype=int, index=nodelist, columns=nodelist + ) G = nx.Graph([(1, 1), (1, 2)]) df = nx.to_pandas_adjacency(G, dtype=int) pd.testing.assert_frame_equal(df, dftrue) @@ -135,7 +216,71 @@ def test_roundtrip(self): G = nx.from_pandas_edgelist(df) assert_graphs_equal(Gtrue, G) # adjacency - Gtrue = nx.Graph(({1: {1: {'weight': 1}, 2: {'weight': 1}}, 2: {1: {'weight': 1}}})) + adj = {1: {1: {"weight": 1}, 2: {"weight": 1}}, 2: {1: {"weight": 1}}} + Gtrue = nx.Graph(adj) df = nx.to_pandas_adjacency(Gtrue, dtype=int) G = nx.from_pandas_adjacency(df) assert_graphs_equal(Gtrue, G) + + def test_from_adjacency_named(self): + # example from issue #3105 + data = { + "A": {"A": 0, "B": 0, "C": 0}, + "B": {"A": 1, "B": 0, "C": 0}, + "C": {"A": 0, "B": 1, "C": 0}, + } + dftrue = pd.DataFrame(data) + df = dftrue[["A", "C", "B"]] + G = nx.from_pandas_adjacency(df, create_using=nx.DiGraph()) + df = nx.to_pandas_adjacency(G, dtype=np.intp) + pd.testing.assert_frame_equal(df, dftrue) + + def test_edgekey_with_multigraph(self): + df = pd.DataFrame( + { + "attr1": {"A": "F1", "B": "F2", "C": "F3"}, + "attr2": {"A": 1, "B": 0, "C": 0}, + "attr3": {"A": 0, "B": 1, "C": 0}, + "source": {"A": "N1", "B": "N2", "C": "N1"}, + "target": {"A": "N2", "B": "N3", "C": "N1"}, + } + ) + Gtrue = nx.Graph( + [ + ("N1", "N2", {"F1": {"attr2": 1, "attr3": 0}}), + ("N2", "N3", {"F2": {"attr2": 0, "attr3": 1}}), + ("N1", "N1", {"F3": {"attr2": 0, "attr3": 0}}), + ] + ) + # example from issue #4065 + G = nx.from_pandas_edgelist( + df, + source="source", + target="target", + edge_attr=["attr2", "attr3"], + edge_key="attr1", + create_using=nx.MultiGraph(), + ) + assert_graphs_equal(G, Gtrue) + + def test_edgekey_with_normal_graph_no_action(self): + Gtrue = nx.Graph( + [ + ("E", "C", {"cost": 9, "weight": 10}), + ("B", "A", {"cost": 1, "weight": 7}), + ("A", "D", {"cost": 7, "weight": 4}), + ] + ) + G = nx.from_pandas_edgelist(self.df, 0, "b", True, edge_key="weight") + assert_graphs_equal(G, Gtrue) + + def test_nonexisting_edgekey_raises(self): + with pytest.raises(nx.exception.NetworkXError): + nx.from_pandas_edgelist( + self.df, + source="source", + target="target", + edge_key="Not_real", + edge_attr=True, + create_using=nx.MultiGraph(), + ) diff --git a/networkx/tests/test_convert_scipy.py b/networkx/tests/test_convert_scipy.py index c423bed..9cce6c0 100644 --- a/networkx/tests/test_convert_scipy.py +++ b/networkx/tests/test_convert_scipy.py @@ -1,35 +1,31 @@ -from nose import SkipTest -from nose.tools import assert_raises, assert_true, raises +import pytest import networkx as nx from networkx.testing import assert_graphs_equal from networkx.generators.classic import barbell_graph, cycle_graph, path_graph -class TestConvertNumpy(object): +class TestConvertNumpy: @classmethod - def setupClass(cls): + def setup_class(cls): global np, sp, sparse, np_assert_equal - try: - import numpy as np - import scipy as sp - import scipy.sparse as sparse - np_assert_equal = np.testing.assert_equal - except ImportError: - raise SkipTest('SciPy sparse library not available.') - - def __init__(self): + np = pytest.importorskip("numpy") + sp = pytest.importorskip("scipy") + sparse = sp.sparse + np_assert_equal = np.testing.assert_equal + + def setup_method(self): self.G1 = barbell_graph(10, 3) - self.G2 = cycle_graph(10, create_using=nx.DiGraph()) + self.G2 = cycle_graph(10, create_using=nx.DiGraph) self.G3 = self.create_weighted(nx.Graph()) self.G4 = self.create_weighted(nx.DiGraph()) def test_exceptions(self): - class G(object): + class G: format = None - assert_raises(nx.NetworkXError, nx.to_networkx_graph, G) + pytest.raises(nx.NetworkXError, nx.to_networkx_graph, G) def create_weighted(self, G): g = cycle_graph(4) @@ -42,7 +38,7 @@ def create_weighted(self, G): return G def assert_isomorphic(self, G1, G2): - assert_true(nx.is_isomorphic(G1, G2)) + assert nx.is_isomorphic(G1, G2) def identity_conversion(self, G, A, create_using): GG = nx.from_scipy_sparse_matrix(A, create_using=create_using) @@ -51,33 +47,33 @@ def identity_conversion(self, G, A, create_using): GW = nx.to_networkx_graph(A, create_using=create_using) self.assert_isomorphic(G, GW) - GI = create_using.__class__(A) + GI = nx.empty_graph(0, create_using).__class__(A) self.assert_isomorphic(G, GI) ACSR = A.tocsr() - GI = create_using.__class__(ACSR) + GI = nx.empty_graph(0, create_using).__class__(ACSR) self.assert_isomorphic(G, GI) ACOO = A.tocoo() - GI = create_using.__class__(ACOO) + GI = nx.empty_graph(0, create_using).__class__(ACOO) self.assert_isomorphic(G, GI) ACSC = A.tocsc() - GI = create_using.__class__(ACSC) + GI = nx.empty_graph(0, create_using).__class__(ACSC) self.assert_isomorphic(G, GI) AD = A.todense() - GI = create_using.__class__(AD) + GI = nx.empty_graph(0, create_using).__class__(AD) self.assert_isomorphic(G, GI) AA = A.toarray() - GI = create_using.__class__(AA) + GI = nx.empty_graph(0, create_using).__class__(AA) self.assert_isomorphic(G, GI) def test_shape(self): "Conversion from non-square sparse array." A = sp.sparse.lil_matrix([[1, 2, 3], [4, 5, 6]]) - assert_raises(nx.NetworkXError, nx.from_scipy_sparse_matrix, A) + pytest.raises(nx.NetworkXError, nx.from_scipy_sparse_matrix, A) def test_identity_graph_matrix(self): "Conversion from graph to sparse matrix to graph." @@ -110,66 +106,72 @@ def test_nodelist(self): # Make nodelist ambiguous by containing duplicates. nodelist += [nodelist[0]] - assert_raises(nx.NetworkXError, nx.to_numpy_matrix, P3, - nodelist=nodelist) + pytest.raises(nx.NetworkXError, nx.to_numpy_matrix, P3, nodelist=nodelist) def test_weight_keyword(self): WP4 = nx.Graph() - WP4.add_edges_from((n, n + 1, dict(weight=0.5, other=0.3)) - for n in range(3)) + WP4.add_edges_from((n, n + 1, dict(weight=0.5, other=0.3)) for n in range(3)) P4 = path_graph(4) A = nx.to_scipy_sparse_matrix(P4) - np_assert_equal(A.todense(), - nx.to_scipy_sparse_matrix(WP4, weight=None).todense()) - np_assert_equal(0.5 * A.todense(), - nx.to_scipy_sparse_matrix(WP4).todense()) - np_assert_equal(0.3 * A.todense(), - nx.to_scipy_sparse_matrix(WP4, weight='other').todense()) + np_assert_equal( + A.todense(), nx.to_scipy_sparse_matrix(WP4, weight=None).todense() + ) + np_assert_equal(0.5 * A.todense(), nx.to_scipy_sparse_matrix(WP4).todense()) + np_assert_equal( + 0.3 * A.todense(), nx.to_scipy_sparse_matrix(WP4, weight="other").todense() + ) def test_format_keyword(self): WP4 = nx.Graph() - WP4.add_edges_from((n, n + 1, dict(weight=0.5, other=0.3)) - for n in range(3)) + WP4.add_edges_from((n, n + 1, dict(weight=0.5, other=0.3)) for n in range(3)) P4 = path_graph(4) - A = nx.to_scipy_sparse_matrix(P4, format='csr') - np_assert_equal(A.todense(), - nx.to_scipy_sparse_matrix(WP4, weight=None).todense()) - - A = nx.to_scipy_sparse_matrix(P4, format='csc') - np_assert_equal(A.todense(), - nx.to_scipy_sparse_matrix(WP4, weight=None).todense()) - - A = nx.to_scipy_sparse_matrix(P4, format='coo') - np_assert_equal(A.todense(), - nx.to_scipy_sparse_matrix(WP4, weight=None).todense()) - - A = nx.to_scipy_sparse_matrix(P4, format='bsr') - np_assert_equal(A.todense(), - nx.to_scipy_sparse_matrix(WP4, weight=None).todense()) - - A = nx.to_scipy_sparse_matrix(P4, format='lil') - np_assert_equal(A.todense(), - nx.to_scipy_sparse_matrix(WP4, weight=None).todense()) - - A = nx.to_scipy_sparse_matrix(P4, format='dia') - np_assert_equal(A.todense(), - nx.to_scipy_sparse_matrix(WP4, weight=None).todense()) + A = nx.to_scipy_sparse_matrix(P4, format="csr") + np_assert_equal( + A.todense(), nx.to_scipy_sparse_matrix(WP4, weight=None).todense() + ) + + A = nx.to_scipy_sparse_matrix(P4, format="csc") + np_assert_equal( + A.todense(), nx.to_scipy_sparse_matrix(WP4, weight=None).todense() + ) + + A = nx.to_scipy_sparse_matrix(P4, format="coo") + np_assert_equal( + A.todense(), nx.to_scipy_sparse_matrix(WP4, weight=None).todense() + ) + + A = nx.to_scipy_sparse_matrix(P4, format="bsr") + np_assert_equal( + A.todense(), nx.to_scipy_sparse_matrix(WP4, weight=None).todense() + ) + + A = nx.to_scipy_sparse_matrix(P4, format="lil") + np_assert_equal( + A.todense(), nx.to_scipy_sparse_matrix(WP4, weight=None).todense() + ) + + A = nx.to_scipy_sparse_matrix(P4, format="dia") + np_assert_equal( + A.todense(), nx.to_scipy_sparse_matrix(WP4, weight=None).todense() + ) + + A = nx.to_scipy_sparse_matrix(P4, format="dok") + np_assert_equal( + A.todense(), nx.to_scipy_sparse_matrix(WP4, weight=None).todense() + ) - A = nx.to_scipy_sparse_matrix(P4, format='dok') - np_assert_equal(A.todense(), - nx.to_scipy_sparse_matrix(WP4, weight=None).todense()) - - @raises(nx.NetworkXError) def test_format_keyword_raise(self): - WP4 = nx.Graph() - WP4.add_edges_from((n, n + 1, dict(weight=0.5, other=0.3)) - for n in range(3)) - P4 = path_graph(4) - nx.to_scipy_sparse_matrix(P4, format='any_other') + with pytest.raises(nx.NetworkXError): + WP4 = nx.Graph() + WP4.add_edges_from( + (n, n + 1, dict(weight=0.5, other=0.3)) for n in range(3) + ) + P4 = path_graph(4) + nx.to_scipy_sparse_matrix(P4, format="any_other") - @raises(nx.NetworkXError) def test_null_raise(self): - nx.to_scipy_sparse_matrix(nx.Graph()) + with pytest.raises(nx.NetworkXError): + nx.to_scipy_sparse_matrix(nx.Graph()) def test_empty(self): G = nx.Graph() @@ -190,11 +192,19 @@ def test_selfloop_graph(self): M = nx.to_scipy_sparse_matrix(G) np_assert_equal(M.todense(), np.matrix([[1]])) + G.add_edges_from([(2, 3), (3, 4)]) + M = nx.to_scipy_sparse_matrix(G, nodelist=[2, 3, 4]) + np_assert_equal(M.todense(), np.matrix([[0, 1, 0], [1, 0, 1], [0, 1, 0]])) + def test_selfloop_digraph(self): G = nx.DiGraph([(1, 1)]) M = nx.to_scipy_sparse_matrix(G) np_assert_equal(M.todense(), np.matrix([[1]])) + G.add_edges_from([(2, 3), (3, 4)]) + M = nx.to_scipy_sparse_matrix(G, nodelist=[2, 3, 4]) + np_assert_equal(M.todense(), np.matrix([[0, 1, 0], [0, 0, 1], [0, 0, 0]])) + def test_from_scipy_sparse_matrix_parallel_edges(self): """Tests that the :func:`networkx.from_scipy_sparse_matrix` function interprets integer weights as the number of parallel edges when @@ -208,11 +218,13 @@ def test_from_scipy_sparse_matrix_parallel_edges(self): edges = [(0, 0), (0, 1), (1, 0)] expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges]) expected.add_edge(1, 1, weight=2) - actual = nx.from_scipy_sparse_matrix(A, parallel_edges=True, - create_using=nx.DiGraph()) + actual = nx.from_scipy_sparse_matrix( + A, parallel_edges=True, create_using=nx.DiGraph + ) assert_graphs_equal(actual, expected) - actual = nx.from_scipy_sparse_matrix(A, parallel_edges=False, - create_using=nx.DiGraph()) + actual = nx.from_scipy_sparse_matrix( + A, parallel_edges=False, create_using=nx.DiGraph + ) assert_graphs_equal(actual, expected) # Now each integer entry in the adjacency matrix is interpreted as the # number of parallel edges in the graph if the appropriate keyword @@ -220,15 +232,17 @@ def test_from_scipy_sparse_matrix_parallel_edges(self): edges = [(0, 0), (0, 1), (1, 0), (1, 1), (1, 1)] expected = nx.MultiDiGraph() expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges]) - actual = nx.from_scipy_sparse_matrix(A, parallel_edges=True, - create_using=nx.MultiDiGraph()) + actual = nx.from_scipy_sparse_matrix( + A, parallel_edges=True, create_using=nx.MultiDiGraph + ) assert_graphs_equal(actual, expected) expected = nx.MultiDiGraph() expected.add_edges_from(set(edges), weight=1) # The sole self-loop (edge 0) on vertex 1 should have weight 2. - expected[1][1][0]['weight'] = 2 - actual = nx.from_scipy_sparse_matrix(A, parallel_edges=False, - create_using=nx.MultiDiGraph()) + expected[1][1][0]["weight"] = 2 + actual = nx.from_scipy_sparse_matrix( + A, parallel_edges=False, create_using=nx.MultiDiGraph + ) assert_graphs_equal(actual, expected) def test_symmetric(self): @@ -238,7 +252,7 @@ def test_symmetric(self): """ A = sparse.csr_matrix([[0, 1], [1, 0]]) - G = nx.from_scipy_sparse_matrix(A, create_using=nx.MultiGraph()) + G = nx.from_scipy_sparse_matrix(A, create_using=nx.MultiGraph) expected = nx.MultiGraph() expected.add_edge(0, 1, weight=1) assert_graphs_equal(G, expected) diff --git a/networkx/tests/test_exceptions.py b/networkx/tests/test_exceptions.py index 2e63fc4..980a2f0 100644 --- a/networkx/tests/test_exceptions.py +++ b/networkx/tests/test_exceptions.py @@ -1,39 +1,39 @@ -from nose.tools import raises +import pytest import networkx as nx # smoke tests for exceptions -@raises(nx.NetworkXException) def test_raises_networkxexception(): - raise nx.NetworkXException + with pytest.raises(nx.NetworkXException): + raise nx.NetworkXException -@raises(nx.NetworkXError) def test_raises_networkxerr(): - raise nx.NetworkXError + with pytest.raises(nx.NetworkXError): + raise nx.NetworkXError -@raises(nx.NetworkXPointlessConcept) def test_raises_networkx_pointless_concept(): - raise nx.NetworkXPointlessConcept + with pytest.raises(nx.NetworkXPointlessConcept): + raise nx.NetworkXPointlessConcept -@raises(nx.NetworkXAlgorithmError) def test_raises_networkxalgorithmerr(): - raise nx.NetworkXAlgorithmError + with pytest.raises(nx.NetworkXAlgorithmError): + raise nx.NetworkXAlgorithmError -@raises(nx.NetworkXUnfeasible) def test_raises_networkx_unfeasible(): - raise nx.NetworkXUnfeasible + with pytest.raises(nx.NetworkXUnfeasible): + raise nx.NetworkXUnfeasible -@raises(nx.NetworkXNoPath) def test_raises_networkx_no_path(): - raise nx.NetworkXNoPath + with pytest.raises(nx.NetworkXNoPath): + raise nx.NetworkXNoPath -@raises(nx.NetworkXUnbounded) def test_raises_networkx_unbounded(): - raise nx.NetworkXUnbounded + with pytest.raises(nx.NetworkXUnbounded): + raise nx.NetworkXUnbounded diff --git a/networkx/tests/test_relabel.py b/networkx/tests/test_relabel.py index b90c79c..1dc9ff4 100644 --- a/networkx/tests/test_relabel.py +++ b/networkx/tests/test_relabel.py @@ -1,185 +1,289 @@ -#!/usr/bin/env python -from nose.tools import * -from networkx import * -from networkx.convert import * -from networkx.algorithms.operators import * -from networkx.generators.classic import barbell_graph, cycle_graph -from networkx.testing import * +import pytest +import networkx as nx +from networkx.generators.classic import empty_graph +from networkx.testing import assert_nodes_equal, assert_edges_equal -class TestRelabel(): +class TestRelabel: def test_convert_node_labels_to_integers(self): # test that empty graph converts fine for all options G = empty_graph() - H = convert_node_labels_to_integers(G, 100) - assert_equal(list(H.nodes()), []) - assert_equal(list(H.edges()), []) + H = nx.convert_node_labels_to_integers(G, 100) + assert list(H.nodes()) == [] + assert list(H.edges()) == [] for opt in ["default", "sorted", "increasing degree", "decreasing degree"]: G = empty_graph() - H = convert_node_labels_to_integers(G, 100, ordering=opt) - assert_equal(list(H.nodes()), []) - assert_equal(list(H.edges()), []) + H = nx.convert_node_labels_to_integers(G, 100, ordering=opt) + assert list(H.nodes()) == [] + assert list(H.edges()) == [] G = empty_graph() - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'C'), ('C', 'D')]) - H = convert_node_labels_to_integers(G) + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")]) + H = nx.convert_node_labels_to_integers(G) degH = (d for n, d in H.degree()) degG = (d for n, d in G.degree()) - assert_equal(sorted(degH), sorted(degG)) + assert sorted(degH) == sorted(degG) - H = convert_node_labels_to_integers(G, 1000) + H = nx.convert_node_labels_to_integers(G, 1000) degH = (d for n, d in H.degree()) degG = (d for n, d in G.degree()) - assert_equal(sorted(degH), sorted(degG)) + assert sorted(degH) == sorted(degG) assert_nodes_equal(H.nodes(), [1000, 1001, 1002, 1003]) - H = convert_node_labels_to_integers(G, ordering="increasing degree") + H = nx.convert_node_labels_to_integers(G, ordering="increasing degree") degH = (d for n, d in H.degree()) degG = (d for n, d in G.degree()) - assert_equal(sorted(degH), sorted(degG)) - assert_equal(degree(H, 0), 1) - assert_equal(degree(H, 1), 2) - assert_equal(degree(H, 2), 2) - assert_equal(degree(H, 3), 3) + assert sorted(degH) == sorted(degG) + assert H.degree(0) == 1 + assert H.degree(1) == 2 + assert H.degree(2) == 2 + assert H.degree(3) == 3 - H = convert_node_labels_to_integers(G, ordering="decreasing degree") + H = nx.convert_node_labels_to_integers(G, ordering="decreasing degree") degH = (d for n, d in H.degree()) degG = (d for n, d in G.degree()) - assert_equal(sorted(degH), sorted(degG)) - assert_equal(degree(H, 0), 3) - assert_equal(degree(H, 1), 2) - assert_equal(degree(H, 2), 2) - assert_equal(degree(H, 3), 1) - - H = convert_node_labels_to_integers(G, ordering="increasing degree", - label_attribute='label') + assert sorted(degH) == sorted(degG) + assert H.degree(0) == 3 + assert H.degree(1) == 2 + assert H.degree(2) == 2 + assert H.degree(3) == 1 + + H = nx.convert_node_labels_to_integers( + G, ordering="increasing degree", label_attribute="label" + ) degH = (d for n, d in H.degree()) degG = (d for n, d in G.degree()) - assert_equal(sorted(degH), sorted(degG)) - assert_equal(degree(H, 0), 1) - assert_equal(degree(H, 1), 2) - assert_equal(degree(H, 2), 2) - assert_equal(degree(H, 3), 3) + assert sorted(degH) == sorted(degG) + assert H.degree(0) == 1 + assert H.degree(1) == 2 + assert H.degree(2) == 2 + assert H.degree(3) == 3 # check mapping - assert_equal(H.nodes[3]['label'], 'C') - assert_equal(H.nodes[0]['label'], 'D') - assert_true(H.nodes[1]['label'] == 'A' or H.nodes[2]['label'] == 'A') - assert_true(H.nodes[1]['label'] == 'B' or H.nodes[2]['label'] == 'B') + assert H.nodes[3]["label"] == "C" + assert H.nodes[0]["label"] == "D" + assert H.nodes[1]["label"] == "A" or H.nodes[2]["label"] == "A" + assert H.nodes[1]["label"] == "B" or H.nodes[2]["label"] == "B" def test_convert_to_integers2(self): G = empty_graph() - G.add_edges_from([('C', 'D'), ('A', 'B'), ('A', 'C'), ('B', 'C')]) - H = convert_node_labels_to_integers(G, ordering="sorted") + G.add_edges_from([("C", "D"), ("A", "B"), ("A", "C"), ("B", "C")]) + H = nx.convert_node_labels_to_integers(G, ordering="sorted") degH = (d for n, d in H.degree()) degG = (d for n, d in G.degree()) - assert_equal(sorted(degH), sorted(degG)) + assert sorted(degH) == sorted(degG) - H = convert_node_labels_to_integers(G, ordering="sorted", - label_attribute='label') - assert_equal(H.nodes[0]['label'], 'A') - assert_equal(H.nodes[1]['label'], 'B') - assert_equal(H.nodes[2]['label'], 'C') - assert_equal(H.nodes[3]['label'], 'D') + H = nx.convert_node_labels_to_integers( + G, ordering="sorted", label_attribute="label" + ) + assert H.nodes[0]["label"] == "A" + assert H.nodes[1]["label"] == "B" + assert H.nodes[2]["label"] == "C" + assert H.nodes[3]["label"] == "D" - @raises(nx.NetworkXError) def test_convert_to_integers_raise(self): - G = nx.Graph() - H = convert_node_labels_to_integers(G, ordering="increasing age") + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + H = nx.convert_node_labels_to_integers(G, ordering="increasing age") def test_relabel_nodes_copy(self): - G = empty_graph() - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'C'), ('C', 'D')]) - mapping = {'A': 'aardvark', 'B': 'bear', 'C': 'cat', 'D': 'dog'} - H = relabel_nodes(G, mapping) - assert_nodes_equal(H.nodes(), ['aardvark', 'bear', 'cat', 'dog']) + G = nx.empty_graph() + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")]) + mapping = {"A": "aardvark", "B": "bear", "C": "cat", "D": "dog"} + H = nx.relabel_nodes(G, mapping) + assert_nodes_equal(H.nodes(), ["aardvark", "bear", "cat", "dog"]) def test_relabel_nodes_function(self): - G = empty_graph() - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'C'), ('C', 'D')]) + G = nx.empty_graph() + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")]) # function mapping no longer encouraged but works def mapping(n): return ord(n) - H = relabel_nodes(G, mapping) + + H = nx.relabel_nodes(G, mapping) assert_nodes_equal(H.nodes(), [65, 66, 67, 68]) def test_relabel_nodes_graph(self): - G = Graph([('A', 'B'), ('A', 'C'), ('B', 'C'), ('C', 'D')]) - mapping = {'A': 'aardvark', 'B': 'bear', 'C': 'cat', 'D': 'dog'} - H = relabel_nodes(G, mapping) - assert_nodes_equal(H.nodes(), ['aardvark', 'bear', 'cat', 'dog']) + G = nx.Graph([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")]) + mapping = {"A": "aardvark", "B": "bear", "C": "cat", "D": "dog"} + H = nx.relabel_nodes(G, mapping) + assert_nodes_equal(H.nodes(), ["aardvark", "bear", "cat", "dog"]) def test_relabel_nodes_orderedgraph(self): - G = OrderedGraph() + G = nx.OrderedGraph() G.add_nodes_from([1, 2, 3]) G.add_edges_from([(1, 3), (2, 3)]) - mapping = {1: 'a', 2: 'b', 3: 'c'} - H = relabel_nodes(G, mapping) - assert list(H.nodes) == ['a', 'b', 'c'] + mapping = {1: "a", 2: "b", 3: "c"} + H = nx.relabel_nodes(G, mapping) + assert list(H.nodes) == ["a", "b", "c"] def test_relabel_nodes_digraph(self): - G = DiGraph([('A', 'B'), ('A', 'C'), ('B', 'C'), ('C', 'D')]) - mapping = {'A': 'aardvark', 'B': 'bear', 'C': 'cat', 'D': 'dog'} - H = relabel_nodes(G, mapping, copy=False) - assert_nodes_equal(H.nodes(), ['aardvark', 'bear', 'cat', 'dog']) + G = nx.DiGraph([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")]) + mapping = {"A": "aardvark", "B": "bear", "C": "cat", "D": "dog"} + H = nx.relabel_nodes(G, mapping, copy=False) + assert_nodes_equal(H.nodes(), ["aardvark", "bear", "cat", "dog"]) def test_relabel_nodes_multigraph(self): - G = MultiGraph([('a', 'b'), ('a', 'b')]) - mapping = {'a': 'aardvark', 'b': 'bear'} - G = relabel_nodes(G, mapping, copy=False) - assert_nodes_equal(G.nodes(), ['aardvark', 'bear']) - assert_edges_equal(G.edges(), [('aardvark', 'bear'), ('aardvark', 'bear')]) + G = nx.MultiGraph([("a", "b"), ("a", "b")]) + mapping = {"a": "aardvark", "b": "bear"} + G = nx.relabel_nodes(G, mapping, copy=False) + assert_nodes_equal(G.nodes(), ["aardvark", "bear"]) + assert_edges_equal(G.edges(), [("aardvark", "bear"), ("aardvark", "bear")]) def test_relabel_nodes_multidigraph(self): - G = MultiDiGraph([('a', 'b'), ('a', 'b')]) - mapping = {'a': 'aardvark', 'b': 'bear'} - G = relabel_nodes(G, mapping, copy=False) - assert_nodes_equal(G.nodes(), ['aardvark', 'bear']) - assert_edges_equal(G.edges(), [('aardvark', 'bear'), ('aardvark', 'bear')]) + G = nx.MultiDiGraph([("a", "b"), ("a", "b")]) + mapping = {"a": "aardvark", "b": "bear"} + G = nx.relabel_nodes(G, mapping, copy=False) + assert_nodes_equal(G.nodes(), ["aardvark", "bear"]) + assert_edges_equal(G.edges(), [("aardvark", "bear"), ("aardvark", "bear")]) def test_relabel_isolated_nodes_to_same(self): - G = Graph() + G = nx.Graph() G.add_nodes_from(range(4)) mapping = {1: 1} - H = relabel_nodes(G, mapping, copy=False) + H = nx.relabel_nodes(G, mapping, copy=False) assert_nodes_equal(H.nodes(), list(range(4))) - @raises(KeyError) def test_relabel_nodes_missing(self): - G = Graph([('A', 'B'), ('A', 'C'), ('B', 'C'), ('C', 'D')]) - mapping = {0: 'aardvark'} - G = relabel_nodes(G, mapping, copy=False) + with pytest.raises(KeyError): + G = nx.Graph([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")]) + mapping = {0: "aardvark"} + G = nx.relabel_nodes(G, mapping, copy=False) def test_relabel_copy_name(self): - G = Graph() - H = relabel_nodes(G, {}, copy=True) - assert_equal(H.graph, G.graph) - H = relabel_nodes(G, {}, copy=False) - assert_equal(H.graph, G.graph) + G = nx.Graph() + H = nx.relabel_nodes(G, {}, copy=True) + assert H.graph == G.graph + H = nx.relabel_nodes(G, {}, copy=False) + assert H.graph == G.graph G.name = "first" - H = relabel_nodes(G, {}, copy=True) - assert_equal(H.graph, G.graph) - H = relabel_nodes(G, {}, copy=False) - assert_equal(H.graph, G.graph) + H = nx.relabel_nodes(G, {}, copy=True) + assert H.graph == G.graph + H = nx.relabel_nodes(G, {}, copy=False) + assert H.graph == G.graph def test_relabel_toposort(self): K5 = nx.complete_graph(4) G = nx.complete_graph(4) - G = nx.relabel_nodes(G, dict([(i, i + 1) for i in range(4)]), copy=False) + G = nx.relabel_nodes(G, {i: i + 1 for i in range(4)}, copy=False) nx.is_isomorphic(K5, G) G = nx.complete_graph(4) - G = nx.relabel_nodes(G, dict([(i, i - 1) for i in range(4)]), copy=False) + G = nx.relabel_nodes(G, {i: i - 1 for i in range(4)}, copy=False) nx.is_isomorphic(K5, G) def test_relabel_selfloop(self): G = nx.DiGraph([(1, 1), (1, 2), (2, 3)]) - G = nx.relabel_nodes(G, {1: 'One', 2: 'Two', 3: 'Three'}, copy=False) - assert_nodes_equal(G.nodes(), ['One', 'Three', 'Two']) + G = nx.relabel_nodes(G, {1: "One", 2: "Two", 3: "Three"}, copy=False) + assert_nodes_equal(G.nodes(), ["One", "Three", "Two"]) G = nx.MultiDiGraph([(1, 1), (1, 2), (2, 3)]) - G = nx.relabel_nodes(G, {1: 'One', 2: 'Two', 3: 'Three'}, copy=False) - assert_nodes_equal(G.nodes(), ['One', 'Three', 'Two']) + G = nx.relabel_nodes(G, {1: "One", 2: "Two", 3: "Three"}, copy=False) + assert_nodes_equal(G.nodes(), ["One", "Three", "Two"]) G = nx.MultiDiGraph([(1, 1)]) G = nx.relabel_nodes(G, {1: 0}, copy=False) assert_nodes_equal(G.nodes(), [0]) + + def test_relabel_multidigraph_inout_merge_nodes(self): + for MG in (nx.MultiGraph, nx.MultiDiGraph): + for cc in (True, False): + G = MG([(0, 4), (1, 4), (4, 2), (4, 3)]) + G[0][4][0]["value"] = "a" + G[1][4][0]["value"] = "b" + G[4][2][0]["value"] = "c" + G[4][3][0]["value"] = "d" + G.add_edge(0, 4, key="x", value="e") + G.add_edge(4, 3, key="x", value="f") + mapping = {0: 9, 1: 9, 2: 9, 3: 9} + H = nx.relabel_nodes(G, mapping, copy=cc) + # No ordering on keys enforced + assert {"value": "a"} in H[9][4].values() + assert {"value": "b"} in H[9][4].values() + assert {"value": "c"} in H[4][9].values() + assert len(H[4][9]) == 3 if G.is_directed() else 6 + assert {"value": "d"} in H[4][9].values() + assert {"value": "e"} in H[9][4].values() + assert {"value": "f"} in H[4][9].values() + assert len(H[9][4]) == 3 if G.is_directed() else 6 + + def test_relabel_multigraph_merge_inplace(self): + G = nx.MultiGraph([(0, 1), (0, 2), (0, 3), (0, 1), (0, 2), (0, 3)]) + G[0][1][0]["value"] = "a" + G[0][2][0]["value"] = "b" + G[0][3][0]["value"] = "c" + mapping = {1: 4, 2: 4, 3: 4} + nx.relabel_nodes(G, mapping, copy=False) + # No ordering on keys enforced + assert {"value": "a"} in G[0][4].values() + assert {"value": "b"} in G[0][4].values() + assert {"value": "c"} in G[0][4].values() + + def test_relabel_multidigraph_merge_inplace(self): + G = nx.MultiDiGraph([(0, 1), (0, 2), (0, 3)]) + G[0][1][0]["value"] = "a" + G[0][2][0]["value"] = "b" + G[0][3][0]["value"] = "c" + mapping = {1: 4, 2: 4, 3: 4} + nx.relabel_nodes(G, mapping, copy=False) + # No ordering on keys enforced + assert {"value": "a"} in G[0][4].values() + assert {"value": "b"} in G[0][4].values() + assert {"value": "c"} in G[0][4].values() + + def test_relabel_multidigraph_inout_copy(self): + G = nx.MultiDiGraph([(0, 4), (1, 4), (4, 2), (4, 3)]) + G[0][4][0]["value"] = "a" + G[1][4][0]["value"] = "b" + G[4][2][0]["value"] = "c" + G[4][3][0]["value"] = "d" + G.add_edge(0, 4, key="x", value="e") + G.add_edge(4, 3, key="x", value="f") + mapping = {0: 9, 1: 9, 2: 9, 3: 9} + H = nx.relabel_nodes(G, mapping, copy=True) + # No ordering on keys enforced + assert {"value": "a"} in H[9][4].values() + assert {"value": "b"} in H[9][4].values() + assert {"value": "c"} in H[4][9].values() + assert len(H[4][9]) == 3 + assert {"value": "d"} in H[4][9].values() + assert {"value": "e"} in H[9][4].values() + assert {"value": "f"} in H[4][9].values() + assert len(H[9][4]) == 3 + + def test_relabel_multigraph_merge_copy(self): + G = nx.MultiGraph([(0, 1), (0, 2), (0, 3)]) + G[0][1][0]["value"] = "a" + G[0][2][0]["value"] = "b" + G[0][3][0]["value"] = "c" + mapping = {1: 4, 2: 4, 3: 4} + H = nx.relabel_nodes(G, mapping, copy=True) + assert {"value": "a"} in H[0][4].values() + assert {"value": "b"} in H[0][4].values() + assert {"value": "c"} in H[0][4].values() + + def test_relabel_multidigraph_merge_copy(self): + G = nx.MultiDiGraph([(0, 1), (0, 2), (0, 3)]) + G[0][1][0]["value"] = "a" + G[0][2][0]["value"] = "b" + G[0][3][0]["value"] = "c" + mapping = {1: 4, 2: 4, 3: 4} + H = nx.relabel_nodes(G, mapping, copy=True) + assert {"value": "a"} in H[0][4].values() + assert {"value": "b"} in H[0][4].values() + assert {"value": "c"} in H[0][4].values() + + def test_relabel_multigraph_nonnumeric_key(self): + for MG in (nx.MultiGraph, nx.MultiDiGraph): + for cc in (True, False): + G = nx.MultiGraph() + G.add_edge(0, 1, key="I", value="a") + G.add_edge(0, 2, key="II", value="b") + G.add_edge(0, 3, key="II", value="c") + mapping = {1: 4, 2: 4, 3: 4} + nx.relabel_nodes(G, mapping, copy=False) + assert {"value": "a"} in G[0][4].values() + assert {"value": "b"} in G[0][4].values() + assert {"value": "c"} in G[0][4].values() + assert 0 in G[0][4] + assert "I" in G[0][4] + assert "II" in G[0][4] diff --git a/networkx/utils/contextmanagers.py b/networkx/utils/contextmanagers.py index b525a5a..870fecb 100644 --- a/networkx/utils/contextmanagers.py +++ b/networkx/utils/contextmanagers.py @@ -1,10 +1,7 @@ -from __future__ import absolute_import - from contextlib import contextmanager +import warnings -__all__ = [ - 'reversed', -] +__all__ = ["reversed"] @contextmanager @@ -17,7 +14,20 @@ def reversed(G): ---------- G : graph A NetworkX graph. + + Warning + ------- + The reversed context manager is deprecated in favor + of G.reverse(copy=False). The view allows multiple threads to use the + same graph without confusion while the context manager does not. + This context manager is scheduled to be removed in version 3.0. """ + msg = ( + "context manager reversed is deprecated and to be removed in 3.0." + "Use G.reverse(copy=False) if G.is_directed() else G instead." + ) + warnings.warn(msg, DeprecationWarning) + directed = G.is_directed() if directed: G._pred, G._succ = G._succ, G._pred diff --git a/networkx/utils/decorators.py b/networkx/utils/decorators.py index 9f1f988..8c2a12d 100644 --- a/networkx/utils/decorators.py +++ b/networkx/utils/decorators.py @@ -1,19 +1,20 @@ -import sys - from collections import defaultdict from os.path import splitext from contextlib import contextmanager +from pathlib import Path import networkx as nx from decorator import decorator -from networkx.utils import is_string_like +from networkx.utils import create_random_state, create_py_random_state __all__ = [ - 'not_implemented_for', - 'open_file', - 'nodes_or_number', - 'preserve_random_state', - 'random_state', + "not_implemented_for", + "open_file", + "nodes_or_number", + "preserve_random_state", + "random_state", + "np_random_state", + "py_random_state", ] @@ -52,47 +53,54 @@ def sp_function(G): def sp_np_function(G): pass """ + @decorator def _not_implemented_for(not_implement_for_func, *args, **kwargs): graph = args[0] - terms = {'directed': graph.is_directed(), - 'undirected': not graph.is_directed(), - 'multigraph': graph.is_multigraph(), - 'graph': not graph.is_multigraph()} + terms = { + "directed": graph.is_directed(), + "undirected": not graph.is_directed(), + "multigraph": graph.is_multigraph(), + "graph": not graph.is_multigraph(), + } match = True try: for t in graph_types: match = match and terms[t] - except KeyError: - raise KeyError('use one or more of ', - 'directed, undirected, multigraph, graph') + except KeyError as e: + raise KeyError( + "use one or more of " "directed, undirected, multigraph, graph" + ) from e if match: - msg = 'not implemented for %s type' % ' '.join(graph_types) + msg = f"not implemented for {' '.join(graph_types)} type" raise nx.NetworkXNotImplemented(msg) else: return not_implement_for_func(*args, **kwargs) + return _not_implemented_for def _open_gz(path, mode): import gzip + return gzip.open(path, mode=mode) def _open_bz2(path, mode): import bz2 + return bz2.BZ2File(path, mode=mode) # To handle new extensions, define a function accepting a `path` and `mode`. # Then add the extension to _dispatch_dict. _dispatch_dict = defaultdict(lambda: open) -_dispatch_dict['.gz'] = _open_gz -_dispatch_dict['.bz2'] = _open_bz2 -_dispatch_dict['.gzip'] = _open_gz +_dispatch_dict[".gz"] = _open_gz +_dispatch_dict[".bz2"] = _open_bz2 +_dispatch_dict[".gzip"] = _open_gz -def open_file(path_arg, mode='r'): +def open_file(path_arg, mode="r"): """Decorator to ensure clean opening and closing of files. Parameters @@ -181,33 +189,37 @@ def _open_file(func_to_be_decorated, *args, **kwargs): # or it could have been explicitly set by the user. try: path = kwargs[path_arg] - except KeyError: + except KeyError as e: # Could not find the keyword. Thus, no default was specified # in the function signature and the user did not provide it. - msg = 'Missing required keyword argument: {0}' - raise nx.NetworkXError(msg.format(path_arg)) + msg = f"Missing required keyword argument: {path_arg}" + raise nx.NetworkXError(msg) from e else: is_kwarg = True - except IndexError: + except IndexError as e: # A "required" argument was missing. This can only happen if # the decorator of the function was incorrectly specified. # So this probably is not a user error, but a developer error. msg = "path_arg of open_file decorator is incorrect" - raise nx.NetworkXError(msg) + raise nx.NetworkXError(msg) from e else: is_kwarg = False # Now we have the path_arg. There are two types of input to consider: # 1) string representing a path that should be opened # 2) an already opened file object - if is_string_like(path): + if isinstance(path, str): ext = splitext(path)[1] fobj = _dispatch_dict[ext](path, mode=mode) close_fobj = True - elif hasattr(path, 'read'): + elif hasattr(path, "read"): # path is already a file-like object fobj = path close_fobj = False + elif isinstance(path, Path): + # path is a pathlib reference to a filename + fobj = _dispatch_dict[path.suffix](str(path), mode=mode) + close_fobj = True else: # could be None, in which case the algorithm will deal with it fobj = path @@ -267,6 +279,7 @@ def full_rary_tree(r, n) # r is a number. n can be a number of a list of nodes pass """ + @decorator def _nodes_or_number(func_to_be_decorated, *args, **kw): # form tuple of arg positions to be converted. @@ -284,10 +297,11 @@ def _nodes_or_number(func_to_be_decorated, *args, **kw): nodes = tuple(n) else: if n < 0: - msg = "Negative number of nodes not valid: %i" % n + msg = "Negative number of nodes not valid: {n}" raise nx.NetworkXError(msg) new_args[i] = (n, nodes) return func_to_be_decorated(*new_args, **kw) + return _nodes_or_number @@ -332,6 +346,7 @@ def wrapper(*args, **kwargs): with save_random_state(): seed(1234567890) return func(*args, **kwargs) + wrapper.__name__ = func.__name__ return wrapper except ImportError: @@ -342,6 +357,7 @@ def random_state(random_state_index): """Decorator to generate a numpy.random.RandomState instance. Argument position `random_state_index` is processed by create_random_state. + The result is a numpy.random.RandomState instance. Parameters ---------- @@ -360,29 +376,96 @@ def random_state(random_state_index): -------- Decorate functions like this:: - @random_state(0) + @np_random_state(0) def random_float(random_state=None): return random_state.rand() - @random_state(1) + @np_random_state(1) def random_array(dims, random_state=1): return random_state.rand(*dims) + + See Also + -------- + py_random_state """ + @decorator def _random_state(func, *args, **kwargs): # Parse the decorator arguments. try: random_state_arg = args[random_state_index] - except TypeError: - raise nx.NetworkXError("random_state_arg must be an integer") - except IndexError: - raise nx.NetworkXError("random_state_arg is incorrect") + except TypeError as e: + raise nx.NetworkXError("random_state_index must be an integer") from e + except IndexError as e: + raise nx.NetworkXError("random_state_index is incorrect") from e # Create a numpy.random.RandomState instance - random_state_instance = nx.utils.create_random_state(random_state_arg) + random_state = create_random_state(random_state_arg) # args is a tuple, so we must convert to list before modifying it. new_args = list(args) - new_args[random_state_index] = random_state_instance + new_args[random_state_index] = random_state return func(*new_args, **kwargs) + + return _random_state + + +np_random_state = random_state + + +def py_random_state(random_state_index): + """Decorator to generate a random.Random instance (or equiv). + + Argument position `random_state_index` processed by create_py_random_state. + The result is either a random.Random instance, or numpy.random.RandomState + instance with additional attributes to mimic basic methods of Random. + + Parameters + ---------- + random_state_index : int + Location of the random_state argument in args that is to be used to + generate the numpy.random.RandomState instance. Even if the argument is + a named positional argument (with a default value), you must specify + its index as a positional argument. + + Returns + ------- + _random_state : function + Function whose random_state keyword argument is a RandomState instance. + + Examples + -------- + Decorate functions like this:: + + @py_random_state(0) + def random_float(random_state=None): + return random_state.rand() + + @py_random_state(1) + def random_array(dims, random_state=1): + return random_state.rand(*dims) + + See Also + -------- + np_random_state + """ + + @decorator + def _random_state(func, *args, **kwargs): + # Parse the decorator arguments. + try: + random_state_arg = args[random_state_index] + except TypeError as e: + raise nx.NetworkXError("random_state_index must be an integer") from e + except IndexError as e: + raise nx.NetworkXError("random_state_index is incorrect") from e + + # Create a numpy.random.RandomState instance + random_state = create_py_random_state(random_state_arg) + + # args is a tuple, so we must convert to list before modifying it. + new_args = list(args) + new_args[random_state_index] = random_state + return func(*new_args, **kwargs) + return _random_state diff --git a/networkx/utils/heaps.py b/networkx/utils/heaps.py index 023e6ce..d07b155 100644 --- a/networkx/utils/heaps.py +++ b/networkx/utils/heaps.py @@ -2,19 +2,14 @@ Min-heaps. """ -__author__ = """ysitu """ -# Copyright (C) 2014 ysitu -# All rights reserved. -# BSD license. - from heapq import heappop, heappush from itertools import count import networkx as nx -__all__ = ['MinHeap', 'PairingHeap', 'BinaryHeap'] +__all__ = ["MinHeap", "PairingHeap", "BinaryHeap"] -class MinHeap(object): +class MinHeap: """Base class for min-heaps. A MinHeap stores a collection of key-value pairs ordered by their values. @@ -22,10 +17,11 @@ class MinHeap(object): value in an existing pair and deleting the minimum pair. """ - class _Item(object): + class _Item: """Used by subclassess to represent a key-value pair. """ - __slots__ = ('key', 'value') + + __slots__ = ("key", "value") def __init__(self, key, value): self.key = key @@ -70,7 +66,7 @@ def pop(self): raise NotImplementedError def get(self, key, default=None): - """Return the value associated with a key. + """Returns the value associated with a key. Parameters ---------- @@ -112,22 +108,22 @@ def insert(self, key, value, allow_increase=False): raise NotImplementedError def __nonzero__(self): - """Return whether the heap if empty. + """Returns whether the heap if empty. """ return bool(self._dict) def __bool__(self): - """Return whether the heap if empty. + """Returns whether the heap if empty. """ return bool(self._dict) def __len__(self): - """Return the number of key-value pairs in the heap. + """Returns the number of key-value pairs in the heap. """ return len(self._dict) def __contains__(self, key): - """Return whether a key exists in the heap. + """Returns whether a key exists in the heap. Parameters ---------- @@ -140,9 +136,11 @@ def __contains__(self, key): def _inherit_doc(cls): """Decorator for inheriting docstrings from base classes. """ + def func(fn): fn.__doc__ = cls.__dict__[fn.__name__].__doc__ return fn + return func @@ -156,7 +154,8 @@ class _Node(MinHeap._Item): A tree in a pairing heap is stored using the left-child, right-sibling representation. """ - __slots__ = ('left', 'next', 'prev', 'parent') + + __slots__ = ("left", "next", "prev", "parent") def __init__(self, key, value): super(PairingHeap._Node, self).__init__(key, value) @@ -172,19 +171,19 @@ def __init__(self, key, value): def __init__(self): """Initialize a pairing heap. """ - super(PairingHeap, self).__init__() + super().__init__() self._root = None @_inherit_doc(MinHeap) def min(self): if self._root is None: - raise nx.NetworkXError('heap is empty.') + raise nx.NetworkXError("heap is empty.") return (self._root.key, self._root.value) @_inherit_doc(MinHeap) def pop(self): if self._root is None: - raise nx.NetworkXError('heap is empty.') + raise nx.NetworkXError("heap is empty.") min_node = self._root self._root = self._merge_children(self._root) del self._dict[min_node.key] @@ -308,7 +307,7 @@ class BinaryHeap(MinHeap): def __init__(self): """Initialize a binary heap. """ - super(BinaryHeap, self).__init__() + super().__init__() self._heap = [] self._count = count() @@ -316,7 +315,7 @@ def __init__(self): def min(self): dict = self._dict if not dict: - raise nx.NetworkXError('heap is empty') + raise nx.NetworkXError("heap is empty") heap = self._heap pop = heappop # Repeatedly remove stale key-value pairs until a up-to-date one is @@ -332,7 +331,7 @@ def min(self): def pop(self): dict = self._dict if not dict: - raise nx.NetworkXError('heap is empty') + raise nx.NetworkXError("heap is empty") heap = self._heap pop = heappop # Repeatedly remove stale key-value pairs until a up-to-date one is diff --git a/networkx/utils/mapped_queue.py b/networkx/utils/mapped_queue.py index 888a310..d633c74 100644 --- a/networkx/utils/mapped_queue.py +++ b/networkx/utils/mapped_queue.py @@ -1,26 +1,12 @@ -# -*- coding: utf-8 -*- -# -# priorityq: An object-oriented priority queue with updatable priorities. -# -# Copyright 2018 Edward L. Platt -# -# This file is part of NetworkX -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. -# -# Authors: -# Edward L. Platt -# """Priority queue class with updatable priorities. """ import heapq -__all__ = ['MappedQueue'] +__all__ = ["MappedQueue"] -class MappedQueue(object): +class MappedQueue: """The MappedQueue class implements an efficient minimum heap. The smallest element can be popped in O(1) time, new elements can be pushed in O(log n) time, and any element can be removed or updated in O(log n) @@ -75,7 +61,7 @@ def __len__(self): def _heapify(self): """Restore heap invariant and recalculate map.""" heapq.heapify(self.h) - self.d = dict([(elt, pos) for pos, elt in enumerate(self.h)]) + self.d = {elt: pos for pos, elt in enumerate(self.h)} if len(self.h) != len(self.d): raise AssertionError("Heap contains duplicate elements") diff --git a/networkx/utils/misc.py b/networkx/utils/misc.py index aa76247..2e30b4f 100644 --- a/networkx/utils/misc.py +++ b/networkx/utils/misc.py @@ -5,54 +5,20 @@ can be accessed, for example, as >>> import networkx ->>> networkx.utils.is_string_like('spam') +>>> networkx.utils.is_list_of_ints([1, 2, 3]) True +>>> networkx.utils.is_list_of_ints([1, 2, "spam"]) +False """ -# Authors: Aric Hagberg (hagberg@lanl.gov), -# Dan Schult(dschult@colgate.edu), -# Ben Edwards(bedwards@cs.unm.edu) - -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. + from collections import defaultdict from collections import deque +import warnings import sys import uuid from itertools import tee, chain +import networkx as nx -# itertools.accumulate is only available on Python 3.2 or later. -# -# Once support for Python versions less than 3.2 is dropped, this code should -# be removed. -try: - from itertools import accumulate -except ImportError: - import operator - - # The code for this function is from the Python 3.5 documentation, - # distributed under the PSF license: - # - def accumulate(iterable, func=operator.add): - it = iter(iterable) - try: - total = next(it) - except StopIteration: - return - yield total - for element in it: - total = func(total, element) - yield total - -# 2.x/3.x compatibility -try: - basestring -except NameError: - basestring = str - unicode = str # some cookbook stuff # used in deciding whether something is a bunch of nodes, edges, etc. @@ -61,7 +27,12 @@ def accumulate(iterable, func=operator.add): def is_string_like(obj): # from John Hunter, types-free version """Check if obj is string.""" - return isinstance(obj, basestring) + msg = ( + "is_string_like is deprecated and will be removed in 3.0." + "Use isinstance(obj, str) instead." + ) + warnings.warn(msg, DeprecationWarning) + return isinstance(obj, str) def iterable(obj): @@ -75,6 +46,11 @@ def iterable(obj): return True +def empty_generator(): + """ Return a generator with no members """ + yield from () + + def flatten(obj, result=None): """ Return flattened version of (possibly nested) iterable object. """ if not iterable(obj) or is_string_like(obj): @@ -89,6 +65,42 @@ def flatten(obj, result=None): return obj.__class__(result) +def make_list_of_ints(sequence): + """Return list of ints from sequence of integral numbers. + + All elements of the sequence must satisfy int(element) == element + or a ValueError is raised. Sequence is iterated through once. + + If sequence is a list, the non-int values are replaced with ints. + So, no new list is created + """ + if not isinstance(sequence, list): + result = [] + for i in sequence: + errmsg = f"sequence is not all integers: {i}" + try: + ii = int(i) + except ValueError: + raise nx.NetworkXError(errmsg) from None + if ii != i: + raise nx.NetworkXError(errmsg) + result.append(ii) + return result + # original sequence is a list... in-place conversion to ints + for indx, i in enumerate(sequence): + errmsg = f"sequence is not all integers: {i}" + if isinstance(i, int): + continue + try: + ii = int(i) + except ValueError: + raise nx.NetworkXError(errmsg) from None + if ii != i: + raise nx.NetworkXError(errmsg) + sequence[indx] = ii + return sequence + + def is_list_of_ints(intlist): """ Return True if list is a list of ints. """ if not isinstance(intlist, list): @@ -99,26 +111,11 @@ def is_list_of_ints(intlist): return True -PY2 = sys.version_info[0] == 2 -if PY2: - def make_str(x): - """Return the string representation of t.""" - if isinstance(x, unicode): - return x - else: - # Note, this will not work unless x is ascii-encoded. - # That is good, since we should be working with unicode anyway. - # Essentially, unless we are reading a file, we demand that users - # convert any encoded strings to unicode before using the library. - # - # Also, the str() is necessary to convert integers, etc. - # unicode(3) works, but unicode(3, 'unicode-escape') wants a buffer. - # - return unicode(str(x), 'unicode-escape') -else: - def make_str(x): - """Return the string representation of t.""" - return str(x) +def make_str(x): + """Returns the string representation of t.""" + msg = "make_str is deprecated and will be removed in 3.0. Use str instead." + warnings.warn(msg, DeprecationWarning) + return str(x) def generate_unique_node(): @@ -137,10 +134,12 @@ def default_opener(filename): """ from subprocess import call - cmds = {'darwin': ['open'], - 'linux': ['xdg-open'], - 'linux2': ['xdg-open'], - 'win32': ['cmd.exe', '/C', 'start', '']} + cmds = { + "darwin": ["open"], + "linux": ["xdg-open"], + "linux2": ["xdg-open"], + "win32": ["cmd.exe", "/C", "start", ""], + } cmd = cmds[sys.platform] + [filename] call(cmd) @@ -162,6 +161,7 @@ def dict_to_numpy_array2(d, mapping=None): """ import numpy + if mapping is None: s = set(d.keys()) for k, v in d.items(): @@ -184,6 +184,7 @@ def dict_to_numpy_array1(d, mapping=None): """ import numpy + if mapping is None: s = set(d.keys()) mapping = dict(zip(s, range(len(s)))) @@ -200,7 +201,7 @@ def is_iterator(obj): object. """ - has_next_attr = hasattr(obj, '__next__') or hasattr(obj, 'next') + has_next_attr = hasattr(obj, "__next__") or hasattr(obj, "next") return iter(obj) is obj and has_next_attr @@ -212,7 +213,7 @@ def arbitrary_element(iterable): >>> arbitrary_element({3, 2, 1}) 1 - >>> arbitrary_element('hello') + >>> arbitrary_element("hello") 'h' This function raises a :exc:`ValueError` if `iterable` is an @@ -227,7 +228,7 @@ def arbitrary_element(iterable): """ if is_iterator(iterable): - raise ValueError('cannot return an arbitrary item from an iterator') + raise ValueError("cannot return an arbitrary item from an iterator") # Another possible implementation is ``for x in iterable: return x``. return next(iter(iterable)) @@ -261,7 +262,7 @@ def groups(many_to_one): For example:: >>> from networkx.utils import groups - >>> many_to_one = {'a': 1, 'b': 1, 'c': 2, 'd': 3, 'e': 3} + >>> many_to_one = {"a": 1, "b": 1, "c": 2, "d": 3, "e": 3} >>> groups(many_to_one) # doctest: +SKIP {1: {'a', 'b'}, 2: {'c'}, 3: {'d', 'e'}} @@ -294,10 +295,9 @@ def create_random_state(random_state=None): Parameters ---------- random_state : int or RandomState instance or None optional (default=None) - If int, `random_state` is the seed used by the random number generator, - if numpy.random.RandomState instance, `random_state` is the random - number generator, - if None, the random number generator is the RandomState instance used + If int, return a numpy.random.RandomState instance set with seed=int. + if numpy.random.RandomState instance, return it. + if None or numpy.random, return the global random number generator used by numpy.random. """ import numpy as np @@ -308,5 +308,108 @@ def create_random_state(random_state=None): return random_state if isinstance(random_state, int): return np.random.RandomState(random_state) - msg = '%r cannot be used to generate a numpy.random.RandomState instance' - raise ValueError(msg % random_state) + msg = ( + f"{random_state} cannot be used to generate a numpy.random.RandomState instance" + ) + raise ValueError(msg) + + +class PythonRandomInterface: + try: + + def __init__(self, rng=None): + import numpy + + if rng is None: + self._rng = numpy.random.mtrand._rand + self._rng = rng + + except ImportError: + msg = "numpy not found, only random.random available." + warnings.warn(msg, ImportWarning) + + def random(self): + return self._rng.random_sample() + + def uniform(self, a, b): + return a + (b - a) * self._rng.random_sample() + + def randrange(self, a, b=None): + return self._rng.randint(a, b) + + def choice(self, seq): + return seq[self._rng.randint(0, len(seq))] + + def gauss(self, mu, sigma): + return self._rng.normal(mu, sigma) + + def shuffle(self, seq): + return self._rng.shuffle(seq) + + # Some methods don't match API for numpy RandomState. + # Commented out versions are not used by NetworkX + + def sample(self, seq, k): + return self._rng.choice(list(seq), size=(k,), replace=False) + + def randint(self, a, b): + return self._rng.randint(a, b + 1) + + # exponential as expovariate with 1/argument, + def expovariate(self, scale): + return self._rng.exponential(1 / scale) + + # pareto as paretovariate with 1/argument, + def paretovariate(self, shape): + return self._rng.pareto(shape) + + +# weibull as weibullvariate multiplied by beta, +# def weibullvariate(self, alpha, beta): +# return self._rng.weibull(alpha) * beta +# +# def triangular(self, low, high, mode): +# return self._rng.triangular(low, mode, high) +# +# def choices(self, seq, weights=None, cum_weights=None, k=1): +# return self._rng.choice(seq + + +def create_py_random_state(random_state=None): + """Returns a random.Random instance depending on input. + + Parameters + ---------- + random_state : int or random number generator or None (default=None) + If int, return a random.Random instance set with seed=int. + if random.Random instance, return it. + if None or the `random` package, return the global random number + generator used by `random`. + if np.random package, return the global numpy random number + generator wrapped in a PythonRandomInterface class. + if np.random.RandomState instance, return it wrapped in + PythonRandomInterface + if a PythonRandomInterface instance, return it + """ + import random + + try: + import numpy as np + + if random_state is np.random: + return PythonRandomInterface(np.random.mtrand._rand) + if isinstance(random_state, np.random.RandomState): + return PythonRandomInterface(random_state) + if isinstance(random_state, PythonRandomInterface): + return random_state + except ImportError: + pass + + if random_state is None or random_state is random: + return random._inst + if isinstance(random_state, random.Random): + return random_state + if isinstance(random_state, int): + return random.Random(random_state) + msg = f"{random_state} cannot be used to generate a random.Random instance" + raise ValueError(msg) diff --git a/networkx/utils/random_sequence.py b/networkx/utils/random_sequence.py index b8e3531..7bd68c7 100644 --- a/networkx/utils/random_sequence.py +++ b/networkx/utils/random_sequence.py @@ -1,36 +1,28 @@ -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. -# -# Authors: Aric Hagberg (hagberg@lanl.gov) -# Dan Schult (dschult@colgate.edu) -# Ben Edwards (bedwards@cs.unm.edu) """ Utilities for generating random numbers, random sequences, and random selections. """ -import random -import sys import networkx as nx +from networkx.utils import py_random_state # The same helpers for choosing random sequences from distributions # uses Python's random module -# https://docs.python.org/2/library/random.html +# https://docs.python.org/3/library/random.html -def powerlaw_sequence(n, exponent=2.0): + +@py_random_state(2) +def powerlaw_sequence(n, exponent=2.0, seed=None): """ Return sample sequence of length n from a power law distribution. """ - return [random.paretovariate(exponent - 1) for i in range(n)] + return [seed.paretovariate(exponent - 1) for i in range(n)] +@py_random_state(2) def zipf_rv(alpha, xmin=1, seed=None): - r"""Return a random value chosen from the Zipf distribution. + r"""Returns a random value chosen from the Zipf distribution. The return value is an integer drawn from the probability distribution @@ -46,8 +38,9 @@ def zipf_rv(alpha, xmin=1, seed=None): Exponent value of the distribution xmin : int Minimum value - seed : int - Seed value for random number generator + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -68,7 +61,8 @@ def zipf_rv(alpha, xmin=1, seed=None): Examples -------- - >>> nx.zipf_rv(alpha=2, xmin=3, seed=42) # doctest: +SKIP + >>> nx.utils.zipf_rv(alpha=2, xmin=3, seed=42) + 8 References ---------- @@ -79,22 +73,20 @@ def zipf_rv(alpha, xmin=1, seed=None): raise ValueError("xmin < 1") if alpha <= 1: raise ValueError("a <= 1.0") - if seed is not None: - random.seed(seed) a1 = alpha - 1.0 - b = 2**a1 + b = 2 ** a1 while True: - u = 1.0 - random.random() # u in (0,1] - v = random.random() # v in [0,1) - x = int(xmin * u**-(1.0 / a1)) - t = (1.0 + (1.0 / x))**a1 + u = 1.0 - seed.random() # u in (0,1] + v = seed.random() # v in [0,1) + x = int(xmin * u ** -(1.0 / a1)) + t = (1.0 + (1.0 / x)) ** a1 if v * x * (t - 1.0) / (b - 1.0) <= t / b: break return x def cumulative_distribution(distribution): - """Return normalized cumulative distribution from discrete distribution.""" + """Returns normalized cumulative distribution from discrete distribution.""" cdf = [0.0] psum = float(sum(distribution)) @@ -103,7 +95,8 @@ def cumulative_distribution(distribution): return cdf -def discrete_sequence(n, distribution=None, cdistribution=None): +@py_random_state(3) +def discrete_sequence(n, distribution=None, cdistribution=None, seed=None): """ Return sample sequence of length n from a given discrete distribution or discrete cumulative distribution. @@ -123,18 +116,20 @@ def discrete_sequence(n, distribution=None, cdistribution=None): cdf = cumulative_distribution(distribution) else: raise nx.NetworkXError( - "discrete_sequence: distribution or cdistribution missing") + "discrete_sequence: distribution or cdistribution missing" + ) # get a uniform random number - inputseq = [random.random() for i in range(n)] + inputseq = [seed.random() for i in range(n)] # choose from CDF seq = [bisect.bisect_left(cdf, s) - 1 for s in inputseq] return seq -def random_weighted_sample(mapping, k): - """Return k items without replacement from a weighted sample. +@py_random_state(2) +def random_weighted_sample(mapping, k, seed=None): + """Returns k items without replacement from a weighted sample. The input is a dictionary of items with weights as values. """ @@ -142,17 +137,18 @@ def random_weighted_sample(mapping, k): raise ValueError("sample larger than population") sample = set() while len(sample) < k: - sample.add(weighted_choice(mapping)) + sample.add(weighted_choice(mapping, seed)) return list(sample) -def weighted_choice(mapping): - """Return a single element from a weighted sample. +@py_random_state(1) +def weighted_choice(mapping, seed=None): + """Returns a single element from a weighted sample. The input is a dictionary of items with weights as values. """ # use roulette method - rnd = random.random() * sum(mapping.values()) + rnd = seed.random() * sum(mapping.values()) for k, w in mapping.items(): rnd -= w if rnd < 0: diff --git a/networkx/utils/rcm.py b/networkx/utils/rcm.py index 8939d8b..6c7094f 100644 --- a/networkx/utils/rcm.py +++ b/networkx/utils/rcm.py @@ -1,19 +1,13 @@ """ Cuthill-McKee ordering of graph nodes to produce sparse matrices """ -# Copyright (C) 2011-2014 by -# Aric Hagberg -# All rights reserved. -# BSD license. from collections import deque from operator import itemgetter import networkx as nx from ..utils import arbitrary_element -__author__ = """\n""".join(['Aric Hagberg ']) -__all__ = ['cuthill_mckee_ordering', - 'reverse_cuthill_mckee_ordering'] +__all__ = ["cuthill_mckee_ordering", "reverse_cuthill_mckee_ordering"] def cuthill_mckee_ordering(G, heuristic=None): @@ -42,7 +36,7 @@ def cuthill_mckee_ordering(G, heuristic=None): >>> from networkx.utils import cuthill_mckee_ordering >>> G = nx.path_graph(4) >>> rcm = list(cuthill_mckee_ordering(G)) - >>> A = nx.adjacency_matrix(G, nodelist=rcm) # doctest: +SKIP + >>> A = nx.adjacency_matrix(G, nodelist=rcm) Smallest degree node as heuristic function: @@ -70,8 +64,7 @@ def cuthill_mckee_ordering(G, heuristic=None): Springer-Verlag New York, Inc., New York, NY, USA. """ for c in nx.connected_components(G): - for n in connected_cuthill_mckee_ordering(G.subgraph(c), heuristic): - yield n + yield from connected_cuthill_mckee_ordering(G.subgraph(c), heuristic) def reverse_cuthill_mckee_ordering(G, heuristic=None): @@ -101,7 +94,7 @@ def reverse_cuthill_mckee_ordering(G, heuristic=None): >>> from networkx.utils import reverse_cuthill_mckee_ordering >>> G = nx.path_graph(4) >>> rcm = list(reverse_cuthill_mckee_ordering(G)) - >>> A = nx.adjacency_matrix(G, nodelist=rcm) # doctest: +SKIP + >>> A = nx.adjacency_matrix(G, nodelist=rcm) Smallest degree node as heuristic function: @@ -141,8 +134,7 @@ def connected_cuthill_mckee_ordering(G, heuristic=None): while queue: parent = queue.popleft() yield parent - nd = sorted(list(G.degree(set(G[parent]) - visited)), - key=itemgetter(1)) + nd = sorted(list(G.degree(set(G[parent]) - visited)), key=itemgetter(1)) children = [n for n, d in nd] visited.update(children) queue.extend(children) diff --git a/networkx/utils/tests/__init__.py b/networkx/utils/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/networkx/utils/tests/test.txt b/networkx/utils/tests/test.txt index 20b88ee..63b8d6e 100644 --- a/networkx/utils/tests/test.txt +++ b/networkx/utils/tests/test.txt @@ -1 +1 @@ -Blah... BLAH BLAH!!!! \ No newline at end of file +Blah... BLAH BLAH!!!! \ No newline at end of file diff --git a/networkx/utils/tests/test_contextmanager.py b/networkx/utils/tests/test_contextmanager.py index 29cc62a..6924683 100644 --- a/networkx/utils/tests/test_contextmanager.py +++ b/networkx/utils/tests/test_contextmanager.py @@ -1,22 +1,18 @@ -from __future__ import absolute_import - -from nose.tools import * - import networkx as nx def test_reversed(): G = nx.DiGraph() - G.add_edge('A', 'B') + G.add_edge("A", "B") # no exception with nx.utils.reversed(G): pass - assert_true('B' in G['A']) + assert "B" in G["A"] # exception try: with nx.utils.reversed(G): raise Exception except: - assert_true('B' in G['A']) + assert "B" in G["A"] diff --git a/networkx/utils/tests/test_decorators.py b/networkx/utils/tests/test_decorators.py index d052dd5..c14fa19 100644 --- a/networkx/utils/tests/test_decorators.py +++ b/networkx/utils/tests/test_decorators.py @@ -1,82 +1,97 @@ import tempfile import os +import pathlib +import random -from nose.tools import * -from nose import SkipTest +import pytest import networkx as nx from networkx.utils.decorators import open_file, not_implemented_for -from networkx.utils.decorators import nodes_or_number, preserve_random_state, \ - random_state +from networkx.utils.decorators import ( + preserve_random_state, + py_random_state, + np_random_state, + random_state, +) +from networkx.utils.misc import PythonRandomInterface def test_not_implemented_decorator(): - @not_implemented_for('directed') + @not_implemented_for("directed") def test1(G): pass + test1(nx.Graph()) -@raises(KeyError) def test_not_implemented_decorator_key(): - @not_implemented_for('foo') - def test1(G): - pass - test1(nx.Graph()) + with pytest.raises(KeyError): + + @not_implemented_for("foo") + def test1(G): + pass + + test1(nx.Graph()) -@raises(nx.NetworkXNotImplemented) def test_not_implemented_decorator_raise(): - @not_implemented_for('graph') - def test1(G): - pass - test1(nx.Graph()) + with pytest.raises(nx.NetworkXNotImplemented): + + @not_implemented_for("graph") + def test1(G): + pass + + test1(nx.Graph()) -class TestOpenFileDecorator(object): - def setUp(self): - self.text = ['Blah... ', 'BLAH ', 'BLAH!!!!'] - self.fobj = tempfile.NamedTemporaryFile('wb+', delete=False) +class TestOpenFileDecorator: + def setup_method(self): + self.text = ["Blah... ", "BLAH ", "BLAH!!!!"] + self.fobj = tempfile.NamedTemporaryFile("wb+", delete=False) self.name = self.fobj.name + def teardown_method(self): + self.fobj.close() + os.unlink(self.name) + def write(self, path): for text in self.text: - path.write(text.encode('ascii')) + path.write(text.encode("ascii")) - @open_file(1, 'r') + @open_file(1, "r") def read(self, path): return path.readlines()[0] @staticmethod - @open_file(0, 'wb') + @open_file(0, "wb") def writer_arg0(path): - path.write('demo'.encode('ascii')) + path.write(b"demo") - @open_file(1, 'wb+') + @open_file(1, "wb+") def writer_arg1(self, path): self.write(path) - @open_file(2, 'wb') + @open_file(2, "wb") def writer_arg2default(self, x, path=None): if path is None: - with tempfile.NamedTemporaryFile('wb+') as fh: + with tempfile.NamedTemporaryFile("wb+") as fh: self.write(fh) else: self.write(path) - @open_file(4, 'wb') - def writer_arg4default(self, x, y, other='hello', path=None, **kwargs): + @open_file(4, "wb") + def writer_arg4default(self, x, y, other="hello", path=None, **kwargs): if path is None: - with tempfile.NamedTemporaryFile('wb+') as fh: + with tempfile.NamedTemporaryFile("wb+") as fh: self.write(fh) else: self.write(path) - @open_file('path', 'wb') + @open_file("path", "wb") def writer_kwarg(self, **kwargs): - path = kwargs.get('path', None) + path = kwargs.get("path", None) if path is None: - with tempfile.NamedTemporaryFile('wb+') as fh: + with tempfile.NamedTemporaryFile("wb+") as fh: self.write(fh) else: self.write(path) @@ -87,107 +102,199 @@ def test_writer_arg0_str(self): def test_writer_arg0_fobj(self): self.writer_arg0(self.fobj) + def test_writer_arg0_pathlib(self): + self.writer_arg0(pathlib.Path(self.name)) + def test_writer_arg1_str(self): self.writer_arg1(self.name) - assert_equal(self.read(self.name), ''.join(self.text)) + assert self.read(self.name) == "".join(self.text) def test_writer_arg1_fobj(self): self.writer_arg1(self.fobj) - assert_false(self.fobj.closed) + assert not self.fobj.closed self.fobj.close() - assert_equal(self.read(self.name), ''.join(self.text)) + assert self.read(self.name) == "".join(self.text) def test_writer_arg2default_str(self): self.writer_arg2default(0, path=None) self.writer_arg2default(0, path=self.name) - assert_equal(self.read(self.name), ''.join(self.text)) + assert self.read(self.name) == "".join(self.text) def test_writer_arg2default_fobj(self): self.writer_arg2default(0, path=self.fobj) - assert_false(self.fobj.closed) + assert not self.fobj.closed self.fobj.close() - assert_equal(self.read(self.name), ''.join(self.text)) + assert self.read(self.name) == "".join(self.text) def test_writer_arg2default_fobj_path_none(self): self.writer_arg2default(0, path=None) def test_writer_arg4default_fobj(self): - self.writer_arg4default(0, 1, dog='dog', other='other') - self.writer_arg4default(0, 1, dog='dog', other='other', path=self.name) - assert_equal(self.read(self.name), ''.join(self.text)) + self.writer_arg4default(0, 1, dog="dog", other="other") + self.writer_arg4default(0, 1, dog="dog", other="other", path=self.name) + assert self.read(self.name) == "".join(self.text) def test_writer_kwarg_str(self): self.writer_kwarg(path=self.name) - assert_equal(self.read(self.name), ''.join(self.text)) + assert self.read(self.name) == "".join(self.text) def test_writer_kwarg_fobj(self): self.writer_kwarg(path=self.fobj) self.fobj.close() - assert_equal(self.read(self.name), ''.join(self.text)) + assert self.read(self.name) == "".join(self.text) def test_writer_kwarg_path_none(self): self.writer_kwarg(path=None) - def tearDown(self): - self.fobj.close() - os.unlink(self.name) - @preserve_random_state def test_preserve_random_state(): try: import numpy.random + r = numpy.random.random() except ImportError: return - assert(abs(r - 0.61879477158568) < 1e-16) + assert abs(r - 0.61879477158568) < 1e-16 -class TestRandomState(object): +class TestRandomState: @classmethod - def setUp(cls): + def setup_class(cls): global np - try: - import numpy as np - except ImportError: - raise SkipTest('NumPy not available.') + np = pytest.importorskip("numpy") @random_state(1) def instantiate_random_state(self, random_state): - assert_true(isinstance(random_state, np.random.RandomState)) - return random_state + assert isinstance(random_state, np.random.RandomState) + return random_state.random_sample() + + @np_random_state(1) + def instantiate_np_random_state(self, random_state): + assert isinstance(random_state, np.random.RandomState) + return random_state.random_sample() + + @py_random_state(1) + def instantiate_py_random_state(self, random_state): + assert isinstance(random_state, random.Random) or isinstance( + random_state, PythonRandomInterface + ) + return random_state.random() def test_random_state_None(self): - self.instantiate_random_state(random_state=None) + np.random.seed(42) + rv = np.random.random_sample() + np.random.seed(42) + assert rv == self.instantiate_random_state(None) + np.random.seed(42) + assert rv == self.instantiate_np_random_state(None) + + random.seed(42) + rv = random.random() + random.seed(42) + assert rv == self.instantiate_py_random_state(None) def test_random_state_np_random(self): - self.instantiate_random_state(random_state=np.random) + np.random.seed(42) + rv = np.random.random_sample() + np.random.seed(42) + assert rv == self.instantiate_random_state(np.random) + np.random.seed(42) + assert rv == self.instantiate_np_random_state(np.random) + np.random.seed(42) + assert rv == self.instantiate_py_random_state(np.random) def test_random_state_int(self): + np.random.seed(42) + np_rv = np.random.random_sample() + random.seed(42) + py_rv = random.random() + + np.random.seed(42) seed = 1 - random_state = self.instantiate_random_state(random_state=seed) - assert_true(np.all((np.random.RandomState(seed).rand(10), - random_state.rand(10)))) + rval = self.instantiate_random_state(seed) + rval_expected = np.random.RandomState(seed).rand() + assert rval, rval_expected + + rval = self.instantiate_np_random_state(seed) + rval_expected = np.random.RandomState(seed).rand() + assert rval, rval_expected + # test that global seed wasn't changed in function + assert np_rv == np.random.random_sample() + + random.seed(42) + rval = self.instantiate_py_random_state(seed) + rval_expected = random.Random(seed).random() + assert rval, rval_expected + # test that global seed wasn't changed in function + assert py_rv == random.random() def test_random_state_np_random_RandomState(self): + np.random.seed(42) + np_rv = np.random.random_sample() + + np.random.seed(42) seed = 1 rng = np.random.RandomState(seed) - random_state = self.instantiate_random_state(random_state=rng) - assert_true(np.all((np.random.RandomState(seed).rand(10), - random_state.rand(10)))) + rval = self.instantiate_random_state(rng) + rval_expected = np.random.RandomState(seed).rand() + assert rval, rval_expected + rval = self.instantiate_np_random_state(seed) + rval_expected = np.random.RandomState(seed).rand() + assert rval, rval_expected -@raises(nx.NetworkXError) -def test_string_arg_index(): - @random_state('a') - def make_random_state(rs): - pass - rstate = make_random_state(1) + rval = self.instantiate_py_random_state(seed) + rval_expected = np.random.RandomState(seed).rand() + assert rval, rval_expected + # test that global seed wasn't changed in function + assert np_rv == np.random.random_sample() + def test_random_state_py_random(self): + seed = 1 + rng = random.Random(seed) + rv = self.instantiate_py_random_state(rng) + assert rv, random.Random(seed).random() -@raises(nx.NetworkXError) -def test_invalid_arg_index(): - @random_state(2) - def make_random_state(rs): - pass - rstate = make_random_state(1) + pytest.raises(ValueError, self.instantiate_random_state, rng) + pytest.raises(ValueError, self.instantiate_np_random_state, rng) + + +def test_random_state_string_arg_index(): + with pytest.raises(nx.NetworkXError): + + @random_state("a") + def make_random_state(rs): + pass + + rstate = make_random_state(1) + + +def test_py_random_state_string_arg_index(): + with pytest.raises(nx.NetworkXError): + + @py_random_state("a") + def make_random_state(rs): + pass + + rstate = make_random_state(1) + + +def test_random_state_invalid_arg_index(): + with pytest.raises(nx.NetworkXError): + + @random_state(2) + def make_random_state(rs): + pass + + rstate = make_random_state(1) + + +def test_py_random_state_invalid_arg_index(): + with pytest.raises(nx.NetworkXError): + + @py_random_state(2) + def make_random_state(rs): + pass + + rstate = make_random_state(1) diff --git a/networkx/utils/tests/test_heaps.py b/networkx/utils/tests/test_heaps.py index 4139694..2943388 100644 --- a/networkx/utils/tests/test_heaps.py +++ b/networkx/utils/tests/test_heaps.py @@ -1,10 +1,9 @@ -from nose.tools import * +import pytest import networkx as nx -from networkx.utils import * +from networkx.utils import BinaryHeap, PairingHeap -class X(object): - +class X: def __eq__(self, other): raise self is other @@ -12,16 +11,16 @@ def __ne__(self, other): raise self is not other def __lt__(self, other): - raise TypeError('cannot compare') + raise TypeError("cannot compare") def __le__(self, other): - raise TypeError('cannot compare') + raise TypeError("cannot compare") def __ge__(self, other): - raise TypeError('cannot compare') + raise TypeError("cannot compare") def __gt__(self, other): - raise TypeError('cannot compare') + raise TypeError("cannot compare") def __hash__(self): return hash(id(self)) @@ -31,60 +30,61 @@ def __hash__(self): data = [ # min should not invent an element. - ('min', nx.NetworkXError), + ("min", nx.NetworkXError), # Popping an empty heap should fail. - ('pop', nx.NetworkXError), + ("pop", nx.NetworkXError), # Getting nonexisting elements should return None. - ('get', 0, None), - ('get', x, None), - ('get', None, None), + ("get", 0, None), + ("get", x, None), + ("get", None, None), # Inserting a new key should succeed. - ('insert', x, 1, True), - ('get', x, 1), - ('min', (x, 1)), + ("insert", x, 1, True), + ("get", x, 1), + ("min", (x, 1)), # min should not pop the top element. - ('min', (x, 1)), + ("min", (x, 1)), # Inserting a new key of different type should succeed. - ('insert', 1, -2.0, True), + ("insert", 1, -2.0, True), # int and float values should interop. - ('min', (1, -2.0)), + ("min", (1, -2.0)), # pop removes minimum-valued element. - ('insert', 3, -10 ** 100, True), - ('insert', 4, 5, True), - ('pop', (3, -10 ** 100)), - ('pop', (1, -2.0)), + ("insert", 3, -(10 ** 100), True), + ("insert", 4, 5, True), + ("pop", (3, -(10 ** 100))), + ("pop", (1, -2.0)), # Decrease-insert should succeed. - ('insert', 4, -50, True), - ('insert', 4, -60, False, True), + ("insert", 4, -50, True), + ("insert", 4, -60, False, True), # Decrease-insert should not create duplicate keys. - ('pop', (4, -60)), - ('pop', (x, 1)), + ("pop", (4, -60)), + ("pop", (x, 1)), # Popping all elements should empty the heap. - ('min', nx.NetworkXError), - ('pop', nx.NetworkXError), + ("min", nx.NetworkXError), + ("pop", nx.NetworkXError), # Non-value-changing insert should fail. - ('insert', x, 0, True), - ('insert', x, 0, False, False), - ('min', (x, 0)), - ('insert', x, 0, True, False), - ('min', (x, 0)), + ("insert", x, 0, True), + ("insert", x, 0, False, False), + ("min", (x, 0)), + ("insert", x, 0, True, False), + ("min", (x, 0)), # Failed insert should not create duplicate keys. - ('pop', (x, 0)), - ('pop', nx.NetworkXError), + ("pop", (x, 0)), + ("pop", nx.NetworkXError), # Increase-insert should succeed when allowed. - ('insert', None, 0, True), - ('insert', 2, -1, True), - ('min', (2, -1)), - ('insert', 2, 1, True, False), - ('min', (None, 0)), + ("insert", None, 0, True), + ("insert", 2, -1, True), + ("min", (2, -1)), + ("insert", 2, 1, True, False), + ("min", (None, 0)), # Increase-insert should fail when disallowed. - ('insert', None, 2, False, False), - ('min', (None, 0)), + ("insert", None, 2, False, False), + ("min", (None, 0)), # Failed increase-insert should not create duplicate keys. - ('pop', (None, 0)), - ('pop', (2, 1)), - ('min', nx.NetworkXError), - ('pop', nx.NetworkXError)] + ("pop", (None, 0)), + ("pop", (2, 1)), + ("min", nx.NetworkXError), + ("pop", nx.NetworkXError), +] def _test_heap_class(cls, *args, **kwargs): @@ -92,34 +92,34 @@ def _test_heap_class(cls, *args, **kwargs): # Basic behavioral test for op in data: if op[-1] is not nx.NetworkXError: - assert_equal(op[-1], getattr(heap, op[0])(*op[1:-1])) + assert op[-1] == getattr(heap, op[0])(*op[1:-1]) else: - assert_raises(op[-1], getattr(heap, op[0]), *op[1:-1]) + pytest.raises(op[-1], getattr(heap, op[0]), *op[1:-1]) # Coverage test. for i in range(99, -1, -1): - assert_true(heap.insert(i, i)) + assert heap.insert(i, i) for i in range(50): - assert_equal(heap.pop(), (i, i)) + assert heap.pop() == (i, i) for i in range(100): - assert_equal(heap.insert(i, i), i < 50) + assert heap.insert(i, i) == (i < 50) for i in range(100): - assert_false(heap.insert(i, i + 1)) + assert not heap.insert(i, i + 1) for i in range(50): - assert_equal(heap.pop(), (i, i)) + assert heap.pop() == (i, i) for i in range(100): - assert_equal(heap.insert(i, i + 1), i < 50) + assert heap.insert(i, i + 1) == (i < 50) for i in range(49): - assert_equal(heap.pop(), (i, i + 1)) - assert_equal(sorted([heap.pop(), heap.pop()]), [(49, 50), (50, 50)]) + assert heap.pop() == (i, i + 1) + assert sorted([heap.pop(), heap.pop()]) == [(49, 50), (50, 50)] for i in range(51, 100): - assert_false(heap.insert(i, i + 1, True)) + assert not heap.insert(i, i + 1, True) for i in range(51, 70): - assert_equal(heap.pop(), (i, i + 1)) + assert heap.pop() == (i, i + 1) for i in range(100): - assert_true(heap.insert(i, i)) + assert heap.insert(i, i) for i in range(100): - assert_equal(heap.pop(), (i, i)) - assert_raises(nx.NetworkXError, heap.pop) + assert heap.pop() == (i, i) + pytest.raises(nx.NetworkXError, heap.pop) def test_PairingHeap(): diff --git a/networkx/utils/tests/test_mapped_queue.py b/networkx/utils/tests/test_mapped_queue.py index cbb1dfd..78ea91e 100644 --- a/networkx/utils/tests/test_mapped_queue.py +++ b/networkx/utils/tests/test_mapped_queue.py @@ -1,36 +1,18 @@ -# -*- coding: utf-8 -*- -# -# priorityq: An object-oriented priority queue with updatable priorities. -# -# Copyright 2018 Edward L. Platt -# -# This file is part of NetworkX -# -# NetworkX is distributed under a BSD license; see LICENSE.txt for more -# information. -# -# Authors: -# Edward L. Platt - -from nose.tools import assert_equal -from nose.tools import raises - from networkx.utils.mapped_queue import MappedQueue -class TestMappedQueue(object): - +class TestMappedQueue: def setup(self): pass def _check_map(self, q): - d = dict((elt, pos) for pos, elt in enumerate(q.h)) - assert_equal(d, q.d) + d = {elt: pos for pos, elt in enumerate(q.h)} + assert d == q.d def _make_mapped_queue(self, h): q = MappedQueue() q.h = h - q.d = dict((elt, pos) for pos, elt in enumerate(h)) + q.d = {elt: pos for pos, elt in enumerate(h)} return q def test_heapify(self): @@ -48,14 +30,14 @@ def test_len(self): h = [5, 4, 3, 2, 1, 0] q = MappedQueue(h) self._check_map(q) - assert_equal(len(q), 6) + assert len(q) == 6 def test_siftup_leaf(self): h = [2] h_sifted = [2] q = self._make_mapped_queue(h) q._siftup(0) - assert_equal(q.h, h_sifted) + assert q.h == h_sifted self._check_map(q) def test_siftup_one_child(self): @@ -63,7 +45,7 @@ def test_siftup_one_child(self): h_sifted = [0, 2] q = self._make_mapped_queue(h) q._siftup(0) - assert_equal(q.h, h_sifted) + assert q.h == h_sifted self._check_map(q) def test_siftup_left_child(self): @@ -71,7 +53,7 @@ def test_siftup_left_child(self): h_sifted = [0, 2, 1] q = self._make_mapped_queue(h) q._siftup(0) - assert_equal(q.h, h_sifted) + assert q.h == h_sifted self._check_map(q) def test_siftup_right_child(self): @@ -79,7 +61,7 @@ def test_siftup_right_child(self): h_sifted = [0, 1, 2] q = self._make_mapped_queue(h) q._siftup(0) - assert_equal(q.h, h_sifted) + assert q.h == h_sifted self._check_map(q) def test_siftup_multiple(self): @@ -87,7 +69,7 @@ def test_siftup_multiple(self): h_sifted = [1, 3, 2, 4, 0, 5, 6] q = self._make_mapped_queue(h) q._siftup(0) - assert_equal(q.h, h_sifted) + assert q.h == h_sifted self._check_map(q) def test_siftdown_leaf(self): @@ -95,7 +77,7 @@ def test_siftdown_leaf(self): h_sifted = [2] q = self._make_mapped_queue(h) q._siftdown(0) - assert_equal(q.h, h_sifted) + assert q.h == h_sifted self._check_map(q) def test_siftdown_single(self): @@ -103,7 +85,7 @@ def test_siftdown_single(self): h_sifted = [0, 1] q = self._make_mapped_queue(h) q._siftdown(len(h) - 1) - assert_equal(q.h, h_sifted) + assert q.h == h_sifted self._check_map(q) def test_siftdown_multiple(self): @@ -111,7 +93,7 @@ def test_siftdown_multiple(self): h_sifted = [0, 1, 3, 2, 5, 6, 7, 4] q = self._make_mapped_queue(h) q._siftdown(len(h) - 1) - assert_equal(q.h, h_sifted) + assert q.h == h_sifted self._check_map(q) def test_push(self): @@ -120,7 +102,7 @@ def test_push(self): q = MappedQueue() for elt in to_push: q.push(elt) - assert_equal(q.h, h_sifted) + assert q.h == h_sifted self._check_map(q) def test_push_duplicate(self): @@ -129,11 +111,11 @@ def test_push_duplicate(self): q = MappedQueue() for elt in to_push: inserted = q.push(elt) - assert_equal(inserted, True) - assert_equal(q.h, h_sifted) + assert inserted + assert q.h == h_sifted self._check_map(q) inserted = q.push(1) - assert_equal(inserted, False) + assert not inserted def test_pop(self): h = [3, 4, 6, 0, 1, 2, 5] @@ -143,7 +125,7 @@ def test_pop(self): popped = [] for elt in sorted(h): popped.append(q.pop()) - assert_equal(popped, h_sorted) + assert popped == h_sorted self._check_map(q) def test_remove_leaf(self): @@ -151,25 +133,25 @@ def test_remove_leaf(self): h_removed = [0, 2, 1, 6, 4, 5] q = self._make_mapped_queue(h) removed = q.remove(3) - assert_equal(q.h, h_removed) + assert q.h == h_removed def test_remove_root(self): h = [0, 2, 1, 6, 3, 5, 4] h_removed = [1, 2, 4, 6, 3, 5] q = self._make_mapped_queue(h) removed = q.remove(0) - assert_equal(q.h, h_removed) + assert q.h == h_removed def test_update_leaf(self): h = [0, 20, 10, 60, 30, 50, 40] h_updated = [0, 15, 10, 60, 20, 50, 40] q = self._make_mapped_queue(h) removed = q.update(30, 15) - assert_equal(q.h, h_updated) + assert q.h == h_updated def test_update_root(self): h = [0, 20, 10, 60, 30, 50, 40] h_updated = [10, 20, 35, 60, 30, 50, 40] q = self._make_mapped_queue(h) removed = q.update(0, 35) - assert_equal(q.h, h_updated) + assert q.h == h_updated diff --git a/networkx/utils/tests/test_misc.py b/networkx/utils/tests/test_misc.py index 0bedc5d..3ecf2ef 100644 --- a/networkx/utils/tests/test_misc.py +++ b/networkx/utils/tests/test_misc.py @@ -1,35 +1,54 @@ -# -*- encoding: utf-8 -*- -from nose.tools import * -from nose import SkipTest +import pytest import networkx as nx -from networkx.utils import * +import random +from networkx.utils import ( + create_py_random_state, + create_random_state, + discrete_sequence, + dict_to_numpy_array, + dict_to_numpy_array1, + dict_to_numpy_array2, + is_string_like, + iterable, + groups, + make_list_of_ints, + make_str, + pairwise, + powerlaw_sequence, + PythonRandomInterface, + to_tuple, +) def test_is_string_like(): - assert_true(is_string_like("aaaa")) - assert_false(is_string_like(None)) - assert_false(is_string_like(123)) + assert is_string_like("aaaa") + assert not is_string_like(None) + assert not is_string_like(123) def test_iterable(): - assert_false(iterable(None)) - assert_false(iterable(10)) - assert_true(iterable([1, 2, 3])) - assert_true(iterable((1, 2, 3))) - assert_true(iterable({1: "A", 2: "X"})) - assert_true(iterable("ABC")) + assert not iterable(None) + assert not iterable(10) + assert iterable([1, 2, 3]) + assert iterable((1, 2, 3)) + assert iterable({1: "A", 2: "X"}) + assert iterable("ABC") def test_graph_iterable(): K = nx.complete_graph(10) - assert_true(iterable(K)) - assert_true(iterable(K.nodes())) - assert_true(iterable(K.edges())) + assert iterable(K) + assert iterable(K.nodes()) + assert iterable(K.edges()) -def test_is_list_of_ints(): - assert_true(is_list_of_ints([1, 2, 3, 42])) - assert_false(is_list_of_ints([1, 2, 3, "kermit"])) +def test_make_list_of_ints(): + mylist = [1, 2, 3.0, 42, -2] + assert make_list_of_ints(mylist) is mylist + assert make_list_of_ints(mylist) == mylist + assert type(make_list_of_ints(mylist)[2]) is int + pytest.raises(nx.NetworkXError, make_list_of_ints, [1, 2, 3, "kermit"]) + pytest.raises(nx.NetworkXError, make_list_of_ints, [1, 2, 3.1]) def test_random_number_distribution(): @@ -39,61 +58,51 @@ def test_random_number_distribution(): def test_make_str_with_bytes(): - import sys - PY2 = sys.version_info[0] == 2 - x = "qualité" y = make_str(x) - if PY2: - assert_true(isinstance(y, unicode)) - # Since file encoding is utf-8, the é will be two bytes. - assert_true(len(y) == 8) - else: - assert_true(isinstance(y, str)) - assert_true(len(y) == 7) + assert isinstance(y, str) + assert len(y) == 7 def test_make_str_with_unicode(): - import sys - PY2 = sys.version_info[0] == 2 - if PY2: - x = unicode("qualité", encoding='utf-8') - y = make_str(x) - assert_true(isinstance(y, unicode)) - assert_true(len(y) == 7) - else: - x = "qualité" - y = make_str(x) - assert_true(isinstance(y, str)) - assert_true(len(y) == 7) - - -class TestNumpyArray(object): + x = "qualité" + y = make_str(x) + assert isinstance(y, str) + assert len(y) == 7 + + +class TestNumpyArray: @classmethod - def setupClass(cls): + def setup_class(cls): global numpy global assert_allclose - try: - import numpy - from numpy.testing import assert_allclose - except ImportError: - raise SkipTest('NumPy not available.') + numpy = pytest.importorskip("numpy") + assert_allclose = numpy.testing.assert_allclose + + def test_numpy_to_list_of_ints(self): + a = numpy.array([1, 2, 3], dtype=numpy.int64) + b = numpy.array([1.0, 2, 3]) + c = numpy.array([1.1, 2, 3]) + assert type(make_list_of_ints(a)) == list + assert make_list_of_ints(b) == list(b) + B = make_list_of_ints(b) + assert type(B[0]) == int + pytest.raises(nx.NetworkXError, make_list_of_ints, c) def test_dict_to_numpy_array1(self): - d = {'a': 1, 'b': 2} - a = dict_to_numpy_array1(d, mapping={'a': 0, 'b': 1}) + d = {"a": 1, "b": 2} + a = dict_to_numpy_array1(d, mapping={"a": 0, "b": 1}) assert_allclose(a, numpy.array([1, 2])) - a = dict_to_numpy_array1(d, mapping={'b': 0, 'a': 1}) + a = dict_to_numpy_array1(d, mapping={"b": 0, "a": 1}) assert_allclose(a, numpy.array([2, 1])) a = dict_to_numpy_array1(d) assert_allclose(a.sum(), 3) def test_dict_to_numpy_array2(self): - d = {'a': {'a': 1, 'b': 2}, - 'b': {'a': 10, 'b': 20}} + d = {"a": {"a": 1, "b": 2}, "b": {"a": 10, "b": 20}} - mapping = {'a': 1, 'b': 0} + mapping = {"a": 1, "b": 0} a = dict_to_numpy_array2(d, mapping=mapping) assert_allclose(a, numpy.array([[20, 10], [2, 1]])) @@ -101,14 +110,13 @@ def test_dict_to_numpy_array2(self): assert_allclose(a.sum(), 33) def test_dict_to_numpy_array_a(self): - d = {'a': {'a': 1, 'b': 2}, - 'b': {'a': 10, 'b': 20}} + d = {"a": {"a": 1, "b": 2}, "b": {"a": 10, "b": 20}} - mapping = {'a': 0, 'b': 1} + mapping = {"a": 0, "b": 1} a = dict_to_numpy_array(d, mapping=mapping) assert_allclose(a, numpy.array([[1, 2], [10, 20]])) - mapping = {'a': 1, 'b': 0} + mapping = {"a": 1, "b": 0} a = dict_to_numpy_array(d, mapping=mapping) assert_allclose(a, numpy.array([[20, 10], [2, 1]])) @@ -116,9 +124,9 @@ def test_dict_to_numpy_array_a(self): assert_allclose(a.sum(), 33) def test_dict_to_numpy_array_b(self): - d = {'a': 1, 'b': 2} + d = {"a": 1, "b": 2} - mapping = {'a': 0, 'b': 1} + mapping = {"a": 0, "b": 1} a = dict_to_numpy_array(d, mapping=mapping) assert_allclose(a, numpy.array([1, 2])) @@ -130,52 +138,85 @@ def test_pairwise(): nodes = range(4) node_pairs = [(0, 1), (1, 2), (2, 3)] node_pairs_cycle = node_pairs + [(3, 0)] - assert_equal(list(pairwise(nodes)), node_pairs) - assert_equal(list(pairwise(iter(nodes))), node_pairs) - assert_equal(list(pairwise(nodes, cyclic=True)), node_pairs_cycle) + assert list(pairwise(nodes)) == node_pairs + assert list(pairwise(iter(nodes))) == node_pairs + assert list(pairwise(nodes, cyclic=True)) == node_pairs_cycle empty_iter = iter(()) - assert_equal(list(pairwise(empty_iter)), []) + assert list(pairwise(empty_iter)) == [] empty_iter = iter(()) - assert_equal(list(pairwise(empty_iter, cyclic=True)), []) + assert list(pairwise(empty_iter, cyclic=True)) == [] def test_groups(): - many_to_one = dict(zip('abcde', [0, 0, 1, 1, 2])) + many_to_one = dict(zip("abcde", [0, 0, 1, 1, 2])) actual = groups(many_to_one) - expected = {0: {'a', 'b'}, 1: {'c', 'd'}, 2: {'e'}} - assert_equal(actual, expected) - assert_equal({}, groups({})) + expected = {0: {"a", "b"}, 1: {"c", "d"}, 2: {"e"}} + assert actual == expected + assert {} == groups({}) def test_to_tuple(): a_list = [1, 2, [1, 3]] actual = to_tuple(a_list) expected = (1, 2, (1, 3)) - assert_equal(actual, expected) + assert actual == expected a_tuple = (1, 2) actual = to_tuple(a_tuple) expected = a_tuple - assert_equal(actual, expected) + assert actual == expected a_mix = (1, 2, [1, 3]) actual = to_tuple(a_mix) expected = (1, 2, (1, 3)) - assert_equal(actual, expected) + assert actual == expected def test_create_random_state(): - try: - import numpy as np - except ImportError: - raise SkipTest('numpy not available.') + np = pytest.importorskip("numpy") + rs = np.random.RandomState + + assert isinstance(create_random_state(1), rs) + assert isinstance(create_random_state(None), rs) + assert isinstance(create_random_state(np.random), rs) + assert isinstance(create_random_state(rs(1)), rs) + pytest.raises(ValueError, create_random_state, "a") + + assert np.all(rs(1).rand(10) == create_random_state(1).rand(10)) + + +def test_create_py_random_state(): + pyrs = random.Random + + assert isinstance(create_py_random_state(1), pyrs) + assert isinstance(create_py_random_state(None), pyrs) + assert isinstance(create_py_random_state(pyrs(1)), pyrs) + pytest.raises(ValueError, create_py_random_state, "a") + + np = pytest.importorskip("numpy") rs = np.random.RandomState + nprs = PythonRandomInterface + assert isinstance(create_py_random_state(np.random), nprs) + assert isinstance(create_py_random_state(rs(1)), nprs) + # test default rng input + assert isinstance(PythonRandomInterface(), nprs) - assert_true(isinstance(create_random_state(1), rs)) - assert_true(isinstance(create_random_state(None), rs)) - assert_true(isinstance(create_random_state(np.random), rs)) - assert_true(isinstance(create_random_state(rs(1)), rs)) - assert_raises(ValueError, create_random_state, 'a') - assert_true(np.all((rs(1).rand(10), create_random_state(1).rand(10)))) +def test_PythonRandomInterface(): + np = pytest.importorskip("numpy") + rs = np.random.RandomState + rng = PythonRandomInterface(rs(42)) + rs42 = rs(42) + + # make sure these functions are same as expected outcome + assert rng.randrange(3, 5) == rs42.randint(3, 5) + assert np.all(rng.choice([1, 2, 3]) == rs42.choice([1, 2, 3])) + assert rng.gauss(0, 1) == rs42.normal(0, 1) + assert rng.expovariate(1.5) == rs42.exponential(1 / 1.5) + assert np.all(rng.shuffle([1, 2, 3]) == rs42.shuffle([1, 2, 3])) + assert np.all( + rng.sample([1, 2, 3], 2) == rs42.choice([1, 2, 3], (2,), replace=False) + ) + assert rng.randint(3, 5) == rs42.randint(3, 6) + assert rng.random() == rs42.random_sample() diff --git a/networkx/utils/tests/test_random_sequence.py b/networkx/utils/tests/test_random_sequence.py index d70a0b0..6a565bc 100644 --- a/networkx/utils/tests/test_random_sequence.py +++ b/networkx/utils/tests/test_random_sequence.py @@ -1,31 +1,37 @@ -#!/usr/bin/env python -from nose.tools import * -from networkx.utils import powerlaw_sequence,\ - zipf_rv, random_weighted_sample,\ - weighted_choice -import networkx.utils +import pytest +from networkx.utils import ( + powerlaw_sequence, + zipf_rv, + random_weighted_sample, + weighted_choice, +) def test_degree_sequences(): + seq = powerlaw_sequence(10, seed=1) seq = powerlaw_sequence(10) - assert_equal(len(seq), 10) + assert len(seq) == 10 def test_zipf_rv(): + r = zipf_rv(2.3, xmin=2, seed=1) + r = zipf_rv(2.3, 2, 1) r = zipf_rv(2.3) - assert_true(type(r), int) - assert_raises(ValueError, zipf_rv, 0.5) - assert_raises(ValueError, zipf_rv, 2, xmin=0) + assert type(r), int + pytest.raises(ValueError, zipf_rv, 0.5) + pytest.raises(ValueError, zipf_rv, 2, xmin=0) def test_random_weighted_sample(): - mapping = {'a': 10, 'b': 20} + mapping = {"a": 10, "b": 20} + s = random_weighted_sample(mapping, 2, seed=1) s = random_weighted_sample(mapping, 2) - assert_equal(sorted(s), sorted(mapping.keys())) - assert_raises(ValueError, random_weighted_sample, mapping, 3) + assert sorted(s) == sorted(mapping.keys()) + pytest.raises(ValueError, random_weighted_sample, mapping, 3) def test_random_weighted_choice(): - mapping = {'a': 10, 'b': 0} + mapping = {"a": 10, "b": 0} + c = weighted_choice(mapping, seed=1) c = weighted_choice(mapping) - assert_equal(c, 'a') + assert c == "a" diff --git a/networkx/utils/tests/test_rcm.py b/networkx/utils/tests/test_rcm.py index 8841bee..b53cc8f 100644 --- a/networkx/utils/tests/test_rcm.py +++ b/networkx/utils/tests/test_rcm.py @@ -1,4 +1,3 @@ -from nose.tools import * from networkx.utils import reverse_cuthill_mckee_ordering import networkx as nx @@ -6,36 +5,59 @@ def test_reverse_cuthill_mckee(): # example graph from # http://www.boost.org/doc/libs/1_37_0/libs/graph/example/cuthill_mckee_ordering.cpp - G = nx.Graph([(0, 3), (0, 5), (1, 2), (1, 4), (1, 6), (1, 9), (2, 3), - (2, 4), (3, 5), (3, 8), (4, 6), (5, 6), (5, 7), (6, 7)]) + G = nx.Graph( + [ + (0, 3), + (0, 5), + (1, 2), + (1, 4), + (1, 6), + (1, 9), + (2, 3), + (2, 4), + (3, 5), + (3, 8), + (4, 6), + (5, 6), + (5, 7), + (6, 7), + ] + ) rcm = list(reverse_cuthill_mckee_ordering(G)) - assert_true(rcm in [[0, 8, 5, 7, 3, 6, 2, 4, 1, 9], - [0, 8, 5, 7, 3, 6, 4, 2, 1, 9]]) + assert rcm in [[0, 8, 5, 7, 3, 6, 2, 4, 1, 9], [0, 8, 5, 7, 3, 6, 4, 2, 1, 9]] def test_rcm_alternate_heuristic(): # example from - G = nx.Graph([(0, 0), - (0, 4), - (1, 1), - (1, 2), - (1, 5), - (1, 7), - (2, 2), - (2, 4), - (3, 3), - (3, 6), - (4, 4), - (5, 5), - (5, 7), - (6, 6), - (7, 7)]) + G = nx.Graph( + [ + (0, 0), + (0, 4), + (1, 1), + (1, 2), + (1, 5), + (1, 7), + (2, 2), + (2, 4), + (3, 3), + (3, 6), + (4, 4), + (5, 5), + (5, 7), + (6, 6), + (7, 7), + ] + ) - answers = [[6, 3, 5, 7, 1, 2, 4, 0], [6, 3, 7, 5, 1, 2, 4, 0], - [7, 5, 1, 2, 4, 0, 6, 3]] + answers = [ + [6, 3, 5, 7, 1, 2, 4, 0], + [6, 3, 7, 5, 1, 2, 4, 0], + [7, 5, 1, 2, 4, 0, 6, 3], + ] def smallest_degree(G): deg, node = min((d, n) for n, d in G.degree()) return node + rcm = list(reverse_cuthill_mckee_ordering(G, heuristic=smallest_degree)) - assert_true(rcm in answers) + assert rcm in answers diff --git a/networkx/utils/tests/test_unionfind.py b/networkx/utils/tests/test_unionfind.py index 9f3116a..75a8fae 100644 --- a/networkx/utils/tests/test_unionfind.py +++ b/networkx/utils/tests/test_unionfind.py @@ -1,5 +1,3 @@ -from nose.tools import * - import networkx as nx @@ -11,4 +9,34 @@ def test_unionfind(): # # Now we just make sure that no exception is raised. x = nx.utils.UnionFind() - x.union(0, 'a') + x.union(0, "a") + + +def test_subtree_union(): + # See https://github.com/networkx/networkx/pull/3224 + # (35db1b551ee65780794a357794f521d8768d5049). + # Test if subtree unions hare handled correctly by to_sets(). + uf = nx.utils.UnionFind() + uf.union(1, 2) + uf.union(3, 4) + uf.union(4, 5) + uf.union(1, 5) + assert list(uf.to_sets()) == [{1, 2, 3, 4, 5}] + + +def test_unionfind_weights(): + # Tests if weights are computed correctly with unions of many elements + uf = nx.utils.UnionFind() + uf.union(1, 4, 7) + uf.union(2, 5, 8) + uf.union(3, 6, 9) + uf.union(1, 2, 3, 4, 5, 6, 7, 8, 9) + assert uf.weights[uf[1]] == 9 + + +def test_empty_union(): + # Tests if a null-union does nothing. + uf = nx.utils.UnionFind((0, 1)) + uf.union() + assert uf[0] == 0 + assert uf[1] == 1 diff --git a/networkx/utils/union_find.py b/networkx/utils/union_find.py index 04a8e47..fffb00c 100644 --- a/networkx/utils/union_find.py +++ b/networkx/utils/union_find.py @@ -1,10 +1,3 @@ -# Copyright 2016-2018 NetworkX developers. -# Copyright (C) 2004-2018 by -# Aric Hagberg -# Dan Schult -# Pieter Swart -# All rights reserved. -# BSD license. """ Union-find data structure. """ @@ -82,24 +75,29 @@ def to_sets(self): For example:: - >>> partition = UnionFind('xyz') + >>> partition = UnionFind("xyz") >>> sorted(map(sorted, partition.to_sets())) [['x'], ['y'], ['z']] - >>> partition.union('x', 'y') + >>> partition.union("x", "y") >>> sorted(map(sorted, partition.to_sets())) [['x', 'y'], ['z']] """ - # TODO In Python 3.3+, this should be `yield from ...`. - for block in groups(self.parents).values(): - yield block + # Ensure fully pruned paths + for x in self.parents.keys(): + _ = self[x] # Evaluated for side-effect only + + yield from groups(self.parents).values() def union(self, *objects): """Find the sets containing the objects and merge them all.""" - roots = [self[x] for x in objects] # Find the heaviest root according to its weight. - heaviest = max(roots, key=lambda r: self.weights[r]) + roots = iter(sorted({self[x] for x in objects}, key=lambda r: self.weights[r])) + try: + root = next(roots) + except StopIteration: + return + for r in roots: - if r != heaviest: - self.weights[heaviest] += self.weights[r] - self.parents[r] = heaviest + self.weights[root] += self.weights[r] + self.parents[r] = root diff --git a/requirements/README.md b/requirements/README.md deleted file mode 100644 index 9c3714f..0000000 --- a/requirements/README.md +++ /dev/null @@ -1,27 +0,0 @@ -# pip requirements files - -## Index - -- [`default.txt`](default.txt) - Default requirements -- [`extras.txt`](extras.txt) - Optional requirements -- [`test.txt`](test.txt) - Requirements for running test suite -- [`doc.txt`](doc.txt) - Requirements for building the documentation (see `../doc/`) - -## Examples - -### Installing requirements - -```bash -$ pip install -U -r requirements/default.txt -``` - -### Running the tests - -```bash -$ pip install -U -r requirements/default.txt -$ pip install -U -r requirements/test.txt -``` diff --git a/requirements/default.txt b/requirements/default.txt deleted file mode 100644 index d4d2edf..0000000 --- a/requirements/default.txt +++ /dev/null @@ -1 +0,0 @@ -decorator>=4.1.0 diff --git a/requirements/doc.txt b/requirements/doc.txt deleted file mode 100644 index 47518e7..0000000 --- a/requirements/doc.txt +++ /dev/null @@ -1,6 +0,0 @@ -sphinx>=1.6.3 -sphinx_rtd_theme>=0.2.4 -sphinx-gallery>=0.1.12 -pillow>=4.2.1 -nb2plots>=0.5.2 -texext>=0.5 diff --git a/requirements/extras.txt b/requirements/extras.txt deleted file mode 100644 index 35e83bb..0000000 --- a/requirements/extras.txt +++ /dev/null @@ -1,9 +0,0 @@ -numpy>=1.12.0 -scipy>=0.19.0 -pandas>=0.20.1 -matplotlib>=2.0.2 -pygraphviz>=1.3.1 -pydot>=1.2.3 -pyyaml>=3.12 -gdal==1.10.0 -lxml>=3.7.3 diff --git a/requirements/test.txt b/requirements/test.txt deleted file mode 100644 index cd1ab6e..0000000 --- a/requirements/test.txt +++ /dev/null @@ -1,4 +0,0 @@ -nose>=1.3.7 -nose-ignore-docstring>=0.2 -coverage>=4.4.1 -codecov>=2.0.9 diff --git a/setup.cfg b/setup.cfg index 3be5d08..8bfd5a1 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,19 +1,4 @@ -[nosetests] -verbosity=0 -detailed-errors=1 -with-doctest=0 +[egg_info] +tag_build = +tag_date = 0 -# The default regex which discovers test modules catches networkx.testing -# and declares it as a test module, even though it is not a test module -# This causes a very small discrepancy between the coverage report as -# generated by nosetests, and the coverage report as generated by coverage. -# If we want them to agree, then we need to make sure that networkx.testing -# is not captured by the default match regex. -# -# Default is: (?:^|[\b_\./-])[Tt]est -# -match=(?:^|[\b_\./-])[Tt]est(?!ing) - - -[wheel] -universal = 1 diff --git a/setup.py b/setup.py index dfe9cf5..398b3d9 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- """ Setup script for networkx @@ -10,109 +8,149 @@ from glob import glob import os import sys -if os.path.exists('MANIFEST'): - os.remove('MANIFEST') + +if os.path.exists("MANIFEST"): + os.remove("MANIFEST") from setuptools import setup -if sys.argv[-1] == 'setup.py': +if sys.argv[-1] == "setup.py": print("To install, run 'python setup.py install'") print() -if sys.version_info[:2] < (2, 7): - print("NetworkX requires Python 2.7 or later (%d.%d detected)." % - sys.version_info[:2]) - sys.exit(-1) +if sys.version_info[:2] < (3, 6): + error = ( + "NetworkX 2.5+ requires Python 3.6 or later (%d.%d detected). \n" + "For Python 2.7, please install version 2.2 using: \n" + "$ pip install 'networkx==2.2'" % sys.version_info[:2] + ) + sys.stderr.write(error + "\n") + sys.exit(1) # Write the version information. -sys.path.insert(0, 'networkx') +sys.path.insert(0, "networkx") import release + version = release.write_versionfile() sys.path.pop(0) -packages = ["networkx", - "networkx.algorithms", - "networkx.algorithms.assortativity", - "networkx.algorithms.bipartite", - "networkx.algorithms.node_classification", - "networkx.algorithms.centrality", - "networkx.algorithms.community", - "networkx.algorithms.components", - "networkx.algorithms.connectivity", - "networkx.algorithms.coloring", - "networkx.algorithms.flow", - "networkx.algorithms.traversal", - "networkx.algorithms.isomorphism", - "networkx.algorithms.shortest_paths", - "networkx.algorithms.link_analysis", - "networkx.algorithms.operators", - "networkx.algorithms.approximation", - "networkx.algorithms.tree", - "networkx.classes", - "networkx.generators", - "networkx.drawing", - "networkx.linalg", - "networkx.readwrite", - "networkx.readwrite.json_graph", - "networkx.tests", - "networkx.testing", - "networkx.utils"] +packages = [ + "networkx", + "networkx.algorithms", + "networkx.algorithms.assortativity", + "networkx.algorithms.bipartite", + "networkx.algorithms.node_classification", + "networkx.algorithms.centrality", + "networkx.algorithms.community", + "networkx.algorithms.components", + "networkx.algorithms.connectivity", + "networkx.algorithms.coloring", + "networkx.algorithms.flow", + "networkx.algorithms.traversal", + "networkx.algorithms.isomorphism", + "networkx.algorithms.shortest_paths", + "networkx.algorithms.link_analysis", + "networkx.algorithms.operators", + "networkx.algorithms.approximation", + "networkx.algorithms.tree", + "networkx.classes", + "networkx.generators", + "networkx.drawing", + "networkx.linalg", + "networkx.readwrite", + "networkx.readwrite.json_graph", + "networkx.tests", + "networkx.testing", + "networkx.utils", +] -docdirbase = 'share/doc/networkx-%s' % version +docdirbase = "share/doc/networkx-%s" % version # add basic documentation data = [(docdirbase, glob("*.txt"))] # add examples -for d in ['advanced', - 'algorithms', - 'basic', - '3d_drawing', - 'drawing', - 'graph', - 'javascript', - 'jit', - 'pygraphviz', - 'subclass']: - dd = os.path.join(docdirbase, 'examples', d) - pp = os.path.join('examples', d) +for d in [ + ".", + "advanced", + "algorithms", + "basic", + "3d_drawing", + "drawing", + "graph", + "javascript", + "jit", + "pygraphviz", + "subclass", +]: + dd = os.path.join(docdirbase, "examples", d) + pp = os.path.join("examples", d) + data.append((dd, glob(os.path.join(pp, "*.txt")))) data.append((dd, glob(os.path.join(pp, "*.py")))) data.append((dd, glob(os.path.join(pp, "*.bz2")))) data.append((dd, glob(os.path.join(pp, "*.gz")))) data.append((dd, glob(os.path.join(pp, "*.mbox")))) data.append((dd, glob(os.path.join(pp, "*.edgelist")))) +# add js force examples +dd = os.path.join(docdirbase, "examples", "javascript/force") +pp = os.path.join("examples", "javascript/force") +data.append((dd, glob(os.path.join(pp, "*")))) # add the tests package_data = { - 'networkx': ['tests/*.py'], - 'networkx.algorithms': ['tests/*.py'], - 'networkx.algorithms.assortativity': ['tests/*.py'], - 'networkx.algorithms.bipartite': ['tests/*.py'], - 'networkx.algorithms.node_classification': ['tests/*.py'], - 'networkx.algorithms.centrality': ['tests/*.py'], - 'networkx.algorithms.community': ['tests/*.py'], - 'networkx.algorithms.components': ['tests/*.py'], - 'networkx.algorithms.connectivity': ['tests/*.py'], - 'networkx.algorithms.coloring': ['tests/*.py'], - 'networkx.algorithms.flow': ['tests/*.py', 'tests/*.bz2'], - 'networkx.algorithms.isomorphism': ['tests/*.py', 'tests/*.*99'], - 'networkx.algorithms.link_analysis': ['tests/*.py'], - 'networkx.algorithms.approximation': ['tests/*.py'], - 'networkx.algorithms.operators': ['tests/*.py'], - 'networkx.algorithms.shortest_paths': ['tests/*.py'], - 'networkx.algorithms.traversal': ['tests/*.py'], - 'networkx.algorithms.tree': ['tests/*.py'], - 'networkx.classes': ['tests/*.py'], - 'networkx.generators': ['tests/*.py', 'atlas.dat.gz'], - 'networkx.drawing': ['tests/*.py'], - 'networkx.linalg': ['tests/*.py'], - 'networkx.readwrite': ['tests/*.py'], - 'networkx.readwrite.json_graph': ['tests/*.py'], - 'networkx.testing': ['tests/*.py'], - 'networkx.utils': ['tests/*.py'] + "networkx": ["tests/*.py"], + "networkx.algorithms": ["tests/*.py"], + "networkx.algorithms.assortativity": ["tests/*.py"], + "networkx.algorithms.bipartite": ["tests/*.py"], + "networkx.algorithms.node_classification": ["tests/*.py"], + "networkx.algorithms.centrality": ["tests/*.py"], + "networkx.algorithms.community": ["tests/*.py"], + "networkx.algorithms.components": ["tests/*.py"], + "networkx.algorithms.connectivity": ["tests/*.py"], + "networkx.algorithms.coloring": ["tests/*.py"], + "networkx.algorithms.flow": ["tests/*.py", "tests/*.bz2"], + "networkx.algorithms.isomorphism": ["tests/*.py", "tests/*.*99"], + "networkx.algorithms.link_analysis": ["tests/*.py"], + "networkx.algorithms.approximation": ["tests/*.py"], + "networkx.algorithms.operators": ["tests/*.py"], + "networkx.algorithms.shortest_paths": ["tests/*.py"], + "networkx.algorithms.traversal": ["tests/*.py"], + "networkx.algorithms.tree": ["tests/*.py"], + "networkx.classes": ["tests/*.py"], + "networkx.generators": ["tests/*.py", "atlas.dat.gz"], + "networkx.drawing": ["tests/*.py"], + "networkx.linalg": ["tests/*.py"], + "networkx.readwrite": ["tests/*.py"], + "networkx.readwrite.json_graph": ["tests/*.py"], + "networkx.testing": ["tests/*.py"], + "networkx.utils": ["tests/*.py"], +} + +install_requires = ["decorator>=4.3.0"] +extras_require = { + "all": [ + "numpy", + "scipy", + "pandas", + "matplotlib", + "pygraphviz", + "pydot", + "pyyaml", + "lxml", + "pytest", + ], + "gdal": ["gdal"], + "lxml": ["lxml"], + "matplotlib": ["matplotlib"], + "pytest": ["pytest"], + "numpy": ["numpy"], + "pandas": ["pandas"], + "pydot": ["pydot"], + "pygraphviz": ["pygraphviz"], + "pyyaml": ["pyyaml"], + "scipy": ["scipy"], } -install_requires = ['decorator>=4.1.0'] -extras_require = {'all': ['numpy', 'scipy', 'pandas', 'matplotlib', - 'pygraphviz', 'pydot', 'pyyaml', 'gdal', 'lxml']} +with open("README.rst", "r") as fh: + long_description = fh.read() if __name__ == "__main__": @@ -121,22 +159,20 @@ version=version, maintainer=release.maintainer, maintainer_email=release.maintainer_email, - author=release.authors['Hagberg'][0], - author_email=release.authors['Hagberg'][1], + author=release.authors["Hagberg"][0], + author_email=release.authors["Hagberg"][1], description=release.description, keywords=release.keywords, - long_description=release.long_description, - license=release.license, + long_description=long_description, platforms=release.platforms, url=release.url, - download_url=release.download_url, + project_urls=release.project_urls, classifiers=release.classifiers, packages=packages, data_files=data, package_data=package_data, install_requires=install_requires, extras_require=extras_require, - test_suite='nose.collector', - tests_require=['nose>=0.10.1'], - zip_safe=False + python_requires=">=3.6", + zip_safe=False, ) diff --git a/tools/appveyor/install.ps1 b/tools/appveyor/install.ps1 deleted file mode 100755 index fde367b..0000000 --- a/tools/appveyor/install.ps1 +++ /dev/null @@ -1,101 +0,0 @@ -# Sample script to install Python and pip under Windows -# Authors: Olivier Grisel and Kyle Kastner -# License: BSD 3 clause - -$BASE_URL = "https://www.python.org/ftp/python/" -$GET_PIP_URL = "https://bootstrap.pypa.io/get-pip.py" -$GET_PIP_PATH = "C:\get-pip.py" - - -function DownloadPython ($python_version, $platform_suffix) { - $webclient = New-Object System.Net.WebClient - $filename = "python-" + $python_version + $platform_suffix + ".msi" - $url = $BASE_URL + $python_version + "/" + $filename - - $basedir = $pwd.Path + "\" - $filepath = $basedir + $filename - if (Test-Path $filename) { - Write-Host "Reusing" $filepath - return $filepath - } - - # Download and retry up to 3 times in case of network transient errors. - Write-Host "Downloading" $filename "from" $url - $retry_attempts = 2 - for($i=0; $i -lt $retry_attempts; $i++){ - try { - $webclient.DownloadFile($url, $filepath) - break - } - Catch [Exception] { - Start-Sleep 1 - } - } - if (Test-Path $filepath) { - Write-Host "File saved at" $filepath - } else { - # Retry once to get the error message if any at the last try - $webclient.DownloadFile($url, $filepath) - } - return $filepath -} - - -function InstallPython ($python_version, $architecture, $python_home) { - Write-Host "Installing Python" $python_version "for" $architecture "bit architecture to" $python_home - if (Test-Path $python_home) { - Write-Host $python_home "already exists, skipping." - return $false - } - if ($architecture -eq "32") { - $platform_suffix = "" - } else { - $platform_suffix = ".amd64" - } - $msipath = DownloadPython $python_version $platform_suffix - Write-Host "Installing" $msipath "to" $python_home - $install_log = $python_home + ".log" - $install_args = "/qn /log $install_log /i $msipath TARGETDIR=$python_home" - $uninstall_args = "/qn /x $msipath" - RunCommand "msiexec.exe" $install_args - if (-not(Test-Path $python_home)) { - Write-Host "Python seems to be installed else-where, reinstalling." - RunCommand "msiexec.exe" $uninstall_args - RunCommand "msiexec.exe" $install_args - } - if (Test-Path $python_home) { - Write-Host "Python $python_version ($architecture) installation complete" - } else { - Write-Host "Failed to install Python in $python_home" - Get-Content -Path $install_log - Exit 1 - } -} - -function RunCommand ($command, $command_args) { - Write-Host $command $command_args - Start-Process -FilePath $command -ArgumentList $command_args -Wait -Passthru -} - - -function InstallPip ($python_home) { - $pip_path = $python_home + "\Scripts\pip.exe" - $python_path = $python_home + "\python.exe" - if (-not(Test-Path $pip_path)) { - Write-Host "Installing pip..." - $webclient = New-Object System.Net.WebClient - $webclient.DownloadFile($GET_PIP_URL, $GET_PIP_PATH) - Write-Host "Executing:" $python_path $GET_PIP_PATH - Start-Process -FilePath "$python_path" -ArgumentList "$GET_PIP_PATH" -Wait -Passthru - } else { - Write-Host "pip already installed." - } -} - - -function main () { - InstallPython $env:PYTHON_VERSION $env:PYTHON_ARCH $env:PYTHON - InstallPip $env:PYTHON -} - -main diff --git a/tools/appveyor/run_with_env.cmd b/tools/appveyor/run_with_env.cmd deleted file mode 100755 index 0c70d63..0000000 --- a/tools/appveyor/run_with_env.cmd +++ /dev/null @@ -1,47 +0,0 @@ -:: To build extensions for 64 bit Python 3, we need to configure environment -:: variables to use the MSVC 2010 C++ compilers from GRMSDKX_EN_DVD.iso of: -:: MS Windows SDK for Windows 7 and .NET Framework 4 (SDK v7.1) -:: -:: To build extensions for 64 bit Python 2, we need to configure environment -:: variables to use the MSVC 2008 C++ compilers from GRMSDKX_EN_DVD.iso of: -:: MS Windows SDK for Windows 7 and .NET Framework 3.5 (SDK v7.0) -:: -:: 32 bit builds do not require specific environment configurations. -:: -:: Note: this script needs to be run with the /E:ON and /V:ON flags for the -:: cmd interpreter, at least for (SDK v7.0) -:: -:: More details at: -:: https://github.com/cython/cython/wiki/64BitCythonExtensionsOnWindows -:: http://stackoverflow.com/a/13751649/163740 -:: -:: Author: Olivier Grisel -:: License: BSD 3 clause -@ECHO OFF - -SET COMMAND_TO_RUN=%* -SET WIN_SDK_ROOT=C:\Program Files\Microsoft SDKs\Windows - -SET MAJOR_PYTHON_VERSION="%PYTHON_VERSION:~0,1%" -IF %MAJOR_PYTHON_VERSION% == "2" ( - SET WINDOWS_SDK_VERSION="v7.0" -) ELSE IF %MAJOR_PYTHON_VERSION% == "3" ( - SET WINDOWS_SDK_VERSION="v7.1" -) ELSE ( - ECHO Unsupported Python version: "%MAJOR_PYTHON_VERSION%" - EXIT 1 -) - -IF "%PYTHON_ARCH%"=="64" ( - ECHO Configuring Windows SDK %WINDOWS_SDK_VERSION% for Python %MAJOR_PYTHON_VERSION% on a 64 bit architecture - SET DISTUTILS_USE_SDK=1 - SET MSSdk=1 - "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Setup\WindowsSdkVer.exe" -q -version:%WINDOWS_SDK_VERSION% - "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Bin\SetEnv.cmd" /x64 /release - ECHO Executing: %COMMAND_TO_RUN% - call %COMMAND_TO_RUN% || EXIT 1 -) ELSE ( - ECHO Using default MSVC build environment for 32 bit architecture - ECHO Executing: %COMMAND_TO_RUN% - call %COMMAND_TO_RUN% || EXIT 1 -) diff --git a/tools/gitwash_dumper.py b/tools/gitwash_dumper.py deleted file mode 100644 index b4d782f..0000000 --- a/tools/gitwash_dumper.py +++ /dev/null @@ -1,235 +0,0 @@ -#!/usr/bin/env python -''' Checkout gitwash repo into directory and do search replace on name ''' - -from __future__ import (absolute_import, division, print_function) - -import os -from os.path import join as pjoin -import shutil -import sys -import re -import glob -import fnmatch -import tempfile -from subprocess import call -from optparse import OptionParser - -verbose = False - - -def clone_repo(url, branch): - cwd = os.getcwd() - tmpdir = tempfile.mkdtemp() - try: - cmd = 'git clone %s %s' % (url, tmpdir) - call(cmd, shell=True) - os.chdir(tmpdir) - cmd = 'git checkout %s' % branch - call(cmd, shell=True) - except: - shutil.rmtree(tmpdir) - raise - finally: - os.chdir(cwd) - return tmpdir - - -def cp_files(in_path, globs, out_path): - try: - os.makedirs(out_path) - except OSError: - pass - out_fnames = [] - for in_glob in globs: - in_glob_path = pjoin(in_path, in_glob) - for in_fname in glob.glob(in_glob_path): - out_fname = in_fname.replace(in_path, out_path) - pth, _ = os.path.split(out_fname) - if not os.path.isdir(pth): - os.makedirs(pth) - shutil.copyfile(in_fname, out_fname) - out_fnames.append(out_fname) - return out_fnames - - -def filename_search_replace(sr_pairs, filename, backup=False): - ''' Search and replace for expressions in files - - ''' - with open(filename, 'rt') as in_fh: - in_txt = in_fh.read(-1) - out_txt = in_txt[:] - for in_exp, out_exp in sr_pairs: - in_exp = re.compile(in_exp) - out_txt = in_exp.sub(out_exp, out_txt) - if in_txt == out_txt: - return False - with open(filename, 'wt') as out_fh: - out_fh.write(out_txt) - if backup: - with open(filename + '.bak', 'wt') as bak_fh: - bak_fh.write(in_txt) - return True - - -def copy_replace(replace_pairs, - repo_path, - out_path, - cp_globs=('*',), - rep_globs=('*',), - renames = ()): - out_fnames = cp_files(repo_path, cp_globs, out_path) - renames = [(re.compile(in_exp), out_exp) for in_exp, out_exp in renames] - fnames = [] - for rep_glob in rep_globs: - fnames += fnmatch.filter(out_fnames, rep_glob) - if verbose: - print('\n'.join(fnames)) - for fname in fnames: - filename_search_replace(replace_pairs, fname, False) - for in_exp, out_exp in renames: - new_fname, n = in_exp.subn(out_exp, fname) - if n: - os.rename(fname, new_fname) - break - - -def make_link_targets(proj_name, - user_name, - repo_name, - known_link_fname, - out_link_fname, - url=None, - ml_url=None): - """ Check and make link targets - - If url is None or ml_url is None, check if there are links present for these - in `known_link_fname`. If not, raise error. The check is: - - Look for a target `proj_name`. - Look for a target `proj_name` + ' mailing list' - - Also, look for a target `proj_name` + 'github'. If this exists, don't write - this target into the new file below. - - If we are writing any of the url, ml_url, or github address, then write new - file with these links, of form: - - .. _`proj_name` - .. _`proj_name`: url - .. _`proj_name` mailing list: url - """ - with open(known_link_fname, 'rt') as link_fh: - link_contents = link_fh.readlines() - have_url = not url is None - have_ml_url = not ml_url is None - have_gh_url = None - for line in link_contents: - if not have_url: - match = re.match(r'..\s+_`%s`:\s+' % proj_name, line) - if match: - have_url = True - if not have_ml_url: - match = re.match(r'..\s+_`%s mailing list`:\s+' % proj_name, line) - if match: - have_ml_url = True - if not have_gh_url: - match = re.match(r'..\s+_`%s github`:\s+' % proj_name, line) - if match: - have_gh_url = True - if not have_url or not have_ml_url: - raise RuntimeError('Need command line or known project ' - 'and / or mailing list URLs') - lines = [] - if not url is None: - lines.append('.. _`%s`: %s\n' % (proj_name, url)) - if not have_gh_url: - gh_url = 'https://github.com/%s/%s\n' % (user_name, repo_name) - lines.append('.. _`%s github`: %s\n' % (proj_name, gh_url)) - if not ml_url is None: - lines.append('.. _`%s mailing list`: %s\n' % (proj_name, ml_url)) - if len(lines) == 0: - # Nothing to do - return - # A neat little header line - lines = ['.. %s\n' % proj_name] + lines - with open(out_link_fname, 'wt') as out_links: - out_links.writelines(lines) - - -USAGE = ''' - -If not set with options, the repository name is the same as the - -If not set with options, the main github user is the same as the -repository name.''' - - -GITWASH_CENTRAL = 'git://github.com/matthew-brett/gitwash.git' -GITWASH_BRANCH = 'master' - - -def main(): - parser = OptionParser() - parser.set_usage(parser.get_usage().strip() + USAGE) - parser.add_option("--repo-name", dest="repo_name", - help="repository name - e.g. nitime", - metavar="REPO_NAME") - parser.add_option("--github-user", dest="main_gh_user", - help="github username for main repo - e.g fperez", - metavar="MAIN_GH_USER") - parser.add_option("--gitwash-url", dest="gitwash_url", - help="URL to gitwash repository - default %s" - % GITWASH_CENTRAL, - default=GITWASH_CENTRAL, - metavar="GITWASH_URL") - parser.add_option("--gitwash-branch", dest="gitwash_branch", - help="branch in gitwash repository - default %s" - % GITWASH_BRANCH, - default=GITWASH_BRANCH, - metavar="GITWASH_BRANCH") - parser.add_option("--source-suffix", dest="source_suffix", - help="suffix of ReST source files - default '.rst'", - default='.rst', - metavar="SOURCE_SUFFIX") - parser.add_option("--project-url", dest="project_url", - help="URL for project web pages", - default=None, - metavar="PROJECT_URL") - parser.add_option("--project-ml-url", dest="project_ml_url", - help="URL for project mailing list", - default=None, - metavar="PROJECT_ML_URL") - (options, args) = parser.parse_args() - if len(args) < 2: - parser.print_help() - sys.exit() - out_path, project_name = args - if options.repo_name is None: - options.repo_name = project_name - if options.main_gh_user is None: - options.main_gh_user = options.repo_name - repo_path = clone_repo(options.gitwash_url, options.gitwash_branch) - try: - copy_replace((('PROJECTNAME', project_name), - ('REPONAME', options.repo_name), - ('MAIN_GH_USER', options.main_gh_user)), - repo_path, - out_path, - cp_globs=(pjoin('gitwash', '*'),), - rep_globs=('*.rst',), - renames=(('\.rst$', options.source_suffix),)) - make_link_targets(project_name, - options.main_gh_user, - options.repo_name, - pjoin(out_path, 'gitwash', 'known_projects.inc'), - pjoin(out_path, 'gitwash', 'this_project.inc'), - options.project_url, - options.project_ml_url) - finally: - shutil.rmtree(repo_path) - - -if __name__ == '__main__': - main() diff --git a/tools/travis/before_install.sh b/tools/travis/before_install.sh deleted file mode 100755 index a03aba8..0000000 --- a/tools/travis/before_install.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -set -e - -section () { - echo -en "travis_fold:start:$1\r" -} - -section_end () { - echo -en "travis_fold:end:$1\r" -} - -export -f section -export -f section_end - -if [[ "${MINIMUM_REQUIREMENTS}" == 1 ]]; then - sed -i 's/>=/==/g' requirements/default.txt - sed -i 's/>=/==/g' requirements/extras.txt - sed -i 's/>=/==/g' requirements/test.txt - sed -i 's/>=/==/g' requirements/doc.txt -fi - -set +e diff --git a/tools/travis/build_docs.sh b/tools/travis/build_docs.sh deleted file mode 100755 index eb48990..0000000 --- a/tools/travis/build_docs.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env bash - -set -e - -pip install --retries 3 -q -r requirements/doc.txt -export SPHINXCACHE=$HOME/.cache/sphinx -cd doc -make html -make latexpdf -cd .. - -set +e diff --git a/tools/travis/deploy-key.enc b/tools/travis/deploy-key.enc deleted file mode 100644 index 6b6d64d..0000000 --- a/tools/travis/deploy-key.enc +++ /dev/null @@ -1,4 +0,0 @@ -_'NXNg ƅՄ}Lb˶=>ɵf\Zc{ ǪNұ$RKcä5#c#0`K.QqD'Uڈ" _rQIRnhM ׀}0Qs\:i)&)$f(o).# C -4 I>ݗڭ$V2uoD0`$* - Fs!}_D,X0VH, -EH80.Y<~m&ba >]gDŽlUXZI؈\nH^s&H, : f@hR2h63 \ No newline at end of file diff --git a/tools/travis/deploy_docs.sh b/tools/travis/deploy_docs.sh deleted file mode 100755 index 9da4e9f..0000000 --- a/tools/travis/deploy_docs.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/usr/bin/env bash - -set -e - -section "Deploy docs" -if [[ $TRAVIS_PULL_REQUEST == false && $TRAVIS_BRANCH == "master" && $BUILD_DOCS == 1 && $DEPLOY_DOCS == 1 ]] -then - # "A deploy key is an SSH key that is stored on your server and grants access to a single GitHub repository. - # This key is attached directly to the repository instead of to a personal user account." - # -- https://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys - # - # $ ssh-keygen -t ed25519 -C "Networkx Travis Bot" -f deploy-key - # Your identification has been saved in deploy-key. - # Your public key has been saved in deploy-key.pub. - # - # Add the deploy-key.pub contents to your repo's settings under Settings -> Deploy Keys. - # Encrypt the private deploy-key for Travis-CI and commit it to the repo - # - # $ gem install travis - # $ travis login - # $ travis encrypt-file deploy-key - # storing result as deploy-key.enc - # - # The ``travis encrypt-file deploy-key`` command provides the ``openssl`` command below. - - # Decrypt the deploy-key with the Travis-CI key - openssl aes-256-cbc -K $encrypted_64abb7a9cf51_key -iv $encrypted_64abb7a9cf51_iv -in tools/travis/deploy-key.enc -out deploy-key -d - chmod 600 deploy-key - eval `ssh-agent -s` - ssh-add deploy-key - - # Push the latest docs to the networkx/documentation repo (gh-pages branch) - GH_REF=git@github.com:networkx/documentation.git - echo "-- pushing docs --" - ( - git config --global user.email "travis@travis-ci.com" - git config --global user.name "NetworkX Travis Bot" - - cd doc - git clone --quiet --branch=gh-pages --depth=1 ${GH_REF} ghpages_build - cd ghpages_build - - # Overwrite previous commit - git rm -r latest - cp -a ../build/html latest - cp -a ../build/latex/networkx_reference.pdf latest/_downloads/. - git add latest - git commit -m "Deploy GitHub Pages" - - git push --force --quiet "${GH_REF}" gh-pages > /dev/null 2>&1 - cd ../.. - ) -else - echo "-- will only push docs from master --" -fi -section_end "Deploy docs" - -set +e diff --git a/tools/travis/linux_install.sh b/tools/travis/linux_install.sh deleted file mode 100755 index f3e41fe..0000000 --- a/tools/travis/linux_install.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env bash -set -e - -# create new empty venv -virtualenv -p python ~/venv -source ~/venv/bin/activate - -if [[ "${OPTIONAL_DEPS}" == 1 ]]; then - - # needed to build Python binding for GDAL - export CPLUS_INCLUDE_PATH=/usr/include/gdal - export C_INCLUDE_PATH=/usr/include/gdal - - # needed for view_graphviz and default_opener - DIR=~/.local/share/applications - mkdir -p $DIR - FILE=$DIR/png.desktop - cat <$FILE -[Desktop Entry] -Name=png -MimeType=image/png; -Exec=/usr/bin/file -Type=Application -Terminal=true -NoDisplay=true -EOF - - xdg-mime default png.desktop image/png - -fi - -set +e diff --git a/tools/travis/osx_install.sh b/tools/travis/osx_install.sh deleted file mode 100755 index 04df32a..0000000 --- a/tools/travis/osx_install.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env bash -set -ex - -# set up Miniconda on OSX -if [[ "${OSX_PKG_ENV}" == miniconda ]]; then - wget https://repo.continuum.io/miniconda/Miniconda3-4.3.21-MacOSX-x86_64.sh -O miniconda.sh - bash miniconda.sh -b -p $HOME/miniconda - export PATH="$HOME/miniconda/bin:$PATH" - hash -r - conda config --set always_yes yes --set changeps1 no - conda update -q conda - # Useful for debugging any issues with conda - conda info -a - - conda create -q -n testenv python=$TRAVIS_PYTHON_VERSION decorator - source activate testenv -else - # set up Python and virtualenv on OSX - git clone https://github.com/matthew-brett/multibuild - source multibuild/osx_utils.sh - get_macpython_environment $TRAVIS_PYTHON_VERSION venv -fi - -if [[ "${OPTIONAL_DEPS}" == 1 ]]; then - if [[ "${OSX_PKG_ENV}" == miniconda ]]; then - conda install graphviz=2.38 - export PKG_CONFIG_PATH=/Users/travis/miniconda/envs/testenv/lib/pkgconfig - else - brew install graphviz - fi - dot -V - sed -i "" 's/^gdal.*/gdal==1.11.2/' requirements/extras.txt -fi - -set +ex diff --git a/tools/travis/script.sh b/tools/travis/script.sh deleted file mode 100755 index 108ffe8..0000000 --- a/tools/travis/script.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env bash - -set -e - -section "Script section" - -export NX_SOURCE=$PWD -export NX_INSTALL=$(pip show networkx | grep Location | awk '{print $2"/networkx"}') - -# nose 1.3.0 does not tell coverage to only cover the requested -# package (except during the report). So to restrict coverage, we must -# inform coverage through the .coveragerc file. -cp .coveragerc $NX_INSTALL -cp setup.cfg $NX_INSTALL - -# Move to new directory so that networkx is not imported from repository. -# Why? Because we want the tests to make sure that NetworkX was installed -# correctly. Example: setup.py might not have included some submodules. -# Testing from the git repository cannot catch a mistake like that. -# -# Export current directory for logs. -cd $NX_INSTALL -printenv PWD - -# Run nosetests. -if [[ "${REPORT_COVERAGE}" == 1 ]]; then - nosetests --verbosity=2 --with-ignore-docstrings --with-coverage --cover-package=networkx - cp -a .coverage $NX_SOURCE -else - nosetests --verbosity=2 --with-ignore-docstrings -fi - -cd $NX_SOURCE - -section_end "Script section" - -set +e From 76cc2e475d7c128a1d855917535def3fbfdb6d5c Mon Sep 17 00:00:00 2001 From: Brad Cowie Date: Wed, 1 Dec 2021 16:20:33 +1300 Subject: [PATCH 2/2] Migrate to github actions. --- .github/workflows/release-debian.yml | 45 ++++++++++++++++++++++++++++ .github/workflows/test-debian.yml | 30 +++++++++++++++++++ 2 files changed, 75 insertions(+) create mode 100644 .github/workflows/release-debian.yml create mode 100644 .github/workflows/test-debian.yml diff --git a/.github/workflows/release-debian.yml b/.github/workflows/release-debian.yml new file mode 100644 index 0000000..503318a --- /dev/null +++ b/.github/workflows/release-debian.yml @@ -0,0 +1,45 @@ +name: Build debian package + +on: + release: + types: + - published + +env: + DEBIAN_FRONTEND: noninteractive + +jobs: + debian-package: + name: "Build debian package" + runs-on: ubuntu-latest + environment: + name: "release" + container: + image: "debian:latest" + steps: + - name: Install dependencies + run: | + apt-get update + apt-get -y upgrade + apt-get -y install devscripts dpkg-dev debhelper equivs + - name: Checkout repo + uses: actions/checkout@v2 + - name: Bump version + run: | + export DEBEMAIL='maintainers@faucet.nz' + export DEBFULLNAME='Faucet Maintainers' + debchange --newversion ${{ github.event.release.tag_name }} -b "New upstream release" + - name: Build package + run: | + mk-build-deps -i -r -t 'apt-get -f -y --force-yes' + dpkg-buildpackage -b -us -uc -rfakeroot + - name: Store package + run: | + mkdir -p packages/all + cp ../*.deb packages/all + - name: Publish package on packagecloud + uses: faucetsdn/action-packagecloud-upload-debian-packages@v1 + with: + path: packages/ + repo: faucetsdn/faucet-test + token: ${{ secrets.PACKAGECLOUD_TOKEN }} diff --git a/.github/workflows/test-debian.yml b/.github/workflows/test-debian.yml new file mode 100644 index 0000000..4b4570f --- /dev/null +++ b/.github/workflows/test-debian.yml @@ -0,0 +1,30 @@ +name: Test building debian package + +on: pull_request + +env: + DEBIAN_FRONTEND: noninteractive + +jobs: + debian-package: + name: "Build debian package" + runs-on: ubuntu-latest + container: + image: "debian:latest" + steps: + - name: Install dependencies + run: | + apt-get update + apt-get -y upgrade + apt-get -y install devscripts dpkg-dev debhelper equivs + - name: Checkout repo + uses: actions/checkout@v2 + - name: Bump version + run: | + export DEBEMAIL='maintainers@faucet.nz' + export DEBFULLNAME='Faucet Maintainers' + debchange -b "Test release" + - name: Build package + run: | + mk-build-deps -i -r -t 'apt-get -f -y --force-yes' + dpkg-buildpackage -b -us -uc -rfakeroot