diff --git a/.circleci/config.yml b/.circleci/config.yml index b7dfef39c00..b9277b86471 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -44,6 +44,8 @@ jobs: pip install git+https://github.com/rapidsai/cugraph.git#subdirectory=python/nx-cugraph --no-deps # Development version of GraphBLAS backend pip install git+https://github.com/python-graphblas/graphblas-algorithms.git@main --no-deps + # Development version of nx-parallel backend + pip install git+https://github.com/networkx/nx-parallel.git@main --no-deps pip list - save_cache: diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 64b36ad820c..0dd46935381 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -12,3 +12,4 @@ be23fa0e422b51f4526828cb19b8105c89e5dcbb 5c0b11afb4c0882a070d522ef3fa41482ba935d3 5fcf01b9a43a097c4f579486023d1279b2b88619 7297ae8a37dd3356b64d383cb0c55735a6364bcc +3c0f096f66ab352cfaf8dbe2d5fc5731cbbc4338 diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 76c0f80b78a..ec68dc5ee50 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -4,6 +4,10 @@ updates: directory: "/" schedule: interval: "monthly" + groups: + actions: + patterns: + - "*" labels: - "type: Maintenance" - package-ecosystem: "pip" diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml new file mode 100644 index 00000000000..4614bc8141c --- /dev/null +++ b/.github/workflows/benchmark.yml @@ -0,0 +1,117 @@ +### Inspired from https://github.com/scikit-image/scikit-image/blob/main/.github/workflows/benchmarks.yml + +name: Benchmark PR +on: + pull_request: + types: [labeled, synchronize] + +jobs: + ## This code below is to make sure a new commit on a PR with the label + ## retriggers the benchmark, otherwise the label needs to be removed and + ## applied again to run the benchmark. + check: + runs-on: ubuntu-latest + outputs: + result: ${{ steps.check.outputs.result }} + steps: + - uses: actions/checkout@v4 + + - uses: actions/github-script@v7 + name: Check for benchmark label and new commit + id: check + with: + script: | + const { owner, repo, number: pull_number } = context.issue; + + // Check for label + const { data: pullRequest } = await github.rest.pulls.get({ owner, repo, pull_number }); + if (!pullRequest.labels.some(({ name }) => name === 'run:benchmark')) { + return false; + } + + // Check for repo name and organization name + const { data: repository } = await github.rest.repos.get({ owner, repo }); + return repository.name === 'networkx' && repository.owner.login === 'networkx'; + benchmark: + needs: check + if: ${{ needs.check.outputs.result == 'true' }} + # if: contains(github.event.pull_request.labels.*.name, 'run:benchmark') || ${{ github.event.label.name == 'run:benchmark' }} + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.x" + + - name: Setup some dependencies + shell: bash -l {0} + run: | + sudo apt-get update -y && sudo apt-get install -y ccache + # Make gcc/gxx symlinks first in path + sudo /usr/sbin/update-ccache-symlinks + echo "/usr/lib/ccache" >> $GITHUB_PATH + + - name: "Prepare ccache" + id: prepare-ccache + shell: bash -l {0} + run: | + echo "key=benchmark-$RUNNER_OS" >> $GITHUB_OUTPUT + echo "timestamp=$(date +%Y%m%d-%H%M%S)" >> $GITHUB_OUTPUT + ccache -p + ccache -z + + - name: "Restore ccache" + uses: actions/cache@v3 + with: + path: .ccache + key: ccache-${{ secrets.CACHE_VERSION }}-${{ steps.prepare-ccache.outputs.key }}-${{ steps.prepare-ccache.outputs.timestamp }} + restore-keys: | + ccache-${{ secrets.CACHE_VERSION }}-${{ steps.prepare-ccache.outputs.key }}- + + - name: Install packages + run: | + pip install --upgrade pip + pip install -r requirements/default.txt + pip install . + pip list + + - name: Run benchmarks + shell: bash -l {0} + id: benchmark + env: + ASV_FACTOR: 1.5 + ASV_SKIP_SLOW: 1 + run: | + set -x + python -m pip install asv virtualenv packaging + cd benchmarks/ + + # ID this runner + python -m asv machine --yes --conf asv.conf.json + + echo "Baseline: ${{ github.event.pull_request.base.sha }} (${{ github.event.pull_request.base.label }})" + + echo "Contender: ${GITHUB_SHA} (${{ github.event.pull_request.head.label }})" + + # Run benchmarks for current commit against base + ASV_OPTIONS="--split --show-stderr --factor $ASV_FACTOR --conf asv.conf.json" + python -m asv continuous $ASV_OPTIONS ${{ github.event.pull_request.base.sha }} ${GITHUB_SHA} \ + | sed "/Traceback \|failed$\|PERFORMANCE DECREASED/ s/^/::error::/" \ + | tee benchmarks.log + + # Report and export results for subsequent steps + if grep "Traceback \|failed\|PERFORMANCE DECREASED" benchmarks.log > /dev/null ; then + exit 1 + fi + + - name: "Check ccache performance" + shell: bash -l {0} + run: ccache -s + if: always() + + - uses: actions/upload-artifact@v4 + if: always() + with: + name: asv-benchmark-results-${{ runner.os }} + path: benchmarks.log diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 5b75cfbcfdc..a7ffdea017b 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -19,7 +19,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml index d26df0bed6e..a45e6255bd2 100644 --- a/.github/workflows/deploy-docs.yml +++ b/.github/workflows/deploy-docs.yml @@ -14,7 +14,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.11" @@ -38,6 +38,8 @@ jobs: pip install git+https://github.com/rapidsai/cugraph.git#subdirectory=python/nx-cugraph --no-deps # Development version of GraphBLAS backend pip install git+https://github.com/python-graphblas/graphblas-algorithms.git@main --no-deps + # Development version of nx-parallel backend + pip install git+https://github.com/networkx/nx-parallel.git@main --no-deps pip list # To set up a cross-repository deploy key: diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 7f43b045f92..2634e5c1258 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -17,7 +17,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml index a025509cb0a..156841681a5 100644 --- a/.github/workflows/mypy.yml +++ b/.github/workflows/mypy.yml @@ -17,7 +17,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/nightly-release-test.yml b/.github/workflows/nightly-release-test.yml index f9f1a8b7f25..94d9c38e689 100644 --- a/.github/workflows/nightly-release-test.yml +++ b/.github/workflows/nightly-release-test.yml @@ -14,7 +14,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 43084700c25..ee854c0045f 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: "3.10" - name: Install Python requirements @@ -27,7 +27,7 @@ jobs: - name: List contents of wheel run: python -m zipfile --list dist/networkx-*.whl - name: Upload nighlty wheel - uses: scientific-python/upload-nightly-action@5fb764c5bce1ac2297084c0f7161b1919f17c74f # 0.2.0 + uses: scientific-python/upload-nightly-action@b67d7fcc0396e1128a474d1ab2b48aa94680f9fc # 0.5.0 with: anaconda_nightly_upload_token: ${{ secrets.ANACONDA_NIGHTLY }} artifacts_path: dist/ diff --git a/.github/workflows/pytest-randomly.yml b/.github/workflows/pytest-randomly.yml index 162ad4189a2..683e630ad88 100644 --- a/.github/workflows/pytest-randomly.yml +++ b/.github/workflows/pytest-randomly.yml @@ -11,7 +11,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.10" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index accb03ae86c..91724b5b8e4 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -19,7 +19,7 @@ jobs: with: fetch-depth: 0 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 name: Install Python with: python-version: "3.11" diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 80d23aad3be..5d6b0113aff 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -16,7 +16,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -40,7 +40,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -66,7 +66,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -90,7 +90,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -150,7 +150,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d27674aced6..a0423daf37a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,26 +2,19 @@ # pre-commit install repos: - - repo: https://github.com/psf/black - rev: 23.9.1 - hooks: - - id: black - - repo: https://github.com/adamchainz/blacken-docs - rev: 1.16.0 - hooks: - - id: blacken-docs - repo: https://github.com/pre-commit/mirrors-prettier - rev: v3.0.3 + rev: v3.1.0 hooks: - id: prettier files: \.(html|md|toml|yml|yaml) args: [--prose-wrap=preserve] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.291 + rev: v0.1.8 hooks: - id: ruff args: - --fix + - id: ruff-format - repo: local hooks: - id: generate_requirements.py diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 7acff214fa6..623af3ee730 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -78,8 +78,8 @@ Development Workflow # Test your installation pytest --pyargs networkx - * Finally, we recommend you use a pre-commit hook, which runs black when - you type ``git commit``:: + * Finally, we recommend you install pre-commit which checks + that your code matches formatting guidelines:: pre-commit install diff --git a/LICENSE.txt b/LICENSE.txt index 42b6f17a65e..100b4bffb00 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -2,7 +2,7 @@ NetworkX is distributed with the 3-clause BSD license. :: - Copyright (C) 2004-2023, NetworkX Developers + Copyright (C) 2004-2024, NetworkX Developers Aric Hagberg Dan Schult Pieter Swart diff --git a/README.rst b/README.rst index 11bd027f974..9e4be00835b 100644 --- a/README.rst +++ b/README.rst @@ -67,7 +67,7 @@ License Released under the 3-Clause BSD license (see `LICENSE.txt`):: - Copyright (C) 2004-2023 NetworkX Developers + Copyright (C) 2004-2024 NetworkX Developers Aric Hagberg Dan Schult Pieter Swart diff --git a/benchmarks/benchmarks/benchmark_neighbors.py b/benchmarks/benchmarks/benchmark_neighbors.py new file mode 100644 index 00000000000..e821f3cad95 --- /dev/null +++ b/benchmarks/benchmarks/benchmark_neighbors.py @@ -0,0 +1,51 @@ +import networkx as nx + + +# NOTE: explicit set construction in benchmarks is required for meaningful +# comparisons due to change in return type from generator -> set. See gh-7244. +class NonNeighbors: + param_names = ["num_nodes"] + params = [10, 100, 1000] + + def setup(self, num_nodes): + self.star_graph = nx.star_graph(num_nodes) + self.complete_graph = nx.complete_graph(num_nodes) + self.path_graph = nx.path_graph(num_nodes) + + def time_star_center(self, num_nodes): + set(nx.non_neighbors(self.star_graph, 0)) + + def time_star_rim(self, num_nodes): + set(nx.non_neighbors(self.star_graph, 5)) + + def time_complete(self, num_nodes): + set(nx.non_neighbors(self.complete_graph, 0)) + + def time_path_first(self, num_nodes): + set(nx.non_neighbors(self.path_graph, 0)) + + def time_path_last(self, num_nodes): + set(nx.non_neighbors(self.path_graph, num_nodes - 1)) + + def time_path_center(self, num_nodes): + set(nx.non_neighbors(self.path_graph, num_nodes // 2)) + + +# NOTE: explicit set construction in benchmarks is required for meaningful +# comparisons due to change in return type from generator -> set. See gh-7244. +class CommonNeighbors: + param_names = ["num_nodes"] + params = [10, 100, 1000] + + def setup(self, num_nodes): + self.star_graph = nx.star_graph(num_nodes) + self.complete_graph = nx.complete_graph(num_nodes) + + def time_star_center_rim(self, num_nodes): + set(nx.common_neighbors(self.star_graph, 0, num_nodes // 2)) + + def time_star_rim_rim(self, num_nodes): + set(nx.common_neighbors(self.star_graph, 4, 5)) + + def time_complete(self, num_nodes): + set(nx.common_neighbors(self.complete_graph, 0, num_nodes // 2)) diff --git a/doc/conf.py b/doc/conf.py index 1d8ea398768..cdd04b9fe52 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -22,10 +22,10 @@ "sphinx.ext.todo", "sphinx.ext.viewcode", "sphinx_gallery.gen_gallery", - "nb2plots", "texext", "numpydoc", "matplotlib.sphinxext.plot_directive", + "myst_nb", ] # https://github.com/sphinx-gallery/sphinx-gallery @@ -54,6 +54,7 @@ "image_scrapers": ("matplotlib",), "matplotlib_animations": True, "plot_gallery": "True", + "reference_url": {"sphinx_gallery": None}, } # Add pygraphviz png scraper, if available try: @@ -77,8 +78,9 @@ # The encoding of source files. source_encoding = "utf-8" -# Do not include release announcement template -exclude_patterns = ["release/release_template.rst"] +# Items to exclude during source collection, including release announcement +# template, build outputs, and READMEs (markdown only) +exclude_patterns = ["release/release_template.rst", "build/*", "README.md"] # General substitutions. project = "NetworkX" @@ -248,14 +250,19 @@ plot_pre_code = """ import networkx as nx +import numpy as np +np.random.seed(42) """ -plot_formats = [("png", 100), "pdf"] +plot_formats = [("png", 100)] def setup(app): app.add_css_file("custom.css") app.add_js_file("copybutton.js") + # Workaround to prevent duplicate file warnings from sphinx w/ myst-nb. + # See executablebooks/MyST-NB#363 + app.registry.source_suffix.pop(".ipynb") # Monkeypatch numpydoc to show "Backends" section diff --git a/doc/developer/deprecations.rst b/doc/developer/deprecations.rst index a2c6700cec0..a54295865ca 100644 --- a/doc/developer/deprecations.rst +++ b/doc/developer/deprecations.rst @@ -43,13 +43,8 @@ Make sure to review ``networkx/conftest.py`` after removing deprecated code. Version 3.3 ~~~~~~~~~~~ -* Remove pydot functionality ``drawing/nx_pydot.py``, if pydot is still not being maintained. See #5723 * Remove the ``forest_str`` function from ``readwrite/text.py``. Replace existing usages with ``write_network_text``. -* Change ``single_target_shortest_path_length`` in ``algorithms/shortest_path/unweighted.py`` - to return a dict. See #6527 -* Change ``shortest_path`` in ``algorithms/shortest_path/generic.py`` - to return a iterator. See #6527 Version 3.4 ~~~~~~~~~~~ @@ -68,4 +63,13 @@ Version 3.5 ~~~~~~~~~~~ * Remove ``all_triplets`` from ``algorithms/triads.py`` * Remove ``random_triad`` from ``algorithms/triad.py``. +* Remove ``d_separated`` from ``algorithms/d_separation.py``. +* Remove ``minimal_d_separator`` from ``algorithms/d_separation.py``. * Add `not_implemented_for("multigraph”)` decorator to ``k_core``, ``k_shell``, ``k_crust`` and ``k_corona`` functions. +* Change ``single_target_shortest_path_length`` in ``algorithms/shortest_path/unweighted.py`` + to return a dict. See #6527 +* Change ``shortest_path`` in ``algorithms/shortest_path/generic.py`` + to return a iterator. See #6527 +* Remove ``total_spanning_tree_weight`` from ``linalg/laplacianmatrix.py`` +* Remove ``create`` keyword argument from ``nonisomorphic_trees`` in + ``generators/nonisomorphic_trees``. diff --git a/doc/developer/nxeps/nxep-0003.rst b/doc/developer/nxeps/nxep-0003.rst index f3971ef3bbf..94916e117f5 100644 --- a/doc/developer/nxeps/nxep-0003.rst +++ b/doc/developer/nxeps/nxep-0003.rst @@ -290,7 +290,7 @@ Related Work This proposal is based on ideas and discussions from #3036 and #1393. -This proposal does not delve into backends using the `_dispatch` functionality +This proposal does not delve into backends using the `_dispatchable` functionality and whether we should be providing or allowing control over the builder functions for backend libraries. This is a potentially helpful discussion but is beyond the scope of this NXEP. diff --git a/doc/developer/projects.rst b/doc/developer/projects.rst index 623a9986b8b..fbd3b4e2c51 100644 --- a/doc/developer/projects.rst +++ b/doc/developer/projects.rst @@ -89,7 +89,7 @@ Centrality Atlas of network centrality measures. Centrality is a central concept in network science and has many applications across domains. NetworkX provides many functions for measuring - various types of :doc:`network centrality`. + various types of :doc:`network centrality`. The individual centrality functions are typically well-described by their docstrings (though there's always room for improvement!); however, there currently is no big-picture overview of centrality. diff --git a/doc/reference/algorithms/assortativity.rst b/doc/reference/algorithms/assortativity.rst index 8ec61677661..a6afe231e6a 100644 --- a/doc/reference/algorithms/assortativity.rst +++ b/doc/reference/algorithms/assortativity.rst @@ -6,6 +6,8 @@ Assortativity .. autosummary:: :toctree: generated/ +.. _networkx.algorithms.assortativity.correlation: + Assortativity ------------- .. autosummary:: @@ -16,6 +18,8 @@ Assortativity numeric_assortativity_coefficient degree_pearson_correlation_coefficient +.. _networkx.algorithms.assortativity.neighbor_degree: + Average neighbor degree ----------------------- .. autosummary:: @@ -23,6 +27,7 @@ Average neighbor degree average_neighbor_degree +.. _networkx.algorithms.assortativity.connectivity: Average degree connectivity --------------------------- @@ -31,6 +36,7 @@ Average degree connectivity average_degree_connectivity +.. _networkx.algorithms.assortativity.mixing: Mixing ------ @@ -43,6 +49,8 @@ Mixing degree_mixing_dict mixing_dict +.. _networkx.algorithms.assortativity.pairs: + Pairs ----- .. autosummary:: diff --git a/doc/reference/algorithms/broadcasting.rst b/doc/reference/algorithms/broadcasting.rst new file mode 100644 index 00000000000..3c2164982ff --- /dev/null +++ b/doc/reference/algorithms/broadcasting.rst @@ -0,0 +1,10 @@ +************ +Broadcasting +************ + +.. automodule:: networkx.algorithms.broadcasting +.. autosummary:: + :toctree: generated/ + + tree_broadcast_center + tree_broadcast_time diff --git a/doc/reference/algorithms/centrality.rst b/doc/reference/algorithms/centrality.rst index b7d4001746f..b0ea8437530 100644 --- a/doc/reference/algorithms/centrality.rst +++ b/doc/reference/algorithms/centrality.rst @@ -4,6 +4,8 @@ Centrality .. automodule:: networkx.algorithms.centrality +.. _networkx.algorithms.centrality.degree_alg: + Degree ------ .. autosummary:: @@ -13,6 +15,9 @@ Degree in_degree_centrality out_degree_centrality +.. _networkx.algorithms.centrality.eigenvector: +.. _networkx.algorithms.centrality.katz: + Eigenvector ----------- .. autosummary:: @@ -23,6 +28,8 @@ Eigenvector katz_centrality katz_centrality_numpy +.. _networkx.algorithms.centrality.closeness: + Closeness --------- .. autosummary:: @@ -31,6 +38,8 @@ Closeness closeness_centrality incremental_closeness_centrality +.. _networkx.algorithms.centrality.current_flow_closeness: + Current Flow Closeness ---------------------- .. autosummary:: @@ -39,6 +48,9 @@ Current Flow Closeness current_flow_closeness_centrality information_centrality +.. _networkx.algorithms.centrality.betweenness: +.. _networkx.algorithms.centrality.betweenness_subset: + (Shortest Path) Betweenness --------------------------- .. autosummary:: @@ -49,6 +61,8 @@ Current Flow Closeness edge_betweenness_centrality edge_betweenness_centrality_subset +.. _networkx.algorithms.centrality.current_flow_betweenness: +.. _networkx.algorithms.centrality.current_flow_betweenness_subset: Current Flow Betweenness ------------------------ @@ -68,6 +82,8 @@ Communicability Betweenness communicability_betweenness_centrality +.. _networkx.algorithms.centrality.group: + Group Centrality ---------------- .. autosummary:: @@ -80,6 +96,8 @@ Group Centrality group_out_degree_centrality prominent_group +.. _networkx.algorithms.centrality.load: + Load ---- .. autosummary:: @@ -88,6 +106,8 @@ Load load_centrality edge_load_centrality +.. _networkx.algorithms.centrality.subgraph_alg: + Subgraph -------- .. autosummary:: @@ -97,6 +117,8 @@ Subgraph subgraph_centrality_exp estrada_index +.. _networkx.algorithms.centrality.harmonic: + Harmonic Centrality ------------------- .. autosummary:: @@ -111,6 +133,8 @@ Dispersion dispersion +.. _networkx.algorithms.centrality.reaching: + Reaching -------- .. autosummary:: @@ -119,6 +143,8 @@ Reaching local_reaching_centrality global_reaching_centrality +.. _networkx.algorithms.centrality.percolation: + Percolation ----------- .. autosummary:: @@ -126,6 +152,8 @@ Percolation percolation_centrality +.. _networkx.algorithms.centrality.second_order: + Second Order Centrality ----------------------- .. autosummary:: @@ -133,6 +161,8 @@ Second Order Centrality second_order_centrality +.. _networkx.algorithms.centrality.trophic: + Trophic ------- .. autosummary:: @@ -142,6 +172,8 @@ Trophic trophic_differences trophic_incoherence_parameter +.. _networkx.algorithms.centrality.voterank_alg: + VoteRank -------- .. autosummary:: @@ -149,6 +181,8 @@ VoteRank voterank +.. _networkx.algorithms.centrality.laplacian: + Laplacian --------- .. autosummary:: diff --git a/doc/reference/algorithms/coloring.rst b/doc/reference/algorithms/coloring.rst index a0e68ce769f..5c21770d528 100644 --- a/doc/reference/algorithms/coloring.rst +++ b/doc/reference/algorithms/coloring.rst @@ -1,3 +1,6 @@ +.. _networkx.algorithms.coloring.greedy_coloring: +.. _networkx.algorithms.coloring.equitable_coloring: + ******** Coloring ******** diff --git a/doc/reference/algorithms/community.rst b/doc/reference/algorithms/community.rst index 55f7ad61ab3..0c62bf5bb13 100644 --- a/doc/reference/algorithms/community.rst +++ b/doc/reference/algorithms/community.rst @@ -14,6 +14,15 @@ Bipartitions kernighan_lin_bisection +Divisive Communities +-------------------- +.. automodule:: networkx.algorithms.community.divisive +.. autosummary:: + :toctree: generated/ + + edge_betweenness_partition + edge_current_flow_betweenness_partition + K-Clique -------- .. automodule:: networkx.algorithms.community.kclique @@ -47,6 +56,7 @@ Label propagation asyn_lpa_communities label_propagation_communities + fast_label_propagation_communities Louvain Community Detection --------------------------- diff --git a/doc/reference/algorithms/component.rst b/doc/reference/algorithms/component.rst index 23aa4c5bcf2..a0b25d1de6e 100644 --- a/doc/reference/algorithms/component.rst +++ b/doc/reference/algorithms/component.rst @@ -3,6 +3,8 @@ Components ********** .. automodule:: networkx.algorithms.components +.. _networkx.algorithms.components.connected: + Connectivity ------------ .. autosummary:: @@ -13,6 +15,8 @@ Connectivity connected_components node_connected_component +.. _networkx.algorithms.components.strongly_connected: + Strong connectivity ------------------- .. autosummary:: @@ -25,6 +29,8 @@ Strong connectivity kosaraju_strongly_connected_components condensation +.. _networkx.algorithms.components.weakly_connected: + Weak connectivity ----------------- .. autosummary:: @@ -34,6 +40,8 @@ Weak connectivity number_weakly_connected_components weakly_connected_components +.. _networkx.algorithms.components.attracting: + Attracting components --------------------- .. autosummary:: @@ -43,6 +51,8 @@ Attracting components number_attracting_components attracting_components +.. _networkx.algorithms.components.biconnected: + Biconnected components ---------------------- .. autosummary:: @@ -53,6 +63,8 @@ Biconnected components biconnected_component_edges articulation_points +.. _networkx.algorithms.components.semiconnected: + Semiconnectedness ----------------- .. autosummary:: diff --git a/doc/reference/algorithms/d_separation.rst b/doc/reference/algorithms/d_separation.rst index 053471b575b..09d34965e6a 100644 --- a/doc/reference/algorithms/d_separation.rst +++ b/doc/reference/algorithms/d_separation.rst @@ -6,6 +6,6 @@ D-Separation .. autosummary:: :toctree: generated/ - d_separated + is_d_separator is_minimal_d_separator - minimal_d_separator + find_minimal_d_separator diff --git a/doc/reference/algorithms/dag.rst b/doc/reference/algorithms/dag.rst index f1cadf8afef..8fb911788f3 100644 --- a/doc/reference/algorithms/dag.rst +++ b/doc/reference/algorithms/dag.rst @@ -21,3 +21,4 @@ Directed Acyclic Graphs dag_longest_path dag_longest_path_length dag_to_branching + compute_v_structures diff --git a/doc/reference/algorithms/distance_measures.rst b/doc/reference/algorithms/distance_measures.rst index cd54a4afa49..172e5b6ddb8 100644 --- a/doc/reference/algorithms/distance_measures.rst +++ b/doc/reference/algorithms/distance_measures.rst @@ -10,6 +10,7 @@ Distance Measures center diameter eccentricity + effective_graph_resistance kemeny_constant periphery radius diff --git a/doc/reference/algorithms/flow.rst b/doc/reference/algorithms/flow.rst index 0888f052c7e..8fe2936fee7 100644 --- a/doc/reference/algorithms/flow.rst +++ b/doc/reference/algorithms/flow.rst @@ -4,6 +4,7 @@ Flows .. automodule:: networkx.algorithms.flow +.. _networkx.algorithms.flow.maxflow: Maximum Flow ------------ @@ -15,6 +16,7 @@ Maximum Flow minimum_cut minimum_cut_value +.. _networkx.algorithms.flow.edmondskarp: Edmonds-Karp ------------ @@ -23,6 +25,7 @@ Edmonds-Karp edmonds_karp +.. _networkx.algorithms.flow.shortestaugmentingpath: Shortest Augmenting Path ------------------------ @@ -31,6 +34,7 @@ Shortest Augmenting Path shortest_augmenting_path +.. _networkx.algorithms.flow.preflowpush: Preflow-Push ------------ @@ -39,6 +43,7 @@ Preflow-Push preflow_push +.. _networkx.algorithms.flow.dinitz_alg: Dinitz ------ @@ -47,6 +52,7 @@ Dinitz dinitz +.. _networkx.algorithms.flow.boykovkolmogorov: Boykov-Kolmogorov ----------------- @@ -55,6 +61,7 @@ Boykov-Kolmogorov boykov_kolmogorov +.. _networkx.algorithms.flow.gomory_hu: Gomory-Hu Tree -------------- @@ -63,6 +70,7 @@ Gomory-Hu Tree gomory_hu_tree +.. _networkx.algorithms.flow.utils: Utils ----- @@ -71,6 +79,8 @@ Utils build_residual_network +.. _networkx.algorithms.flow.mincost: +.. _networkx.algorithms.flow.networksimplex: Network Simplex --------------- @@ -83,6 +93,7 @@ Network Simplex cost_of_flow max_flow_min_cost +.. _networkx.algorithms.flow.capacityscaling: Capacity Scaling Minimum Cost Flow ---------------------------------- diff --git a/doc/reference/algorithms/index.rst b/doc/reference/algorithms/index.rst index f8c5aff485d..2dd8d8f054a 100644 --- a/doc/reference/algorithms/index.rst +++ b/doc/reference/algorithms/index.rst @@ -15,6 +15,7 @@ Algorithms bipartite boundary bridges + broadcasting centrality chains chordal diff --git a/doc/reference/algorithms/isomorphism.rst b/doc/reference/algorithms/isomorphism.rst index 1d64bd0e7f8..2010831c514 100644 --- a/doc/reference/algorithms/isomorphism.rst +++ b/doc/reference/algorithms/isomorphism.rst @@ -1,4 +1,5 @@ .. _isomorphism: +.. _networkx.algorithms.isomorphism.isomorph: *********** Isomorphism diff --git a/doc/reference/algorithms/isomorphism.vf2.rst b/doc/reference/algorithms/isomorphism.vf2.rst index a5c2ad4206d..95b62de661a 100644 --- a/doc/reference/algorithms/isomorphism.vf2.rst +++ b/doc/reference/algorithms/isomorphism.vf2.rst @@ -17,8 +17,10 @@ Graph Matcher GraphMatcher.initialize GraphMatcher.is_isomorphic GraphMatcher.subgraph_is_isomorphic + GraphMatcher.subgraph_is_monomorphic GraphMatcher.isomorphisms_iter GraphMatcher.subgraph_isomorphisms_iter + GraphMatcher.subgraph_monomorphisms_iter GraphMatcher.candidate_pairs_iter GraphMatcher.match GraphMatcher.semantic_feasibility @@ -36,8 +38,10 @@ DiGraph Matcher DiGraphMatcher.initialize DiGraphMatcher.is_isomorphic DiGraphMatcher.subgraph_is_isomorphic + DiGraphMatcher.subgraph_is_monomorphic DiGraphMatcher.isomorphisms_iter DiGraphMatcher.subgraph_isomorphisms_iter + DiGraphMatcher.subgraph_monomorphisms_iter DiGraphMatcher.candidate_pairs_iter DiGraphMatcher.match DiGraphMatcher.semantic_feasibility diff --git a/doc/reference/algorithms/minors.rst b/doc/reference/algorithms/minors.rst index 77cf7385e96..cfe6677d3b5 100644 --- a/doc/reference/algorithms/minors.rst +++ b/doc/reference/algorithms/minors.rst @@ -1,3 +1,5 @@ +.. _networkx.algorithms.minors.contraction: + ****** Minors ****** diff --git a/doc/reference/algorithms/operators.rst b/doc/reference/algorithms/operators.rst index 13632e270e6..c97c96c3c10 100644 --- a/doc/reference/algorithms/operators.rst +++ b/doc/reference/algorithms/operators.rst @@ -44,3 +44,4 @@ Operators tensor_product power corona_product + modular_product diff --git a/doc/reference/algorithms/tree.rst b/doc/reference/algorithms/tree.rst index 4c7c3e6f585..363d1e9665b 100644 --- a/doc/reference/algorithms/tree.rst +++ b/doc/reference/algorithms/tree.rst @@ -64,6 +64,7 @@ Spanning Trees minimum_spanning_edges maximum_spanning_edges SpanningTreeIterator + number_of_spanning_trees Decomposition ------------- diff --git a/doc/reference/algorithms/wiener.rst b/doc/reference/algorithms/wiener.rst index c0ac446adb8..acd070a99aa 100644 --- a/doc/reference/algorithms/wiener.rst +++ b/doc/reference/algorithms/wiener.rst @@ -1,5 +1,5 @@ ************ -Wiener index +Wiener Index ************ .. automodule:: networkx.algorithms.wiener @@ -7,3 +7,5 @@ Wiener index :toctree: generated/ wiener_index + schultz_index + gutman_index diff --git a/doc/reference/drawing.rst b/doc/reference/drawing.rst index daf094998dd..63853e0abe0 100644 --- a/doc/reference/drawing.rst +++ b/doc/reference/drawing.rst @@ -84,6 +84,7 @@ Graph Layout :toctree: generated/ bipartite_layout + bfs_layout circular_layout kamada_kawai_layout planar_layout diff --git a/doc/reference/generators.rst b/doc/reference/generators.rst index 14fbb594847..d3b980fec92 100644 --- a/doc/reference/generators.rst +++ b/doc/reference/generators.rst @@ -34,6 +34,7 @@ Classic dorogovtsev_goltsev_mendes_graph empty_graph full_rary_tree + kneser_graph ladder_graph lollipop_graph null_graph diff --git a/doc/reference/introduction.md b/doc/reference/introduction.md new file mode 100644 index 00000000000..a00cfe52c9b --- /dev/null +++ b/doc/reference/introduction.md @@ -0,0 +1,342 @@ +--- +jupytext: + text_representation: + extension: .md + format_name: myst + format_version: 0.13 + jupytext_version: 1.13.1 +kernelspec: + display_name: Python 3 (ipykernel) + language: python + name: python3 +--- + +# Introduction + +```{currentmodule} networkx + +``` + +The structure of NetworkX can be seen by the organization of its source code. +The package provides classes for graph objects, generators to create standard +graphs, IO routines for reading in existing datasets, algorithms to analyze +the resulting networks and some basic drawing tools. + +Most of the NetworkX API is provided by functions which take a graph object +as an argument. Methods of the graph object are limited to basic manipulation +and reporting. This provides modularity of code and documentation. +It also makes it easier for newcomers to learn about the package in stages. +The source code for each module is meant to be easy to read and reading +this Python code is actually a good way to learn more about network algorithms, +but we have put a lot of effort into making the documentation sufficient and friendly. +If you have suggestions or questions please contact us by joining the +[NetworkX Google group](http://groups.google.com/group/networkx-discuss). + +Classes are named using `CamelCase` (capital letters at the start of each word). +functions, methods and variable names are `lower_case_underscore` (lowercase with +an underscore representing a space between words). + +## NetworkX Basics + +After starting Python, import the networkx module with (the recommended way) + +```{code-cell} +import networkx as nx +``` + +To save repetition, in the documentation we assume that +NetworkX has been imported this way. + +If importing networkx fails, it means that Python cannot find the installed +module. Check your installation and your `PYTHONPATH`. + +The following basic graph types are provided as Python classes: + +{class}`Graph` + +- This class implements an undirected graph. It ignores + multiple edges between two nodes. It does allow self-loop + edges between a node and itself. + +{class}`DiGraph` + +- Directed graphs, that is, graphs with directed edges. + Provides operations common to directed graphs, + (a subclass of Graph). + +{class}`MultiGraph` + +- A flexible graph class that allows multiple undirected edges between + pairs of nodes. The additional flexibility leads to some degradation + in performance, though usually not significant. + +{class}`MultiDiGraph` + +- A directed version of a MultiGraph. + +Empty graph-like objects are created with + +```{code-cell} +G = nx.Graph() +G = nx.DiGraph() +G = nx.MultiGraph() +G = nx.MultiDiGraph() +``` + +All graph classes allow any {term}`hashable` object as a node. +Hashable objects include strings, tuples, integers, and more. +Arbitrary edge attributes such as weights and labels +can be associated with an edge. + +The graph internal data structures are based on an +adjacency list representation and implemented using +Python {term}`dictionary` datastructures. +The graph adjacency structure is +implemented as a Python dictionary of +dictionaries; the outer dictionary is keyed by nodes to values that are +themselves dictionaries keyed by neighboring node to the +edge attributes associated with that edge. This "dict-of-dicts" structure +allows fast addition, deletion, and lookup of nodes and neighbors in +large graphs. The underlying datastructure is accessed directly +by methods (the programming interface "API") in the class definitions. +All functions, on the other hand, manipulate graph-like objects +solely via those API methods and not by acting directly on the datastructure. +This design allows for possible replacement of the 'dicts-of-dicts'-based +datastructure with an alternative datastructure that implements the +same methods. + +## Graphs + +The first choice to be made when using NetworkX is what type of graph +object to use. A graph (network) is a collection of nodes together +with a collection of edges that are pairs of nodes. Attributes are +often associated with nodes and/or edges. NetworkX graph objects come in +different flavors depending on two main properties of the network: + +> - Directed: Are the edges **directed**? Does the order of the edge +> pairs \$(u, v)\$ matter? A directed graph is specified by the "Di" +> prefix in the class name, e.g. {class}`DiGraph`. We make this distinction +> because many classical graph properties are defined differently for +> directed graphs. +> - Multi-edges: Are multiple edges allowed between each pair of nodes? +> As you might imagine, multiple edges requires a different data +> structure, though clever users could design edge data attributes to +> support this functionality. We provide a standard data structure +> and interface for this type of graph using the prefix "Multi", +> e.g., {class}`MultiGraph`. + +The basic graph classes are named: +{doc}`Graph `, +{doc}`DiGraph`, +{doc}`MultiGraph `, and +{doc}`MultiDiGraph ` + +### Nodes and Edges + +The next choice you have to make when specifying a graph is what kinds +of nodes and edges to use. + +If the topology of the network is all you +care about then using integers or strings as the nodes makes sense and +you need not worry about edge data. If you have a data structure +already in place to describe nodes you can simply use that structure +as your nodes provided it is {term}`hashable`. If it is not hashable you can +use a unique identifier to represent the node and assign the data +as a {term}`node attribute`. + +Edges often have data associated with them. Arbitrary data +can be associated with edges as an {term}`edge attribute`. +If the data is numeric and the intent is to represent +a _weighted_ graph then use the 'weight' keyword for the attribute. +Some of the graph algorithms, such as +Dijkstra's shortest path algorithm, use this attribute +name by default to get the weight for each edge. + +Attributes can be assigned to an edge by using keyword/value +pairs when adding edges. You can use any keyword +to name your attribute and can then query the edge +data using that attribute keyword. + +Once you've decided how to encode the nodes and edges, and whether you have +an undirected/directed graph with or without multiedges you are ready to build +your network. + +## Graph Creation + +NetworkX graph objects can be created in one of three ways: + +- Graph generators---standard algorithms to create network topologies. +- Importing data from preexisting (usually file) sources. +- Adding edges and nodes explicitly. + +Explicit addition and removal of nodes/edges is the easiest to describe. +Each graph object supplies methods to manipulate the graph. For example, + +```{code-cell} +G = nx.Graph() +G.add_edge(1, 2) # default edge data=1 +G.add_edge(2, 3, weight=0.9) # specify edge data +``` + +Edge attributes can be anything: + +```{code-cell} +import math +G.add_edge('y', 'x', function=math.cos) +G.add_node(math.cos) # any hashable can be a node +``` + +You can add many edges at one time: + +```{code-cell} +elist = [(1, 2), (2, 3), (1, 4), (4, 2)] +G.add_edges_from(elist) +elist = [('a', 'b', 5.0), ('b', 'c', 3.0), ('a', 'c', 1.0), ('c', 'd', 7.3)] +G.add_weighted_edges_from(elist) +``` + +See the {doc}`/tutorial` for more examples. + +Some basic graph operations such as union and intersection +are described in the {ref}`operators module ` documentation. + +Graph generators such as {func}`~generators.random_graphs.binomial_graph` +and {func}`~generators.random_graphs.erdos_renyi_graph` are +provided in the {ref}`graph generators ` subpackage. + +For importing network data from formats such as GML, GraphML, edge list text files +see the {ref}`reading and writing graphs ` subpackage. + +## Graph Reporting + +Class views provide basic reporting of nodes, neighbors, edges and degree. +These views provide iteration over the properties as well as membership +queries and data attribute lookup. The views refer to the graph data structure +so changes to the graph are reflected in the views. This is analogous to +dictionary views in Python 3. If you want to change the graph while iterating +you will need to use e.g. `for e in list(G.edges):`. The views provide +set-like operations, e.g. union and intersection, as well as dict-like +lookup and iteration of the data attributes using `G.edges[u, v]['color']` +and `for e, datadict in G.edges.items():`. Methods `G.edges.items()` and +`G.edges.values()` are familiar from python dicts. In addition `G.edges.data()` +provides specific attribute iteration e.g. `for e, e_color in G.edges.data('color'):`. + +The basic graph relationship of an edge can be obtained in two ways. +One can look for neighbors of a node or one can look for edges. +We jokingly refer to people who focus on nodes/neighbors as node-centric +and people who focus on edges as edge-centric. The designers of NetworkX +tend to be node-centric and view edges as a relationship between nodes. +You can see this by our choice of lookup notation like `G[u]` providing neighbors +(adjacency) while edge lookup is `G.edges[u, v]`. +Most data structures for sparse graphs are essentially adjacency lists and so +fit this perspective. In the end, of course, it doesn't really matter which way +you examine the graph. `G.edges` removes duplicate representations of undirected +edges while neighbor reporting across all nodes will naturally report both directions. + +Any properties that are more complicated than edges, neighbors and degree are +provided by functions. For example `nx.triangles(G, n)` gives the number of triangles +which include node n as a vertex. These functions are grouped in the code and +documentation under the term {ref}`algorithms`. + +## Algorithms + +A number of graph algorithms are provided with NetworkX. +These include shortest path, and breadth first search +(see {ref}`traversal`), +clustering and isomorphism algorithms and others. There are +many that we have not developed yet too. If you implement a +graph algorithm that might be useful for others please let +us know through the +[NetworkX Google group](http://groups.google.com/group/networkx-discuss) +or the GitHub [Developer Zone](https://github.com/networkx/networkx). + +As an example here is code to use Dijkstra's algorithm to +find the shortest weighted path: + +```{code-cell} +G = nx.Graph() +e = [('a', 'b', 0.3), ('b', 'c', 0.9), ('a', 'c', 0.5), ('c', 'd', 1.2)] +G.add_weighted_edges_from(e) +print(nx.dijkstra_path(G, 'a', 'd')) +``` + +## Drawing + +While NetworkX is not designed as a network drawing tool, we provide +a simple interface to drawing packages and some simple layout algorithms. +We interface to the excellent Graphviz layout tools like dot and neato +with the (suggested) pygraphviz package or the pydot interface. +Drawing can be done using external programs or the Matplotlib Python +package. Interactive GUI interfaces are possible, though not provided. +The drawing tools are provided in the module {ref}`drawing `. + +The basic drawing functions essentially place the nodes on a scatterplot +using the positions you provide via a dictionary or the positions are +computed with a layout function. The edges are lines between those dots. + +```{code-cell} +import matplotlib.pyplot as plt +G = nx.cubical_graph() +subax1 = plt.subplot(121) +nx.draw(G) # default spring_layout +subax2 = plt.subplot(122) +nx.draw(G, pos=nx.circular_layout(G), node_color='r', edge_color='b') +``` + +See the {doc}`examples ` for more ideas. + +## Data Structure + +NetworkX uses a "dictionary of dictionaries of dictionaries" as the +basic network data structure. This allows fast lookup with reasonable +storage for large sparse networks. The keys are nodes so `G[u]` returns +an adjacency dictionary keyed by neighbor to the edge attribute +dictionary. A view of the adjacency data structure is provided +by the dict-like object `G.adj` as e.g. `for node, nbrsdict in G.adj.items():`. +The expression `G[u][v]` returns the edge attribute dictionary itself. +A dictionary of lists would have also been possible, but not allow +fast edge detection nor convenient storage of edge data. + +Advantages of dict-of-dicts-of-dicts data structure: + +- Find edges and remove edges with two dictionary look-ups. +- Prefer to "lists" because of fast lookup with sparse storage. +- Prefer to "sets" since data can be attached to edge. +- `G[u][v]` returns the edge attribute dictionary. +- `n in G` tests if node `n` is in graph `G`. +- `for n in G:` iterates through the graph. +- `for nbr in G[n]:` iterates through neighbors. + +As an example, here is a representation of an undirected graph with the +edges \$(A, B)\$ and \$(B, C)\$. + +```{code-cell} +G = nx.Graph() +G.add_edge('A', 'B') +G.add_edge('B', 'C') +print(G.adj) +``` + +The data structure gets morphed slightly for each base graph class. +For DiGraph two dict-of-dicts-of-dicts structures are provided, one +for successors (`G.succ`) and one for predecessors (`G.pred`). +For MultiGraph/MultiDiGraph we use a dict-of-dicts-of-dicts-of-dicts [^turtles] +where the third dictionary is keyed by an edge key identifier to the fourth +dictionary which contains the edge attributes for that edge between +the two nodes. + +Graphs provide two interfaces to the edge data attributes: adjacency +and edges. So `G[u][v]['width']` is the same as `G.edges[u, v]['width']`. + +```{code-cell} +G = nx.Graph() +G.add_edge(1, 2, color='red', weight=0.84, size=300) +print(G[1][2]['size']) +print(G.edges[1, 2]['color']) +``` + +```{rubric} Footnotes + +``` + +[^turtles]: "It's dictionaries all the way down." diff --git a/doc/reference/introduction.rst b/doc/reference/introduction.rst deleted file mode 100644 index 70fc3f5c758..00000000000 --- a/doc/reference/introduction.rst +++ /dev/null @@ -1,344 +0,0 @@ -Introduction -============ - -.. currentmodule:: networkx - -The structure of NetworkX can be seen by the organization of its source code. -The package provides classes for graph objects, generators to create standard -graphs, IO routines for reading in existing datasets, algorithms to analyze -the resulting networks and some basic drawing tools. - -Most of the NetworkX API is provided by functions which take a graph object -as an argument. Methods of the graph object are limited to basic manipulation -and reporting. This provides modularity of code and documentation. -It also makes it easier for newcomers to learn about the package in stages. -The source code for each module is meant to be easy to read and reading -this Python code is actually a good way to learn more about network algorithms, -but we have put a lot of effort into making the documentation sufficient and friendly. -If you have suggestions or questions please contact us by joining the -`NetworkX Google group `_. - -Classes are named using ``CamelCase`` (capital letters at the start of each word). -functions, methods and variable names are ``lower_case_underscore`` (lowercase with -an underscore representing a space between words). - - -NetworkX Basics ---------------- - -After starting Python, import the networkx module with (the recommended way) - -.. nbplot:: - - >>> import networkx as nx - -To save repetition, in the documentation we assume that -NetworkX has been imported this way. - -If importing networkx fails, it means that Python cannot find the installed -module. Check your installation and your ``PYTHONPATH``. - -The following basic graph types are provided as Python classes: - -:class:`Graph` - This class implements an undirected graph. It ignores - multiple edges between two nodes. It does allow self-loop - edges between a node and itself. - -:class:`DiGraph` - Directed graphs, that is, graphs with directed edges. - Provides operations common to directed graphs, - (a subclass of Graph). - -:class:`MultiGraph` - A flexible graph class that allows multiple undirected edges between - pairs of nodes. The additional flexibility leads to some degradation - in performance, though usually not significant. - -:class:`MultiDiGraph` - A directed version of a MultiGraph. - -Empty graph-like objects are created with - -.. nbplot:: - - >>> G = nx.Graph() - >>> G = nx.DiGraph() - >>> G = nx.MultiGraph() - >>> G = nx.MultiDiGraph() - -All graph classes allow any :term:`hashable` object as a node. -Hashable objects include strings, tuples, integers, and more. -Arbitrary edge attributes such as weights and labels -can be associated with an edge. - - -The graph internal data structures are based on an -adjacency list representation and implemented using -Python :term:`dictionary` datastructures. -The graph adjacency structure is -implemented as a Python dictionary of -dictionaries; the outer dictionary is keyed by nodes to values that are -themselves dictionaries keyed by neighboring node to the -edge attributes associated with that edge. This "dict-of-dicts" structure -allows fast addition, deletion, and lookup of nodes and neighbors in -large graphs. The underlying datastructure is accessed directly -by methods (the programming interface "API") in the class definitions. -All functions, on the other hand, manipulate graph-like objects -solely via those API methods and not by acting directly on the datastructure. -This design allows for possible replacement of the 'dicts-of-dicts'-based -datastructure with an alternative datastructure that implements the -same methods. - - -Graphs ------- - -The first choice to be made when using NetworkX is what type of graph -object to use. A graph (network) is a collection of nodes together -with a collection of edges that are pairs of nodes. Attributes are -often associated with nodes and/or edges. NetworkX graph objects come in -different flavors depending on two main properties of the network: - - - Directed: Are the edges **directed**? Does the order of the edge - pairs $(u, v)$ matter? A directed graph is specified by the "Di" - prefix in the class name, e.g. `DiGraph()`. We make this distinction - because many classical graph properties are defined differently for - directed graphs. - - - Multi-edges: Are multiple edges allowed between each pair of nodes? - As you might imagine, multiple edges requires a different data - structure, though clever users could design edge data attributes to - support this functionality. We provide a standard data structure - and interface for this type of graph using the prefix "Multi", - e.g., `MultiGraph()`. - -The basic graph classes are named: -:doc:`Graph `, -:doc:`DiGraph`, -:doc:`MultiGraph `, and -:doc:`MultiDiGraph ` - - -Nodes and Edges -^^^^^^^^^^^^^^^ - -The next choice you have to make when specifying a graph is what kinds -of nodes and edges to use. - -If the topology of the network is all you -care about then using integers or strings as the nodes makes sense and -you need not worry about edge data. If you have a data structure -already in place to describe nodes you can simply use that structure -as your nodes provided it is :term:`hashable`. If it is not hashable you can -use a unique identifier to represent the node and assign the data -as a :term:`node attribute`. - -Edges often have data associated with them. Arbitrary data -can be associated with edges as an :term:`edge attribute`. -If the data is numeric and the intent is to represent -a *weighted* graph then use the 'weight' keyword for the attribute. -Some of the graph algorithms, such as -Dijkstra's shortest path algorithm, use this attribute -name by default to get the weight for each edge. - -Attributes can be assigned to an edge by using keyword/value -pairs when adding edges. You can use any keyword -to name your attribute and can then query the edge -data using that attribute keyword. - -Once you've decided how to encode the nodes and edges, and whether you have -an undirected/directed graph with or without multiedges you are ready to build -your network. - -Graph Creation --------------- - -NetworkX graph objects can be created in one of three ways: - -- Graph generators---standard algorithms to create network topologies. -- Importing data from preexisting (usually file) sources. -- Adding edges and nodes explicitly. - -Explicit addition and removal of nodes/edges is the easiest to describe. -Each graph object supplies methods to manipulate the graph. For example, - -.. nbplot:: - - >>> import networkx as nx - >>> G = nx.Graph() - >>> G.add_edge(1, 2) # default edge data=1 - >>> G.add_edge(2, 3, weight=0.9) # specify edge data - -Edge attributes can be anything: - -.. nbplot:: - - >>> import math - >>> G.add_edge('y', 'x', function=math.cos) - >>> G.add_node(math.cos) # any hashable can be a node - -You can add many edges at one time: - -.. nbplot:: - - >>> elist = [(1, 2), (2, 3), (1, 4), (4, 2)] - >>> G.add_edges_from(elist) - >>> elist = [('a', 'b', 5.0), ('b', 'c', 3.0), ('a', 'c', 1.0), ('c', 'd', 7.3)] - >>> G.add_weighted_edges_from(elist) - -See the :doc:`/tutorial` for more examples. - -Some basic graph operations such as union and intersection -are described in the :ref:`operators module ` documentation. - -Graph generators such as :func:`~generators.random_graphs.binomial_graph` -and :func:`~generators.random_graphs.erdos_renyi_graph` are -provided in the :ref:`graph generators ` subpackage. - -For importing network data from formats such as GML, GraphML, edge list text files -see the :ref:`reading and writing graphs ` subpackage. - - -Graph Reporting ---------------- - -Class views provide basic reporting of nodes, neighbors, edges and degree. -These views provide iteration over the properties as well as membership -queries and data attribute lookup. The views refer to the graph data structure -so changes to the graph are reflected in the views. This is analogous to -dictionary views in Python 3. If you want to change the graph while iterating -you will need to use e.g. ``for e in list(G.edges):``. The views provide -set-like operations, e.g. union and intersection, as well as dict-like -lookup and iteration of the data attributes using ``G.edges[u, v]['color']`` -and ``for e, datadict in G.edges.items():``. Methods ``G.edges.items()`` and -``G.edges.values()`` are familiar from python dicts. In addition ``G.edges.data()`` -provides specific attribute iteration e.g. ``for e, e_color in G.edges.data('color'):``. - -The basic graph relationship of an edge can be obtained in two ways. -One can look for neighbors of a node or one can look for edges. -We jokingly refer to people who focus on nodes/neighbors as node-centric -and people who focus on edges as edge-centric. The designers of NetworkX -tend to be node-centric and view edges as a relationship between nodes. -You can see this by our choice of lookup notation like ``G[u]`` providing neighbors -(adjacency) while edge lookup is ``G.edges[u, v]``. -Most data structures for sparse graphs are essentially adjacency lists and so -fit this perspective. In the end, of course, it doesn't really matter which way -you examine the graph. ``G.edges`` removes duplicate representations of undirected -edges while neighbor reporting across all nodes will naturally report both directions. - -Any properties that are more complicated than edges, neighbors and degree are -provided by functions. For example ``nx.triangles(G, n)`` gives the number of triangles -which include node n as a vertex. These functions are grouped in the code and -documentation under the term :ref:`algorithms`. - - -Algorithms ----------- - -A number of graph algorithms are provided with NetworkX. -These include shortest path, and breadth first search -(see :ref:`traversal`), -clustering and isomorphism algorithms and others. There are -many that we have not developed yet too. If you implement a -graph algorithm that might be useful for others please let -us know through the -`NetworkX Google group `_ -or the GitHub `Developer Zone `_. - -As an example here is code to use Dijkstra's algorithm to -find the shortest weighted path: - -.. nbplot:: - - >>> G = nx.Graph() - >>> e = [('a', 'b', 0.3), ('b', 'c', 0.9), ('a', 'c', 0.5), ('c', 'd', 1.2)] - >>> G.add_weighted_edges_from(e) - >>> print(nx.dijkstra_path(G, 'a', 'd')) - ['a', 'c', 'd'] - -Drawing -------- - -While NetworkX is not designed as a network drawing tool, we provide -a simple interface to drawing packages and some simple layout algorithms. -We interface to the excellent Graphviz layout tools like dot and neato -with the (suggested) pygraphviz package or the pydot interface. -Drawing can be done using external programs or the Matplotlib Python -package. Interactive GUI interfaces are possible, though not provided. -The drawing tools are provided in the module :ref:`drawing `. - -The basic drawing functions essentially place the nodes on a scatterplot -using the positions you provide via a dictionary or the positions are -computed with a layout function. The edges are lines between those dots. - -.. nbplot:: - - >>> import matplotlib.pyplot as plt - >>> G = nx.cubical_graph() - >>> subax1 = plt.subplot(121) - >>> nx.draw(G) # default spring_layout - >>> subax2 = plt.subplot(122) - >>> nx.draw(G, pos=nx.circular_layout(G), node_color='r', edge_color='b') - -See the :doc:`examples ` for more ideas. - -Data Structure --------------- - -NetworkX uses a "dictionary of dictionaries of dictionaries" as the -basic network data structure. This allows fast lookup with reasonable -storage for large sparse networks. The keys are nodes so ``G[u]`` returns -an adjacency dictionary keyed by neighbor to the edge attribute -dictionary. A view of the adjacency data structure is provided -by the dict-like object ``G.adj`` as e.g. ``for node, nbrsdict in G.adj.items():``. -The expression ``G[u][v]`` returns the edge attribute dictionary itself. -A dictionary of lists would have also been possible, but not allow -fast edge detection nor convenient storage of edge data. - -Advantages of dict-of-dicts-of-dicts data structure: - - - Find edges and remove edges with two dictionary look-ups. - - Prefer to "lists" because of fast lookup with sparse storage. - - Prefer to "sets" since data can be attached to edge. - - ``G[u][v]`` returns the edge attribute dictionary. - - ``n in G`` tests if node ``n`` is in graph ``G``. - - ``for n in G:`` iterates through the graph. - - ``for nbr in G[n]:`` iterates through neighbors. - -As an example, here is a representation of an undirected graph with the -edges $(A, B)$ and $(B, C)$. - -.. nbplot:: - - >>> G = nx.Graph() - >>> G.add_edge('A', 'B') - >>> G.add_edge('B', 'C') - >>> print(G.adj) - {'A': {'B': {}}, 'B': {'A': {}, 'C': {}}, 'C': {'B': {}}} - -The data structure gets morphed slightly for each base graph class. -For DiGraph two dict-of-dicts-of-dicts structures are provided, one -for successors (``G.succ``) and one for predecessors (``G.pred``). -For MultiGraph/MultiDiGraph we use a dict-of-dicts-of-dicts-of-dicts [#turtles]_ -where the third dictionary is keyed by an edge key identifier to the fourth -dictionary which contains the edge attributes for that edge between -the two nodes. - -Graphs provide two interfaces to the edge data attributes: adjacency -and edges. So ``G[u][v]['width']`` is the same as ``G.edges[u, v]['width']``. - -.. nbplot:: - - >>> G = nx.Graph() - >>> G.add_edge(1, 2, color='red', weight=0.84, size=300) - >>> print(G[1][2]['size']) - 300 - >>> print(G.edges[1, 2]['color']) - red - -.. code-links:: - -.. rubric:: Footnotes - -.. [#turtles] "It's dictionaries all the way down." diff --git a/doc/reference/randomness.rst b/doc/reference/randomness.rst index 4ca72802583..dc9db60a994 100644 --- a/doc/reference/randomness.rst +++ b/doc/reference/randomness.rst @@ -27,7 +27,7 @@ RNGs, you may find yourself using another package that uses the other. Setting the state of the two global RNGs is as simple setting the seed of each RNG to an arbitrary integer: -.. nbplot:: +.. code-block:: >>> import random >>> random.seed(246) # or any integer @@ -41,7 +41,7 @@ to functions that use an RNG. This argument is called `seed`, but determines more than the seed of the RNG. It tells the function which RNG package to use, and whether to use a global or local RNG. -.. nbplot:: +.. code-block:: >>> from networkx import path_graph, random_layout >>> G = path_graph(9) diff --git a/doc/reference/readwrite/json_graph.rst b/doc/reference/readwrite/json_graph.rst index 53368770ed4..2fed0804948 100644 --- a/doc/reference/readwrite/json_graph.rst +++ b/doc/reference/readwrite/json_graph.rst @@ -1,3 +1,8 @@ +.. _networkx.readwrite.json_graph.adjacency: +.. _networkx.readwrite.json_graph.cytoscape: +.. _networkx.readwrite.json_graph.node_link: +.. _networkx.readwrite.json_graph.tree: + JSON ==== .. automodule:: networkx.readwrite.json_graph diff --git a/doc/reference/utils.rst b/doc/reference/utils.rst index 21ae064076e..acd9c92584e 100644 --- a/doc/reference/utils.rst +++ b/doc/reference/utils.rst @@ -85,4 +85,4 @@ Backends .. autosummary:: :toctree: generated/ - _dispatch + _dispatchable diff --git a/doc/tutorial.md b/doc/tutorial.md new file mode 100644 index 00000000000..f5ef8714187 --- /dev/null +++ b/doc/tutorial.md @@ -0,0 +1,658 @@ +--- +jupytext: + text_representation: + extension: .md + format_name: myst + format_version: 0.13 + jupytext_version: 1.13.1 +kernelspec: + display_name: Python 3 (ipykernel) + language: python + name: python3 +--- + +# Tutorial + +```{currentmodule} networkx + +``` + +This guide can help you start working with NetworkX. + +## Creating a graph + +Create an empty graph with no nodes and no edges. + +```{code-cell} +import networkx as nx +G = nx.Graph() +``` + +By definition, a {class}`Graph` is a collection of nodes (vertices) along with +identified pairs of nodes (called edges, links, etc). In NetworkX, nodes can +be any {py:term}`hashable` object e.g., a text string, an image, an XML object, +another Graph, a customized node object, etc. + +```{note} +Python's `None` object is not allowed to be used as a node. It +determines whether optional function arguments have been assigned in many +functions. +``` + +## Nodes + +The graph `G` can be grown in several ways. NetworkX includes many +{doc}`graph generator functions ` and +{doc}`facilities to read and write graphs in many formats `. +To get started though we'll look at simple manipulations. You can add one node +at a time, + +```{code-cell} +G.add_node(1) +``` + +or add nodes from any {py:term}`iterable` container, such as a list + +```{code-cell} +G.add_nodes_from([2, 3]) +``` + +You can also add nodes along with node +attributes if your container yields 2-tuples of the form +`(node, node_attribute_dict)`: + +```{code-cell} +G.add_nodes_from([(4, {"color": "red"}), (5, {"color": "green"})]) +``` + +Node attributes are discussed further {ref}`below `. + +Nodes from one graph can be incorporated into another: + +```{code-cell} +H = nx.path_graph(10) +G.add_nodes_from(H) +``` + +`G` now contains the nodes of `H` as nodes of `G`. +In contrast, you could use the graph `H` as a node in `G`. + +```{code-cell} +G.add_node(H) +``` + +The graph `G` now contains `H` as a node. This flexibility is very powerful as +it allows graphs of graphs, graphs of files, graphs of functions and much more. +It is worth thinking about how to structure your application so that the nodes +are useful entities. Of course you can always use a unique identifier in `G` +and have a separate dictionary keyed by identifier to the node information if +you prefer. + +```{note} +You should not change the node object if the hash depends on its contents. +``` + +## Edges + +`G` can also be grown by adding one edge at a time, + +```{code-cell} +G.add_edge(1, 2) +e = (2, 3) +G.add_edge(*e) # unpack edge tuple* +``` + +by adding a list of edges, + +```{code-cell} +G.add_edges_from([(1, 2), (1, 3)]) +``` + +or by adding any {term}`ebunch` of edges. An _ebunch_ is any iterable +container of edge-tuples. An edge-tuple can be a 2-tuple of nodes or a 3-tuple +with 2 nodes followed by an edge attribute dictionary, e.g., +`(2, 3, {'weight': 3.1415})`. Edge attributes are discussed further +{ref}`below `. + +```{code-cell} +G.add_edges_from(H.edges) +``` + +There are no complaints when adding existing nodes or edges. For example, +after removing all nodes and edges, + +```{code-cell} +G.clear() +``` + +we add new nodes/edges and NetworkX quietly ignores any that are +already present. + +```{code-cell} +G.add_edges_from([(1, 2), (1, 3)]) +G.add_node(1) +G.add_edge(1, 2) +G.add_node("spam") # adds node "spam" +G.add_nodes_from("spam") # adds 4 nodes: 's', 'p', 'a', 'm' +G.add_edge(3, 'm') +``` + +At this stage the graph `G` consists of 8 nodes and 3 edges, as can be seen by: + +```{code-cell} +G.number_of_nodes() +``` + +```{code-cell} +G.number_of_edges() +``` + +```{note} +The order of adjacency reporting (e.g., {meth}`G.adj `, +{meth}`G.successors `, +{meth}`G.predecessors `) is the order of +edge addition. However, the order of G.edges is the order of the adjacencies +which includes both the order of the nodes and each +node's adjacencies. See example below: +``` + +```{code-cell} +DG = nx.DiGraph() +DG.add_edge(2, 1) # adds the nodes in order 2, 1 +DG.add_edge(1, 3) +DG.add_edge(2, 4) +DG.add_edge(1, 2) +assert list(DG.successors(2)) == [1, 4] +assert list(DG.edges) == [(2, 1), (2, 4), (1, 3), (1, 2)] +``` + +## Examining elements of a graph + +We can examine the nodes and edges. Four basic graph properties facilitate +reporting: `G.nodes`, `G.edges`, `G.adj` and `G.degree`. These +are set-like views of the nodes, edges, neighbors (adjacencies), and degrees +of nodes in a graph. They offer a continually updated read-only view into +the graph structure. They are also dict-like in that you can look up node +and edge data attributes via the views and iterate with data attributes +using methods `.items()`, `.data()`. +If you want a specific container type instead of a view, you can specify one. +Here we use lists, though sets, dicts, tuples and other containers may be +better in other contexts. + +```{code-cell} +list(G.nodes) +``` + +```{code-cell} +list(G.edges) +``` + +```{code-cell} +list(G.adj[1]) # or list(G.neighbors(1)) +``` + +```{code-cell} +G.degree[1] # the number of edges incident to 1 +``` + +One can specify to report the edges and degree from a subset of all nodes +using an {term}`nbunch`. An _nbunch_ is any of: `None` (meaning all nodes), +a node, or an iterable container of nodes that is not itself a node in the +graph. + +```{code-cell} +G.edges([2, 'm']) +``` + +```{code-cell} +G.degree([2, 3]) +``` + +## Removing elements from a graph + +One can remove nodes and edges from the graph in a similar fashion to adding. +Use methods +{meth}`Graph.remove_node`, +{meth}`Graph.remove_nodes_from`, +{meth}`Graph.remove_edge` +and +{meth}`Graph.remove_edges_from`, e.g. + +```{code-cell} +G.remove_node(2) +G.remove_nodes_from("spam") +list(G.nodes) +``` + +```{code-cell} +G.remove_edge(1, 3) +list(G) +``` + +## Using the graph constructors + +Graph objects do not have to be built up incrementally - data specifying +graph structure can be passed directly to the constructors of the various +graph classes. +When creating a graph structure by instantiating one of the graph +classes you can specify data in several formats. + +```{code-cell} +G.add_edge(1, 2) +H = nx.DiGraph(G) # create a DiGraph using the connections from G +list(H.edges()) +``` + +```{code-cell} +edgelist = [(0, 1), (1, 2), (2, 3)] +H = nx.Graph(edgelist) # create a graph from an edge list +list(H.edges()) +``` + +```{code-cell} +adjacency_dict = {0: (1, 2), 1: (0, 2), 2: (0, 1)} +H = nx.Graph(adjacency_dict) # create a Graph dict mapping nodes to nbrs +list(H.edges()) +``` + +## What to use as nodes and edges + +You might notice that nodes and edges are not specified as NetworkX +objects. This leaves you free to use meaningful items as nodes and +edges. The most common choices are numbers or strings, but a node can +be any hashable object (except `None`), and an edge can be associated +with any object `x` using `G.add_edge(n1, n2, object=x)`. + +As an example, `n1` and `n2` could be protein objects from the RCSB Protein +Data Bank, and `x` could refer to an XML record of publications detailing +experimental observations of their interaction. + +We have found this power quite useful, but its abuse +can lead to surprising behavior unless one is familiar with Python. +If in doubt, consider using {func}`~relabel.convert_node_labels_to_integers` to obtain +a more traditional graph with integer labels. + +## Accessing edges and neighbors + +In addition to the views {attr}`Graph.edges`, and {attr}`Graph.adj`, +access to edges and neighbors is possible using subscript notation. + +```{code-cell} +G = nx.Graph([(1, 2, {"color": "yellow"})]) +G[1] # same as G.adj[1] +``` + +```{code-cell} +G[1][2] +``` + +```{code-cell} +G.edges[1, 2] +``` + +You can get/set the attributes of an edge using subscript notation +if the edge already exists. + +```{code-cell} +G.add_edge(1, 3) +G[1][3]['color'] = "blue" +G.edges[1, 2]['color'] = "red" +G.edges[1, 2] +``` + +Fast examination of all (node, adjacency) pairs is achieved using +`G.adjacency()`, or `G.adj.items()`. +Note that for undirected graphs, adjacency iteration sees each edge twice. + +```{code-cell} +FG = nx.Graph() +FG.add_weighted_edges_from([(1, 2, 0.125), (1, 3, 0.75), (2, 4, 1.2), (3, 4, 0.375)]) +for n, nbrs in FG.adj.items(): + for nbr, eattr in nbrs.items(): + wt = eattr['weight'] + if wt < 0.5: print(f"({n}, {nbr}, {wt:.3})") +``` + +Convenient access to all edges is achieved with the edges property. + +```{code-cell} +for (u, v, wt) in FG.edges.data('weight'): + if wt < 0.5: + print(f"({u}, {v}, {wt:.3})") +``` + +(attributes)= + +## Adding attributes to graphs, nodes, and edges + +Attributes such as weights, labels, colors, or whatever Python object you like, +can be attached to graphs, nodes, or edges. + +Each graph, node, and edge can hold key/value attribute pairs in an associated +attribute dictionary (the keys must be hashable). By default these are empty, +but attributes can be added or changed using `add_edge`, `add_node` or direct +manipulation of the attribute dictionaries named `G.graph`, `G.nodes`, and +`G.edges` for a graph `G`. + +### Graph attributes + +Assign graph attributes when creating a new graph + +```{code-cell} +G = nx.Graph(day="Friday") +G.graph +``` + +Or you can modify attributes later + +```{code-cell} +G.graph['day'] = "Monday" +G.graph +``` + +### Node attributes + +Add node attributes using `add_node()`, `add_nodes_from()`, or `G.nodes` + +```{code-cell} +G.add_node(1, time='5pm') +G.add_nodes_from([3], time='2pm') +G.nodes[1] +``` + +```{code-cell} +G.nodes[1]['room'] = 714 +G.nodes.data() +``` + +Note that adding a node to `G.nodes` does not add it to the graph, use +`G.add_node()` to add new nodes. Similarly for edges. + +### Edge Attributes + +Add/change edge attributes using `add_edge()`, `add_edges_from()`, +or subscript notation. + +```{code-cell} +G.add_edge(1, 2, weight=4.7 ) +G.add_edges_from([(3, 4), (4, 5)], color='red') +G.add_edges_from([(1, 2, {'color': 'blue'}), (2, 3, {'weight': 8})]) +G[1][2]['weight'] = 4.7 +G.edges[3, 4]['weight'] = 4.2 +``` + +The special attribute `weight` should be numeric as it is used by +algorithms requiring weighted edges. + +## Directed graphs + +The {class}`DiGraph` class provides additional methods and properties specific +to directed edges, e.g., +{attr}`DiGraph.out_edges`, {attr}`DiGraph.in_degree`, +{meth}`DiGraph.predecessors`, {meth}`DiGraph.successors` etc. +To allow algorithms to work with both classes easily, the directed versions of +{meth}`neighbors ` is equivalent to +{meth}`successors ` while {attr}`DiGraph.degree` reports the sum +of {attr}`DiGraph.in_degree` and {attr}`DiGraph.out_degree` even though that may +feel inconsistent at times. + +```{code-cell} +DG = nx.DiGraph() +DG.add_weighted_edges_from([(1, 2, 0.5), (3, 1, 0.75)]) +DG.out_degree(1, weight='weight') +``` + +```{code-cell} +DG.degree(1, weight='weight') +``` + +```{code-cell} +list(DG.successors(1)) +``` + +```{code-cell} +list(DG.neighbors(1)) +``` + +Some algorithms work only for directed graphs and others are not well +defined for directed graphs. Indeed the tendency to lump directed +and undirected graphs together is dangerous. If you want to treat +a directed graph as undirected for some measurement you should probably +convert it using {meth}`Graph.to_undirected` or with + +```{code-cell} +H = nx.Graph(G) # create an undirected graph H from a directed graph G +``` + +## Multigraphs + +NetworkX provides classes for graphs which allow multiple edges +between any pair of nodes. The {class}`MultiGraph` and +{class}`MultiDiGraph` +classes allow you to add the same edge twice, possibly with different +edge data. This can be powerful for some applications, but many +algorithms are not well defined on such graphs. +Where results are well defined, +e.g., {meth}`MultiGraph.degree` we provide the function. Otherwise you +should convert to a standard graph in a way that makes the measurement +well defined. + +```{code-cell} +MG = nx.MultiGraph() +MG.add_weighted_edges_from([(1, 2, 0.5), (1, 2, 0.75), (2, 3, 0.5)]) +dict(MG.degree(weight='weight')) +``` + +```{code-cell} +GG = nx.Graph() +for n, nbrs in MG.adjacency(): + for nbr, edict in nbrs.items(): + minvalue = min([d['weight'] for d in edict.values()]) + GG.add_edge(n, nbr, weight = minvalue) + +nx.shortest_path(GG, 1, 3) +``` + +## Graph generators and graph operations + +In addition to constructing graphs node-by-node or edge-by-edge, they +can also be generated by + +### 1. Applying classic graph operations, such as: + +```{eval-rst} +.. autosummary:: + + ~networkx.classes.function.subgraph + ~networkx.algorithms.operators.binary.union + ~networkx.algorithms.operators.binary.disjoint_union + ~networkx.algorithms.operators.product.cartesian_product + ~networkx.algorithms.operators.binary.compose + ~networkx.algorithms.operators.unary.complement + ~networkx.classes.function.create_empty_copy + ~networkx.classes.function.to_undirected + ~networkx.classes.function.to_directed +``` + +### 2. Using a call to one of the classic small graphs, e.g., + +```{eval-rst} +.. autosummary:: + + ~networkx.generators.small.petersen_graph + ~networkx.generators.small.tutte_graph + ~networkx.generators.small.sedgewick_maze_graph + ~networkx.generators.small.tetrahedral_graph +``` + +### 3. Using a (constructive) generator for a classic graph, e.g., + +```{eval-rst} +.. autosummary:: + + ~networkx.generators.classic.complete_graph + ~networkx.algorithms.bipartite.generators.complete_bipartite_graph + ~networkx.generators.classic.barbell_graph + ~networkx.generators.classic.lollipop_graph +``` + +like so: + +```{code-cell} +K_5 = nx.complete_graph(5) +K_3_5 = nx.complete_bipartite_graph(3, 5) +barbell = nx.barbell_graph(10, 10) +lollipop = nx.lollipop_graph(10, 20) +``` + +### 4. Using a stochastic graph generator, e.g, + +```{eval-rst} +.. autosummary:: + + ~networkx.generators.random_graphs.erdos_renyi_graph + ~networkx.generators.random_graphs.watts_strogatz_graph + ~networkx.generators.random_graphs.barabasi_albert_graph + ~networkx.generators.random_graphs.random_lobster +``` + +like so: + +```{code-cell} +er = nx.erdos_renyi_graph(100, 0.15) +ws = nx.watts_strogatz_graph(30, 3, 0.1) +ba = nx.barabasi_albert_graph(100, 5) +red = nx.random_lobster(100, 0.9, 0.9) +``` + +### 5. Reading a graph stored in a file using common graph formats + +NetworkX supports many popular formats, such as edge lists, adjacency lists, +GML, GraphML, LEDA and others. + +```{code-cell} +nx.write_gml(red, "path.to.file") +mygraph = nx.read_gml("path.to.file") +``` + +For details on graph formats see {doc}`/reference/readwrite/index` +and for graph generator functions see {doc}`/reference/generators` + +## Analyzing graphs + +The structure of `G` can be analyzed using various graph-theoretic +functions such as: + +```{code-cell} +G = nx.Graph() +G.add_edges_from([(1, 2), (1, 3)]) +G.add_node("spam") # adds node "spam" +list(nx.connected_components(G)) +``` + +```{code-cell} +sorted(d for n, d in G.degree()) +``` + +```{code-cell} +nx.clustering(G) +``` + +Some functions with large output iterate over (node, value) 2-tuples. +These are easily stored in a `dict` structure if you desire. + +```{code-cell} +sp = dict(nx.all_pairs_shortest_path(G)) +sp[3] +``` + +See {doc}`/reference/algorithms/index` for details on graph algorithms +supported. + +## Drawing graphs + +NetworkX is not primarily a graph drawing package but basic drawing with +Matplotlib as well as an interface to use the open source Graphviz software +package are included. These are part of the {doc}`networkx.drawing ` +module and will be imported if possible. + +First import Matplotlib's plot interface (pylab works too) + +```{code-cell} +import matplotlib.pyplot as plt +``` + +To test if the import of `~networkx.drawing.nx_pylab` was successful draw `G` +using one of + +```{code-cell} +G = nx.petersen_graph() +subax1 = plt.subplot(121) +nx.draw(G, with_labels=True, font_weight='bold') +subax2 = plt.subplot(122) +nx.draw_shell(G, nlist=[range(5, 10), range(5)], with_labels=True, font_weight='bold') +``` + +when drawing to an interactive display. Note that you may need to issue a +Matplotlib + +```python +plt.show() +``` + +command if you are not using matplotlib in interactive mode. + +```{code-cell} +options = { + 'node_color': 'black', + 'node_size': 100, + 'width': 3, +} +subax1 = plt.subplot(221) +nx.draw_random(G, **options) +subax2 = plt.subplot(222) +nx.draw_circular(G, **options) +subax3 = plt.subplot(223) +nx.draw_spectral(G, **options) +subax4 = plt.subplot(224) +nx.draw_shell(G, nlist=[range(5,10), range(5)], **options) +``` + +You can find additional options via {func}`~drawing.nx_pylab.draw_networkx` and +layouts via the {mod}`layout module`. +You can use multiple shells with {func}`~drawing.nx_pylab.draw_shell`. + +```{code-cell} +G = nx.dodecahedral_graph() +shells = [[2, 3, 4, 5, 6], [8, 1, 0, 19, 18, 17, 16, 15, 14, 7], [9, 10, 11, 12, 13]] +nx.draw_shell(G, nlist=shells, **options) +``` + +To save drawings to a file, use, for example + +```pycon +>>> nx.draw(G) +>>> plt.savefig("path.png") +``` + +This function writes to the file `path.png` in the local directory. If Graphviz and +PyGraphviz or pydot, are available on your system, you can also use +`networkx.drawing.nx_agraph.graphviz_layout` or +`networkx.drawing.nx_pydot.graphviz_layout` to get the node positions, or write +the graph in dot format for further processing. + +```pycon +>>> from networkx.drawing.nx_pydot import write_dot +>>> pos = nx.nx_agraph.graphviz_layout(G) +>>> nx.draw(G, pos=pos) +>>> write_dot(G, 'file.dot') +``` + +See {doc}`/reference/drawing` for additional details. + +## NX-Guides + +If you are interested in learning more about NetworkX, graph theory and network analysis +then you should check out {doc}`nx-guides `. There you can find tutorials, +real-world applications and in-depth examinations of graphs and network algorithms. +All the material is official and was developed and curated by the NetworkX community. diff --git a/doc/tutorial.rst b/doc/tutorial.rst deleted file mode 100644 index 43f54b69fd0..00000000000 --- a/doc/tutorial.rst +++ /dev/null @@ -1,635 +0,0 @@ -Tutorial -======== - -.. currentmodule:: networkx - -This guide can help you start working with NetworkX. - -Creating a graph ----------------- - -Create an empty graph with no nodes and no edges. - -.. nbplot:: - - >>> import networkx as nx - >>> G = nx.Graph() - -By definition, a :class:`Graph` is a collection of nodes (vertices) along with -identified pairs of nodes (called edges, links, etc). In NetworkX, nodes can -be any :py:term:`hashable` object e.g., a text string, an image, an XML object, -another Graph, a customized node object, etc. - -.. note:: Python's ``None`` object is not allowed to be used as a node. It - determines whether optional function arguments have been assigned in many - functions. - -Nodes ------ - -The graph ``G`` can be grown in several ways. NetworkX includes many -:doc:`graph generator functions ` and -:doc:`facilities to read and write graphs in many formats `. -To get started though we'll look at simple manipulations. You can add one node -at a time, - -.. nbplot:: - - >>> G.add_node(1) - -or add nodes from any :py:term:`iterable` container, such as a list - -.. nbplot:: - - >>> G.add_nodes_from([2, 3]) - -You can also add nodes along with node -attributes if your container yields 2-tuples of the form -``(node, node_attribute_dict)``:: - - >>> G.add_nodes_from([ - ... (4, {"color": "red"}), - ... (5, {"color": "green"}), - ... ]) - -Node attributes are discussed further :ref:`below `. - -Nodes from one graph can be incorporated into another: - -.. nbplot:: - - >>> H = nx.path_graph(10) - >>> G.add_nodes_from(H) - -``G`` now contains the nodes of ``H`` as nodes of ``G``. -In contrast, you could use the graph ``H`` as a node in ``G``. - -.. nbplot:: - - >>> G.add_node(H) - -The graph ``G`` now contains ``H`` as a node. This flexibility is very powerful as -it allows graphs of graphs, graphs of files, graphs of functions and much more. -It is worth thinking about how to structure your application so that the nodes -are useful entities. Of course you can always use a unique identifier in ``G`` -and have a separate dictionary keyed by identifier to the node information if -you prefer. - -.. note:: You should not change the node object if the hash depends - on its contents. - -Edges ------ - -``G`` can also be grown by adding one edge at a time, - -.. nbplot:: - - >>> G.add_edge(1, 2) - >>> e = (2, 3) - >>> G.add_edge(*e) # unpack edge tuple* - -by adding a list of edges, - -.. nbplot:: - - >>> G.add_edges_from([(1, 2), (1, 3)]) - -or by adding any :term:`ebunch` of edges. An *ebunch* is any iterable -container of edge-tuples. An edge-tuple can be a 2-tuple of nodes or a 3-tuple -with 2 nodes followed by an edge attribute dictionary, e.g., -``(2, 3, {'weight': 3.1415})``. Edge attributes are discussed further -:ref:`below `. - -.. nbplot:: - - >>> G.add_edges_from(H.edges) - -There are no complaints when adding existing nodes or edges. For example, -after removing all nodes and edges, - -.. nbplot:: - - >>> G.clear() - -we add new nodes/edges and NetworkX quietly ignores any that are -already present. - -.. nbplot:: - - >>> G.add_edges_from([(1, 2), (1, 3)]) - >>> G.add_node(1) - >>> G.add_edge(1, 2) - >>> G.add_node("spam") # adds node "spam" - >>> G.add_nodes_from("spam") # adds 4 nodes: 's', 'p', 'a', 'm' - >>> G.add_edge(3, 'm') - -At this stage the graph ``G`` consists of 8 nodes and 3 edges, as can be seen by: - -.. nbplot:: - - >>> G.number_of_nodes() - 8 - >>> G.number_of_edges() - 3 - -.. note:: - - The order of adjacency reporting (e.g., :meth:`G.adj `, - :meth:`G.successors `, - :meth:`G.predecessors `) is the order of - edge addition. However, the order of G.edges is the order of the adjacencies - which includes both the order of the nodes and each - node's adjacencies. See example below: - -.. nbplot:: - - >>> DG = nx.DiGraph() - >>> DG.add_edge(2, 1) # adds the nodes in order 2, 1 - >>> DG.add_edge(1, 3) - >>> DG.add_edge(2, 4) - >>> DG.add_edge(1, 2) - >>> assert list(DG.successors(2)) == [1, 4] - >>> assert list(DG.edges) == [(2, 1), (2, 4), (1, 3), (1, 2)] - -Examining elements of a graph ------------------------------ - -We can examine the nodes and edges. Four basic graph properties facilitate -reporting: ``G.nodes``, ``G.edges``, ``G.adj`` and ``G.degree``. These -are set-like views of the nodes, edges, neighbors (adjacencies), and degrees -of nodes in a graph. They offer a continually updated read-only view into -the graph structure. They are also dict-like in that you can look up node -and edge data attributes via the views and iterate with data attributes -using methods ``.items()``, ``.data()``. -If you want a specific container type instead of a view, you can specify one. -Here we use lists, though sets, dicts, tuples and other containers may be -better in other contexts. - -.. nbplot:: - - >>> list(G.nodes) - [1, 2, 3, 'spam', 's', 'p', 'a', 'm'] - >>> list(G.edges) - [(1, 2), (1, 3), (3, 'm')] - >>> list(G.adj[1]) # or list(G.neighbors(1)) - [2, 3] - >>> G.degree[1] # the number of edges incident to 1 - 2 - -One can specify to report the edges and degree from a subset of all nodes -using an :term:`nbunch`. An *nbunch* is any of: ``None`` (meaning all nodes), -a node, or an iterable container of nodes that is not itself a node in the -graph. - -.. nbplot:: - - >>> G.edges([2, 'm']) - EdgeDataView([(2, 1), ('m', 3)]) - >>> G.degree([2, 3]) - DegreeView({2: 1, 3: 2}) - -Removing elements from a graph ------------------------------- - -One can remove nodes and edges from the graph in a similar fashion to adding. -Use methods -:meth:`Graph.remove_node`, -:meth:`Graph.remove_nodes_from`, -:meth:`Graph.remove_edge` -and -:meth:`Graph.remove_edges_from`, e.g. - -.. nbplot:: - - >>> G.remove_node(2) - >>> G.remove_nodes_from("spam") - >>> list(G.nodes) - [1, 3, 'spam'] - >>> G.remove_edge(1, 3) - -Using the graph constructors ----------------------------- - -Graph objects do not have to be built up incrementally - data specifying -graph structure can be passed directly to the constructors of the various -graph classes. -When creating a graph structure by instantiating one of the graph -classes you can specify data in several formats. - -.. nbplot:: - - >>> G.add_edge(1, 2) - >>> H = nx.DiGraph(G) # create a DiGraph using the connections from G - >>> list(H.edges()) - [(1, 2), (2, 1)] - >>> edgelist = [(0, 1), (1, 2), (2, 3)] - >>> H = nx.Graph(edgelist) # create a graph from an edge list - >>> list(H.edges()) - [(0, 1), (1, 2), (2, 3)] - >>> adjacency_dict = {0: (1, 2), 1: (0, 2), 2: (0, 1)} - >>> H = nx.Graph(adjacency_dict) # create a Graph dict mapping nodes to nbrs - >>> list(H.edges()) - [(0, 1), (0, 2), (1, 2)] - -What to use as nodes and edges ------------------------------- - -You might notice that nodes and edges are not specified as NetworkX -objects. This leaves you free to use meaningful items as nodes and -edges. The most common choices are numbers or strings, but a node can -be any hashable object (except ``None``), and an edge can be associated -with any object ``x`` using ``G.add_edge(n1, n2, object=x)``. - -As an example, ``n1`` and ``n2`` could be protein objects from the RCSB Protein -Data Bank, and ``x`` could refer to an XML record of publications detailing -experimental observations of their interaction. - -We have found this power quite useful, but its abuse -can lead to surprising behavior unless one is familiar with Python. -If in doubt, consider using :func:`~relabel.convert_node_labels_to_integers` to obtain -a more traditional graph with integer labels. - -Accessing edges and neighbors ------------------------------ - -In addition to the views :attr:`Graph.edges`, and :attr:`Graph.adj`, -access to edges and neighbors is possible using subscript notation. - -.. nbplot:: - - >>> G = nx.Graph([(1, 2, {"color": "yellow"})]) - >>> G[1] # same as G.adj[1] - AtlasView({2: {'color': 'yellow'}}) - >>> G[1][2] - {'color': 'yellow'} - >>> G.edges[1, 2] - {'color': 'yellow'} - -You can get/set the attributes of an edge using subscript notation -if the edge already exists. - -.. nbplot:: - - >>> G.add_edge(1, 3) - >>> G[1][3]['color'] = "blue" - >>> G.edges[1, 2]['color'] = "red" - >>> G.edges[1, 2] - {'color': 'red'} - -Fast examination of all (node, adjacency) pairs is achieved using -``G.adjacency()``, or ``G.adj.items()``. -Note that for undirected graphs, adjacency iteration sees each edge twice. - -.. nbplot:: - - >>> FG = nx.Graph() - >>> FG.add_weighted_edges_from([(1, 2, 0.125), (1, 3, 0.75), (2, 4, 1.2), (3, 4, 0.375)]) - >>> for n, nbrs in FG.adj.items(): - ... for nbr, eattr in nbrs.items(): - ... wt = eattr['weight'] - ... if wt < 0.5: print(f"({n}, {nbr}, {wt:.3})") - (1, 2, 0.125) - (2, 1, 0.125) - (3, 4, 0.375) - (4, 3, 0.375) - -Convenient access to all edges is achieved with the edges property. - -.. nbplot:: - - >>> for (u, v, wt) in FG.edges.data('weight'): - ... if wt < 0.5: - ... print(f"({u}, {v}, {wt:.3})") - (1, 2, 0.125) - (3, 4, 0.375) - -.. _attributes: - -Adding attributes to graphs, nodes, and edges ---------------------------------------------- - -Attributes such as weights, labels, colors, or whatever Python object you like, -can be attached to graphs, nodes, or edges. - -Each graph, node, and edge can hold key/value attribute pairs in an associated -attribute dictionary (the keys must be hashable). By default these are empty, -but attributes can be added or changed using ``add_edge``, ``add_node`` or direct -manipulation of the attribute dictionaries named ``G.graph``, ``G.nodes``, and -``G.edges`` for a graph ``G``. - -Graph attributes -~~~~~~~~~~~~~~~~ - -Assign graph attributes when creating a new graph - -.. nbplot:: - - >>> G = nx.Graph(day="Friday") - >>> G.graph - {'day': 'Friday'} - -Or you can modify attributes later - -.. nbplot:: - - >>> G.graph['day'] = "Monday" - >>> G.graph - {'day': 'Monday'} - -Node attributes -~~~~~~~~~~~~~~~ - -Add node attributes using ``add_node()``, ``add_nodes_from()``, or ``G.nodes`` - -.. nbplot:: - - >>> G.add_node(1, time='5pm') - >>> G.add_nodes_from([3], time='2pm') - >>> G.nodes[1] - {'time': '5pm'} - >>> G.nodes[1]['room'] = 714 - >>> G.nodes.data() - NodeDataView({1: {'time': '5pm', 'room': 714}, 3: {'time': '2pm'}}) - -Note that adding a node to ``G.nodes`` does not add it to the graph, use -``G.add_node()`` to add new nodes. Similarly for edges. - -Edge Attributes -~~~~~~~~~~~~~~~ - -Add/change edge attributes using ``add_edge()``, ``add_edges_from()``, -or subscript notation. - -.. nbplot:: - - >>> G.add_edge(1, 2, weight=4.7 ) - >>> G.add_edges_from([(3, 4), (4, 5)], color='red') - >>> G.add_edges_from([(1, 2, {'color': 'blue'}), (2, 3, {'weight': 8})]) - >>> G[1][2]['weight'] = 4.7 - >>> G.edges[3, 4]['weight'] = 4.2 - -The special attribute ``weight`` should be numeric as it is used by -algorithms requiring weighted edges. - -Directed graphs ---------------- - -The :class:`DiGraph` class provides additional methods and properties specific -to directed edges, e.g., -:attr:`DiGraph.out_edges`, :attr:`DiGraph.in_degree`, -`DiGraph.predecessors`, `DiGraph.successors` etc. -To allow algorithms to work with both classes easily, the directed versions of -:meth:`neighbors ` is equivalent to -`successors ` while `~DiGraph.degree` reports the sum -of `~DiGraph.in_degree` and `~DiGraph.out_degree` even though that may feel inconsistent at times. - -.. nbplot:: - - >>> DG = nx.DiGraph() - >>> DG.add_weighted_edges_from([(1, 2, 0.5), (3, 1, 0.75)]) - >>> DG.out_degree(1, weight='weight') - 0.5 - >>> DG.degree(1, weight='weight') - 1.25 - >>> list(DG.successors(1)) - [2] - >>> list(DG.neighbors(1)) - [2] - -Some algorithms work only for directed graphs and others are not well -defined for directed graphs. Indeed the tendency to lump directed -and undirected graphs together is dangerous. If you want to treat -a directed graph as undirected for some measurement you should probably -convert it using :meth:`Graph.to_undirected` or with - -.. nbplot:: - - >>> H = nx.Graph(G) # create an undirected graph H from a directed graph G - -Multigraphs ------------ - -NetworkX provides classes for graphs which allow multiple edges -between any pair of nodes. The :class:`MultiGraph` and -:class:`MultiDiGraph` -classes allow you to add the same edge twice, possibly with different -edge data. This can be powerful for some applications, but many -algorithms are not well defined on such graphs. -Where results are well defined, -e.g., :meth:`MultiGraph.degree` we provide the function. Otherwise you -should convert to a standard graph in a way that makes the measurement -well defined. - -.. nbplot:: - - >>> MG = nx.MultiGraph() - >>> MG.add_weighted_edges_from([(1, 2, 0.5), (1, 2, 0.75), (2, 3, 0.5)]) - >>> dict(MG.degree(weight='weight')) - {1: 1.25, 2: 1.75, 3: 0.5} - >>> GG = nx.Graph() - >>> for n, nbrs in MG.adjacency(): - ... for nbr, edict in nbrs.items(): - ... minvalue = min([d['weight'] for d in edict.values()]) - ... GG.add_edge(n, nbr, weight = minvalue) - ... - >>> nx.shortest_path(GG, 1, 3) - [1, 2, 3] - -Graph generators and graph operations -------------------------------------- - -In addition to constructing graphs node-by-node or edge-by-edge, they -can also be generated by - -1. Applying classic graph operations, such as: -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. autosummary:: - - ~networkx.classes.function.subgraph - ~networkx.algorithms.operators.binary.union - ~networkx.algorithms.operators.binary.disjoint_union - ~networkx.algorithms.operators.product.cartesian_product - ~networkx.algorithms.operators.binary.compose - ~networkx.algorithms.operators.unary.complement - ~networkx.classes.function.create_empty_copy - ~networkx.classes.function.to_undirected - ~networkx.classes.function.to_directed - -2. Using a call to one of the classic small graphs, e.g., -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. autosummary:: - - ~networkx.generators.small.petersen_graph - ~networkx.generators.small.tutte_graph - ~networkx.generators.small.sedgewick_maze_graph - ~networkx.generators.small.tetrahedral_graph - -3. Using a (constructive) generator for a classic graph, e.g., -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. autosummary:: - - ~networkx.generators.classic.complete_graph - ~networkx.algorithms.bipartite.generators.complete_bipartite_graph - ~networkx.generators.classic.barbell_graph - ~networkx.generators.classic.lollipop_graph - -like so: - -.. nbplot:: - - >>> K_5 = nx.complete_graph(5) - >>> K_3_5 = nx.complete_bipartite_graph(3, 5) - >>> barbell = nx.barbell_graph(10, 10) - >>> lollipop = nx.lollipop_graph(10, 20) - -4. Using a stochastic graph generator, e.g, -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. autosummary:: - - ~networkx.generators.random_graphs.erdos_renyi_graph - ~networkx.generators.random_graphs.watts_strogatz_graph - ~networkx.generators.random_graphs.barabasi_albert_graph - ~networkx.generators.random_graphs.random_lobster - -like so: - -.. nbplot:: - - >>> er = nx.erdos_renyi_graph(100, 0.15) - >>> ws = nx.watts_strogatz_graph(30, 3, 0.1) - >>> ba = nx.barabasi_albert_graph(100, 5) - >>> red = nx.random_lobster(100, 0.9, 0.9) - -5. Reading a graph stored in a file using common graph formats -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -NetworkX supports many popular formats, such as edge lists, adjacency lists, -GML, GraphML, LEDA and others. - -.. nbplot:: - - >>> nx.write_gml(red, "path.to.file") - >>> mygraph = nx.read_gml("path.to.file") - -For details on graph formats see :doc:`/reference/readwrite/index` -and for graph generator functions see :doc:`/reference/generators` - -Analyzing graphs ----------------- - -The structure of ``G`` can be analyzed using various graph-theoretic -functions such as: - -.. nbplot:: - - >>> G = nx.Graph() - >>> G.add_edges_from([(1, 2), (1, 3)]) - >>> G.add_node("spam") # adds node "spam" - >>> list(nx.connected_components(G)) - [{1, 2, 3}, {'spam'}] - >>> sorted(d for n, d in G.degree()) - [0, 1, 1, 2] - >>> nx.clustering(G) - {1: 0, 2: 0, 3: 0, 'spam': 0} - -Some functions with large output iterate over (node, value) 2-tuples. -These are easily stored in a `dict` structure if you desire. - -.. nbplot:: - - >>> sp = dict(nx.all_pairs_shortest_path(G)) - >>> sp[3] - {3: [3], 1: [3, 1], 2: [3, 1, 2]} - -See :doc:`/reference/algorithms/index` for details on graph algorithms -supported. - -Drawing graphs --------------- - -NetworkX is not primarily a graph drawing package but basic drawing with -Matplotlib as well as an interface to use the open source Graphviz software -package are included. These are part of the :doc:`networkx.drawing ` -module and will be imported if possible. - -First import Matplotlib's plot interface (pylab works too) - -.. nbplot:: - - >>> import matplotlib.pyplot as plt - -To test if the import of `~networkx.drawing.nx_pylab` was successful draw ``G`` -using one of - -.. nbplot:: - - >>> G = nx.petersen_graph() - >>> subax1 = plt.subplot(121) - >>> nx.draw(G, with_labels=True, font_weight='bold') - >>> subax2 = plt.subplot(122) - >>> nx.draw_shell(G, nlist=[range(5, 10), range(5)], with_labels=True, font_weight='bold') - -when drawing to an interactive display. Note that you may need to issue a -Matplotlib - ->>> plt.show() # doctest: +SKIP - -command if you are not using matplotlib in interactive mode. - -.. nbplot:: - - >>> options = { - ... 'node_color': 'black', - ... 'node_size': 100, - ... 'width': 3, - ... } - >>> subax1 = plt.subplot(221) - >>> nx.draw_random(G, **options) - >>> subax2 = plt.subplot(222) - >>> nx.draw_circular(G, **options) - >>> subax3 = plt.subplot(223) - >>> nx.draw_spectral(G, **options) - >>> subax4 = plt.subplot(224) - >>> nx.draw_shell(G, nlist=[range(5,10), range(5)], **options) - -You can find additional options via :func:`~drawing.nx_pylab.draw_networkx` and -layouts via the :mod:`layout module`. -You can use multiple shells with :func:`~drawing.nx_pylab.draw_shell`. - -.. nbplot:: - - >>> G = nx.dodecahedral_graph() - >>> shells = [[2, 3, 4, 5, 6], [8, 1, 0, 19, 18, 17, 16, 15, 14, 7], [9, 10, 11, 12, 13]] - >>> nx.draw_shell(G, nlist=shells, **options) - -To save drawings to a file, use, for example - ->>> nx.draw(G) ->>> plt.savefig("path.png") - -This function writes to the file ``path.png`` in the local directory. If Graphviz and -PyGraphviz or pydot, are available on your system, you can also use -`networkx.drawing.nx_agraph.graphviz_layout` or -`networkx.drawing.nx_pydot.graphviz_layout` to get the node positions, or write -the graph in dot format for further processing. - ->>> from networkx.drawing.nx_pydot import write_dot ->>> pos = nx.nx_agraph.graphviz_layout(G) ->>> nx.draw(G, pos=pos) ->>> write_dot(G, 'file.dot') - -See :doc:`/reference/drawing` for additional details. - -.. code-links:: - -NX-Guides ---------- -If you are interested in learning more about NetworkX, graph theory and network analysis -then you should check out :doc:`nx-guides `. There you can find tutorials, -real-world applications and in-depth examinations of graphs and network algorithms. -All the material is official and was developed and curated by the NetworkX community. diff --git a/examples/3d_drawing/plot_3d_rotation_animation.py b/examples/3d_drawing/plot_3d_rotation_animation.py index 2849b378f0a..9382fd5bf1f 100644 --- a/examples/3d_drawing/plot_3d_rotation_animation.py +++ b/examples/3d_drawing/plot_3d_rotation_animation.py @@ -1,6 +1,6 @@ """ ========================================= -Animations of 3D rotation and random walk. +Animations of 3D rotation and random walk ========================================= Examples of 3D plots of a graph in the 3D spectral layout with animation. Following diff --git a/examples/algorithms/plot_cycle_detection.py b/examples/algorithms/plot_cycle_detection.py new file mode 100644 index 00000000000..cd22f750736 --- /dev/null +++ b/examples/algorithms/plot_cycle_detection.py @@ -0,0 +1,29 @@ +""" +=============== +Cycle Detection +=============== + +This example demonstrates the use of ``nx.find_cycle`` to find a single, +arbitrary cycle in a graph. + +Other functions like ``nx.simple_cycles`` and ``nx.cycle_basis`` can be used to +find all cycles or a cycle basis. +""" +import networkx as nx +import matplotlib.pyplot as plt + +# Create a simple directed graph with a cycle +G = nx.DiGraph([(1, 2), (2, 3), (3, 4), (4, 2), (3, 5), (3, 2), (1, 5)]) + +# Draw the graph +pos = nx.spring_layout(G, seed=8020) +nx.draw(G, pos, with_labels=True) + +# The `orientation` parameter can be used to determine how directed edges are +# treated and the reporting of edge direction in the cycle +cycle = nx.find_cycle(G, orientation="original") +print(cycle) + +# Highlight the cycle in red +nx.draw_networkx_edges(G, pos, edgelist=cycle, edge_color="r", width=2) +plt.show() diff --git a/examples/algorithms/plot_greedy_coloring.py b/examples/algorithms/plot_greedy_coloring.py index 93a7f4d2974..85ec0a07346 100644 --- a/examples/algorithms/plot_greedy_coloring.py +++ b/examples/algorithms/plot_greedy_coloring.py @@ -3,7 +3,7 @@ Greedy Coloring =============== -We attempt to color a graph using as few colors as possible, where no neighbours +We attempt to color a graph using as few colors as possible, where no neighbors of a node can have same color as the node itself. """ import numpy as np diff --git a/examples/algorithms/plot_image_segmentation_spectral_graph_partiion.py b/examples/algorithms/plot_image_segmentation_spectral_graph_partiion.py index f2787a2b4c8..197b86d1496 100644 --- a/examples/algorithms/plot_image_segmentation_spectral_graph_partiion.py +++ b/examples/algorithms/plot_image_segmentation_spectral_graph_partiion.py @@ -1,12 +1,16 @@ """ -===================================================== +================================================== Image Segmentation via Spectral Graph Partitioning -===================================================== -Example of partitioning a undirected graph obtained by `k-neighbors` +================================================== + +Example of partitioning a undirected graph obtained by ``k-neighbors`` from an RGB image into two subgraphs using spectral clustering illustrated by 3D plots of the original labeled data points in RGB 3D space vs the bi-partition marking performed by graph partitioning via spectral clustering. -All 3D plots and animations use the 3D spectral layout. +All 3D plots use the 3D spectral layout. + +See :ref:`sphx_glr_auto_examples_3d_drawing` for recipes to create 3D animations +from these visualizations. """ import numpy as np import networkx as nx @@ -15,7 +19,7 @@ from matplotlib.lines import Line2D from sklearn.cluster import SpectralClustering -# sphinx_gallery_thumbnail_number = 4 +# sphinx_gallery_thumbnail_number = 3 ############################################################################### # Create an example 3D dataset "The Rings". @@ -129,40 +133,6 @@ def _scatter_plot(ax, X, array_of_markers, axis_plot=True): plt.show() -############################################################################### -# Generate the rotating animation of the clustered data. -# ------------------------------------------------------ -# The data points are marked according to clustering and rotated -# in the 3D animation. - - -def _init(): - ax.clear() - _scatter_plot(ax, X, array_of_markers) - ax.grid(False) - ax.set_axis_off() - ax.view_init(elev=6.0, azim=-22.0) - - -def _frame_update(index): - ax.view_init(6.0 + index * 0.2, -22.0 + index * 0.5) - - -fig = plt.figure(layout="tight") -ax = fig.add_subplot(111, projection="3d") -ax.grid(False) -ax.set_axis_off() -ani = animation.FuncAnimation( - fig, - _frame_update, - init_func=_init, - interval=50, - cache_frame_data=False, - frames=100, -) - -plt.show() - ############################################################################### # Generate the plots of the graph. @@ -217,29 +187,3 @@ def _3d_graph_plot(ax): _3d_graph_plot(ax1) plt.tight_layout() plt.show() - -############################################################################### -# Generate the rotating 3D animation of the graph. -# ------------------------------------------------ -# The nodes of the graph are marked according to clustering. -# The graph is rotated in the 3D animation. - - -def _frame_update(index): - ax.view_init(100.0 + index * 0.7, -100.0 + index * 0.5) - - -fig = plt.figure(layout="tight") -ax = fig.add_subplot(111, projection="3d") -ax.grid(False) -ax.set_axis_off() -_3d_graph_plot(ax) -ani = animation.FuncAnimation( - fig, - _frame_update, - interval=50, - cache_frame_data=False, - frames=100, -) - -plt.show() diff --git a/examples/drawing/plot_multigraphs.py b/examples/drawing/plot_multigraphs.py new file mode 100644 index 00000000000..31886de568a --- /dev/null +++ b/examples/drawing/plot_multigraphs.py @@ -0,0 +1,70 @@ +""" +====================================== +Plotting MultiDiGraph Edges and Labels +====================================== + +This example shows how to plot edges and labels for a MultiDiGraph class object. +The same applies for DiGraph and MultiGraph class objects. + +4 Graphs are created, each with different number of edges between 2 nodes. +The final graph contains 4 edges in every node pair and 2 self loops per node. + +MultiGraph can have unlimited multi-edges that can be drawn +with different angles and theoretically node labels can remain visible. + +Multi-self-loops can be drawn in 4 directions of the node. +The subsequent loops will result in overlaps. +""" +import itertools as it +import numpy as np +import networkx as nx +import matplotlib.pyplot as plt + + +def draw_labeled_multigraph(G, attr_name, ax=None): + """ + Length of connectionstyle must be at least that of a maximum number of edges + between pair of nodes. This number is maximum one-sided connections + for directed graph and maximum total connections for undirected graph. + """ + # Works with arc3 and angle3 connectionstyles + connectionstyle = [f"arc3,rad={r}" for r in it.accumulate([0.15] * 4)] + # connectionstyle = [f"angle3,angleA={r}" for r in it.accumulate([30] * 4)] + + pos = nx.shell_layout(G) + nx.draw_networkx_nodes(G, pos, ax=ax) + nx.draw_networkx_labels(G, pos, font_size=20, ax=ax) + nx.draw_networkx_edges( + G, pos, edge_color="grey", connectionstyle=connectionstyle, ax=ax + ) + + labels = { + tuple(edge): f"{attr_name}={attrs[attr_name]}" + for *edge, attrs in G.edges(keys=True, data=True) + } + nx.draw_networkx_edge_labels( + G, + pos, + labels, + connectionstyle=connectionstyle, + label_pos=0.3, + font_color="blue", + bbox={"alpha": 0}, + ax=ax, + ) + + +nodes = "ABC" +prod = list(it.product(nodes, repeat=2)) +pair_dict = {f"Product x {i}": prod * i for i in range(1, 5)} + + +fig, axes = plt.subplots(2, 2) +for (name, pairs), ax in zip(pair_dict.items(), np.ravel(axes)): + G = nx.MultiDiGraph() + for i, (u, v) in enumerate(pairs): + G.add_edge(u, v, w=round(i / 3, 2)) + draw_labeled_multigraph(G, "w", ax) + ax.set_title(name) +fig.tight_layout() +plt.show() diff --git a/examples/geospatial/plot_delaunay.py b/examples/geospatial/plot_delaunay.py index edafe0635e1..799381fff74 100644 --- a/examples/geospatial/plot_delaunay.py +++ b/examples/geospatial/plot_delaunay.py @@ -58,7 +58,11 @@ # Now, we can plot with a nice basemap. ax = cells.plot(facecolor="lightblue", alpha=0.50, edgecolor="cornsilk", linewidth=2) -add_basemap(ax) +try: # Try-except for issues with timeout/parsing failures in CI + add_basemap(ax) +except: + pass + ax.axis("off") nx.draw( delaunay_graph, diff --git a/examples/geospatial/plot_lines.py b/examples/geospatial/plot_lines.py index 616db374369..3de10223b3f 100644 --- a/examples/geospatial/plot_lines.py +++ b/examples/geospatial/plot_lines.py @@ -76,7 +76,10 @@ for i, facet in enumerate(ax): facet.set_title(("Streets", "Graph")[i]) facet.axis("off") - add_basemap(facet) + try: # For issues with downloading/parsing in CI + add_basemap(facet) + except: + pass nx.draw( G_primal, {n: [n[0], n[1]] for n in list(G_primal.nodes)}, ax=ax[1], node_size=50 ) @@ -92,7 +95,10 @@ for i, facet in enumerate(ax): facet.set_title(("Streets", "Graph")[i]) facet.axis("off") - add_basemap(facet) + try: # For issues with downloading/parsing in CI + add_basemap(facet) + except: + pass nx.draw(G_dual, {n: [n[0], n[1]] for n in list(G_dual.nodes)}, ax=ax[1], node_size=50) plt.show() diff --git a/examples/geospatial/plot_points.py b/examples/geospatial/plot_points.py index 7d9d99df826..7517069cafa 100644 --- a/examples/geospatial/plot_points.py +++ b/examples/geospatial/plot_points.py @@ -51,7 +51,10 @@ f, ax = plt.subplots(1, 2, figsize=(8, 4)) for i, facet in enumerate(ax): cases.plot(marker=".", color="orangered", ax=facet) - add_basemap(facet) + try: # For issues with downloading/parsing basemaps in CI + add_basemap(facet) + except: + pass facet.set_title(("KNN-3", "50-meter Distance Band")[i]) facet.axis("off") nx.draw(knn_graph, positions, ax=ax[0], node_size=5, node_color="b") diff --git a/networkx/__init__.py b/networkx/__init__.py index 34bb47f8cde..72d2ef6562f 100644 --- a/networkx/__init__.py +++ b/networkx/__init__.py @@ -17,7 +17,7 @@ from networkx.exception import * from networkx import utils -from networkx.utils.backends import _dispatch +from networkx.utils.backends import _dispatchable from networkx import classes from networkx.classes import filters diff --git a/networkx/algorithms/__init__.py b/networkx/algorithms/__init__.py index db6d6cebb19..56bfb14afdf 100644 --- a/networkx/algorithms/__init__.py +++ b/networkx/algorithms/__init__.py @@ -1,6 +1,7 @@ from networkx.algorithms.assortativity import * from networkx.algorithms.asteroidal import * from networkx.algorithms.boundary import * +from networkx.algorithms.broadcasting import * from networkx.algorithms.bridges import * from networkx.algorithms.chains import * from networkx.algorithms.centrality import * @@ -37,6 +38,7 @@ from networkx.algorithms.operators import * from networkx.algorithms.planarity import * from networkx.algorithms.planar_drawing import * +from networkx.algorithms.polynomials import * from networkx.algorithms.reciprocity import * from networkx.algorithms.regular import * from networkx.algorithms.richclub import * @@ -57,7 +59,6 @@ from networkx.algorithms.voronoi import * from networkx.algorithms.walks import * from networkx.algorithms.wiener import * -from networkx.algorithms.polynomials import * # Make certain subpackages available to the user as direct imports from # the `networkx` namespace. diff --git a/networkx/algorithms/approximation/clique.py b/networkx/algorithms/approximation/clique.py index 4a3d8beba61..56443068633 100644 --- a/networkx/algorithms/approximation/clique.py +++ b/networkx/algorithms/approximation/clique.py @@ -13,7 +13,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def maximum_independent_set(G): """Returns an approximate maximum independent set. @@ -70,7 +70,7 @@ def maximum_independent_set(G): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def max_clique(G): r"""Find the Maximum Clique @@ -129,7 +129,7 @@ def max_clique(G): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def clique_removal(G): r"""Repeatedly remove cliques from the graph. @@ -182,7 +182,7 @@ def clique_removal(G): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def large_clique_size(G): """Find the size of a large clique in a graph. diff --git a/networkx/algorithms/approximation/clustering_coefficient.py b/networkx/algorithms/approximation/clustering_coefficient.py index e15ac68460b..f95c86d2a91 100644 --- a/networkx/algorithms/approximation/clustering_coefficient.py +++ b/networkx/algorithms/approximation/clustering_coefficient.py @@ -6,7 +6,7 @@ @not_implemented_for("directed") @py_random_state(2) -@nx._dispatch(name="approximate_average_clustering") +@nx._dispatchable(name="approximate_average_clustering") def average_clustering(G, trials=1000, seed=None): r"""Estimates the average clustering coefficient of G. diff --git a/networkx/algorithms/approximation/connectivity.py b/networkx/algorithms/approximation/connectivity.py index bc5e7125937..a2214ed128b 100644 --- a/networkx/algorithms/approximation/connectivity.py +++ b/networkx/algorithms/approximation/connectivity.py @@ -12,7 +12,7 @@ ] -@nx._dispatch(name="approximate_local_node_connectivity") +@nx._dispatchable(name="approximate_local_node_connectivity") def local_node_connectivity(G, source, target, cutoff=None): """Compute node connectivity between source and target. @@ -108,7 +108,7 @@ def local_node_connectivity(G, source, target, cutoff=None): return K -@nx._dispatch(name="approximate_node_connectivity") +@nx._dispatchable(name="approximate_node_connectivity") def node_connectivity(G, s=None, t=None): r"""Returns an approximation for node connectivity for a graph or digraph G. @@ -214,7 +214,7 @@ def neighbors(v): return K -@nx._dispatch(name="approximate_all_pairs_node_connectivity") +@nx._dispatchable(name="approximate_all_pairs_node_connectivity") def all_pairs_node_connectivity(G, nbunch=None, cutoff=None): """Compute node connectivity between all pairs of nodes. diff --git a/networkx/algorithms/approximation/distance_measures.py b/networkx/algorithms/approximation/distance_measures.py index 9b817b3317c..a6fece661b1 100644 --- a/networkx/algorithms/approximation/distance_measures.py +++ b/networkx/algorithms/approximation/distance_measures.py @@ -7,7 +7,7 @@ @py_random_state(1) -@nx._dispatch(name="approximate_diameter") +@nx._dispatchable(name="approximate_diameter") def diameter(G, seed=None): """Returns a lower bound on the diameter of the graph G. diff --git a/networkx/algorithms/approximation/dominating_set.py b/networkx/algorithms/approximation/dominating_set.py index 97edb172f94..691564cf9dc 100644 --- a/networkx/algorithms/approximation/dominating_set.py +++ b/networkx/algorithms/approximation/dominating_set.py @@ -20,7 +20,7 @@ # TODO Why doesn't this algorithm work for directed graphs? @not_implemented_for("directed") -@nx._dispatch(node_attrs="weight") +@nx._dispatchable(node_attrs="weight") def min_weighted_dominating_set(G, weight=None): r"""Returns a dominating set that approximates the minimum weight node dominating set. @@ -101,7 +101,7 @@ def _cost(node_and_neighborhood): return dom_set -@nx._dispatch +@nx._dispatchable def min_edge_dominating_set(G): r"""Returns minimum cardinality edge dominating set. diff --git a/networkx/algorithms/approximation/kcomponents.py b/networkx/algorithms/approximation/kcomponents.py index a5df6cc686c..b540bd5f4a6 100644 --- a/networkx/algorithms/approximation/kcomponents.py +++ b/networkx/algorithms/approximation/kcomponents.py @@ -14,7 +14,7 @@ @not_implemented_for("directed") -@nx._dispatch(name="approximate_k_components") +@nx._dispatchable(name="approximate_k_components") def k_components(G, min_density=0.95): r"""Returns the approximate k-component structure of a graph G. diff --git a/networkx/algorithms/approximation/matching.py b/networkx/algorithms/approximation/matching.py index 8f1c3501666..3a7c8a39b2e 100644 --- a/networkx/algorithms/approximation/matching.py +++ b/networkx/algorithms/approximation/matching.py @@ -13,7 +13,7 @@ __all__ = ["min_maximal_matching"] -@nx._dispatch +@nx._dispatchable def min_maximal_matching(G): r"""Returns the minimum maximal matching of G. That is, out of all maximal matchings of the graph G, the smallest is returned. diff --git a/networkx/algorithms/approximation/maxcut.py b/networkx/algorithms/approximation/maxcut.py index ec62b346bb4..0c30d224d2f 100644 --- a/networkx/algorithms/approximation/maxcut.py +++ b/networkx/algorithms/approximation/maxcut.py @@ -4,9 +4,10 @@ __all__ = ["randomized_partitioning", "one_exchange"] -@not_implemented_for("directed", "multigraph") +@not_implemented_for("directed") +@not_implemented_for("multigraph") @py_random_state(1) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def randomized_partitioning(G, seed=None, p=0.5, weight=None): """Compute a random partitioning of the graph nodes and its cut value. @@ -49,9 +50,10 @@ def _swap_node_partition(cut, node): return cut - {node} if node in cut else cut.union({node}) -@not_implemented_for("directed", "multigraph") +@not_implemented_for("directed") +@not_implemented_for("multigraph") @py_random_state(2) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def one_exchange(G, initial_cut=None, seed=None, weight=None): """Compute a partitioning of the graphs nodes and the corresponding cut value. diff --git a/networkx/algorithms/approximation/ramsey.py b/networkx/algorithms/approximation/ramsey.py index 6f45c4f4971..5cb9fda0449 100644 --- a/networkx/algorithms/approximation/ramsey.py +++ b/networkx/algorithms/approximation/ramsey.py @@ -11,7 +11,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def ramsey_R2(G): r"""Compute the largest clique and largest independent set in `G`. diff --git a/networkx/algorithms/approximation/steinertree.py b/networkx/algorithms/approximation/steinertree.py index 50aea045fea..af5916442b9 100644 --- a/networkx/algorithms/approximation/steinertree.py +++ b/networkx/algorithms/approximation/steinertree.py @@ -7,7 +7,7 @@ @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight", returns_graph=True) def metric_closure(G, weight="weight"): """Return the metric closure of a graph. @@ -126,7 +126,7 @@ def _remove_nonterminal_leaves(G, terminals): @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight", returns_graph=True) def steiner_tree(G, terminal_nodes, weight="weight", method=None): r"""Return an approximation to the minimum Steiner tree of a graph. diff --git a/networkx/algorithms/approximation/tests/test_maxcut.py b/networkx/algorithms/approximation/tests/test_maxcut.py index 39291fbf14d..ef0424401e4 100644 --- a/networkx/algorithms/approximation/tests/test_maxcut.py +++ b/networkx/algorithms/approximation/tests/test_maxcut.py @@ -1,9 +1,21 @@ import random +import pytest + import networkx as nx from networkx.algorithms.approximation import maxcut +@pytest.mark.parametrize( + "f", (nx.approximation.randomized_partitioning, nx.approximation.one_exchange) +) +@pytest.mark.parametrize("graph_constructor", (nx.DiGraph, nx.MultiGraph)) +def test_raises_on_directed_and_multigraphs(f, graph_constructor): + G = graph_constructor([(0, 1), (1, 2)]) + with pytest.raises(nx.NetworkXNotImplemented): + f(G) + + def _is_valid_cut(G, set1, set2): union = set1.union(set2) assert union == set(G.nodes) diff --git a/networkx/algorithms/approximation/tests/test_traveling_salesman.py b/networkx/algorithms/approximation/tests/test_traveling_salesman.py index ccb553e1cc7..539b243549a 100644 --- a/networkx/algorithms/approximation/tests/test_traveling_salesman.py +++ b/networkx/algorithms/approximation/tests/test_traveling_salesman.py @@ -756,9 +756,15 @@ def fixed_asadpour(G, weight): # the shortest path between those vertices, allowing vertices to appear more # than once. # - # However, we are using a fixed random number generator so we know what the - # expected tour is. - expected_tours = [[1, 4, 5, 0, 2, 3, 2, 1], [3, 2, 0, 1, 4, 5, 3]] + # Even though we are using a fixed seed, multiple tours have been known to + # be returned. The first two are from the original delevopment of this test, + # and the third one from issue #5913 on GitHub. If other tours are returned, + # add it on the list of expected tours. + expected_tours = [ + [1, 4, 5, 0, 2, 3, 2, 1], + [3, 2, 0, 1, 4, 5, 3], + [3, 2, 1, 0, 5, 4, 3], + ] assert tour in expected_tours diff --git a/networkx/algorithms/approximation/traveling_salesman.py b/networkx/algorithms/approximation/traveling_salesman.py index 2164ea25db6..7501daf41f5 100644 --- a/networkx/algorithms/approximation/traveling_salesman.py +++ b/networkx/algorithms/approximation/traveling_salesman.py @@ -124,7 +124,7 @@ def move_one_node(soln, seed): @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def christofides(G, weight="weight", tree=None): """Approximate a solution of the traveling salesman problem @@ -197,7 +197,7 @@ def _shortcutting(circuit): return nodes -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def traveling_salesman_problem(G, weight="weight", nodes=None, cycle=True, method=None): """Find the shortest path in `G` connecting specified nodes @@ -340,7 +340,7 @@ def traveling_salesman_problem(G, weight="weight", nodes=None, cycle=True, metho @not_implemented_for("undirected") @py_random_state(2) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def asadpour_atsp(G, weight="weight", seed=None, source=None): """ Returns an approximate solution to the traveling salesman problem. @@ -407,8 +407,10 @@ def asadpour_atsp(G, weight="weight", seed=None, source=None): >>> import networkx as nx >>> import networkx.algorithms.approximation as approx >>> G = nx.complete_graph(3, create_using=nx.DiGraph) - >>> nx.set_edge_attributes(G, {(0, 1): 2, (1, 2): 2, (2, 0): 2, (0, 2): 1, (2, 1): 1, (1, 0): 1}, "weight") - >>> tour = approx.asadpour_atsp(G,source=0) + >>> nx.set_edge_attributes( + ... G, {(0, 1): 2, (1, 2): 2, (2, 0): 2, (0, 2): 1, (2, 1): 1, (1, 0): 1}, "weight" + ... ) + >>> tour = approx.asadpour_atsp(G, source=0) >>> tour [0, 2, 1, 0] """ @@ -490,7 +492,7 @@ def asadpour_atsp(G, weight="weight", seed=None, source=None): return _shortcutting(circuit) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight", returns_graph=True) def held_karp_ascent(G, weight="weight"): """ Minimizes the Held-Karp relaxation of the TSP for `G` @@ -681,7 +683,9 @@ def direction_of_ascent(): a_eq[n_count][arb_count] = deg - 2 n_count -= 1 a_eq[len(G)][arb_count] = 1 - program_result = optimize.linprog(c, A_eq=a_eq, b_eq=b_eq) + program_result = optimize.linprog( + c, A_eq=a_eq, b_eq=b_eq, method="highs-ipm" + ) # If the constants exist, then the direction of ascent doesn't if program_result.success: # There is no direction of ascent @@ -801,7 +805,7 @@ def find_epsilon(k, d): return next(k_max.__iter__()).size(weight), z_star -@nx._dispatch +@nx._dispatchable def spanning_tree_distribution(G, z): """ Find the asadpour exponential distribution of spanning trees. @@ -912,7 +916,7 @@ def q(e): return gamma -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def greedy_tsp(G, weight="weight", source=None): """Return a low cost cycle starting at `source` and its cost. @@ -951,11 +955,22 @@ def greedy_tsp(G, weight="weight", source=None): -------- >>> from networkx.algorithms import approximation as approx >>> G = nx.DiGraph() - >>> G.add_weighted_edges_from({ - ... ("A", "B", 3), ("A", "C", 17), ("A", "D", 14), ("B", "A", 3), - ... ("B", "C", 12), ("B", "D", 16), ("C", "A", 13),("C", "B", 12), - ... ("C", "D", 4), ("D", "A", 14), ("D", "B", 15), ("D", "C", 2) - ... }) + >>> G.add_weighted_edges_from( + ... { + ... ("A", "B", 3), + ... ("A", "C", 17), + ... ("A", "D", 14), + ... ("B", "A", 3), + ... ("B", "C", 12), + ... ("B", "D", 16), + ... ("C", "A", 13), + ... ("C", "B", 12), + ... ("C", "D", 4), + ... ("D", "A", 14), + ... ("D", "B", 15), + ... ("D", "C", 2), + ... } + ... ) >>> cycle = approx.greedy_tsp(G, source="D") >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle)) >>> cycle @@ -1005,7 +1020,7 @@ def greedy_tsp(G, weight="weight", source=None): @py_random_state(9) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def simulated_annealing_tsp( G, init_cycle, @@ -1114,11 +1129,22 @@ def simulated_annealing_tsp( -------- >>> from networkx.algorithms import approximation as approx >>> G = nx.DiGraph() - >>> G.add_weighted_edges_from({ - ... ("A", "B", 3), ("A", "C", 17), ("A", "D", 14), ("B", "A", 3), - ... ("B", "C", 12), ("B", "D", 16), ("C", "A", 13),("C", "B", 12), - ... ("C", "D", 4), ("D", "A", 14), ("D", "B", 15), ("D", "C", 2) - ... }) + >>> G.add_weighted_edges_from( + ... { + ... ("A", "B", 3), + ... ("A", "C", 17), + ... ("A", "D", 14), + ... ("B", "A", 3), + ... ("B", "C", 12), + ... ("B", "D", 16), + ... ("C", "A", 13), + ... ("C", "B", 12), + ... ("C", "D", 4), + ... ("D", "A", 14), + ... ("D", "B", 15), + ... ("D", "C", 2), + ... } + ... ) >>> cycle = approx.simulated_annealing_tsp(G, "greedy", source="D") >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle)) >>> cycle @@ -1224,7 +1250,7 @@ def simulated_annealing_tsp( @py_random_state(9) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def threshold_accepting_tsp( G, init_cycle, @@ -1331,11 +1357,22 @@ def threshold_accepting_tsp( -------- >>> from networkx.algorithms import approximation as approx >>> G = nx.DiGraph() - >>> G.add_weighted_edges_from({ - ... ("A", "B", 3), ("A", "C", 17), ("A", "D", 14), ("B", "A", 3), - ... ("B", "C", 12), ("B", "D", 16), ("C", "A", 13),("C", "B", 12), - ... ("C", "D", 4), ("D", "A", 14), ("D", "B", 15), ("D", "C", 2) - ... }) + >>> G.add_weighted_edges_from( + ... { + ... ("A", "B", 3), + ... ("A", "C", 17), + ... ("A", "D", 14), + ... ("B", "A", 3), + ... ("B", "C", 12), + ... ("B", "D", 16), + ... ("C", "A", 13), + ... ("C", "B", 12), + ... ("C", "D", 4), + ... ("D", "A", 14), + ... ("D", "B", 15), + ... ("D", "C", 2), + ... } + ... ) >>> cycle = approx.threshold_accepting_tsp(G, "greedy", source="D") >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle)) >>> cycle diff --git a/networkx/algorithms/approximation/treewidth.py b/networkx/algorithms/approximation/treewidth.py index ce673b6eda4..31d73f63682 100644 --- a/networkx/algorithms/approximation/treewidth.py +++ b/networkx/algorithms/approximation/treewidth.py @@ -41,7 +41,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable(returns_graph=True) def treewidth_min_degree(G): """Returns a treewidth decomposition using the Minimum Degree heuristic. @@ -65,12 +65,12 @@ def treewidth_min_degree(G): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable(returns_graph=True) def treewidth_min_fill_in(G): """Returns a treewidth decomposition using the Minimum Fill-in heuristic. The heuristic chooses a node from the graph, where the number of edges - added turning the neighbourhood of the chosen node into clique is as + added turning the neighborhood of the chosen node into clique is as small as possible. Parameters @@ -89,7 +89,7 @@ class MinDegreeHeuristic: """Implements the Minimum Degree heuristic. The heuristic chooses the nodes according to their degree - (number of neighbours), i.e., first the node with the lowest degree is + (number of neighbors), i.e., first the node with the lowest degree is chosen, then the graph is updated and the corresponding node is removed. Next, a new node with the lowest degree is chosen, and so on. """ @@ -136,7 +136,7 @@ def min_fill_in_heuristic(graph): """Implements the Minimum Degree heuristic. Returns the node from the graph, where the number of edges added when - turning the neighbourhood of the chosen node into clique is as small as + turning the neighborhood of the chosen node into clique is as small as possible. This algorithm chooses the nodes using the Minimum Fill-In heuristic. The running time of the algorithm is :math:`O(V^3)` and it uses additional constant memory.""" @@ -177,7 +177,7 @@ def min_fill_in_heuristic(graph): return min_fill_in_node -@nx._dispatch +@nx._dispatchable(returns_graph=True) def treewidth_decomp(G, heuristic=min_fill_in_heuristic): """Returns a treewidth decomposition using the passed heuristic. @@ -201,7 +201,7 @@ def treewidth_decomp(G, heuristic=min_fill_in_heuristic): # get first node from heuristic elim_node = heuristic(graph) while elim_node is not None: - # connect all neighbours with each other + # connect all neighbors with each other nbrs = graph[elim_node] for u, v in itertools.permutations(nbrs, 2): if v not in graph[u]: diff --git a/networkx/algorithms/approximation/vertex_cover.py b/networkx/algorithms/approximation/vertex_cover.py index dbd7a123d02..c71399ebcc9 100644 --- a/networkx/algorithms/approximation/vertex_cover.py +++ b/networkx/algorithms/approximation/vertex_cover.py @@ -12,7 +12,7 @@ __all__ = ["min_weighted_vertex_cover"] -@nx._dispatch(node_attrs="weight") +@nx._dispatchable(node_attrs="weight") def min_weighted_vertex_cover(G, weight=None): r"""Returns an approximate minimum weighted vertex cover. diff --git a/networkx/algorithms/assortativity/connectivity.py b/networkx/algorithms/assortativity/connectivity.py index bd433ded595..c3fde0da68a 100644 --- a/networkx/algorithms/assortativity/connectivity.py +++ b/networkx/algorithms/assortativity/connectivity.py @@ -5,7 +5,7 @@ __all__ = ["average_degree_connectivity"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def average_degree_connectivity( G, source="in+out", target="in+out", nodes=None, weight=None ): diff --git a/networkx/algorithms/assortativity/correlation.py b/networkx/algorithms/assortativity/correlation.py index 35ea78d6d52..170d219a5d4 100644 --- a/networkx/algorithms/assortativity/correlation.py +++ b/networkx/algorithms/assortativity/correlation.py @@ -15,7 +15,7 @@ ] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def degree_assortativity_coefficient(G, x="out", y="in", weight=None, nodes=None): """Compute degree assortativity of graph. @@ -94,13 +94,13 @@ def degree_assortativity_coefficient(G, x="out", y="in", weight=None, nodes=None else: degrees = {d for _, d in G.degree(nodes, weight=weight)} - mapping = {d: i for i, d, in enumerate(degrees)} + mapping = {d: i for i, d in enumerate(degrees)} M = degree_mixing_matrix(G, x=x, y=y, nodes=nodes, weight=weight, mapping=mapping) return _numeric_ac(M, mapping=mapping) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def degree_pearson_correlation_coefficient(G, x="out", y="in", weight=None, nodes=None): """Compute degree assortativity of graph. @@ -156,10 +156,10 @@ def degree_pearson_correlation_coefficient(G, x="out", y="in", weight=None, node xy = node_degree_xy(G, x=x, y=y, nodes=nodes, weight=weight) x, y = zip(*xy) - return sp.stats.pearsonr(x, y)[0] + return float(sp.stats.pearsonr(x, y)[0]) -@nx._dispatch(node_attrs="attribute") +@nx._dispatchable(node_attrs="attribute") def attribute_assortativity_coefficient(G, attribute, nodes=None): """Compute assortativity for node attributes. @@ -206,7 +206,7 @@ def attribute_assortativity_coefficient(G, attribute, nodes=None): return attribute_ac(M) -@nx._dispatch(node_attrs="attribute") +@nx._dispatchable(node_attrs="attribute") def numeric_assortativity_coefficient(G, attribute, nodes=None): """Compute assortativity for numerical node attributes. @@ -251,7 +251,7 @@ def numeric_assortativity_coefficient(G, attribute, nodes=None): if nodes is None: nodes = G.nodes vals = {G.nodes[n][attribute] for n in nodes} - mapping = {d: i for i, d, in enumerate(vals)} + mapping = {d: i for i, d in enumerate(vals)} M = attribute_mixing_matrix(G, attribute, nodes, mapping) return _numeric_ac(M, mapping) @@ -280,7 +280,7 @@ def attribute_ac(M): s = (M @ M).sum() t = M.trace() r = (t - s) / (1 - s) - return r + return float(r) def _numeric_ac(M, mapping): @@ -299,4 +299,4 @@ def _numeric_ac(M, mapping): varb = (b[idx] * y**2).sum() - ((b[idx] * y).sum()) ** 2 xy = np.outer(x, y) ab = np.outer(a[idx], b[idx]) - return (xy * (M - ab)).sum() / np.sqrt(vara * varb) + return float((xy * (M - ab)).sum() / np.sqrt(vara * varb)) diff --git a/networkx/algorithms/assortativity/mixing.py b/networkx/algorithms/assortativity/mixing.py index 66b98797e69..7958661e3f4 100644 --- a/networkx/algorithms/assortativity/mixing.py +++ b/networkx/algorithms/assortativity/mixing.py @@ -14,7 +14,7 @@ ] -@nx._dispatch(node_attrs="attribute") +@nx._dispatchable(node_attrs="attribute") def attribute_mixing_dict(G, attribute, nodes=None, normalized=False): """Returns dictionary representation of mixing matrix for attribute. @@ -53,7 +53,7 @@ def attribute_mixing_dict(G, attribute, nodes=None, normalized=False): return mixing_dict(xy_iter, normalized=normalized) -@nx._dispatch(node_attrs="attribute") +@nx._dispatchable(node_attrs="attribute") def attribute_mixing_matrix(G, attribute, nodes=None, mapping=None, normalized=True): """Returns mixing matrix for attribute. @@ -98,12 +98,12 @@ def attribute_mixing_matrix(G, attribute, nodes=None, mapping=None, normalized=T Examples -------- >>> G = nx.path_graph(3) - >>> gender = {0: 'male', 1: 'female', 2: 'female'} - >>> nx.set_node_attributes(G, gender, 'gender') - >>> mapping = {'male': 0, 'female': 1} - >>> mix_mat = nx.attribute_mixing_matrix(G, 'gender', mapping=mapping) + >>> gender = {0: "male", 1: "female", 2: "female"} + >>> nx.set_node_attributes(G, gender, "gender") + >>> mapping = {"male": 0, "female": 1} + >>> mix_mat = nx.attribute_mixing_matrix(G, "gender", mapping=mapping) >>> # mixing from male nodes to female nodes - >>> mix_mat[mapping['male'], mapping['female']] + >>> mix_mat[mapping["male"], mapping["female"]] 0.25 """ d = attribute_mixing_dict(G, attribute, nodes) @@ -113,7 +113,7 @@ def attribute_mixing_matrix(G, attribute, nodes=None, mapping=None, normalized=T return a -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def degree_mixing_dict(G, x="out", y="in", weight=None, nodes=None, normalized=False): """Returns dictionary representation of mixing matrix for degree. @@ -145,7 +145,7 @@ def degree_mixing_dict(G, x="out", y="in", weight=None, nodes=None, normalized=F return mixing_dict(xy_iter, normalized=normalized) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def degree_mixing_matrix( G, x="out", y="in", weight=None, nodes=None, normalized=True, mapping=None ): @@ -201,7 +201,7 @@ def degree_mixing_matrix( have that degree, use `mapping` as follows, >>> max_degree = max(deg for n, deg in G.degree) - >>> mapping = {x: x for x in range(max_degree + 1)} # identity mapping + >>> mapping = {x: x for x in range(max_degree + 1)} # identity mapping >>> mix_mat = nx.degree_mixing_matrix(G, mapping=mapping) >>> mix_mat[3, 1] # mixing from node degree 3 to node degree 1 0.5 diff --git a/networkx/algorithms/assortativity/neighbor_degree.py b/networkx/algorithms/assortativity/neighbor_degree.py index a8980da766f..6488d041a8b 100644 --- a/networkx/algorithms/assortativity/neighbor_degree.py +++ b/networkx/algorithms/assortativity/neighbor_degree.py @@ -3,7 +3,7 @@ __all__ = ["average_neighbor_degree"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def average_neighbor_degree(G, source="out", target="out", nodes=None, weight=None): r"""Returns the average degree of the neighborhood of each node. diff --git a/networkx/algorithms/assortativity/pairs.py b/networkx/algorithms/assortativity/pairs.py index a3580d40324..5a1d6f8e1df 100644 --- a/networkx/algorithms/assortativity/pairs.py +++ b/networkx/algorithms/assortativity/pairs.py @@ -4,7 +4,7 @@ __all__ = ["node_attribute_xy", "node_degree_xy"] -@nx._dispatch(node_attrs="attribute") +@nx._dispatchable(node_attrs="attribute") def node_attribute_xy(G, attribute, nodes=None): """Returns iterator of node-attribute pairs for all edges in G. @@ -59,7 +59,7 @@ def node_attribute_xy(G, attribute, nodes=None): yield (uattr, vattr) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def node_degree_xy(G, x="out", y="in", weight=None, nodes=None): """Generate node degree-degree pairs for edges in G. diff --git a/networkx/algorithms/asteroidal.py b/networkx/algorithms/asteroidal.py index 65355fe6253..41e91390dff 100644 --- a/networkx/algorithms/asteroidal.py +++ b/networkx/algorithms/asteroidal.py @@ -18,7 +18,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def find_asteroidal_triple(G): r"""Find an asteroidal triple in the given graph. @@ -91,7 +91,7 @@ def find_asteroidal_triple(G): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def is_at_free(G): """Check if a graph is AT-free. @@ -125,7 +125,7 @@ def is_at_free(G): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def create_component_structure(G): r"""Create component structure for G. diff --git a/networkx/algorithms/bipartite/basic.py b/networkx/algorithms/bipartite/basic.py index 8b9120e27aa..d0a63a10fd1 100644 --- a/networkx/algorithms/bipartite/basic.py +++ b/networkx/algorithms/bipartite/basic.py @@ -17,7 +17,7 @@ ] -@nx._dispatch +@nx._dispatchable def color(G): """Returns a two-coloring of the graph. @@ -83,7 +83,7 @@ def neighbors(v): return color -@nx._dispatch +@nx._dispatchable def is_bipartite(G): """Returns True if graph G is bipartite, False if not. @@ -109,7 +109,7 @@ def is_bipartite(G): return False -@nx._dispatch +@nx._dispatchable def is_bipartite_node_set(G, nodes): """Returns True if nodes and G/nodes are a bipartition of G. @@ -154,7 +154,7 @@ def is_bipartite_node_set(G, nodes): return True -@nx._dispatch +@nx._dispatchable def sets(G, top_nodes=None): """Returns bipartite node sets of graph G. @@ -221,7 +221,7 @@ def sets(G, top_nodes=None): return (X, Y) -@nx._dispatch(graphs="B") +@nx._dispatchable(graphs="B") def density(B, nodes): """Returns density of bipartite graph B. @@ -274,7 +274,7 @@ def density(B, nodes): return d -@nx._dispatch(graphs="B", edge_attrs="weight") +@nx._dispatchable(graphs="B", edge_attrs="weight") def degrees(B, nodes, weight=None): """Returns the degrees of the two node sets in the bipartite graph B. diff --git a/networkx/algorithms/bipartite/centrality.py b/networkx/algorithms/bipartite/centrality.py index a904da3528f..42d7270ee7d 100644 --- a/networkx/algorithms/bipartite/centrality.py +++ b/networkx/algorithms/bipartite/centrality.py @@ -3,7 +3,7 @@ __all__ = ["degree_centrality", "betweenness_centrality", "closeness_centrality"] -@nx._dispatch(name="bipartite_degree_centrality") +@nx._dispatchable(name="bipartite_degree_centrality") def degree_centrality(G, nodes): r"""Compute the degree centrality for nodes in a bipartite network. @@ -78,7 +78,7 @@ def degree_centrality(G, nodes): return centrality -@nx._dispatch(name="bipartite_betweenness_centrality") +@nx._dispatchable(name="bipartite_betweenness_centrality") def betweenness_centrality(G, nodes): r"""Compute betweenness centrality for nodes in a bipartite network. @@ -182,7 +182,7 @@ def betweenness_centrality(G, nodes): return betweenness -@nx._dispatch(name="bipartite_closeness_centrality") +@nx._dispatchable(name="bipartite_closeness_centrality") def closeness_centrality(G, nodes, normalized=True): r"""Compute the closeness centrality for nodes in a bipartite network. diff --git a/networkx/algorithms/bipartite/cluster.py b/networkx/algorithms/bipartite/cluster.py index f10d7efd117..d9611527759 100644 --- a/networkx/algorithms/bipartite/cluster.py +++ b/networkx/algorithms/bipartite/cluster.py @@ -29,7 +29,7 @@ def cc_min(nu, nv): modes = {"dot": cc_dot, "min": cc_min, "max": cc_max} -@nx._dispatch +@nx._dispatchable def latapy_clustering(G, nodes=None, mode="dot"): r"""Compute a bipartite clustering coefficient for nodes. @@ -134,7 +134,7 @@ def latapy_clustering(G, nodes=None, mode="dot"): clustering = latapy_clustering -@nx._dispatch(name="bipartite_average_clustering") +@nx._dispatchable(name="bipartite_average_clustering") def average_clustering(G, nodes=None, mode="dot"): r"""Compute the average bipartite clustering coefficient. @@ -211,7 +211,7 @@ def average_clustering(G, nodes=None, mode="dot"): return sum(ccs[v] for v in nodes) / len(nodes) -@nx._dispatch +@nx._dispatchable def robins_alexander_clustering(G): r"""Compute the bipartite clustering of G. diff --git a/networkx/algorithms/bipartite/covering.py b/networkx/algorithms/bipartite/covering.py index 8669b4b1681..720c63ac40c 100644 --- a/networkx/algorithms/bipartite/covering.py +++ b/networkx/algorithms/bipartite/covering.py @@ -10,7 +10,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(name="bipartite_min_edge_cover") +@nx._dispatchable(name="bipartite_min_edge_cover") def min_edge_cover(G, matching_algorithm=None): """Returns a set of edges which constitutes the minimum edge cover of the graph. diff --git a/networkx/algorithms/bipartite/edgelist.py b/networkx/algorithms/bipartite/edgelist.py index 90c449a3a14..70631ea0e09 100644 --- a/networkx/algorithms/bipartite/edgelist.py +++ b/networkx/algorithms/bipartite/edgelist.py @@ -146,7 +146,7 @@ def generate_edgelist(G, delimiter=" ", data=True): yield delimiter.join(map(str, edge)) -@nx._dispatch(name="bipartite_parse_edgelist", graphs=None) +@nx._dispatchable(name="bipartite_parse_edgelist", graphs=None, returns_graph=True) def parse_edgelist( lines, comments="#", delimiter=None, create_using=None, nodetype=None, data=True ): @@ -268,7 +268,7 @@ def parse_edgelist( @open_file(0, mode="rb") -@nx._dispatch(name="bipartite_read_edgelist", graphs=None) +@nx._dispatchable(name="bipartite_read_edgelist", graphs=None, returns_graph=True) def read_edgelist( path, comments="#", diff --git a/networkx/algorithms/bipartite/extendability.py b/networkx/algorithms/bipartite/extendability.py index 10dd5473b5c..0764997ad00 100644 --- a/networkx/algorithms/bipartite/extendability.py +++ b/networkx/algorithms/bipartite/extendability.py @@ -10,6 +10,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") +@nx._dispatchable def maximal_extendability(G): """Computes the extendability of a graph. @@ -97,7 +98,7 @@ def maximal_extendability(G): # For node-pairs between V & U, keep min of max number of node-disjoint paths # Variable $k$ stands for the extendability of graph G - k = float("Inf") + k = float("inf") for u in U: for v in V: num_paths = sum(1 for _ in nx.node_disjoint_paths(residual_G, u, v)) diff --git a/networkx/algorithms/bipartite/generators.py b/networkx/algorithms/bipartite/generators.py index 9c8bfc0ef29..de6f0797239 100644 --- a/networkx/algorithms/bipartite/generators.py +++ b/networkx/algorithms/bipartite/generators.py @@ -20,7 +20,7 @@ ] -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) @nodes_or_number([0, 1]) def complete_bipartite_graph(n1, n2, create_using=None): """Returns the complete bipartite graph `K_{n_1,n_2}`. @@ -67,7 +67,7 @@ def complete_bipartite_graph(n1, n2, create_using=None): @py_random_state(3) -@nx._dispatch(name="bipartite_configuration_model", graphs=None) +@nx._dispatchable(name="bipartite_configuration_model", graphs=None, returns_graph=True) def configuration_model(aseq, bseq, create_using=None, seed=None): """Returns a random bipartite graph from two given degree sequences. @@ -138,7 +138,7 @@ def configuration_model(aseq, bseq, create_using=None, seed=None): return G -@nx._dispatch(name="bipartite_havel_hakimi_graph", graphs=None) +@nx._dispatchable(name="bipartite_havel_hakimi_graph", graphs=None, returns_graph=True) def havel_hakimi_graph(aseq, bseq, create_using=None): """Returns a bipartite graph from two given degree sequences using a Havel-Hakimi style construction. @@ -213,7 +213,7 @@ def havel_hakimi_graph(aseq, bseq, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def reverse_havel_hakimi_graph(aseq, bseq, create_using=None): """Returns a bipartite graph from two given degree sequences using a Havel-Hakimi style construction. @@ -287,7 +287,7 @@ def reverse_havel_hakimi_graph(aseq, bseq, create_using=None): return G -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def alternating_havel_hakimi_graph(aseq, bseq, create_using=None): """Returns a bipartite graph from two given degree sequences using an alternating Havel-Hakimi style construction. @@ -366,7 +366,7 @@ def alternating_havel_hakimi_graph(aseq, bseq, create_using=None): @py_random_state(3) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def preferential_attachment_graph(aseq, p, create_using=None, seed=None): """Create a bipartite graph with a preferential attachment model from a given single degree sequence. @@ -438,7 +438,7 @@ def preferential_attachment_graph(aseq, p, create_using=None, seed=None): @py_random_state(3) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_graph(n, m, p, seed=None, directed=False): """Returns a bipartite random graph. @@ -525,7 +525,7 @@ def random_graph(n, m, p, seed=None, directed=False): @py_random_state(3) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def gnmk_random_graph(n, m, k, seed=None, directed=False): """Returns a random bipartite graph G_{n,m,k}. diff --git a/networkx/algorithms/bipartite/matching.py b/networkx/algorithms/bipartite/matching.py index 17d55614bcd..48149ab9e31 100644 --- a/networkx/algorithms/bipartite/matching.py +++ b/networkx/algorithms/bipartite/matching.py @@ -54,7 +54,7 @@ INFINITY = float("inf") -@nx._dispatch +@nx._dispatchable def hopcroft_karp_matching(G, top_nodes=None): """Returns the maximum cardinality matching of the bipartite graph `G`. @@ -181,7 +181,7 @@ def depth_first_search(v): return dict(itertools.chain(leftmatches.items(), rightmatches.items())) -@nx._dispatch +@nx._dispatchable def eppstein_matching(G, top_nodes=None): """Returns the maximum cardinality matching of the bipartite graph `G`. @@ -420,7 +420,7 @@ def _connected_by_alternating_paths(G, matching, targets): } -@nx._dispatch +@nx._dispatchable def to_vertex_cover(G, matching, top_nodes=None): """Returns the minimum vertex cover corresponding to the given maximum matching of the bipartite graph `G`. @@ -501,7 +501,7 @@ def to_vertex_cover(G, matching, top_nodes=None): maximum_matching = hopcroft_karp_matching -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def minimum_weight_full_matching(G, top_nodes=None, weight="weight"): r"""Returns a minimum weight full matching of the bipartite graph `G`. diff --git a/networkx/algorithms/bipartite/matrix.py b/networkx/algorithms/bipartite/matrix.py index e5679677574..462ef8a1311 100644 --- a/networkx/algorithms/bipartite/matrix.py +++ b/networkx/algorithms/bipartite/matrix.py @@ -11,7 +11,7 @@ __all__ = ["biadjacency_matrix", "from_biadjacency_matrix"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def biadjacency_matrix( G, row_order, column_order=None, dtype=None, weight="weight", format="csr" ): @@ -110,7 +110,7 @@ def biadjacency_matrix( raise nx.NetworkXError(f"Unknown sparse array format: {format}") from err -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_biadjacency_matrix(A, create_using=None, edge_attribute="weight"): r"""Creates a new bipartite graph from a biadjacency matrix given as a SciPy sparse array. diff --git a/networkx/algorithms/bipartite/projection.py b/networkx/algorithms/bipartite/projection.py index 57f960e13b3..1eb71fa528f 100644 --- a/networkx/algorithms/bipartite/projection.py +++ b/networkx/algorithms/bipartite/projection.py @@ -12,7 +12,9 @@ ] -@nx._dispatch(graphs="B", preserve_node_attrs=True, preserve_graph_attrs=True) +@nx._dispatchable( + graphs="B", preserve_node_attrs=True, preserve_graph_attrs=True, returns_graph=True +) def projected_graph(B, nodes, multigraph=False): r"""Returns the projection of B onto one of its node sets. @@ -117,7 +119,7 @@ def projected_graph(B, nodes, multigraph=False): @not_implemented_for("multigraph") -@nx._dispatch(graphs="B") +@nx._dispatchable(graphs="B", returns_graph=True) def weighted_projected_graph(B, nodes, ratio=False): r"""Returns a weighted projection of B onto one of its node sets. @@ -218,7 +220,7 @@ def weighted_projected_graph(B, nodes, ratio=False): @not_implemented_for("multigraph") -@nx._dispatch(graphs="B") +@nx._dispatchable(graphs="B", returns_graph=True) def collaboration_weighted_projected_graph(B, nodes): r"""Newman's weighted projection of B onto one of its node sets. @@ -263,7 +265,6 @@ def collaboration_weighted_projected_graph(B, nodes): [0, 2, 4, 5] >>> for edge in sorted(G.edges(data=True)): ... print(edge) - ... (0, 2, {'weight': 0.5}) (0, 5, {'weight': 0.5}) (2, 4, {'weight': 1.0}) @@ -313,7 +314,7 @@ def collaboration_weighted_projected_graph(B, nodes): @not_implemented_for("multigraph") -@nx._dispatch(graphs="B") +@nx._dispatchable(graphs="B", returns_graph=True) def overlap_weighted_projected_graph(B, nodes, jaccard=True): r"""Overlap weighted projection of B onto one of its node sets. @@ -413,7 +414,7 @@ def overlap_weighted_projected_graph(B, nodes, jaccard=True): @not_implemented_for("multigraph") -@nx._dispatch(graphs="B", preserve_all_attrs=True) +@nx._dispatchable(graphs="B", preserve_all_attrs=True, returns_graph=True) def generic_weighted_projected_graph(B, nodes, weight_function=None): r"""Weighted projection of B with a user-specified weight function. @@ -451,22 +452,18 @@ def generic_weighted_projected_graph(B, nodes, weight_function=None): ... unbrs = set(G[u]) ... vnbrs = set(G[v]) ... return float(len(unbrs & vnbrs)) / len(unbrs | vnbrs) - ... >>> def my_weight(G, u, v, weight="weight"): ... w = 0 ... for nbr in set(G[u]) & set(G[v]): ... w += G[u][nbr].get(weight, 1) + G[v][nbr].get(weight, 1) ... return w - ... >>> # A complete bipartite graph with 4 nodes and 4 edges >>> B = nx.complete_bipartite_graph(2, 2) >>> # Add some arbitrary weight to the edges >>> for i, (u, v) in enumerate(B.edges()): ... B.edges[u, v]["weight"] = i + 1 - ... >>> for edge in B.edges(data=True): ... print(edge) - ... (0, 2, {'weight': 1}) (0, 3, {'weight': 2}) (1, 2, {'weight': 3}) @@ -476,14 +473,10 @@ def generic_weighted_projected_graph(B, nodes, weight_function=None): >>> print(list(G.edges(data=True))) [(0, 1, {'weight': 2})] >>> # To specify a custom weight function use the weight_function parameter - >>> G = bipartite.generic_weighted_projected_graph( - ... B, [0, 1], weight_function=jaccard - ... ) + >>> G = bipartite.generic_weighted_projected_graph(B, [0, 1], weight_function=jaccard) >>> print(list(G.edges(data=True))) [(0, 1, {'weight': 1.0})] - >>> G = bipartite.generic_weighted_projected_graph( - ... B, [0, 1], weight_function=my_weight - ... ) + >>> G = bipartite.generic_weighted_projected_graph(B, [0, 1], weight_function=my_weight) >>> print(list(G.edges(data=True))) [(0, 1, {'weight': 10})] diff --git a/networkx/algorithms/bipartite/redundancy.py b/networkx/algorithms/bipartite/redundancy.py index 04b3ae9ca75..7a44d212896 100644 --- a/networkx/algorithms/bipartite/redundancy.py +++ b/networkx/algorithms/bipartite/redundancy.py @@ -7,7 +7,7 @@ __all__ = ["node_redundancy"] -@nx._dispatch +@nx._dispatchable def node_redundancy(G, nodes=None): r"""Computes the node redundancy coefficients for the nodes in the bipartite graph `G`. diff --git a/networkx/algorithms/bipartite/spectral.py b/networkx/algorithms/bipartite/spectral.py index f4b414243ac..61a56dd2c0e 100644 --- a/networkx/algorithms/bipartite/spectral.py +++ b/networkx/algorithms/bipartite/spectral.py @@ -6,7 +6,7 @@ __all__ = ["spectral_bipartivity"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def spectral_bipartivity(G, nodes=None, weight="weight"): """Returns the spectral bipartivity. @@ -57,12 +57,12 @@ def spectral_bipartivity(G, nodes=None, weight="weight"): coshA = 0.5 * (expA + expmA) if nodes is None: # return single number for entire graph - return coshA.diagonal().sum() / expA.diagonal().sum() + return float(coshA.diagonal().sum() / expA.diagonal().sum()) else: # contribution for individual nodes index = dict(zip(nodelist, range(len(nodelist)))) sb = {} for n in nodes: i = index[n] - sb[n] = coshA[i, i] / expA[i, i] + sb[n] = coshA.item(i, i) / expA.item(i, i) return sb diff --git a/networkx/algorithms/bipartite/tests/test_edgelist.py b/networkx/algorithms/bipartite/tests/test_edgelist.py index b388465ef4b..74035b35e9c 100644 --- a/networkx/algorithms/bipartite/tests/test_edgelist.py +++ b/networkx/algorithms/bipartite/tests/test_edgelist.py @@ -2,8 +2,6 @@ Unit tests for bipartite edgelists. """ import io -import os -import tempfile import pytest @@ -101,51 +99,47 @@ def test_write_edgelist_4(self): fh.seek(0) assert fh.read() == b"1 2 2.0\n3 2 3.0\n" - def test_unicode(self): + def test_unicode(self, tmp_path): G = nx.Graph() name1 = chr(2344) + chr(123) + chr(6543) name2 = chr(5543) + chr(1543) + chr(324) G.add_edge(name1, "Radiohead", **{name2: 3}) G.add_node(name1, bipartite=0) G.add_node("Radiohead", bipartite=1) - fd, fname = tempfile.mkstemp() + + fname = tmp_path / "edgelist.txt" bipartite.write_edgelist(G, fname) H = bipartite.read_edgelist(fname) assert graphs_equal(G, H) - os.close(fd) - os.unlink(fname) - def test_latin1_issue(self): + def test_latin1_issue(self, tmp_path): G = nx.Graph() name1 = chr(2344) + chr(123) + chr(6543) name2 = chr(5543) + chr(1543) + chr(324) G.add_edge(name1, "Radiohead", **{name2: 3}) G.add_node(name1, bipartite=0) G.add_node("Radiohead", bipartite=1) - fd, fname = tempfile.mkstemp() - pytest.raises( - UnicodeEncodeError, bipartite.write_edgelist, G, fname, encoding="latin-1" - ) - os.close(fd) - os.unlink(fname) - def test_latin1(self): + fname = tmp_path / "edgelist.txt" + with pytest.raises(UnicodeEncodeError): + bipartite.write_edgelist(G, fname, encoding="latin-1") + + def test_latin1(self, tmp_path): G = nx.Graph() name1 = "Bj" + chr(246) + "rk" name2 = chr(220) + "ber" G.add_edge(name1, "Radiohead", **{name2: 3}) G.add_node(name1, bipartite=0) G.add_node("Radiohead", bipartite=1) - fd, fname = tempfile.mkstemp() + + fname = tmp_path / "edgelist.txt" bipartite.write_edgelist(G, fname, encoding="latin-1") H = bipartite.read_edgelist(fname, encoding="latin-1") assert graphs_equal(G, H) - os.close(fd) - os.unlink(fname) - def test_edgelist_graph(self): + def test_edgelist_graph(self, tmp_path): G = self.G - (fd, fname) = tempfile.mkstemp() + fname = tmp_path / "edgelist.txt" bipartite.write_edgelist(G, fname) H = bipartite.read_edgelist(fname) H2 = bipartite.read_edgelist(fname) @@ -153,32 +147,26 @@ def test_edgelist_graph(self): G.remove_node("g") # isolated nodes are not written in edgelist assert nodes_equal(list(H), list(G)) assert edges_equal(list(H.edges()), list(G.edges())) - os.close(fd) - os.unlink(fname) - def test_edgelist_integers(self): + def test_edgelist_integers(self, tmp_path): G = nx.convert_node_labels_to_integers(self.G) - (fd, fname) = tempfile.mkstemp() + fname = tmp_path / "edgelist.txt" bipartite.write_edgelist(G, fname) H = bipartite.read_edgelist(fname, nodetype=int) # isolated nodes are not written in edgelist G.remove_nodes_from(list(nx.isolates(G))) assert nodes_equal(list(H), list(G)) assert edges_equal(list(H.edges()), list(G.edges())) - os.close(fd) - os.unlink(fname) - def test_edgelist_multigraph(self): + def test_edgelist_multigraph(self, tmp_path): G = self.MG - (fd, fname) = tempfile.mkstemp() + fname = tmp_path / "edgelist.txt" bipartite.write_edgelist(G, fname) H = bipartite.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph()) H2 = bipartite.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph()) assert H is not H2 # they should be different graphs assert nodes_equal(list(H), list(G)) assert edges_equal(list(H.edges()), list(G.edges())) - os.close(fd) - os.unlink(fname) def test_empty_digraph(self): with pytest.raises(nx.NetworkXNotImplemented): diff --git a/networkx/algorithms/bipartite/tests/test_extendability.py b/networkx/algorithms/bipartite/tests/test_extendability.py index d7ae34e4c1b..17b7124341b 100644 --- a/networkx/algorithms/bipartite/tests/test_extendability.py +++ b/networkx/algorithms/bipartite/tests/test_extendability.py @@ -29,6 +29,14 @@ def test_no_perfect_matching_raises(): nx.bipartite.maximal_extendability(G) +def test_residual_graph_not_strongly_connected_raises(): + G = nx.Graph([(1, 2), (2, 3), (3, 4)]) + with pytest.raises( + nx.NetworkXError, match="The residual graph of G is not strongly connected" + ): + nx.bipartite.maximal_extendability(G) + + def test_ladder_graph_is_1(): G = nx.ladder_graph(3) assert nx.bipartite.maximal_extendability(G) == 1 diff --git a/networkx/algorithms/bipartite/tests/test_matrix.py b/networkx/algorithms/bipartite/tests/test_matrix.py index 393b71e7ca2..53d83115118 100644 --- a/networkx/algorithms/bipartite/tests/test_matrix.py +++ b/networkx/algorithms/bipartite/tests/test_matrix.py @@ -39,6 +39,11 @@ def test_biadjacency_matrix_order(self): M = bipartite.biadjacency_matrix(G, X, Y, weight="weight") assert M[1, 2] == 2 + def test_biadjacency_matrix_empty_graph(self): + G = nx.empty_graph(2) + M = nx.bipartite.biadjacency_matrix(G, [0]) + assert np.array_equal(M.toarray(), np.array([[0]])) + def test_null_graph(self): with pytest.raises(nx.NetworkXError): bipartite.biadjacency_matrix(nx.Graph(), []) diff --git a/networkx/algorithms/boundary.py b/networkx/algorithms/boundary.py index ea97cee6efb..fef9ba22369 100644 --- a/networkx/algorithms/boundary.py +++ b/networkx/algorithms/boundary.py @@ -15,7 +15,7 @@ __all__ = ["edge_boundary", "node_boundary"] -@nx._dispatch(edge_attrs={"data": "default"}, preserve_edge_attrs="data") +@nx._dispatchable(edge_attrs={"data": "default"}, preserve_edge_attrs="data") def edge_boundary(G, nbunch1, nbunch2=None, data=False, keys=False, default=None): """Returns the edge boundary of `nbunch1`. @@ -106,7 +106,7 @@ def edge_boundary(G, nbunch1, nbunch2=None, data=False, keys=False, default=None ) -@nx._dispatch +@nx._dispatchable def node_boundary(G, nbunch1, nbunch2=None): """Returns the node boundary of `nbunch1`. diff --git a/networkx/algorithms/bridges.py b/networkx/algorithms/bridges.py index 106120e2fde..e076a256cb8 100644 --- a/networkx/algorithms/bridges.py +++ b/networkx/algorithms/bridges.py @@ -8,7 +8,7 @@ @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def bridges(G, root=None): """Generate all bridges in a graph. @@ -81,7 +81,7 @@ def bridges(G, root=None): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def has_bridges(G, root=None): """Decide whether a graph has any bridges. @@ -142,7 +142,7 @@ def has_bridges(G, root=None): @not_implemented_for("multigraph") @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def local_bridges(G, with_span=True, weight=None): """Iterate over local bridges of `G` optionally computing the span diff --git a/networkx/algorithms/broadcasting.py b/networkx/algorithms/broadcasting.py new file mode 100644 index 00000000000..094ac5e23b3 --- /dev/null +++ b/networkx/algorithms/broadcasting.py @@ -0,0 +1,153 @@ +"""Routines to calculate the broadcast time of certain graphs. + +Broadcasting is an information dissemination problem in which a node in a graph, +called the originator, must distribute a message to all other nodes by placing +a series of calls along the edges of the graph. Once informed, other nodes aid +the originator in distributing the message. + +The broadcasting must be completed as quickly as possible subject to the +following constraints: +- Each call requires one unit of time. +- A node can only participate in one call per unit of time. +- Each call only involves two adjacent nodes: a sender and a receiver. +""" + +import networkx as nx +from networkx import NetworkXError +from networkx.utils import not_implemented_for + +__all__ = [ + "tree_broadcast_center", + "tree_broadcast_time", +] + + +def _get_max_broadcast_value(G, U, v, values): + adj = sorted(set(G.neighbors(v)) & U, key=values.get, reverse=True) + return max(values[u] + i for i, u in enumerate(adj, start=1)) + + +def _get_broadcast_centers(G, v, values, target): + adj = sorted(G.neighbors(v), key=values.get, reverse=True) + j = next(i for i, u in enumerate(adj, start=1) if values[u] + i == target) + return set([v] + adj[:j]) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def tree_broadcast_center(G): + """Return the Broadcast Center of the tree `G`. + + The broadcast center of a graph G denotes the set of nodes having + minimum broadcast time [1]_. This is a linear algorithm for determining + the broadcast center of a tree with ``N`` nodes, as a by-product it also + determines the broadcast time from the broadcast center. + + Parameters + ---------- + G : undirected graph + The graph should be an undirected tree + + Returns + ------- + BC : (int, set) tuple + minimum broadcast number of the tree, set of broadcast centers + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + References + ---------- + .. [1] Slater, P.J., Cockayne, E.J., Hedetniemi, S.T, + Information dissemination in trees. SIAM J.Comput. 10(4), 692–701 (1981) + """ + # Assert that the graph G is a tree + if not nx.is_tree(G): + NetworkXError("Input graph is not a tree") + # step 0 + if G.number_of_nodes() == 2: + return 1, set(G.nodes()) + if G.number_of_nodes() == 1: + return 0, set(G.nodes()) + + # step 1 + U = {node for node, deg in G.degree if deg == 1} + values = {n: 0 for n in U} + T = G.copy() + T.remove_nodes_from(U) + + # step 2 + W = {node for node, deg in T.degree if deg == 1} + values.update((w, G.degree[w] - 1) for w in W) + + # step 3 + while T.number_of_nodes() >= 2: + # step 4 + w = min(W, key=lambda n: values[n]) + v = next(T.neighbors(w)) + + # step 5 + U.add(w) + W.remove(w) + T.remove_node(w) + + # step 6 + if T.degree(v) == 1: + # update t(v) + values.update({v: _get_max_broadcast_value(G, U, v, values)}) + W.add(v) + + # step 7 + v = nx.utils.arbitrary_element(T) + b_T = _get_max_broadcast_value(G, U, v, values) + return b_T, _get_broadcast_centers(G, v, values, b_T) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def tree_broadcast_time(G, node=None): + """Return the Broadcast Time of the tree `G`. + + The minimum broadcast time of a node is defined as the minimum amount + of time required to complete broadcasting starting from the + originator. The broadcast time of a graph is the maximum over + all nodes of the minimum broadcast time from that node [1]_. + This function returns the minimum broadcast time of `node`. + If `node` is None the broadcast time for the graph is returned. + + Parameters + ---------- + G : undirected graph + The graph should be an undirected tree + node: int, optional + index of starting node. If `None`, the algorithm returns the broadcast + time of the tree. + + Returns + ------- + BT : int + Broadcast Time of a node in a tree + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + References + ---------- + .. [1] Harutyunyan, H. A. and Li, Z. + "A Simple Construction of Broadcast Graphs." + In Computing and Combinatorics. COCOON 2019 + (Ed. D. Z. Du and C. Tian.) Springer, pp. 240-253, 2019. + """ + b_T, b_C = tree_broadcast_center(G) + if node is not None: + return b_T + min(nx.shortest_path_length(G, node, u) for u in b_C) + dist_from_center = dict.fromkeys(G, len(G)) + for u in b_C: + for v, dist in nx.shortest_path_length(G, u).items(): + if dist < dist_from_center[v]: + dist_from_center[v] = dist + return b_T + max(dist_from_center.values()) diff --git a/networkx/algorithms/centrality/betweenness.py b/networkx/algorithms/centrality/betweenness.py index b4b1f3963b0..4f44fb19ba0 100644 --- a/networkx/algorithms/centrality/betweenness.py +++ b/networkx/algorithms/centrality/betweenness.py @@ -12,7 +12,7 @@ @py_random_state(5) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def betweenness_centrality( G, k=None, normalized=True, weight=None, endpoints=False, seed=None ): @@ -154,7 +154,7 @@ def betweenness_centrality( @py_random_state(4) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def edge_betweenness_centrality(G, k=None, normalized=True, weight=None, seed=None): r"""Compute betweenness centrality for edges. diff --git a/networkx/algorithms/centrality/betweenness_subset.py b/networkx/algorithms/centrality/betweenness_subset.py index e6c1acdf4ff..7f9967e964c 100644 --- a/networkx/algorithms/centrality/betweenness_subset.py +++ b/networkx/algorithms/centrality/betweenness_subset.py @@ -16,7 +16,7 @@ ] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def betweenness_centrality_subset(G, sources, targets, normalized=False, weight=None): r"""Compute betweenness centrality for a subset of nodes. @@ -114,7 +114,7 @@ def betweenness_centrality_subset(G, sources, targets, normalized=False, weight= return b -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def edge_betweenness_centrality_subset( G, sources, targets, normalized=False, weight=None ): diff --git a/networkx/algorithms/centrality/closeness.py b/networkx/algorithms/centrality/closeness.py index 6a95ac14ef8..1c1722d4ed4 100644 --- a/networkx/algorithms/centrality/closeness.py +++ b/networkx/algorithms/centrality/closeness.py @@ -10,7 +10,7 @@ __all__ = ["closeness_centrality", "incremental_closeness_centrality"] -@nx._dispatch(edge_attrs="distance") +@nx._dispatchable(edge_attrs="distance") def closeness_centrality(G, u=None, distance=None, wf_improved=True): r"""Compute closeness centrality for nodes. @@ -137,7 +137,7 @@ def closeness_centrality(G, u=None, distance=None, wf_improved=True): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable(mutates_input=True) def incremental_closeness_centrality( G, edge, prev_cc=None, insertion=True, wf_improved=True ): diff --git a/networkx/algorithms/centrality/current_flow_betweenness.py b/networkx/algorithms/centrality/current_flow_betweenness.py index ea1b2c8f2f4..b79a4c801e8 100644 --- a/networkx/algorithms/centrality/current_flow_betweenness.py +++ b/networkx/algorithms/centrality/current_flow_betweenness.py @@ -21,7 +21,7 @@ @not_implemented_for("directed") @py_random_state(7) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def approximate_current_flow_betweenness_centrality( G, normalized=True, @@ -134,7 +134,7 @@ def approximate_current_flow_betweenness_centrality( continue for nbr in H[v]: w = H[v][nbr].get(weight, 1.0) - betweenness[v] += w * np.abs(p[v] - p[nbr]) * cstar2k + betweenness[v] += float(w * np.abs(p[v] - p[nbr]) * cstar2k) if normalized: factor = 1.0 else: @@ -144,7 +144,7 @@ def approximate_current_flow_betweenness_centrality( @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def current_flow_betweenness_centrality( G, normalized=True, weight=None, dtype=float, solver="full" ): @@ -220,28 +220,26 @@ def current_flow_betweenness_centrality( """ if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") - n = G.number_of_nodes() + N = G.number_of_nodes() ordering = list(reverse_cuthill_mckee_ordering(G)) # make a copy with integer labels according to rcm ordering # this could be done without a copy if we really wanted to - H = nx.relabel_nodes(G, dict(zip(ordering, range(n)))) - betweenness = dict.fromkeys(H, 0.0) # b[v]=0 for v in H + H = nx.relabel_nodes(G, dict(zip(ordering, range(N)))) + betweenness = dict.fromkeys(H, 0.0) # b[n]=0 for n in H for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): - pos = dict(zip(row.argsort()[::-1], range(n))) - for i in range(n): - betweenness[s] += (i - pos[i]) * row[i] - betweenness[t] += (n - i - 1 - pos[i]) * row[i] + pos = dict(zip(row.argsort()[::-1], range(N))) + for i in range(N): + betweenness[s] += (i - pos[i]) * row.item(i) + betweenness[t] += (N - i - 1 - pos[i]) * row.item(i) if normalized: - nb = (n - 1.0) * (n - 2.0) # normalization factor + nb = (N - 1.0) * (N - 2.0) # normalization factor else: nb = 2.0 - for v in H: - betweenness[v] = float((betweenness[v] - v) * 2.0 / nb) - return {ordering[k]: v for k, v in betweenness.items()} + return {ordering[n]: (b - n) * 2.0 / nb for n, b in betweenness.items()} @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def edge_current_flow_betweenness_centrality( G, normalized=True, weight=None, dtype=float, solver="full" ): @@ -323,21 +321,21 @@ def edge_current_flow_betweenness_centrality( """ if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") - n = G.number_of_nodes() + N = G.number_of_nodes() ordering = list(reverse_cuthill_mckee_ordering(G)) # make a copy with integer labels according to rcm ordering # this could be done without a copy if we really wanted to - H = nx.relabel_nodes(G, dict(zip(ordering, range(n)))) + H = nx.relabel_nodes(G, dict(zip(ordering, range(N)))) edges = (tuple(sorted((u, v))) for u, v in H.edges()) betweenness = dict.fromkeys(edges, 0.0) if normalized: - nb = (n - 1.0) * (n - 2.0) # normalization factor + nb = (N - 1.0) * (N - 2.0) # normalization factor else: nb = 2.0 for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): - pos = dict(zip(row.argsort()[::-1], range(1, n + 1))) - for i in range(n): - betweenness[e] += (i + 1 - pos[i]) * row[i] - betweenness[e] += (n - i - pos[i]) * row[i] + pos = dict(zip(row.argsort()[::-1], range(1, N + 1))) + for i in range(N): + betweenness[e] += (i + 1 - pos[i]) * row.item(i) + betweenness[e] += (N - i - pos[i]) * row.item(i) betweenness[e] /= nb - return {(ordering[s], ordering[t]): v for (s, t), v in betweenness.items()} + return {(ordering[s], ordering[t]): b for (s, t), b in betweenness.items()} diff --git a/networkx/algorithms/centrality/current_flow_betweenness_subset.py b/networkx/algorithms/centrality/current_flow_betweenness_subset.py index debfca27f55..c6790b218e9 100644 --- a/networkx/algorithms/centrality/current_flow_betweenness_subset.py +++ b/networkx/algorithms/centrality/current_flow_betweenness_subset.py @@ -10,7 +10,7 @@ @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def current_flow_betweenness_centrality_subset( G, sources, targets, normalized=True, weight=None, dtype=float, solver="lu" ): @@ -96,31 +96,31 @@ def current_flow_betweenness_centrality_subset( if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") - n = G.number_of_nodes() + N = G.number_of_nodes() ordering = list(reverse_cuthill_mckee_ordering(G)) # make a copy with integer labels according to rcm ordering # this could be done without a copy if we really wanted to - mapping = dict(zip(ordering, range(n))) + mapping = dict(zip(ordering, range(N))) H = nx.relabel_nodes(G, mapping) - betweenness = dict.fromkeys(H, 0.0) # b[v]=0 for v in H + betweenness = dict.fromkeys(H, 0.0) # b[n]=0 for n in H for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): for ss in sources: i = mapping[ss] for tt in targets: j = mapping[tt] - betweenness[s] += 0.5 * np.abs(row[i] - row[j]) - betweenness[t] += 0.5 * np.abs(row[i] - row[j]) + betweenness[s] += 0.5 * abs(row.item(i) - row.item(j)) + betweenness[t] += 0.5 * abs(row.item(i) - row.item(j)) if normalized: - nb = (n - 1.0) * (n - 2.0) # normalization factor + nb = (N - 1.0) * (N - 2.0) # normalization factor else: nb = 2.0 - for v in H: - betweenness[v] = betweenness[v] / nb + 1.0 / (2 - n) - return {ordering[k]: v for k, v in betweenness.items()} + for node in H: + betweenness[node] = betweenness[node] / nb + 1.0 / (2 - N) + return {ordering[node]: value for node, value in betweenness.items()} @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def edge_current_flow_betweenness_centrality_subset( G, sources, targets, normalized=True, weight=None, dtype=float, solver="lu" ): @@ -204,16 +204,16 @@ def edge_current_flow_betweenness_centrality_subset( if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") - n = G.number_of_nodes() + N = G.number_of_nodes() ordering = list(reverse_cuthill_mckee_ordering(G)) # make a copy with integer labels according to rcm ordering # this could be done without a copy if we really wanted to - mapping = dict(zip(ordering, range(n))) + mapping = dict(zip(ordering, range(N))) H = nx.relabel_nodes(G, mapping) edges = (tuple(sorted((u, v))) for u, v in H.edges()) betweenness = dict.fromkeys(edges, 0.0) if normalized: - nb = (n - 1.0) * (n - 2.0) # normalization factor + nb = (N - 1.0) * (N - 2.0) # normalization factor else: nb = 2.0 for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): @@ -221,6 +221,6 @@ def edge_current_flow_betweenness_centrality_subset( i = mapping[ss] for tt in targets: j = mapping[tt] - betweenness[e] += 0.5 * np.abs(row[i] - row[j]) + betweenness[e] += 0.5 * abs(row.item(i) - row.item(j)) betweenness[e] /= nb - return {(ordering[s], ordering[t]): v for (s, t), v in betweenness.items()} + return {(ordering[s], ordering[t]): value for (s, t), value in betweenness.items()} diff --git a/networkx/algorithms/centrality/current_flow_closeness.py b/networkx/algorithms/centrality/current_flow_closeness.py index daefbae902b..92c892f7449 100644 --- a/networkx/algorithms/centrality/current_flow_closeness.py +++ b/networkx/algorithms/centrality/current_flow_closeness.py @@ -11,7 +11,7 @@ @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def current_flow_closeness_centrality(G, weight=None, dtype=float, solver="lu"): """Compute current-flow closeness centrality for nodes. @@ -74,24 +74,22 @@ def current_flow_closeness_centrality(G, weight=None, dtype=float, solver="lu"): "lu": SuperLUInverseLaplacian, "cg": CGInverseLaplacian, } - n = G.number_of_nodes() + N = G.number_of_nodes() ordering = list(reverse_cuthill_mckee_ordering(G)) # make a copy with integer labels according to rcm ordering # this could be done without a copy if we really wanted to - H = nx.relabel_nodes(G, dict(zip(ordering, range(n)))) - betweenness = dict.fromkeys(H, 0.0) # b[v]=0 for v in H - n = H.number_of_nodes() - L = nx.laplacian_matrix(H, nodelist=range(n), weight=weight).asformat("csc") + H = nx.relabel_nodes(G, dict(zip(ordering, range(N)))) + betweenness = dict.fromkeys(H, 0.0) # b[n]=0 for n in H + N = H.number_of_nodes() + L = nx.laplacian_matrix(H, nodelist=range(N), weight=weight).asformat("csc") L = L.astype(dtype) C2 = solvername[solver](L, width=1, dtype=dtype) # initialize solver for v in H: col = C2.get_row(v) for w in H: - betweenness[v] += col[v] - 2 * col[w] - betweenness[w] += col[v] - for v in H: - betweenness[v] = 1 / (betweenness[v]) - return {ordering[k]: v for k, v in betweenness.items()} + betweenness[v] += col.item(v) - 2 * col.item(w) + betweenness[w] += col.item(v) + return {ordering[node]: 1 / value for node, value in betweenness.items()} information_centrality = current_flow_closeness_centrality diff --git a/networkx/algorithms/centrality/degree_alg.py b/networkx/algorithms/centrality/degree_alg.py index 2631730dbc0..ea53f41ea3e 100644 --- a/networkx/algorithms/centrality/degree_alg.py +++ b/networkx/algorithms/centrality/degree_alg.py @@ -5,7 +5,7 @@ __all__ = ["degree_centrality", "in_degree_centrality", "out_degree_centrality"] -@nx._dispatch +@nx._dispatchable def degree_centrality(G): """Compute the degree centrality for nodes. @@ -50,7 +50,7 @@ def degree_centrality(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def in_degree_centrality(G): """Compute the in-degree centrality for nodes. @@ -100,7 +100,7 @@ def in_degree_centrality(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def out_degree_centrality(G): """Compute the out-degree centrality for nodes. diff --git a/networkx/algorithms/centrality/dispersion.py b/networkx/algorithms/centrality/dispersion.py index a551c387d88..a3fa68583a9 100644 --- a/networkx/algorithms/centrality/dispersion.py +++ b/networkx/algorithms/centrality/dispersion.py @@ -5,7 +5,7 @@ __all__ = ["dispersion"] -@nx._dispatch +@nx._dispatchable def dispersion(G, u=None, v=None, normalized=True, alpha=1.0, b=0.0, c=0.0): r"""Calculate dispersion between `u` and `v` in `G`. diff --git a/networkx/algorithms/centrality/eigenvector.py b/networkx/algorithms/centrality/eigenvector.py index 267e7b51027..ed57b2aeb32 100644 --- a/networkx/algorithms/centrality/eigenvector.py +++ b/networkx/algorithms/centrality/eigenvector.py @@ -8,7 +8,7 @@ @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def eigenvector_centrality(G, max_iter=100, tol=1.0e-6, nstart=None, weight=None): r"""Compute the eigenvector centrality for the graph G. @@ -193,7 +193,7 @@ def eigenvector_centrality(G, max_iter=100, tol=1.0e-6, nstart=None, weight=None raise nx.PowerIterationFailedConvergence(max_iter) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def eigenvector_centrality_numpy(G, weight=None, max_iter=50, tol=0): r"""Compute the eigenvector centrality for the graph G. @@ -338,4 +338,4 @@ def eigenvector_centrality_numpy(G, weight=None, max_iter=50, tol=0): ) largest = eigenvector.flatten().real norm = np.sign(largest.sum()) * sp.linalg.norm(largest) - return dict(zip(G, largest / norm)) + return dict(zip(G, (largest / norm).tolist())) diff --git a/networkx/algorithms/centrality/flow_matrix.py b/networkx/algorithms/centrality/flow_matrix.py index e9cd7e26016..3874f6b2ffe 100644 --- a/networkx/algorithms/centrality/flow_matrix.py +++ b/networkx/algorithms/centrality/flow_matrix.py @@ -3,7 +3,7 @@ import networkx as nx -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def flow_matrix_row(G, weight=None, dtype=float, solver="lu"): # Generate a row of the current-flow matrix import numpy as np diff --git a/networkx/algorithms/centrality/group.py b/networkx/algorithms/centrality/group.py index 8207a71a5ae..5819c357d03 100644 --- a/networkx/algorithms/centrality/group.py +++ b/networkx/algorithms/centrality/group.py @@ -19,7 +19,7 @@ ] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def group_betweenness_centrality(G, C, normalized=True, weight=None, endpoints=False): r"""Compute the group betweenness centrality for a group of nodes. @@ -236,7 +236,7 @@ def _group_preprocessing(G, set_v, weight): return PB, sigma, D -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def prominent_group( G, k, weight=None, C=None, endpoints=False, normalized=True, greedy=False ): @@ -498,15 +498,15 @@ def _heuristic(k, root, DF_tree, D, nodes, greedy): / root_node["sigma"][added_node][y] ) DF_tree.nodes[node_p]["sigma"][x][y] = root_node["sigma"][x][y] * (1 - dxvy) - DF_tree.nodes[node_p]["betweenness"][x][y] = ( + DF_tree.nodes[node_p]["betweenness"].loc[y, x] = ( root_node["betweenness"][x][y] - root_node["betweenness"][x][y] * dxvy ) if y != added_node: - DF_tree.nodes[node_p]["betweenness"][x][y] -= ( + DF_tree.nodes[node_p]["betweenness"].loc[y, x] -= ( root_node["betweenness"][x][added_node] * dxyv ) if x != added_node: - DF_tree.nodes[node_p]["betweenness"][x][y] -= ( + DF_tree.nodes[node_p]["betweenness"].loc[y, x] -= ( root_node["betweenness"][added_node][y] * dvxy ) @@ -543,7 +543,7 @@ def _heuristic(k, root, DF_tree, D, nodes, greedy): return node_p, node_m, DF_tree -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def group_closeness_centrality(G, S, weight=None): r"""Compute the group closeness centrality for a group of nodes. @@ -640,7 +640,7 @@ def group_closeness_centrality(G, S, weight=None): return closeness -@nx._dispatch +@nx._dispatchable def group_degree_centrality(G, S): """Compute the group degree centrality for a group of nodes. @@ -692,7 +692,7 @@ def group_degree_centrality(G, S): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def group_in_degree_centrality(G, S): """Compute the group in-degree centrality for a group of nodes. @@ -739,7 +739,7 @@ def group_in_degree_centrality(G, S): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def group_out_degree_centrality(G, S): """Compute the group out-degree centrality for a group of nodes. diff --git a/networkx/algorithms/centrality/harmonic.py b/networkx/algorithms/centrality/harmonic.py index 86b5020f96c..9cd9f7f0839 100644 --- a/networkx/algorithms/centrality/harmonic.py +++ b/networkx/algorithms/centrality/harmonic.py @@ -6,7 +6,7 @@ __all__ = ["harmonic_centrality"] -@nx._dispatch(edge_attrs="distance") +@nx._dispatchable(edge_attrs="distance") def harmonic_centrality(G, nbunch=None, distance=None, sources=None): r"""Compute harmonic centrality for nodes. diff --git a/networkx/algorithms/centrality/katz.py b/networkx/algorithms/centrality/katz.py index 543fd879b97..d85ffd2dcd2 100644 --- a/networkx/algorithms/centrality/katz.py +++ b/networkx/algorithms/centrality/katz.py @@ -8,7 +8,7 @@ @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def katz_centrality( G, alpha=0.1, @@ -194,7 +194,7 @@ def katz_centrality( @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True, weight=None): r"""Compute the Katz centrality for the graph G. @@ -325,6 +325,6 @@ def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True, weight=None): n = A.shape[0] centrality = np.linalg.solve(np.eye(n, n) - (alpha * A), b).squeeze() - # Normalize: rely on truediv to cast to float + # Normalize: rely on truediv to cast to float, then tolist to make Python numbers norm = np.sign(sum(centrality)) * np.linalg.norm(centrality) if normalized else 1 - return dict(zip(nodelist, centrality / norm)) + return dict(zip(nodelist, (centrality / norm).tolist())) diff --git a/networkx/algorithms/centrality/laplacian.py b/networkx/algorithms/centrality/laplacian.py index e0a9a6d5172..66207ed2189 100644 --- a/networkx/algorithms/centrality/laplacian.py +++ b/networkx/algorithms/centrality/laplacian.py @@ -6,7 +6,7 @@ __all__ = ["laplacian_centrality"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def laplacian_centrality( G, normalized=True, nodelist=None, weight="weight", walk_type=None, alpha=0.95 ): @@ -50,8 +50,11 @@ def laplacian_centrality( walk_type : string or None, optional (default=None) Optional parameter `walk_type` used when calling :func:`directed_laplacian_matrix `. - If None, the transition matrix is selected depending on the properties - of the graph. Otherwise can be `random`, `lazy`, or `pagerank`. + One of ``"random"``, ``"lazy"``, or ``"pagerank"``. If ``walk_type=None`` + (the default), then a value is selected according to the properties of `G`: + - ``walk_type="random"`` if `G` is strongly connected and aperiodic + - ``walk_type="lazy"`` if `G` is strongly connected but not aperiodic + - ``walk_type="pagerank"`` for all other cases. alpha : real (default = 0.95) Optional parameter `alpha` used when calling @@ -141,6 +144,6 @@ def laplacian_centrality( if normalized: lapl_cent = lapl_cent / full_energy - laplace_centralities_dict[node] = lapl_cent + laplace_centralities_dict[node] = float(lapl_cent) return laplace_centralities_dict diff --git a/networkx/algorithms/centrality/load.py b/networkx/algorithms/centrality/load.py index 9a81cc43282..50bc6210b31 100644 --- a/networkx/algorithms/centrality/load.py +++ b/networkx/algorithms/centrality/load.py @@ -6,7 +6,7 @@ __all__ = ["load_centrality", "edge_load_centrality"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def newman_betweenness_centrality(G, v=None, cutoff=None, normalized=True, weight=None): """Compute load centrality for nodes. @@ -136,7 +136,7 @@ def _node_betweenness(G, source, cutoff=False, normalized=True, weight=None): load_centrality = newman_betweenness_centrality -@nx._dispatch +@nx._dispatchable def edge_load_centrality(G, cutoff=False): """Compute edge load. diff --git a/networkx/algorithms/centrality/percolation.py b/networkx/algorithms/centrality/percolation.py index cc5d5ce6d7d..0d4c87132b4 100644 --- a/networkx/algorithms/centrality/percolation.py +++ b/networkx/algorithms/centrality/percolation.py @@ -11,7 +11,7 @@ __all__ = ["percolation_centrality"] -@nx._dispatch(node_attrs="attribute", edge_attrs="weight") +@nx._dispatchable(node_attrs="attribute", edge_attrs="weight") def percolation_centrality(G, attribute="percolation", states=None, weight=None): r"""Compute the percolation centrality for nodes. diff --git a/networkx/algorithms/centrality/reaching.py b/networkx/algorithms/centrality/reaching.py index 7b9eac564ac..63ecc216eae 100644 --- a/networkx/algorithms/centrality/reaching.py +++ b/networkx/algorithms/centrality/reaching.py @@ -31,7 +31,7 @@ def _average_weight(G, path, weight=None): return total_weight / path_length -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def global_reaching_centrality(G, weight=None, normalized=True): """Returns the global reaching centrality of a directed graph. @@ -112,14 +112,14 @@ def as_distance(u, v, d): # TODO This can be trivially parallelized. lrc = [ centrality(G, node, paths=paths, weight=weight, normalized=normalized) - for node, paths in shortest_paths.items() + for node, paths in shortest_paths ] max_lrc = max(lrc) return sum(max_lrc - c for c in lrc) / (len(G) - 1) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def local_reaching_centrality(G, v, paths=None, weight=None, normalized=True): """Returns the local reaching centrality of a node in a directed graph. diff --git a/networkx/algorithms/centrality/second_order.py b/networkx/algorithms/centrality/second_order.py index 4bdb1f52141..35583cd63e5 100644 --- a/networkx/algorithms/centrality/second_order.py +++ b/networkx/algorithms/centrality/second_order.py @@ -39,7 +39,7 @@ @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def second_order_centrality(G, weight="weight"): """Compute the second order centrality for nodes of G. @@ -134,5 +134,8 @@ def _Qj(P, j): ) # eq 3 return dict( - zip(G.nodes, [np.sqrt(2 * np.sum(M[:, i]) - n * (n + 1)) for i in range(n)]) + zip( + G.nodes, + (float(np.sqrt(2 * np.sum(M[:, i]) - n * (n + 1))) for i in range(n)), + ) ) # eq 6 diff --git a/networkx/algorithms/centrality/subgraph_alg.py b/networkx/algorithms/centrality/subgraph_alg.py index c615b489201..29a284c547c 100644 --- a/networkx/algorithms/centrality/subgraph_alg.py +++ b/networkx/algorithms/centrality/subgraph_alg.py @@ -14,7 +14,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def subgraph_centrality_exp(G): r"""Returns the subgraph centrality for each node of G. @@ -98,7 +98,7 @@ def subgraph_centrality_exp(G): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def subgraph_centrality(G): r"""Returns subgraph centrality for each node in G. @@ -189,7 +189,7 @@ def subgraph_centrality(G): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def communicability_betweenness_centrality(G): r"""Returns subgraph communicability for all pairs of nodes in G. @@ -278,7 +278,7 @@ def communicability_betweenness_centrality(G): B[i, :] = 0 B[:, i] = 0 B -= np.diag(np.diag(B)) - cbc[v] = B.sum() + cbc[v] = float(B.sum()) # put row and col back A[i, :] = row A[:, i] = col @@ -286,12 +286,11 @@ def communicability_betweenness_centrality(G): order = len(cbc) if order > 2: scale = 1.0 / ((order - 1.0) ** 2 - (order - 1.0)) - for v in cbc: - cbc[v] *= scale + cbc = {node: value * scale for node, value in cbc.items()} return cbc -@nx._dispatch +@nx._dispatchable def estrada_index(G): r"""Returns the Estrada index of a the graph G. diff --git a/networkx/algorithms/centrality/trophic.py b/networkx/algorithms/centrality/trophic.py index cfc7ea4f206..6d1ba960ba9 100644 --- a/networkx/algorithms/centrality/trophic.py +++ b/networkx/algorithms/centrality/trophic.py @@ -6,7 +6,7 @@ @not_implemented_for("undirected") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def trophic_levels(G, weight="weight"): r"""Compute the trophic levels of nodes. @@ -76,13 +76,13 @@ def trophic_levels(G, weight="weight"): # all other nodes have levels as calculated nonzero_node_ids = (node_id for node_id, degree in G.in_degree if degree != 0) for i, node_id in enumerate(nonzero_node_ids): - levels[node_id] = y[i] + levels[node_id] = y.item(i) return levels @not_implemented_for("undirected") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def trophic_differences(G, weight="weight"): r"""Compute the trophic differences of the edges of a directed graph. @@ -117,7 +117,7 @@ def trophic_differences(G, weight="weight"): @not_implemented_for("undirected") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def trophic_incoherence_parameter(G, weight="weight", cannibalism=False): r"""Compute the trophic incoherence parameter of a graph. @@ -159,4 +159,4 @@ def trophic_incoherence_parameter(G, weight="weight", cannibalism=False): # Avoid copy otherwise G_2 = G diffs = trophic_differences(G_2, weight=weight) - return np.std(list(diffs.values())) + return float(np.std(list(diffs.values()))) diff --git a/networkx/algorithms/centrality/voterank_alg.py b/networkx/algorithms/centrality/voterank_alg.py index f9cf43c7813..063dfdd64b3 100644 --- a/networkx/algorithms/centrality/voterank_alg.py +++ b/networkx/algorithms/centrality/voterank_alg.py @@ -4,12 +4,12 @@ __all__ = ["voterank"] -@nx._dispatch +@nx._dispatchable def voterank(G, number_of_nodes=None): """Select a list of influential nodes in a graph using VoteRank algorithm VoteRank [1]_ computes a ranking of the nodes in a graph G based on a - voting scheme. With VoteRank, all nodes vote for each of its in-neighbours + voting scheme. With VoteRank, all nodes vote for each of its in-neighbors and the node with the highest votes is elected iteratively. The voting ability of out-neighbors of elected nodes is decreased in subsequent turns. diff --git a/networkx/algorithms/chains.py b/networkx/algorithms/chains.py index 289bc1c3dd4..ae342d9c866 100644 --- a/networkx/algorithms/chains.py +++ b/networkx/algorithms/chains.py @@ -8,7 +8,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def chain_decomposition(G, root=None): """Returns the chain decomposition of a graph. diff --git a/networkx/algorithms/chordal.py b/networkx/algorithms/chordal.py index 2aa56798317..6bd3ccd2ea3 100644 --- a/networkx/algorithms/chordal.py +++ b/networkx/algorithms/chordal.py @@ -28,7 +28,7 @@ class NetworkXTreewidthBoundExceeded(nx.NetworkXException): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def is_chordal(G): """Checks whether G is a chordal graph. @@ -88,7 +88,7 @@ def is_chordal(G): return len(_find_chordality_breaker(G)) == 0 -@nx._dispatch +@nx._dispatchable def find_induced_nodes(G, s, t, treewidth_bound=sys.maxsize): """Returns the set of induced nodes in the path from s to t. @@ -168,7 +168,7 @@ def find_induced_nodes(G, s, t, treewidth_bound=sys.maxsize): return induced_nodes -@nx._dispatch +@nx._dispatchable def chordal_graph_cliques(G): """Returns all maximal cliques of a chordal graph. @@ -241,7 +241,7 @@ def chordal_graph_cliques(G): yield frozenset(clique_wanna_be) -@nx._dispatch +@nx._dispatchable def chordal_graph_treewidth(G): """Returns the treewidth of the chordal graph G. @@ -339,6 +339,8 @@ def _find_chordality_breaker(G, s=None, treewidth_bound=sys.maxsize): It ignores any self loops. """ + if len(G) == 0: + raise nx.NetworkXPointlessConcept("Graph has no nodes.") unnumbered = set(G) if s is None: s = arbitrary_element(G) @@ -367,7 +369,7 @@ def _find_chordality_breaker(G, s=None, treewidth_bound=sys.maxsize): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable(returns_graph=True) def complete_to_chordal_graph(G): """Return a copy of G completed to a chordal graph diff --git a/networkx/algorithms/clique.py b/networkx/algorithms/clique.py index 7fd7e81665a..5f959dd4658 100644 --- a/networkx/algorithms/clique.py +++ b/networkx/algorithms/clique.py @@ -26,7 +26,7 @@ @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def enumerate_all_cliques(G): """Returns all cliques in an undirected graph. @@ -98,7 +98,7 @@ def enumerate_all_cliques(G): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def find_cliques(G, nodes=None): """Returns all maximal cliques in an undirected graph. @@ -294,7 +294,7 @@ def find_cliques(G, nodes=None): # TODO Should this also be not implemented for directed graphs? -@nx._dispatch +@nx._dispatchable def find_cliques_recursive(G, nodes=None): """Returns all maximal cliques in a graph. @@ -412,7 +412,7 @@ def expand(subg, cand): return expand(subg_init, cand_init) -@nx._dispatch +@nx._dispatchable(returns_graph=True) def make_max_clique_graph(G, create_using=None): """Returns the maximal clique graph of the given graph. @@ -437,8 +437,9 @@ def make_max_clique_graph(G, create_using=None): This function behaves like the following code:: import networkx as nx + G = nx.make_clique_bipartite(G) - cliques = [v for v in G.nodes() if G.nodes[v]['bipartite'] == 0] + cliques = [v for v in G.nodes() if G.nodes[v]["bipartite"] == 0] G = nx.bipartite.projected_graph(G, cliques) G = nx.relabel_nodes(G, {-v: v - 1 for v in G}) @@ -459,7 +460,7 @@ def make_max_clique_graph(G, create_using=None): return B -@nx._dispatch +@nx._dispatchable(returns_graph=True) def make_clique_bipartite(G, fpos=None, create_using=None, name=None): """Returns the bipartite clique graph corresponding to `G`. @@ -508,7 +509,7 @@ def make_clique_bipartite(G, fpos=None, create_using=None, name=None): return B -@nx._dispatch +@nx._dispatchable def node_clique_number(G, nodes=None, cliques=None, separate_nodes=False): """Returns the size of the largest maximal clique containing each given node. @@ -698,7 +699,7 @@ def find_max_weight_clique(self): @not_implemented_for("directed") -@nx._dispatch(node_attrs="weight") +@nx._dispatchable(node_attrs="weight") def max_weight_clique(G, weight="weight"): """Find a maximum weight clique in G. diff --git a/networkx/algorithms/cluster.py b/networkx/algorithms/cluster.py index 0500852a7ce..6c91ad28135 100644 --- a/networkx/algorithms/cluster.py +++ b/networkx/algorithms/cluster.py @@ -17,7 +17,7 @@ @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def triangles(G, nodes=None): """Compute the number of triangles. @@ -67,20 +67,18 @@ def triangles(G, nodes=None): # dict used to avoid visiting the same nodes twice # this allows calculating/counting each triangle only once - later_neighbors = {} + later_nbrs = {} # iterate over the nodes in a graph for node, neighbors in G.adjacency(): - later_neighbors[node] = { - n for n in neighbors if n not in later_neighbors and n != node - } + later_nbrs[node] = {n for n in neighbors if n not in later_nbrs and n != node} # instantiate Counter for each node to include isolated nodes # add 1 to the count if a nodes neighbor's neighbor is also a neighbor triangle_counts = Counter(dict.fromkeys(G, 0)) - for node1, neighbors in later_neighbors.items(): + for node1, neighbors in later_nbrs.items(): for node2 in neighbors: - third_nodes = neighbors & later_neighbors[node2] + third_nodes = neighbors & later_nbrs[node2] m = len(third_nodes) triangle_counts[node1] += m triangle_counts[node2] += m @@ -145,10 +143,10 @@ def wt(u, v): # Only compute the edge weight once, before the inner inner # loop. wij = wt(i, j) - weighted_triangles += sum( - np.cbrt([(wij * wt(j, k) * wt(k, i)) for k in inbrs & jnbrs]) - ) - yield (i, len(inbrs), 2 * weighted_triangles) + weighted_triangles += np.cbrt( + [(wij * wt(j, k) * wt(k, i)) for k in inbrs & jnbrs] + ).sum() + yield (i, len(inbrs), 2 * float(weighted_triangles)) @not_implemented_for("multigraph") @@ -215,41 +213,41 @@ def wt(u, v): for j in ipreds: jpreds = set(G._pred[j]) - {j} jsuccs = set(G._succ[j]) - {j} - directed_triangles += sum( - np.cbrt([(wt(j, i) * wt(k, i) * wt(k, j)) for k in ipreds & jpreds]) - ) - directed_triangles += sum( - np.cbrt([(wt(j, i) * wt(k, i) * wt(j, k)) for k in ipreds & jsuccs]) - ) - directed_triangles += sum( - np.cbrt([(wt(j, i) * wt(i, k) * wt(k, j)) for k in isuccs & jpreds]) - ) - directed_triangles += sum( - np.cbrt([(wt(j, i) * wt(i, k) * wt(j, k)) for k in isuccs & jsuccs]) - ) + directed_triangles += np.cbrt( + [(wt(j, i) * wt(k, i) * wt(k, j)) for k in ipreds & jpreds] + ).sum() + directed_triangles += np.cbrt( + [(wt(j, i) * wt(k, i) * wt(j, k)) for k in ipreds & jsuccs] + ).sum() + directed_triangles += np.cbrt( + [(wt(j, i) * wt(i, k) * wt(k, j)) for k in isuccs & jpreds] + ).sum() + directed_triangles += np.cbrt( + [(wt(j, i) * wt(i, k) * wt(j, k)) for k in isuccs & jsuccs] + ).sum() for j in isuccs: jpreds = set(G._pred[j]) - {j} jsuccs = set(G._succ[j]) - {j} - directed_triangles += sum( - np.cbrt([(wt(i, j) * wt(k, i) * wt(k, j)) for k in ipreds & jpreds]) - ) - directed_triangles += sum( - np.cbrt([(wt(i, j) * wt(k, i) * wt(j, k)) for k in ipreds & jsuccs]) - ) - directed_triangles += sum( - np.cbrt([(wt(i, j) * wt(i, k) * wt(k, j)) for k in isuccs & jpreds]) - ) - directed_triangles += sum( - np.cbrt([(wt(i, j) * wt(i, k) * wt(j, k)) for k in isuccs & jsuccs]) - ) + directed_triangles += np.cbrt( + [(wt(i, j) * wt(k, i) * wt(k, j)) for k in ipreds & jpreds] + ).sum() + directed_triangles += np.cbrt( + [(wt(i, j) * wt(k, i) * wt(j, k)) for k in ipreds & jsuccs] + ).sum() + directed_triangles += np.cbrt( + [(wt(i, j) * wt(i, k) * wt(k, j)) for k in isuccs & jpreds] + ).sum() + directed_triangles += np.cbrt( + [(wt(i, j) * wt(i, k) * wt(j, k)) for k in isuccs & jsuccs] + ).sum() dtotal = len(ipreds) + len(isuccs) dbidirectional = len(ipreds & isuccs) - yield (i, dtotal, dbidirectional, directed_triangles) + yield (i, dtotal, dbidirectional, float(directed_triangles)) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def average_clustering(G, nodes=None, weight=None, count_zeros=True): r"""Compute the average clustering coefficient for the graph G. @@ -309,7 +307,7 @@ def average_clustering(G, nodes=None, weight=None, count_zeros=True): return sum(c) / len(c) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def clustering(G, nodes=None, weight=None): r"""Compute the clustering coefficient for nodes. @@ -424,7 +422,7 @@ def clustering(G, nodes=None, weight=None): return clusterc -@nx._dispatch +@nx._dispatchable def transitivity(G): r"""Compute graph transitivity, the fraction of all possible triangles present in G. @@ -447,6 +445,10 @@ def transitivity(G): out : float Transitivity + Notes + ----- + Self loops are ignored. + Examples -------- >>> G = nx.complete_graph(5) @@ -463,7 +465,7 @@ def transitivity(G): return 0 if triangles == 0 else triangles / contri -@nx._dispatch +@nx._dispatchable def square_clustering(G, nodes=None): r"""Compute the squares clustering coefficient for nodes. @@ -542,7 +544,7 @@ def square_clustering(G, nodes=None): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def generalized_degree(G, nodes=None): r"""Compute the generalized degree for nodes. @@ -583,6 +585,8 @@ def generalized_degree(G, nodes=None): Notes ----- + Self loops are ignored. + In a network of N nodes, the highest triangle multiplicity an edge can have is N-2. diff --git a/networkx/algorithms/coloring/equitable_coloring.py b/networkx/algorithms/coloring/equitable_coloring.py index af1fb5a7e7c..e464a074470 100644 --- a/networkx/algorithms/coloring/equitable_coloring.py +++ b/networkx/algorithms/coloring/equitable_coloring.py @@ -9,14 +9,14 @@ __all__ = ["equitable_color"] -@nx._dispatch +@nx._dispatchable def is_coloring(G, coloring): """Determine if the coloring is a valid coloring for the graph G.""" # Verify that the coloring is valid. return all(coloring[s] != coloring[d] for s, d in G.edges) -@nx._dispatch +@nx._dispatchable def is_equitable(G, coloring, num_colors=None): """Determines if the coloring is valid and equitable for the graph G.""" @@ -112,7 +112,7 @@ def move_witnesses(src_color, dst_color, N, H, F, C, T_cal, L): X = Y -@nx._dispatch +@nx._dispatchable(mutates_input=True) def pad_graph(G, num_colors): """Add a disconnected complete clique K_p such that the number of nodes in the graph becomes a multiple of `num_colors`. @@ -386,7 +386,7 @@ def procedure_P(V_minus, V_plus, N, H, F, C, L, excluded_colors=None): break -@nx._dispatch +@nx._dispatchable def equitable_color(G, num_colors): """Provides an equitable coloring for nodes of `G`. diff --git a/networkx/algorithms/coloring/greedy_coloring.py b/networkx/algorithms/coloring/greedy_coloring.py index bbb3ba03cef..61bc953673f 100644 --- a/networkx/algorithms/coloring/greedy_coloring.py +++ b/networkx/algorithms/coloring/greedy_coloring.py @@ -20,7 +20,6 @@ ] -@nx._dispatch def strategy_largest_first(G, colors): """Returns a list of the nodes of ``G`` in decreasing order by degree. @@ -32,7 +31,6 @@ def strategy_largest_first(G, colors): @py_random_state(2) -@nx._dispatch def strategy_random_sequential(G, colors, seed=None): """Returns a random permutation of the nodes of ``G`` as a list. @@ -47,7 +45,6 @@ def strategy_random_sequential(G, colors, seed=None): return nodes -@nx._dispatch def strategy_smallest_last(G, colors): """Returns a deque of the nodes of ``G``, "smallest" last. @@ -121,7 +118,6 @@ def _maximal_independent_set(G): return result -@nx._dispatch def strategy_independent_set(G, colors): """Uses a greedy independent set removal strategy to determine the colors. @@ -146,7 +142,6 @@ def strategy_independent_set(G, colors): yield from nodes -@nx._dispatch def strategy_connected_sequential_bfs(G, colors): """Returns an iterable over nodes in ``G`` in the order given by a breadth-first traversal. @@ -160,7 +155,6 @@ def strategy_connected_sequential_bfs(G, colors): return strategy_connected_sequential(G, colors, "bfs") -@nx._dispatch def strategy_connected_sequential_dfs(G, colors): """Returns an iterable over nodes in ``G`` in the order given by a depth-first traversal. @@ -174,7 +168,6 @@ def strategy_connected_sequential_dfs(G, colors): return strategy_connected_sequential(G, colors, "dfs") -@nx._dispatch def strategy_connected_sequential(G, colors, traversal="bfs"): """Returns an iterable over nodes in ``G`` in the order given by a breadth-first or depth-first traversal. @@ -207,7 +200,6 @@ def strategy_connected_sequential(G, colors, traversal="bfs"): yield end -@nx._dispatch def strategy_saturation_largest_first(G, colors): """Iterates over all the nodes of ``G`` in "saturation order" (also known as "DSATUR"). @@ -269,12 +261,12 @@ def strategy_saturation_largest_first(G, colors): } -@nx._dispatch +@nx._dispatchable def greedy_color(G, strategy="largest_first", interchange=False): """Color a graph using various strategies of greedy graph coloring. Attempts to color a graph using as few colors as possible, where no - neighbours of a node can have same color as the node itself. The + neighbors of a node can have same color as the node itself. The given strategy determines the order in which nodes are colored. The strategies are described in [1]_, and smallest-last is based on @@ -371,11 +363,11 @@ def greedy_color(G, strategy="largest_first", interchange=False): if interchange: return _greedy_coloring_with_interchange(G, nodes) for u in nodes: - # Set to keep track of colors of neighbours - neighbour_colors = {colors[v] for v in G[u] if v in colors} + # Set to keep track of colors of neighbors + nbr_colors = {colors[v] for v in G[u] if v in colors} # Find the first unused color. for color in itertools.count(): - if color not in neighbour_colors: + if color not in nbr_colors: break # Assign the new color to the current node. colors[u] = color diff --git a/networkx/algorithms/coloring/tests/test_coloring.py b/networkx/algorithms/coloring/tests/test_coloring.py index a2a4e39589e..dd2842d548a 100644 --- a/networkx/algorithms/coloring/tests/test_coloring.py +++ b/networkx/algorithms/coloring/tests/test_coloring.py @@ -446,13 +446,13 @@ def color_remaining_nodes( ) for u in node_iterator: - # Set to keep track of colors of neighbours - neighbour_colors = { + # Set to keep track of colors of neighbors + nbr_colors = { aux_colored_nodes[v] for v in G[u] if v in aux_colored_nodes } # Find the first unused color. for color in itertools.count(): - if color not in neighbour_colors: + if color not in nbr_colors: break aux_colored_nodes[u] = color color_assignments.append((u, color)) diff --git a/networkx/algorithms/communicability_alg.py b/networkx/algorithms/communicability_alg.py index c9144a7b84f..07316dc3ae2 100644 --- a/networkx/algorithms/communicability_alg.py +++ b/networkx/algorithms/communicability_alg.py @@ -9,7 +9,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def communicability(G): r"""Returns communicability between all pairs of nodes in G. @@ -91,7 +91,7 @@ def communicability(G): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def communicability_exp(G): r"""Returns communicability between all pairs of nodes in G. diff --git a/networkx/algorithms/community/__init__.py b/networkx/algorithms/community/__init__.py index fa782201ddc..40549aff238 100644 --- a/networkx/algorithms/community/__init__.py +++ b/networkx/algorithms/community/__init__.py @@ -14,6 +14,7 @@ """ from networkx.algorithms.community.asyn_fluid import * from networkx.algorithms.community.centrality import * +from networkx.algorithms.community.divisive import * from networkx.algorithms.community.kclique import * from networkx.algorithms.community.kernighan_lin import * from networkx.algorithms.community.label_propagation import * diff --git a/networkx/algorithms/community/asyn_fluid.py b/networkx/algorithms/community/asyn_fluid.py index 1a0029ae7ff..fea72c1bfdb 100644 --- a/networkx/algorithms/community/asyn_fluid.py +++ b/networkx/algorithms/community/asyn_fluid.py @@ -10,9 +10,10 @@ __all__ = ["asyn_fluidc"] -@not_implemented_for("directed", "multigraph") +@not_implemented_for("directed") +@not_implemented_for("multigraph") @py_random_state(3) -@nx._dispatch +@nx._dispatchable def asyn_fluidc(G, k, max_iter=100, seed=None): """Returns communities in `G` as detected by Fluid Communities algorithm. @@ -24,7 +25,7 @@ def asyn_fluidc(G, k, max_iter=100, seed=None): The algorithm proceeds as follows. First each of the initial k communities is initialized in a random vertex in the graph. Then the algorithm iterates over all vertices in a random order, updating the community of each vertex - based on its own community and the communities of its neighbours. This + based on its own community and the communities of its neighbors. This process is performed several times until convergence. At all times, each community has a total density of 1, which is equally distributed among the vertices it contains. If a vertex changes of @@ -102,7 +103,7 @@ def asyn_fluidc(G, k, max_iter=100, seed=None): com_counter.update({communities[vertex]: density[communities[vertex]]}) except KeyError: pass - # Gather neighbour vertex communities + # Gather neighbor vertex communities for v in G[vertex]: try: com_counter.update({communities[v]: density[communities[v]]}) diff --git a/networkx/algorithms/community/centrality.py b/networkx/algorithms/community/centrality.py index efdc98460e4..43281701d2b 100644 --- a/networkx/algorithms/community/centrality.py +++ b/networkx/algorithms/community/centrality.py @@ -5,7 +5,7 @@ __all__ = ["girvan_newman"] -@nx._dispatch(preserve_edge_attrs="most_valuable_edge") +@nx._dispatchable(preserve_edge_attrs="most_valuable_edge") def girvan_newman(G, most_valuable_edge=None): """Finds communities in a graph using the Girvan–Newman method. diff --git a/networkx/algorithms/community/community_utils.py b/networkx/algorithms/community/community_utils.py index 5e4727eec42..b57cd9881cb 100644 --- a/networkx/algorithms/community/community_utils.py +++ b/networkx/algorithms/community/community_utils.py @@ -4,7 +4,7 @@ __all__ = ["is_partition"] -@nx._dispatch +@nx._dispatchable def is_partition(G, communities): """Returns *True* if `communities` is a partition of the nodes of `G`. diff --git a/networkx/algorithms/community/divisive.py b/networkx/algorithms/community/divisive.py new file mode 100644 index 00000000000..1fc39594693 --- /dev/null +++ b/networkx/algorithms/community/divisive.py @@ -0,0 +1,196 @@ +import functools + +import networkx as nx + +__all__ = [ + "edge_betweenness_partition", + "edge_current_flow_betweenness_partition", +] + + +@nx._dispatchable(edge_attrs="weight") +def edge_betweenness_partition(G, number_of_sets, *, weight=None): + """Partition created by iteratively removing the highest edge betweenness edge. + + This algorithm works by calculating the edge betweenness for all + edges and removing the edge with the highest value. It is then + determined whether the graph has been broken into at least + `number_of_sets` connected components. + If not the process is repeated. + + Parameters + ---------- + G : NetworkX Graph, DiGraph or MultiGraph + Graph to be partitioned + + number_of_sets : int + Number of sets in the desired partition of the graph + + weight : key, optional, default=None + The key to use if using weights for edge betweenness calculation + + Returns + ------- + C : list of sets + Partition of the nodes of G + + Raises + ------ + NetworkXError + If number_of_sets is <= 0 or if number_of_sets > len(G) + + Examples + -------- + >>> G = nx.karate_club_graph() + >>> part = nx.community.edge_betweenness_partition(G, 2) + >>> {0, 1, 3, 4, 5, 6, 7, 10, 11, 12, 13, 16, 17, 19, 21} in part + True + >>> {2, 8, 9, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33} in part + True + + See Also + -------- + edge_current_flow_betweenness_partition + + Notes + ----- + This algorithm is fairly slow, as both the calculation of connected + components and edge betweenness relies on all pairs shortest + path algorithms. They could potentially be combined to cut down + on overall computation time. + + References + ---------- + .. [1] Santo Fortunato 'Community Detection in Graphs' Physical Reports + Volume 486, Issue 3-5 p. 75-174 + http://arxiv.org/abs/0906.0612 + """ + if number_of_sets <= 0: + raise nx.NetworkXError("number_of_sets must be >0") + if number_of_sets == 1: + return [set(G)] + if number_of_sets == len(G): + return [{n} for n in G] + if number_of_sets > len(G): + raise nx.NetworkXError("number_of_sets must be <= len(G)") + + H = G.copy() + partition = list(nx.connected_components(H)) + while len(partition) < number_of_sets: + ranking = nx.edge_betweenness_centrality(H, weight=weight) + edge = max(ranking, key=ranking.get) + H.remove_edge(*edge) + partition = list(nx.connected_components(H)) + return partition + + +@nx._dispatchable(edge_attrs="weight") +def edge_current_flow_betweenness_partition(G, number_of_sets, *, weight=None): + """Partition created by removing the highest edge current flow betweenness edge. + + This algorithm works by calculating the edge current flow + betweenness for all edges and removing the edge with the + highest value. It is then determined whether the graph has + been broken into at least `number_of_sets` connected + components. If not the process is repeated. + + Parameters + ---------- + G : NetworkX Graph, DiGraph or MultiGraph + Graph to be partitioned + + number_of_sets : int + Number of sets in the desired partition of the graph + + weight : key, optional (default=None) + The edge attribute key to use as weights for + edge current flow betweenness calculations + + Returns + ------- + C : list of sets + Partition of G + + Raises + ------ + NetworkXError + If number_of_sets is <= 0 or number_of_sets > len(G) + + Examples + -------- + >>> G = nx.karate_club_graph() + >>> part = nx.community.edge_current_flow_betweenness_partition(G, 2) + >>> {0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 16, 17, 19, 21} in part + True + >>> {8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33} in part + True + + + See Also + -------- + edge_betweenness_partition + + Notes + ----- + This algorithm is extremely slow, as the recalculation of the edge + current flow betweenness is extremely slow. + + References + ---------- + .. [1] Santo Fortunato 'Community Detection in Graphs' Physical Reports + Volume 486, Issue 3-5 p. 75-174 + http://arxiv.org/abs/0906.0612 + """ + if number_of_sets <= 0: + raise nx.NetworkXError("number_of_sets must be >0") + elif number_of_sets == 1: + return [set(G)] + elif number_of_sets == len(G): + return [{n} for n in G] + elif number_of_sets > len(G): + raise nx.NetworkXError("number_of_sets must be <= len(G)") + + rank = functools.partial( + nx.edge_current_flow_betweenness_centrality, normalized=False, weight=weight + ) + + # current flow requires a connected network so we track the components explicitly + H = G.copy() + partition = list(nx.connected_components(H)) + if len(partition) > 1: + Hcc_subgraphs = [H.subgraph(cc).copy() for cc in partition] + else: + Hcc_subgraphs = [H] + + ranking = {} + for Hcc in Hcc_subgraphs: + ranking.update(rank(Hcc)) + + while len(partition) < number_of_sets: + edge = max(ranking, key=ranking.get) + for cc, Hcc in zip(partition, Hcc_subgraphs): + if edge[0] in cc: + Hcc.remove_edge(*edge) + del ranking[edge] + splitcc_list = list(nx.connected_components(Hcc)) + if len(splitcc_list) > 1: + # there are 2 connected components. split off smaller one + cc_new = min(splitcc_list, key=len) + Hcc_new = Hcc.subgraph(cc_new).copy() + # update edge rankings for Hcc_new + newranks = rank(Hcc_new) + for e, r in newranks.items(): + ranking[e if e in ranking else e[::-1]] = r + # append new cc and Hcc to their lists. + partition.append(cc_new) + Hcc_subgraphs.append(Hcc_new) + + # leave existing cc and Hcc in their lists, but shrink them + Hcc.remove_nodes_from(cc_new) + cc.difference_update(cc_new) + # update edge rankings for Hcc whether it was split or not + newranks = rank(Hcc) + for e, r in newranks.items(): + ranking[e if e in ranking else e[::-1]] = r + break + return partition diff --git a/networkx/algorithms/community/kclique.py b/networkx/algorithms/community/kclique.py index 60433669cee..c7249104204 100644 --- a/networkx/algorithms/community/kclique.py +++ b/networkx/algorithms/community/kclique.py @@ -5,7 +5,7 @@ __all__ = ["k_clique_communities"] -@nx._dispatch +@nx._dispatchable def k_clique_communities(G, k, cliques=None): """Find k-clique communities in graph using the percolation method. diff --git a/networkx/algorithms/community/kernighan_lin.py b/networkx/algorithms/community/kernighan_lin.py index a18c7779b5b..f6397d82be6 100644 --- a/networkx/algorithms/community/kernighan_lin.py +++ b/networkx/algorithms/community/kernighan_lin.py @@ -42,7 +42,7 @@ def _update_costs(costs_x, x): @not_implemented_for("directed") @py_random_state(4) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def kernighan_lin_bisection(G, partition=None, max_iter=10, weight="weight", seed=None): """Partition a graph into two blocks using the Kernighan–Lin algorithm. diff --git a/networkx/algorithms/community/label_propagation.py b/networkx/algorithms/community/label_propagation.py index c10938d627c..8690855766b 100644 --- a/networkx/algorithms/community/label_propagation.py +++ b/networkx/algorithms/community/label_propagation.py @@ -14,7 +14,7 @@ @py_random_state("seed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def fast_label_propagation_communities(G, *, weight=None, seed=None): """Returns communities in `G` as detected by fast label propagation. @@ -35,21 +35,21 @@ def fast_label_propagation_communities(G, *, weight=None, seed=None): Parameters ---------- G : Graph, DiGraph, MultiGraph, or MultiDiGraph - Any NetworkX graph. + Any NetworkX graph. weight : string, or None (default) - The edge attribute representing a non-negative weight of an edge. If None, - each edge is assumed to have weight one. The weight of an edge is used in - determining the frequency with which a label appears among the neighbors of - a node (edge with weight `w` is equivalent to `w` unweighted edges). + The edge attribute representing a non-negative weight of an edge. If None, + each edge is assumed to have weight one. The weight of an edge is used in + determining the frequency with which a label appears among the neighbors of + a node (edge with weight `w` is equivalent to `w` unweighted edges). seed : integer, random_state, or None (default) - Indicator of random number generation state. See :ref:`Randomness`. + Indicator of random number generation state. See :ref:`Randomness`. Returns ------- communities : iterable - Iterable of communities given as sets of nodes. + Iterable of communities given as sets of nodes. Notes ----- @@ -59,8 +59,8 @@ def fast_label_propagation_communities(G, *, weight=None, seed=None): References ---------- .. [1] Vincent A. Traag & Lovro Šubelj. "Large network community detection by - fast label propagation." Scientific Reports 13 (2023): 2701. - https://doi.org/10.1038/s41598-023-29610-z + fast label propagation." Scientific Reports 13 (2023): 2701. + https://doi.org/10.1038/s41598-023-29610-z """ # Queue of nodes to be processed. @@ -137,7 +137,7 @@ def _fast_label_count(G, comms, node, weight=None): @py_random_state(2) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def asyn_lpa_communities(G, weight=None, seed=None): """Returns communities in `G` as detected by asynchronous label propagation. @@ -233,7 +233,7 @@ def asyn_lpa_communities(G, weight=None, seed=None): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def label_propagation_communities(G): """Generates community sets determined by label propagation @@ -316,7 +316,7 @@ def _most_frequent_labels(node, labeling, G): # accordingly, hence the immediate if statement. return {labeling[node]} - # Compute the frequencies of all neighbours of node + # Compute the frequencies of all neighbors of node freqs = Counter(labeling[q] for q in G[node]) max_freq = max(freqs.values()) return {label for label, freq in freqs.items() if freq == max_freq} diff --git a/networkx/algorithms/community/louvain.py b/networkx/algorithms/community/louvain.py index 772f4d79d69..959c93a5104 100644 --- a/networkx/algorithms/community/louvain.py +++ b/networkx/algorithms/community/louvain.py @@ -1,6 +1,7 @@ """Function for detecting communities based on Louvain Community Detection Algorithm""" +import itertools from collections import defaultdict, deque import networkx as nx @@ -11,9 +12,9 @@ @py_random_state("seed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def louvain_communities( - G, weight="weight", resolution=1, threshold=0.0000001, seed=None + G, weight="weight", resolution=1, threshold=0.0000001, max_level=None, seed=None ): r"""Find the best partition of a graph using the Louvain Community Detection Algorithm. @@ -56,7 +57,7 @@ def louvain_communities( increased modularity. The above two phases are executed until no modularity gain is achieved (or is less than - the `threshold`). + the `threshold`, or until `max_levels` is reached). Be careful with self-loops in the input graph. These are treated as previously reduced communities -- as if the process had been started @@ -79,6 +80,10 @@ def louvain_communities( Modularity gain threshold for each level. If the gain of modularity between 2 levels of the algorithm is less than the given threshold then the algorithm stops and returns the resulting communities. + max_level : int or None, optional (default=None) + The maximum number of levels (steps of the algorithm) to compute. + Must be a positive integer or None. If None, then there is no max + level and the threshold parameter determines the stopping condition. seed : integer, random_state, or None (default) Indicator of random number generation state. See :ref:`Randomness`. @@ -115,13 +120,17 @@ def louvain_communities( louvain_partitions """ - d = louvain_partitions(G, weight, resolution, threshold, seed) - q = deque(d, maxlen=1) - return q.pop() + partitions = louvain_partitions(G, weight, resolution, threshold, seed) + if max_level is not None: + if max_level <= 0: + raise ValueError("max_level argument must be a positive integer or None") + partitions = itertools.islice(partitions, max_level) + final_partition = deque(partitions, maxlen=1) + return final_partition.pop() @py_random_state("seed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def louvain_partitions( G, weight="weight", resolution=1, threshold=0.0000001, seed=None ): @@ -240,7 +249,7 @@ def _one_level(G, m, partition, resolution=1, is_directed=False, seed=None): out_degrees = dict(G.out_degree(weight="weight")) Stot_in = list(in_degrees.values()) Stot_out = list(out_degrees.values()) - # Calculate weights for both in and out neighbours without considering self-loops + # Calculate weights for both in and out neighbors without considering self-loops nbrs = {} for u in G: nbrs[u] = defaultdict(float) @@ -327,7 +336,7 @@ def _neighbor_weights(nbrs, node2com): Parameters ---------- nbrs : dictionary - Dictionary with nodes' neighbours as keys and their edge weight as value. + Dictionary with nodes' neighbors as keys and their edge weight as value. node2com : dictionary Dictionary with all graph's nodes as keys and their community index as value. diff --git a/networkx/algorithms/community/lukes.py b/networkx/algorithms/community/lukes.py index 600a4db63d6..389fb51ca63 100644 --- a/networkx/algorithms/community/lukes.py +++ b/networkx/algorithms/community/lukes.py @@ -25,7 +25,7 @@ def _split_n_from(n, min_size_of_first_part): yield p1, n - p1 -@nx._dispatch(node_attrs="node_weight", edge_attrs="edge_weight") +@nx._dispatchable(node_attrs="node_weight", edge_attrs="edge_weight") def lukes_partitioning(G, max_size, node_weight=None, edge_weight=None): """Optimal partitioning of a weighted tree using the Lukes algorithm. diff --git a/networkx/algorithms/community/modularity_max.py b/networkx/algorithms/community/modularity_max.py index d76116f7b2b..f465e01c6b2 100644 --- a/networkx/algorithms/community/modularity_max.py +++ b/networkx/algorithms/community/modularity_max.py @@ -223,7 +223,7 @@ def _greedy_modularity_communities_generator(G, weight=None, resolution=1): yield communities.values() -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def greedy_modularity_communities( G, weight=None, @@ -356,7 +356,7 @@ def greedy_modularity_communities( @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def naive_greedy_modularity_communities(G, resolution=1, weight=None): r"""Find communities in G using greedy modularity maximization. diff --git a/networkx/algorithms/community/quality.py b/networkx/algorithms/community/quality.py index ab86b09113a..f09a6d454af 100644 --- a/networkx/algorithms/community/quality.py +++ b/networkx/algorithms/community/quality.py @@ -58,7 +58,7 @@ def _require_partition(G, partition): require_partition = argmap(_require_partition, (0, 1)) -@nx._dispatch +@nx._dispatchable def intra_community_edges(G, partition): """Returns the number of intra-community edges for a partition of `G`. @@ -76,7 +76,7 @@ def intra_community_edges(G, partition): return sum(G.subgraph(block).size() for block in partition) -@nx._dispatch +@nx._dispatchable def inter_community_edges(G, partition): """Returns the number of inter-community edges for a partition of `G`. according to the given @@ -108,7 +108,7 @@ def inter_community_edges(G, partition): return nx.quotient_graph(G, partition, create_using=MG).size() -@nx._dispatch +@nx._dispatchable def inter_community_non_edges(G, partition): """Returns the number of inter-community non-edges according to the given partition of the nodes of `G`. @@ -141,7 +141,7 @@ def inter_community_non_edges(G, partition): return inter_community_edges(nx.complement(G), partition) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def modularity(G, communities, weight="weight", resolution=1): r"""Returns the modularity of the given partition of the graph. @@ -256,7 +256,7 @@ def community_contribution(community): @require_partition -@nx._dispatch +@nx._dispatchable def partition_quality(G, partition): """Returns the coverage and performance of a partition of G. diff --git a/networkx/algorithms/community/tests/test_asyn_fluid.py b/networkx/algorithms/community/tests/test_asyn_fluid.py index cd108dda369..6c023be773d 100644 --- a/networkx/algorithms/community/tests/test_asyn_fluid.py +++ b/networkx/algorithms/community/tests/test_asyn_fluid.py @@ -5,6 +5,13 @@ from networkx.algorithms.community import asyn_fluidc +@pytest.mark.parametrize("graph_constructor", (nx.DiGraph, nx.MultiGraph)) +def test_raises_on_directed_and_multigraphs(graph_constructor): + G = graph_constructor([(0, 1), (1, 2)]) + with pytest.raises(nx.NetworkXNotImplemented): + nx.community.asyn_fluidc(G, 1) + + def test_exceptions(): test = Graph() test.add_node("a") diff --git a/networkx/algorithms/community/tests/test_divisive.py b/networkx/algorithms/community/tests/test_divisive.py new file mode 100644 index 00000000000..874e8c1677b --- /dev/null +++ b/networkx/algorithms/community/tests/test_divisive.py @@ -0,0 +1,106 @@ +import pytest + +import networkx as nx + + +def test_edge_betweenness_partition(): + G = nx.barbell_graph(3, 0) + C = nx.community.edge_betweenness_partition(G, 2) + answer = [{0, 1, 2}, {3, 4, 5}] + assert len(C) == len(answer) + for s in answer: + assert s in C + + G = nx.barbell_graph(3, 1) + C = nx.community.edge_betweenness_partition(G, 3) + answer = [{0, 1, 2}, {4, 5, 6}, {3}] + assert len(C) == len(answer) + for s in answer: + assert s in C + + C = nx.community.edge_betweenness_partition(G, 7) + answer = [{n} for n in G] + assert len(C) == len(answer) + for s in answer: + assert s in C + + C = nx.community.edge_betweenness_partition(G, 1) + assert C == [set(G)] + + C = nx.community.edge_betweenness_partition(G, 1, weight="weight") + assert C == [set(G)] + + with pytest.raises(nx.NetworkXError): + nx.community.edge_betweenness_partition(G, 0) + + with pytest.raises(nx.NetworkXError): + nx.community.edge_betweenness_partition(G, -1) + + with pytest.raises(nx.NetworkXError): + nx.community.edge_betweenness_partition(G, 10) + + +def test_edge_current_flow_betweenness_partition(): + pytest.importorskip("numpy") + + G = nx.barbell_graph(3, 0) + C = nx.community.edge_current_flow_betweenness_partition(G, 2) + answer = [{0, 1, 2}, {3, 4, 5}] + assert len(C) == len(answer) + for s in answer: + assert s in C + + G = nx.barbell_graph(3, 1) + C = nx.community.edge_current_flow_betweenness_partition(G, 2) + answers = [[{0, 1, 2, 3}, {4, 5, 6}], [{0, 1, 2}, {3, 4, 5, 6}]] + assert len(C) == len(answers[0]) + assert any(all(s in answer for s in C) for answer in answers) + + C = nx.community.edge_current_flow_betweenness_partition(G, 3) + answer = [{0, 1, 2}, {4, 5, 6}, {3}] + assert len(C) == len(answer) + for s in answer: + assert s in C + + C = nx.community.edge_current_flow_betweenness_partition(G, 4) + answers = [[{1, 2}, {4, 5, 6}, {3}, {0}], [{0, 1, 2}, {5, 6}, {3}, {4}]] + assert len(C) == len(answers[0]) + assert any(all(s in answer for s in C) for answer in answers) + + C = nx.community.edge_current_flow_betweenness_partition(G, 5) + answer = [{1, 2}, {5, 6}, {3}, {0}, {4}] + assert len(C) == len(answer) + for s in answer: + assert s in C + + C = nx.community.edge_current_flow_betweenness_partition(G, 6) + answers = [[{2}, {5, 6}, {3}, {0}, {4}, {1}], [{1, 2}, {6}, {3}, {0}, {4}, {5}]] + assert len(C) == len(answers[0]) + assert any(all(s in answer for s in C) for answer in answers) + + C = nx.community.edge_current_flow_betweenness_partition(G, 7) + answer = [{n} for n in G] + assert len(C) == len(answer) + for s in answer: + assert s in C + + C = nx.community.edge_current_flow_betweenness_partition(G, 1) + assert C == [set(G)] + + C = nx.community.edge_current_flow_betweenness_partition(G, 1, weight="weight") + assert C == [set(G)] + + with pytest.raises(nx.NetworkXError): + nx.community.edge_current_flow_betweenness_partition(G, 0) + + with pytest.raises(nx.NetworkXError): + nx.community.edge_current_flow_betweenness_partition(G, -1) + + with pytest.raises(nx.NetworkXError): + nx.community.edge_current_flow_betweenness_partition(G, 10) + + N = 10 + G = nx.empty_graph(N) + for i in range(2, N - 1): + C = nx.community.edge_current_flow_betweenness_partition(G, i) + assert C == [{n} for n in G] diff --git a/networkx/algorithms/community/tests/test_label_propagation.py b/networkx/algorithms/community/tests/test_label_propagation.py index 9a0b3d89b6e..4be72dbf272 100644 --- a/networkx/algorithms/community/tests/test_label_propagation.py +++ b/networkx/algorithms/community/tests/test_label_propagation.py @@ -163,7 +163,7 @@ class TestFastLabelPropagationCommunities: N = 100 # number of nodes K = 15 # average node degree - def _check_communities(self, G, truth, weight=None, seed=None): + def _check_communities(self, G, truth, weight=None, seed=42): C = nx.community.fast_label_propagation_communities(G, weight=weight, seed=seed) assert {frozenset(c) for c in C} == truth @@ -193,7 +193,7 @@ def test_bipartite_graph(self): self._check_communities(G, truth) def test_random_graph(self): - G = nx.gnm_random_graph(self.N, self.N * self.K // 2) + G = nx.gnm_random_graph(self.N, self.N * self.K // 2, seed=42) truth = {frozenset(G)} self._check_communities(G, truth) @@ -203,14 +203,13 @@ def test_disjoin_cliques(self): self._check_communities(G, truth) def test_ring_of_cliques(self): - G = nx.ring_of_cliques(self.N, self.K) - truth = { - frozenset([self.K * i + k for k in range(self.K)]) for i in range(self.N) - } + N, K = self.N, self.K + G = nx.ring_of_cliques(N, K) + truth = {frozenset([K * i + k for k in range(K)]) for i in range(N)} self._check_communities(G, truth) def test_larger_graph(self): - G = nx.gnm_random_graph(100 * self.N, 50 * self.N * self.K) + G = nx.gnm_random_graph(100 * self.N, 50 * self.N * self.K, seed=42) nx.community.fast_label_propagation_communities(G) def test_graph_type(self): @@ -238,3 +237,5 @@ def test_seed_argument(self): C = nx.community.fast_label_propagation_communities(G, seed=2023) truth = {frozenset(c) for c in C} self._check_communities(G, truth, seed=2023) + # smoke test that seed=None works + C = nx.community.fast_label_propagation_communities(G, seed=None) diff --git a/networkx/algorithms/community/tests/test_louvain.py b/networkx/algorithms/community/tests/test_louvain.py index 60b95d37783..b47fb74c57e 100644 --- a/networkx/algorithms/community/tests/test_louvain.py +++ b/networkx/algorithms/community/tests/test_louvain.py @@ -1,3 +1,5 @@ +import pytest + import networkx as nx @@ -242,3 +244,21 @@ def test_empty_graph(): G.add_nodes_from(range(5)) expected = [{0}, {1}, {2}, {3}, {4}] assert nx.community.louvain_communities(G) == expected + + +def test_max_level(): + G = nx.LFR_benchmark_graph( + 250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10 + ) + parts_iter = nx.community.louvain_partitions(G, seed=42) + for max_level, expected in enumerate(parts_iter, 1): + partition = nx.community.louvain_communities(G, max_level=max_level, seed=42) + assert partition == expected + assert max_level > 1 # Ensure we are actually testing max_level + # max_level is an upper limit; it's okay if we stop before it's hit. + partition = nx.community.louvain_communities(G, max_level=max_level + 1, seed=42) + assert partition == expected + with pytest.raises( + ValueError, match="max_level argument must be a positive integer" + ): + nx.community.louvain_communities(G, max_level=0) diff --git a/networkx/algorithms/components/attracting.py b/networkx/algorithms/components/attracting.py index 1cc2e15615c..305c696353b 100644 --- a/networkx/algorithms/components/attracting.py +++ b/networkx/algorithms/components/attracting.py @@ -10,7 +10,7 @@ @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def attracting_components(G): """Generates the attracting components in `G`. @@ -54,7 +54,7 @@ def attracting_components(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def number_attracting_components(G): """Returns the number of attracting components in `G`. @@ -83,7 +83,7 @@ def number_attracting_components(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def is_attracting_component(G): """Returns True if `G` consists of a single attracting component. diff --git a/networkx/algorithms/components/biconnected.py b/networkx/algorithms/components/biconnected.py index 632b2d598d7..0d2f06975f8 100644 --- a/networkx/algorithms/components/biconnected.py +++ b/networkx/algorithms/components/biconnected.py @@ -13,7 +13,7 @@ @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def is_biconnected(G): """Returns True if the graph is biconnected, False otherwise. @@ -94,7 +94,7 @@ def is_biconnected(G): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def biconnected_component_edges(G): """Returns a generator of lists of edges, one list for each biconnected component of the input graph. @@ -167,7 +167,7 @@ def biconnected_component_edges(G): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def biconnected_components(G): """Returns a generator of sets of nodes, one set for each biconnected component of the graph @@ -260,7 +260,7 @@ def biconnected_components(G): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def articulation_points(G): """Yield the articulation points, or cut vertices, of a graph. diff --git a/networkx/algorithms/components/connected.py b/networkx/algorithms/components/connected.py index 8bc10980fe2..ad3e0155a7f 100644 --- a/networkx/algorithms/components/connected.py +++ b/networkx/algorithms/components/connected.py @@ -13,7 +13,7 @@ @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def connected_components(G): """Generate connected components. @@ -69,7 +69,7 @@ def connected_components(G): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def number_connected_components(G): """Returns the number of connected components. @@ -109,7 +109,7 @@ def number_connected_components(G): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def is_connected(G): """Returns True if the graph is connected, False otherwise. @@ -155,7 +155,7 @@ def is_connected(G): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def node_connected_component(G, n): """Returns the set of nodes in the component of graph containing node n. diff --git a/networkx/algorithms/components/semiconnected.py b/networkx/algorithms/components/semiconnected.py index 24a89f34d44..13cfa988a0b 100644 --- a/networkx/algorithms/components/semiconnected.py +++ b/networkx/algorithms/components/semiconnected.py @@ -6,7 +6,7 @@ @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def is_semiconnected(G): r"""Returns True if the graph is semiconnected, False otherwise. diff --git a/networkx/algorithms/components/strongly_connected.py b/networkx/algorithms/components/strongly_connected.py index 5bf5b994766..febd1b9b541 100644 --- a/networkx/algorithms/components/strongly_connected.py +++ b/networkx/algorithms/components/strongly_connected.py @@ -13,7 +13,7 @@ @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def strongly_connected_components(G): """Generate nodes in strongly connected components of graph. @@ -39,10 +39,7 @@ def strongly_connected_components(G): >>> G = nx.cycle_graph(4, create_using=nx.DiGraph()) >>> nx.add_cycle(G, [10, 11, 12]) - >>> [ - ... len(c) - ... for c in sorted(nx.strongly_connected_components(G), key=len, reverse=True) - ... ] + >>> [len(c) for c in sorted(nx.strongly_connected_components(G), key=len, reverse=True)] [4, 3] If you only want the largest component, it's more efficient to @@ -112,7 +109,7 @@ def strongly_connected_components(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def kosaraju_strongly_connected_components(G, source=None): """Generate nodes in strongly connected components of graph. @@ -174,7 +171,7 @@ def kosaraju_strongly_connected_components(G, source=None): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def strongly_connected_components_recursive(G): """Generate nodes in strongly connected components of graph. @@ -256,7 +253,7 @@ def strongly_connected_components_recursive(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def number_strongly_connected_components(G): """Returns number of strongly connected components in graph. @@ -277,7 +274,9 @@ def number_strongly_connected_components(G): Examples -------- - >>> G = nx.DiGraph([(0, 1), (1, 2), (2, 0), (2, 3), (4, 5), (3, 4), (5, 6), (6, 3), (6, 7)]) + >>> G = nx.DiGraph( + ... [(0, 1), (1, 2), (2, 0), (2, 3), (4, 5), (3, 4), (5, 6), (6, 3), (6, 7)] + ... ) >>> nx.number_strongly_connected_components(G) 3 @@ -295,7 +294,7 @@ def number_strongly_connected_components(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def is_strongly_connected(G): """Test directed graph for strong connectivity. @@ -347,7 +346,7 @@ def is_strongly_connected(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable(returns_graph=True) def condensation(G, scc=None): """Returns the condensation of G. @@ -391,7 +390,7 @@ def condensation(G, scc=None): >>> H = nx.condensation(G) >>> H.nodes.data() NodeDataView({0: {'members': {0, 1, 2, 3}}, 1: {'members': {4, 5, 6, 7}}}) - >>> H.graph['mapping'] + >>> H.graph["mapping"] {0: 0, 1: 0, 2: 0, 3: 0, 4: 1, 5: 1, 6: 1, 7: 1} Contracting a complete graph into one single SCC. diff --git a/networkx/algorithms/components/tests/test_connected.py b/networkx/algorithms/components/tests/test_connected.py index 4c9b8d28fd5..cd08640b387 100644 --- a/networkx/algorithms/components/tests/test_connected.py +++ b/networkx/algorithms/components/tests/test_connected.py @@ -61,7 +61,7 @@ def setup_class(cls): C = [] cls.gc.append((G, C)) - # This additionally tests the @nx._dispatch mechanism, treating + # This additionally tests the @nx._dispatchable mechanism, treating # nx.connected_components as if it were a re-implementation from another package @pytest.mark.parametrize("wrapper", [lambda x: x, dispatch_interface.convert]) def test_connected_components(self, wrapper): diff --git a/networkx/algorithms/components/tests/test_strongly_connected.py b/networkx/algorithms/components/tests/test_strongly_connected.py index f1c773026a6..21d9e671898 100644 --- a/networkx/algorithms/components/tests/test_strongly_connected.py +++ b/networkx/algorithms/components/tests/test_strongly_connected.py @@ -183,12 +183,8 @@ def test_connected_raise(self): with pytest.raises(NetworkXNotImplemented): next(nx.kosaraju_strongly_connected_components(G)) with pytest.raises(NetworkXNotImplemented): - with pytest.deprecated_call(): - next(nx.strongly_connected_components_recursive(G)) + next(nx.strongly_connected_components_recursive(G)) pytest.raises(NetworkXNotImplemented, nx.is_strongly_connected, G) - pytest.raises( - nx.NetworkXPointlessConcept, nx.is_strongly_connected, nx.DiGraph() - ) pytest.raises(NetworkXNotImplemented, nx.condensation, G) strong_cc_methods = ( diff --git a/networkx/algorithms/components/tests/test_weakly_connected.py b/networkx/algorithms/components/tests/test_weakly_connected.py index e313263668c..f014478930f 100644 --- a/networkx/algorithms/components/tests/test_weakly_connected.py +++ b/networkx/algorithms/components/tests/test_weakly_connected.py @@ -88,3 +88,9 @@ def test_connected_mutability(self): assert len(seen & component) == 0 seen.update(component) component.clear() + + +def test_is_weakly_connected_empty_graph_raises(): + G = nx.DiGraph() + with pytest.raises(nx.NetworkXPointlessConcept, match="Connectivity is undefined"): + nx.is_weakly_connected(G) diff --git a/networkx/algorithms/components/weakly_connected.py b/networkx/algorithms/components/weakly_connected.py index c8dc2350ef1..499c2ba742c 100644 --- a/networkx/algorithms/components/weakly_connected.py +++ b/networkx/algorithms/components/weakly_connected.py @@ -10,7 +10,7 @@ @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def weakly_connected_components(G): """Generate weakly connected components of G. @@ -36,10 +36,7 @@ def weakly_connected_components(G): >>> G = nx.path_graph(4, create_using=nx.DiGraph()) >>> nx.add_path(G, [10, 11, 12]) - >>> [ - ... len(c) - ... for c in sorted(nx.weakly_connected_components(G), key=len, reverse=True) - ... ] + >>> [len(c) for c in sorted(nx.weakly_connected_components(G), key=len, reverse=True)] [4, 3] If you only want the largest component, it's more efficient to @@ -66,7 +63,7 @@ def weakly_connected_components(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def number_weakly_connected_components(G): """Returns the number of weakly connected components in G. @@ -106,7 +103,7 @@ def number_weakly_connected_components(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def is_weakly_connected(G): """Test directed graph for weak connectivity. diff --git a/networkx/algorithms/connectivity/connectivity.py b/networkx/algorithms/connectivity/connectivity.py index cbb34152bba..ea96dbf2447 100644 --- a/networkx/algorithms/connectivity/connectivity.py +++ b/networkx/algorithms/connectivity/connectivity.py @@ -31,7 +31,7 @@ ] -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "auxiliary?": 4, "residual?": 5}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, preserve_graph_attrs={"auxiliary", "residual"}, @@ -118,7 +118,6 @@ def local_node_connectivity( >>> # You also have to explicitly import the function for >>> # building the auxiliary digraph from the connectivity package >>> from networkx.algorithms.connectivity import build_auxiliary_node_connectivity - ... >>> H = build_auxiliary_node_connectivity(G) >>> # And the function for building the residual network from the >>> # flow package @@ -131,7 +130,6 @@ def local_node_connectivity( >>> for u, v in itertools.combinations(G, 2): ... k = local_node_connectivity(G, u, v, auxiliary=H, residual=R) ... result[u][v] = k - ... >>> all(result[u][v] == 5 for u, v in itertools.combinations(G, 2)) True @@ -214,7 +212,7 @@ def local_node_connectivity( return nx.maximum_flow_value(H, f"{mapping[s]}B", f"{mapping[t]}A", **kwargs) -@nx._dispatch +@nx._dispatchable def node_connectivity(G, s=None, t=None, flow_func=None): r"""Returns node connectivity for a graph or digraph G. @@ -355,7 +353,7 @@ def neighbors(v): return K -@nx._dispatch +@nx._dispatchable def average_node_connectivity(G, flow_func=None): r"""Returns the average connectivity of a graph G. @@ -424,7 +422,7 @@ def average_node_connectivity(G, flow_func=None): return num / den -@nx._dispatch +@nx._dispatchable def all_pairs_node_connectivity(G, nbunch=None, flow_func=None): """Compute node connectivity between all pairs of nodes of G. @@ -492,7 +490,7 @@ def all_pairs_node_connectivity(G, nbunch=None, flow_func=None): return all_pairs -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "auxiliary?": 4, "residual?": 5}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, preserve_graph_attrs={"residual"}, @@ -658,7 +656,7 @@ def local_edge_connectivity( return nx.maximum_flow_value(H, s, t, **kwargs) -@nx._dispatch +@nx._dispatchable def edge_connectivity(G, s=None, t=None, flow_func=None, cutoff=None): r"""Returns the edge connectivity of the graph or digraph G. diff --git a/networkx/algorithms/connectivity/cuts.py b/networkx/algorithms/connectivity/cuts.py index d5883ba8f8e..e51c6843eb3 100644 --- a/networkx/algorithms/connectivity/cuts.py +++ b/networkx/algorithms/connectivity/cuts.py @@ -21,7 +21,7 @@ ] -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "auxiliary?": 4, "residual?": 5}, preserve_edge_attrs={ "auxiliary": {"capacity": float("inf")}, @@ -161,7 +161,7 @@ def minimum_st_edge_cut(G, s, t, flow_func=None, auxiliary=None, residual=None): return cutset -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "auxiliary?": 4, "residual?": 5}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, preserve_node_attrs={"auxiliary": {"id": None}}, @@ -305,7 +305,7 @@ def minimum_st_node_cut(G, s, t, flow_func=None, auxiliary=None, residual=None): return node_cut - {s, t} -@nx._dispatch +@nx._dispatchable def minimum_node_cut(G, s=None, t=None, flow_func=None): r"""Returns a set of nodes of minimum cardinality that disconnects G. @@ -451,7 +451,7 @@ def neighbors(v): return min_cut -@nx._dispatch +@nx._dispatchable def minimum_edge_cut(G, s=None, t=None, flow_func=None): r"""Returns a set of edges of minimum cardinality that disconnects G. diff --git a/networkx/algorithms/connectivity/disjoint_paths.py b/networkx/algorithms/connectivity/disjoint_paths.py index 3fe450517d5..b80aa9c7fb4 100644 --- a/networkx/algorithms/connectivity/disjoint_paths.py +++ b/networkx/algorithms/connectivity/disjoint_paths.py @@ -19,7 +19,7 @@ __all__ = ["edge_disjoint_paths", "node_disjoint_paths"] -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "auxiliary?": 5, "residual?": 6}, preserve_edge_attrs={ "auxiliary": {"capacity": float("inf")}, @@ -234,7 +234,7 @@ def edge_disjoint_paths( paths_found += 1 -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "auxiliary?": 5, "residual?": 6}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, preserve_node_attrs={"auxiliary": {"id": None}}, diff --git a/networkx/algorithms/connectivity/edge_augmentation.py b/networkx/algorithms/connectivity/edge_augmentation.py index c1215509e88..d095ed51917 100644 --- a/networkx/algorithms/connectivity/edge_augmentation.py +++ b/networkx/algorithms/connectivity/edge_augmentation.py @@ -24,7 +24,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def is_k_edge_connected(G, k): """Tests to see if a graph is k-edge-connected. @@ -75,7 +75,7 @@ def is_k_edge_connected(G, k): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def is_locally_k_edge_connected(G, s, t, k): """Tests to see if an edge in a graph is locally k-edge-connected. @@ -133,7 +133,7 @@ def is_locally_k_edge_connected(G, s, t, k): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def k_edge_augmentation(G, k, avail=None, weight=None, partial=False): """Finds set of edges to k-edge-connect G. @@ -284,7 +284,7 @@ def k_edge_augmentation(G, k, avail=None, weight=None, partial=False): raise -@nx._dispatch +@nx._dispatchable def partial_k_edge_augmentation(G, k, avail, weight=None): """Finds augmentation that k-edge-connects as much of the graph as possible. @@ -387,7 +387,7 @@ def _edges_between_disjoint(H, only1, only2): @not_implemented_for("multigraph") @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def one_edge_augmentation(G, avail=None, weight=None, partial=False): """Finds minimum weight set of edges to connect G. @@ -442,7 +442,7 @@ def one_edge_augmentation(G, avail=None, weight=None, partial=False): @not_implemented_for("multigraph") @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def bridge_augmentation(G, avail=None, weight=None): """Finds the a set of edges that bridge connects G. @@ -578,7 +578,7 @@ def _lightest_meta_edges(mapping, avail_uv, avail_w): yield MetaEdge((mu, mv), (u, v), w) -@nx._dispatch +@nx._dispatchable def unconstrained_one_edge_augmentation(G): """Finds the smallest set of edges to connect G. @@ -621,7 +621,7 @@ def unconstrained_one_edge_augmentation(G): yield (inverse[mu][0], inverse[mv][0]) -@nx._dispatch +@nx._dispatchable def weighted_one_edge_augmentation(G, avail, weight=None, partial=False): """Finds the minimum weight set of edges to connect G if one exists. @@ -690,7 +690,7 @@ def weighted_one_edge_augmentation(G, avail, weight=None, partial=False): yield edge -@nx._dispatch +@nx._dispatchable def unconstrained_bridge_augmentation(G): """Finds an optimal 2-edge-augmentation of G using the fewest edges. @@ -845,7 +845,7 @@ def unconstrained_bridge_augmentation(G): break -@nx._dispatch +@nx._dispatchable def weighted_bridge_augmentation(G, avail, weight=None): """Finds an approximate min-weight 2-edge-augmentation of G. @@ -1040,7 +1040,7 @@ def _minimum_rooted_branching(D, root): return A -@nx._dispatch +@nx._dispatchable(returns_graph=True) def collapse(G, grouped_nodes): """Collapses each group of nodes into a single node. @@ -1112,7 +1112,7 @@ def collapse(G, grouped_nodes): return C -@nx._dispatch +@nx._dispatchable def complement_edges(G): """Returns only the edges in the complement of G @@ -1158,7 +1158,7 @@ def _compat_shuffle(rng, input): @not_implemented_for("multigraph") @not_implemented_for("directed") @py_random_state(4) -@nx._dispatch +@nx._dispatchable def greedy_k_edge_augmentation(G, k, avail=None, weight=None, seed=None): """Greedy algorithm for finding a k-edge-augmentation diff --git a/networkx/algorithms/connectivity/edge_kcomponents.py b/networkx/algorithms/connectivity/edge_kcomponents.py index e602c33aaee..e071f4d3df8 100644 --- a/networkx/algorithms/connectivity/edge_kcomponents.py +++ b/networkx/algorithms/connectivity/edge_kcomponents.py @@ -23,7 +23,7 @@ @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def k_edge_components(G, k): """Generates nodes in each maximal k-edge-connected component in G. @@ -107,7 +107,7 @@ def k_edge_components(G, k): @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def k_edge_subgraphs(G, k): """Generates nodes in each maximal k-edge-connected subgraph in G. @@ -196,7 +196,7 @@ def _k_edge_subgraphs_nodes(G, k): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def bridge_components(G): """Finds all bridge-connected components G. @@ -503,19 +503,26 @@ def _high_degree_components(G, k): yield from nx.connected_components(H) -@nx._dispatch +@nx._dispatchable(returns_graph=True) def general_k_edge_subgraphs(G, k): - """General algorithm to find all maximal k-edge-connected subgraphs in G. + """General algorithm to find all maximal k-edge-connected subgraphs in `G`. - Returns - ------- - k_edge_subgraphs : a generator of nx.Graphs that are k-edge-subgraphs - Each k-edge-subgraph is a maximal set of nodes that defines a subgraph - of G that is k-edge-connected. + Parameters + ---------- + G : nx.Graph + Graph in which all maximal k-edge-connected subgraphs will be found. + + k : int + + Yields + ------ + k_edge_subgraphs : Graph instances that are k-edge-subgraphs + Each k-edge-subgraph contains a maximal set of nodes that defines a + subgraph of `G` that is k-edge-connected. Notes ----- - Implementation of the basic algorithm from _[1]. The basic idea is to find + Implementation of the basic algorithm from [1]_. The basic idea is to find a global minimum cut of the graph. If the cut value is at least k, then the graph is a k-edge-connected subgraph and can be added to the results. Otherwise, the cut is used to split the graph in two and the procedure is @@ -524,7 +531,7 @@ def general_k_edge_subgraphs(G, k): a single node or a subgraph of G that is k-edge-connected. This implementation contains optimizations for reducing the number of calls - to max-flow, but there are other optimizations in _[1] that could be + to max-flow, but there are other optimizations in [1]_ that could be implemented. References @@ -547,7 +554,7 @@ def general_k_edge_subgraphs(G, k): ... (14, 101, 24), ... ] >>> G = nx.Graph(it.chain(*[pairwise(path) for path in paths])) - >>> sorted(map(len, k_edge_subgraphs(G, k=3))) + >>> sorted(len(k_sg) for k_sg in k_edge_subgraphs(G, k=3)) [1, 1, 1, 4, 4] """ if k < 1: diff --git a/networkx/algorithms/connectivity/kcomponents.py b/networkx/algorithms/connectivity/kcomponents.py index 19a6e486b84..50d5c8f4190 100644 --- a/networkx/algorithms/connectivity/kcomponents.py +++ b/networkx/algorithms/connectivity/kcomponents.py @@ -17,7 +17,7 @@ @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def k_components(G, flow_func=None): r"""Returns the k-component structure of a graph G. diff --git a/networkx/algorithms/connectivity/kcutsets.py b/networkx/algorithms/connectivity/kcutsets.py index bc04ed18589..53f8d3b8f6e 100644 --- a/networkx/algorithms/connectivity/kcutsets.py +++ b/networkx/algorithms/connectivity/kcutsets.py @@ -21,7 +21,7 @@ __all__ = ["all_node_cuts"] -@nx._dispatch +@nx._dispatchable def all_node_cuts(G, k=None, flow_func=None): r"""Returns all minimum k cutsets of an undirected graph G. @@ -93,10 +93,11 @@ def all_node_cuts(G, k=None, flow_func=None): # Address some corner cases first. # For complete Graphs + if nx.density(G) == 1: - for cut_set in combinations(G, len(G) - 1): - yield set(cut_set) + yield from () return + # Initialize data structures. # Keep track of the cuts already computed so we do not repeat them. seen = [] @@ -130,7 +131,7 @@ def all_node_cuts(G, k=None, flow_func=None): for x in X: # step 3: Compute local connectivity flow of x with all other # non adjacent nodes in G - non_adjacent = set(G) - X - set(G[x]) + non_adjacent = set(G) - {x} - set(G[x]) for v in non_adjacent: # step 4: compute maximum flow in an Even-Tarjan reduction H of G # and step 5: build the associated residual network R diff --git a/networkx/algorithms/connectivity/stoerwagner.py b/networkx/algorithms/connectivity/stoerwagner.py index dc95877e221..cd9d5acaf17 100644 --- a/networkx/algorithms/connectivity/stoerwagner.py +++ b/networkx/algorithms/connectivity/stoerwagner.py @@ -12,7 +12,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def stoer_wagner(G, weight="weight", heap=BinaryHeap): r"""Returns the weighted minimum edge cut using the Stoer-Wagner algorithm. diff --git a/networkx/algorithms/connectivity/tests/test_kcutsets.py b/networkx/algorithms/connectivity/tests/test_kcutsets.py index d5b3b089e23..4b4b5494a87 100644 --- a/networkx/algorithms/connectivity/tests/test_kcutsets.py +++ b/networkx/algorithms/connectivity/tests/test_kcutsets.py @@ -259,8 +259,15 @@ def test_cycle_graph(): def test_complete_graph(): G = nx.complete_graph(5) - solution = [{0, 1, 2, 3}, {0, 1, 2, 4}, {0, 1, 3, 4}, {0, 2, 3, 4}, {1, 2, 3, 4}] - cuts = list(nx.all_node_cuts(G)) - assert len(solution) == len(cuts) - for cut in cuts: - assert cut in solution + assert nx.node_connectivity(G) == 4 + assert list(nx.all_node_cuts(G)) == [] + + +def test_all_node_cuts_simple_case(): + G = nx.complete_graph(5) + G.remove_edges_from([(0, 1), (3, 4)]) + expected = [{0, 1, 2}, {2, 3, 4}] + actual = list(nx.all_node_cuts(G)) + assert len(actual) == len(expected) + for cut in actual: + assert cut in expected diff --git a/networkx/algorithms/connectivity/utils.py b/networkx/algorithms/connectivity/utils.py index bf6860a208f..a4d822ae523 100644 --- a/networkx/algorithms/connectivity/utils.py +++ b/networkx/algorithms/connectivity/utils.py @@ -6,7 +6,7 @@ __all__ = ["build_auxiliary_node_connectivity", "build_auxiliary_edge_connectivity"] -@nx._dispatch +@nx._dispatchable(returns_graph=True) def build_auxiliary_node_connectivity(G): r"""Creates a directed graph D from an undirected graph G to compute flow based node connectivity. @@ -59,7 +59,7 @@ def build_auxiliary_node_connectivity(G): return H -@nx._dispatch +@nx._dispatchable(returns_graph=True) def build_auxiliary_edge_connectivity(G): """Auxiliary digraph for computing flow based edge connectivity diff --git a/networkx/algorithms/core.py b/networkx/algorithms/core.py index 2a766ead754..511e6d9d0ba 100644 --- a/networkx/algorithms/core.py +++ b/networkx/algorithms/core.py @@ -42,7 +42,7 @@ @nx.utils.not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def core_number(G): """Returns the core number for each node. @@ -148,7 +148,7 @@ def _core_subgraph(G, k_filter, k=None, core=None): return G.subgraph(nodes).copy() -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def k_core(G, k=None, core_number=None): """Returns the k-core of G. @@ -224,7 +224,7 @@ def k_filter(v, k, c): return _core_subgraph(G, k_filter, k, core_number) -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def k_shell(G, k=None, core_number=None): """Returns the k-shell of G. @@ -306,7 +306,7 @@ def k_filter(v, k, c): return _core_subgraph(G, k_filter, k, core_number) -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def k_crust(G, k=None, core_number=None): """Returns the k-crust of G. @@ -389,12 +389,12 @@ def k_crust(G, k=None, core_number=None): return G.subgraph(nodes).copy() -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def k_corona(G, k, core_number=None): """Returns the k-corona of G. The k-corona is the subgraph of nodes in the k-core which have - exactly k neighbours in the k-core. + exactly k neighbors in the k-core. .. deprecated:: 3.3 `k_corona` will not accept `MultiGraph` objects in version 3.5. @@ -468,7 +468,7 @@ def func(v, k, c): @nx.utils.not_implemented_for("directed") @nx.utils.not_implemented_for("multigraph") -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def k_truss(G, k): """Returns the k-truss of `G`. @@ -550,7 +550,7 @@ def k_truss(G, k): @nx.utils.not_implemented_for("multigraph") @nx.utils.not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def onion_layers(G): """Returns the layer of each vertex in an onion decomposition of the graph. diff --git a/networkx/algorithms/covering.py b/networkx/algorithms/covering.py index b31521f4297..bed482bc4b3 100644 --- a/networkx/algorithms/covering.py +++ b/networkx/algorithms/covering.py @@ -11,7 +11,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def min_edge_cover(G, matching_algorithm=None): """Returns the min cardinality edge cover of the graph as a set of edges. @@ -106,7 +106,7 @@ def min_edge_cover(G, matching_algorithm=None): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def is_edge_cover(G, cover): """Decides whether a set of edges is a valid edge cover of the graph. diff --git a/networkx/algorithms/cuts.py b/networkx/algorithms/cuts.py index ce455eb47c8..d7d54e7bb0d 100644 --- a/networkx/algorithms/cuts.py +++ b/networkx/algorithms/cuts.py @@ -21,7 +21,7 @@ # TODO STILL NEED TO UPDATE ALL THE DOCUMENTATION! -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def cut_size(G, S, T=None, weight=None): """Returns the size of the cut between two sets of nodes. @@ -84,7 +84,7 @@ def cut_size(G, S, T=None, weight=None): return sum(weight for u, v, weight in edges) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def volume(G, S, weight=None): """Returns the volume of a set of nodes. @@ -127,7 +127,7 @@ def volume(G, S, weight=None): return sum(d for v, d in degree(S, weight=weight)) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def normalized_cut_size(G, S, T=None, weight=None): """Returns the normalized size of the cut between two sets of nodes. @@ -180,7 +180,7 @@ def normalized_cut_size(G, S, T=None, weight=None): return num_cut_edges * ((1 / volume_S) + (1 / volume_T)) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def conductance(G, S, T=None, weight=None): """Returns the conductance of two sets of nodes. @@ -228,7 +228,7 @@ def conductance(G, S, T=None, weight=None): return num_cut_edges / min(volume_S, volume_T) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def edge_expansion(G, S, T=None, weight=None): """Returns the edge expansion between two node sets. @@ -275,7 +275,7 @@ def edge_expansion(G, S, T=None, weight=None): return num_cut_edges / min(len(S), len(T)) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def mixing_expansion(G, S, T=None, weight=None): """Returns the mixing expansion between two node sets. @@ -323,7 +323,7 @@ def mixing_expansion(G, S, T=None, weight=None): # TODO What is the generalization to two arguments, S and T? Does the # denominator become `min(len(S), len(T))`? -@nx._dispatch +@nx._dispatchable def node_expansion(G, S): """Returns the node expansion of the set `S`. @@ -363,7 +363,7 @@ def node_expansion(G, S): # TODO What is the generalization to two arguments, S and T? Does the # denominator become `min(len(S), len(T))`? -@nx._dispatch +@nx._dispatchable def boundary_expansion(G, S): """Returns the boundary expansion of the set `S`. diff --git a/networkx/algorithms/cycles.py b/networkx/algorithms/cycles.py index 9149e9eb10d..576c8b81f41 100644 --- a/networkx/algorithms/cycles.py +++ b/networkx/algorithms/cycles.py @@ -24,7 +24,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def cycle_basis(G, root=None): """Returns a list of cycles which form a basis for cycles of G. @@ -66,6 +66,7 @@ def cycle_basis(G, root=None): See Also -------- simple_cycles + minimum_cycle_basis """ gnodes = dict.fromkeys(G) # set-like object that maintains node order cycles = [] @@ -101,7 +102,7 @@ def cycle_basis(G, root=None): return cycles -@nx._dispatch +@nx._dispatchable def simple_cycles(G, length_bound=None): """Find simple cycles (elementary circuits) of a graph. @@ -474,7 +475,7 @@ def _bounded_cycle_search(G, path, length_bound): B[w].add(v) -@nx._dispatch +@nx._dispatchable def chordless_cycles(G, length_bound=None): """Find simple chordless cycles of a graph. @@ -763,7 +764,7 @@ def _chordless_cycle_search(F, B, path, length_bound): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def recursive_simple_cycles(G): """Find simple cycles (elementary circuits) of a directed graph. @@ -873,7 +874,7 @@ def circuit(thisnode, startnode, component): return result -@nx._dispatch +@nx._dispatchable def find_cycle(G, source=None, orientation=None): """Returns a cycle found via depth-first traversal. @@ -1035,7 +1036,7 @@ def tailhead(edge): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def minimum_cycle_basis(G, weight=None): """Returns a minimum weight cycle basis for G @@ -1165,7 +1166,7 @@ def _min_cycle(G, orth, weight): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def girth(G): """Returns the girth of the graph. @@ -1202,7 +1203,7 @@ def girth(G): References ---------- - .. [1] https://en.wikipedia.org/wiki/Girth_(graph_theory) + .. [1] `Wikipedia: Girth `_ """ girth = depth_limit = inf diff --git a/networkx/algorithms/d_separation.py b/networkx/algorithms/d_separation.py index 4322b095822..a688eca4081 100644 --- a/networkx/algorithms/d_separation.py +++ b/networkx/algorithms/d_separation.py @@ -11,88 +11,183 @@ algorithm presented in [2]_. Refer to [3]_, [4]_ for a couple of alternative algorithms. -Here, we provide a brief overview of d-separation and related concepts that -are relevant for understanding it: - -Blocking paths --------------- +The functional interface in NetworkX consists of three functions: -Before we overview, we introduce the following terminology to describe paths: +- `find_minimal_d_separator` returns a minimal d-separator set ``z``. + That is, removing any node or nodes from it makes it no longer a d-separator. +- `is_d_separator` checks if a given set is a d-separator. +- `is_minimal_d_separator` checks if a given set is a minimal d-separator. -- "open" path: A path between two nodes that can be traversed -- "blocked" path: A path between two nodes that cannot be traversed +D-separators +------------ -A **collider** is a triplet of nodes along a path that is like the following: -``... u -> c <- v ...``), where 'c' is a common successor of ``u`` and ``v``. A path -through a collider is considered "blocked". When -a node that is a collider, or a descendant of a collider is included in -the d-separating set, then the path through that collider node is "open". If the -path through the collider node is open, then we will call this node an open collider. +Here, we provide a brief overview of d-separation and related concepts that +are relevant for understanding it: -The d-separation set blocks the paths between ``u`` and ``v``. If you include colliders, -or their descendant nodes in the d-separation set, then those colliders will open up, -enabling a path to be traversed if it is not blocked some other way. +The ideas of d-separation and d-connection relate to paths being open or blocked. + +- A "path" is a sequence of nodes connected in order by edges. Unlike for most + graph theory analysis, the direction of the edges is ignored. Thus the path + can be thought of as a traditional path on the undirected version of the graph. +- A "candidate d-separator" ``z`` is a set of nodes being considered as + possibly blocking all paths between two prescribed sets ``x`` and ``y`` of nodes. + We refer to each node in the candidate d-separator as "known". +- A "collider" node on a path is a node that is a successor of its two neighbor + nodes on the path. That is, ``c`` is a collider if the edge directions + along the path look like ``... u -> c <- v ...``. +- If a collider node or any of its descendants are "known", the collider + is called an "open collider". Otherwise it is a "blocking collider". +- Any path can be "blocked" in two ways. If the path contains a "known" node + that is not a collider, the path is blocked. Also, if the path contains a + collider that is not a "known" node, the path is blocked. +- A path is "open" if it is not blocked. That is, it is open if every node is + either an open collider or not a "known". Said another way, every + "known" in the path is a collider and every collider is open (has a + "known" as a inclusive descendant). The concept of "open path" is meant to + demonstrate a probabilistic conditional dependence between two nodes given + prescribed knowledge ("known" nodes). +- Two sets ``x`` and ``y`` of nodes are "d-separated" by a set of nodes ``z`` + if all paths between nodes in ``x`` and nodes in ``y`` are blocked. That is, + if there are no open paths from any node in ``x`` to any node in ``y``. + Such a set ``z`` is a "d-separator" of ``x`` and ``y``. +- A "minimal d-separator" is a d-separator ``z`` for which no node or subset + of nodes can be removed with it still being a d-separator. + +The d-separator blocks some paths between ``x`` and ``y`` but opens others. +Nodes in the d-separator block paths if the nodes are not colliders. +But if a collider or its descendant nodes are in the d-separation set, the +colliders are open, allowing a path through that collider. Illustration of D-separation with examples ------------------------------------------ -For a pair of two nodes, ``u`` and ``v``, all paths are considered open if -there is a path between ``u`` and ``v`` that is not blocked. That means, there is an open -path between ``u`` and ``v`` that does not encounter a collider, or a variable in the -d-separating set. +A pair of two nodes, ``u`` and ``v``, are d-connected if there is a path +from ``u`` to ``v`` that is not blocked. That means, there is an open +path from ``u`` to ``v``. For example, if the d-separating set is the empty set, then the following paths are -unblocked between ``u`` and ``v``: +open between ``u`` and ``v``: -- u <- z -> v -- u -> w -> ... -> z -> v +- u <- n -> v +- u -> w -> ... -> n -> v -If for example, 'z' is in the d-separating set, then 'z' blocks those paths -between ``u`` and ``v``. +If on the other hand, ``n`` is in the d-separating set, then ``n`` blocks +those paths between ``u`` and ``v``. -Colliders block a path by default if they and their descendants are not included -in the d-separating set. An example of a path that is blocked when the d-separating -set is empty is: +Colliders block a path if they and their descendants are not included +in the d-separating set. An example of a path that is blocked when the +d-separating set is empty is: -- u -> w -> ... -> z <- v +- u -> w -> ... -> n <- v -because 'z' is a collider in this path and 'z' is not in the d-separating set. However, -if 'z' or a descendant of 'z' is included in the d-separating set, then the path through -the collider at 'z' (... -> z <- ...) is now "open". +The node ``n`` is a collider in this path and is not in the d-separating set. +So ``n`` blocks this path. However, if ``n`` or a descendant of ``n`` is +included in the d-separating set, then the path through the collider +at ``n`` (... -> n <- ...) is "open". -D-separation is concerned with blocking all paths between u and v. Therefore, a -d-separating set between ``u`` and ``v`` is one where all paths are blocked. +D-separation is concerned with blocking all paths between nodes from ``x`` to ``y``. +A d-separating set between ``x`` and ``y`` is one where all paths are blocked. D-separation and its applications in probability ------------------------------------------------ -D-separation is commonly used in probabilistic graphical models. D-separation +D-separation is commonly used in probabilistic causal-graph models. D-separation connects the idea of probabilistic "dependence" with separation in a graph. If -one assumes the causal Markov condition [5]_, then d-separation implies conditional -independence in probability distributions. +one assumes the causal Markov condition [5]_, (every node is conditionally +independent of its non-descendants, given its parents) then d-separation implies +conditional independence in probability distributions. +Symmetrically, d-connection implies dependence. + +The intuition is as follows. The edges on a causal graph indicate which nodes +influence the outcome of other nodes directly. An edge from u to v +implies that the outcome of event ``u`` influences the probabilities for +the outcome of event ``v``. Certainly knowing ``u`` changes predictions for ``v``. +But also knowing ``v`` changes predictions for ``u``. The outcomes are dependent. +Furthermore, an edge from ``v`` to ``w`` would mean that ``w`` and ``v`` are dependent +and thus that ``u`` could indirectly influence ``w``. + +Without any knowledge about the system (candidate d-separating set is empty) +a causal graph ``u -> v -> w`` allows all three nodes to be dependent. But +if we know the outcome of ``v``, the conditional probabilities of outcomes for +``u`` and ``w`` are independent of each other. That is, once we know the outcome +for ```v`, the probabilities for ``w`` do not depend on the outcome for ``u``. +This is the idea behind ``v`` blocking the path if it is "known" (in the candidate +d-separating set). + +The same argument works whether the direction of the edges are both +left-going and when both arrows head out from the middle. Having a "known" +node on a path blocks the collider-free path because those relationships +make the conditional probabilities independent. + +The direction of the causal edges does impact dependence precisely in the +case of a collider e.g. ``u -> v <- w``. In that situation, both ``u`` and ``w`` +influence ``v```. But they do not directly influence each other. So without any +knowledge of any outcomes, ``u`` and ``w`` are independent. That is the idea behind +colliders blocking the path. But, if ``v`` is known, the conditional probabilities +of ``u`` and ``w`` can be dependent. This is the heart of Berkson's Paradox [6]_. +For example, suppose ``u`` and ``w`` are boolean events (they either happen or do not) +and ``v`` represents the outcome "at least one of ``u`` and ``w`` occur". Then knowing +``v`` is true makes the conditional probabilities of ``u`` and ``w`` dependent. +Essentially, knowing that at least one of them is true raises the probability of +each. But further knowledge that ``w`` is true (or false) change the conditional +probability of ``u`` to either the original value or 1. So the conditional +probability of ``u`` depends on the outcome of ``w`` even though there is no +causal relationship between them. When a collider is known, dependence can +occur across paths through that collider. This is the reason open colliders +do not block paths. + +Furthermore, even if ``v`` is not "known", if one of its descendants is "known" +we can use that information to know more about ``v`` which again makes +``u`` and ``w`` potentially dependent. Suppose the chance of ``n`` occurring +is much higher when ``v`` occurs ("at least one of ``u`` and ``w`` occur"). +Then if we know ``n`` occurred, it is more likely that ``v`` occurred and that +makes the chance of ``u`` and ``w`` dependent. This is the idea behind why +a collider does no block a path if any descendant of the collider is "known". + +When two sets of nodes ``x`` and ``y`` are d-separated by a set ``z``, +it means that given the outcomes of the nodes in ``z``, the probabilities +of outcomes of the nodes in ``x`` are independent of the outcomes of the +nodes in ``y`` and vice versa. Examples -------- - ->>> ->>> # HMM graph with five states and observation nodes -... g = nx.DiGraph() ->>> g.add_edges_from( +A Hidden Markov Model with 5 observed states and 5 hidden states +where the hidden states have causal relationships resulting in +a path results in the following causal network. We check that +early states along the path are separated from late state in +the path by the d-separator of the middle hidden state. +Thus if we condition on the middle hidden state, the early +state probabilities are independent of the late state outcomes. + +>>> G = nx.DiGraph() +>>> G.add_edges_from( ... [ -... ("S1", "S2"), -... ("S2", "S3"), -... ("S3", "S4"), -... ("S4", "S5"), -... ("S1", "O1"), -... ("S2", "O2"), -... ("S3", "O3"), -... ("S4", "O4"), -... ("S5", "O5"), +... ("H1", "H2"), +... ("H2", "H3"), +... ("H3", "H4"), +... ("H4", "H5"), +... ("H1", "O1"), +... ("H2", "O2"), +... ("H3", "O3"), +... ("H4", "O4"), +... ("H5", "O5"), ... ] ... ) ->>> ->>> # states/obs before 'S3' are d-separated from states/obs after 'S3' -... nx.d_separated(g, {"S1", "S2", "O1", "O2"}, {"S4", "S5", "O4", "O5"}, {"S3"}) +>>> x, y, z = ({"H1", "O1"}, {"H5", "O5"}, {"H3"}) +>>> nx.is_d_separator(G, x, y, z) +True +>>> nx.is_minimal_d_separator(G, x, y, z) +True +>>> nx.is_minimal_d_separator(G, x, y, z | {"O3"}) +False +>>> z = nx.find_minimal_d_separator(G, x | y, {"O2", "O3", "O4"}) +>>> z == {"H2", "H4"} +True + +If no minimal_d_separator exists, `None` is returned + +>>> other_z = nx.find_minimal_d_separator(G, x | y, {"H2", "H3"}) +>>> other_z is None True @@ -101,142 +196,192 @@ .. [1] Pearl, J. (2009). Causality. Cambridge: Cambridge University Press. -.. [2] Darwiche, A. (2009). Modeling and reasoning with Bayesian networks. +.. [2] Darwiche, A. (2009). Modeling and reasoning with Bayesian networks. Cambridge: Cambridge University Press. -.. [3] Shachter, R. D. (1998). - Bayes-ball: rational pastime (for determining irrelevance and requisite - information in belief networks and influence diagrams). - In , Proceedings of the Fourteenth Conference on Uncertainty in Artificial - Intelligence (pp. 480–487). - San Francisco, CA, USA: Morgan Kaufmann Publishers Inc. +.. [3] Shachter, Ross D. "Bayes-ball: The rational pastime (for + determining irrelevance and requisite information in belief networks + and influence diagrams)." In Proceedings of the Fourteenth Conference + on Uncertainty in Artificial Intelligence (UAI), (pp. 480–487). 1998. .. [4] Koller, D., & Friedman, N. (2009). Probabilistic graphical models: principles and techniques. The MIT Press. .. [5] https://en.wikipedia.org/wiki/Causal_Markov_condition +.. [6] https://en.wikipedia.org/wiki/Berkson%27s_paradox + """ from collections import deque +from itertools import chain import networkx as nx from networkx.utils import UnionFind, not_implemented_for -__all__ = ["d_separated", "minimal_d_separator", "is_minimal_d_separator"] +__all__ = [ + "is_d_separator", + "is_minimal_d_separator", + "find_minimal_d_separator", + "d_separated", + "minimal_d_separator", +] @not_implemented_for("undirected") -@nx._dispatch -def d_separated(G, x, y, z): - """ - Return whether node sets ``x`` and ``y`` are d-separated by ``z``. +@nx._dispatchable +def is_d_separator(G, x, y, z): + """Return whether node sets `x` and `y` are d-separated by `z`. Parameters ---------- - G : graph + G : nx.DiGraph A NetworkX DAG. - x : set - First set of nodes in ``G``. + x : node or set of nodes + First node or set of nodes in `G`. - y : set - Second set of nodes in ``G``. + y : node or set of nodes + Second node or set of nodes in `G`. - z : set - Set of conditioning nodes in ``G``. Can be empty set. + z : node or set of nodes + Potential separator (set of conditioning nodes in `G`). Can be empty set. Returns ------- b : bool - A boolean that is true if ``x`` is d-separated from ``y`` given ``z`` in ``G``. + A boolean that is true if `x` is d-separated from `y` given `z` in `G`. Raises ------ NetworkXError - The *d-separation* test is commonly used with directed - graphical models which are acyclic. Accordingly, the algorithm - raises a :exc:`NetworkXError` if the input graph is not a DAG. + The *d-separation* test is commonly used on disjoint sets of + nodes in acyclic directed graphs. Accordingly, the algorithm + raises a :exc:`NetworkXError` if the node sets are not + disjoint or if the input graph is not a DAG. NodeNotFound If any of the input nodes are not found in the graph, - a :exc:`NodeNotFound` exception is raised. + a :exc:`NodeNotFound` exception is raised Notes ----- A d-separating set in a DAG is a set of nodes that blocks all paths between the two sets. Nodes in `z` block a path if they are part of the path and are not a collider, - or a descendant of a collider. A collider structure along a path + or a descendant of a collider. Also colliders that are not in `z` + block a path. A collider structure along a path is ``... -> c <- ...`` where ``c`` is the collider node. https://en.wikipedia.org/wiki/Bayesian_network#d-separation """ + try: + x = {x} if x in G else x + y = {y} if y in G else y + z = {z} if z in G else z + + intersection = x & y or x & z or y & z + if intersection: + raise nx.NetworkXError( + f"The sets are not disjoint, with intersection {intersection}" + ) + + set_v = x | y | z + if set_v - G.nodes: + raise nx.NodeNotFound(f"The node(s) {set_v - G.nodes} are not found in G") + except TypeError: + raise nx.NodeNotFound("One of x, y, or z is not a node or a set of nodes in G") if not nx.is_directed_acyclic_graph(G): raise nx.NetworkXError("graph should be directed acyclic") - union_xyz = x.union(y).union(z) - - if any(n not in G.nodes for n in union_xyz): - raise nx.NodeNotFound("one or more specified nodes not found in the graph") - - G_copy = G.copy() - - # transform the graph by removing leaves that are not in x | y | z - # until no more leaves can be removed. - leaves = deque([n for n in G_copy.nodes if G_copy.out_degree[n] == 0]) - while len(leaves) > 0: - leaf = leaves.popleft() - if leaf not in union_xyz: - for p in G_copy.predecessors(leaf): - if G_copy.out_degree[p] == 1: - leaves.append(p) - G_copy.remove_node(leaf) - - # transform the graph by removing outgoing edges from the - # conditioning set. - edges_to_remove = list(G_copy.out_edges(z)) - G_copy.remove_edges_from(edges_to_remove) - - # use disjoint-set data structure to check if any node in `x` - # occurs in the same weakly connected component as a node in `y`. - disjoint_set = UnionFind(G_copy.nodes()) - for component in nx.weakly_connected_components(G_copy): - disjoint_set.union(*component) - disjoint_set.union(*x) - disjoint_set.union(*y) - - if x and y and disjoint_set[next(iter(x))] == disjoint_set[next(iter(y))]: - return False - else: - return True + # contains -> and <-> edges from starting node T + forward_deque = deque([]) + forward_visited = set() + + # contains <- and - edges from starting node T + backward_deque = deque(x) + backward_visited = set() + + ancestors_or_z = set().union(*[nx.ancestors(G, node) for node in x]) | z | x + + while forward_deque or backward_deque: + if backward_deque: + node = backward_deque.popleft() + backward_visited.add(node) + if node in y: + return False + if node in z: + continue + + # add <- edges to backward deque + backward_deque.extend(G.pred[node].keys() - backward_visited) + # add -> edges to forward deque + forward_deque.extend(G.succ[node].keys() - forward_visited) + + if forward_deque: + node = forward_deque.popleft() + forward_visited.add(node) + if node in y: + return False + + # Consider if -> node <- is opened due to ancestor of node in z + if node in ancestors_or_z: + # add <- edges to backward deque + backward_deque.extend(G.pred[node].keys() - backward_visited) + if node not in z: + # add -> edges to forward deque + forward_deque.extend(G.succ[node].keys() - forward_visited) + + return True @not_implemented_for("undirected") -@nx._dispatch -def minimal_d_separator(G, u, v): - """Compute a minimal d-separating set between 'u' and 'v'. +@nx._dispatchable +def find_minimal_d_separator(G, x, y, *, included=None, restricted=None): + """Returns a minimal d-separating set between `x` and `y` if possible - A d-separating set in a DAG is a set of nodes that blocks all paths - between the two nodes, 'u' and 'v'. This function - constructs a d-separating set that is "minimal", meaning it is the smallest - d-separating set for 'u' and 'v'. This is not necessarily - unique. For more details, see Notes. + A d-separating set in a DAG is a set of nodes that blocks all + paths between the two sets of nodes, `x` and `y`. This function + constructs a d-separating set that is "minimal", meaning no nodes can + be removed without it losing the d-separating property for `x` and `y`. + If no d-separating sets exist for `x` and `y`, this returns `None`. + + In a DAG there may be more than one minimal d-separator between two + sets of nodes. Minimal d-separators are not always unique. This function + returns one minimal d-separator, or `None` if no d-separator exists. + + Uses the algorithm presented in [1]_. The complexity of the algorithm + is :math:`O(m)`, where :math:`m` stands for the number of edges in + the subgraph of G consisting of only the ancestors of `x` and `y`. + For full details, see [1]_. Parameters ---------- G : graph A networkx DAG. - u : node - A node in the graph, G. - v : node - A node in the graph, G. + x : set | node + A node or set of nodes in the graph. + y : set | node + A node or set of nodes in the graph. + included : set | node | None + A node or set of nodes which must be included in the found separating set, + default is None, which means the empty set. + restricted : set | node | None + Restricted node or set of nodes to consider. Only these nodes can be in + the found separating set, default is None meaning all nodes in ``G``. + + Returns + ------- + z : set | None + The minimal d-separating set, if at least one d-separating set exists, + otherwise None. Raises ------ NetworkXError - Raises a :exc:`NetworkXError` if the input graph is not a DAG. + Raises a :exc:`NetworkXError` if the input graph is not a DAG + or if node sets `x`, `y`, and `included` are not disjoint. NodeNotFound If any of the input nodes are not found in the graph, @@ -244,89 +389,98 @@ def minimal_d_separator(G, u, v): References ---------- - .. [1] Tian, J., & Paz, A. (1998). Finding Minimal D-separators. - - Notes - ----- - This function only finds ``a`` minimal d-separator. It does not guarantee - uniqueness, since in a DAG there may be more than one minimal d-separator - between two nodes. Moreover, this only checks for minimal separators - between two nodes, not two sets. Finding minimal d-separators between - two sets of nodes is not supported. - - Uses the algorithm presented in [1]_. The complexity of the algorithm - is :math:`O(|E_{An}^m|)`, where :math:`|E_{An}^m|` stands for the - number of edges in the moralized graph of the sub-graph consisting - of only the ancestors of 'u' and 'v'. For full details, see [1]_. - - The algorithm works by constructing the moral graph consisting of just - the ancestors of `u` and `v`. Then it constructs a candidate for - a separating set ``Z'`` from the predecessors of `u` and `v`. - Then BFS is run starting from `u` and marking nodes - found from ``Z'`` and calling those nodes ``Z''``. - Then BFS is run again starting from `v` and marking nodes if they are - present in ``Z''``. Those marked nodes are the returned minimal - d-separating set. - - https://en.wikipedia.org/wiki/Bayesian_network#d-separation + .. [1] van der Zander, Benito, and Maciej Liśkiewicz. "Finding + minimal d-separators in linear time and applications." In + Uncertainty in Artificial Intelligence, pp. 637-647. PMLR, 2020. """ if not nx.is_directed_acyclic_graph(G): raise nx.NetworkXError("graph should be directed acyclic") - union_uv = {u, v} + try: + x = {x} if x in G else x + y = {y} if y in G else y - if any(n not in G.nodes for n in union_uv): - raise nx.NodeNotFound("one or more specified nodes not found in the graph") + if included is None: + included = set() + elif included in G: + included = {included} - # first construct the set of ancestors of X and Y - x_anc = nx.ancestors(G, u) - y_anc = nx.ancestors(G, v) - D_anc_xy = x_anc.union(y_anc) - D_anc_xy.update((u, v)) + if restricted is None: + restricted = set(G) + elif restricted in G: + restricted = {restricted} - # second, construct the moralization of the subgraph of Anc(X,Y) - moral_G = nx.moral_graph(G.subgraph(D_anc_xy)) + set_y = x | y | included | restricted + if set_y - G.nodes: + raise nx.NodeNotFound(f"The node(s) {set_y - G.nodes} are not found in G") + except TypeError: + raise nx.NodeNotFound( + "One of x, y, included or restricted is not a node or set of nodes in G" + ) - # find a separating set Z' in moral_G - Z_prime = set(G.predecessors(u)).union(set(G.predecessors(v))) + if not included <= restricted: + raise nx.NetworkXError( + f"Included nodes {included} must be in restricted nodes {restricted}" + ) - # perform BFS on the graph from 'x' to mark - Z_dprime = _bfs_with_marks(moral_G, u, Z_prime) - Z = _bfs_with_marks(moral_G, v, Z_dprime) - return Z + intersection = x & y or x & included or y & included + if intersection: + raise nx.NetworkXError( + f"The sets x, y, included are not disjoint. Overlap: {intersection}" + ) + + nodeset = x | y | included + ancestors_x_y_included = nodeset.union(*[nx.ancestors(G, node) for node in nodeset]) + + z_init = restricted & (ancestors_x_y_included - (x | y)) + + x_closure = _reachable(G, x, ancestors_x_y_included, z_init) + if x_closure & y: + return None + + z_updated = z_init & (x_closure | included) + y_closure = _reachable(G, y, ancestors_x_y_included, z_updated) + return z_updated & (y_closure | included) @not_implemented_for("undirected") -@nx._dispatch -def is_minimal_d_separator(G, u, v, z): - """Determine if a d-separating set is minimal. +@nx._dispatchable +def is_minimal_d_separator(G, x, y, z, *, included=None, restricted=None): + """Determine if `z` is a minimal d-separator for `x` and `y`. - A d-separating set, `z`, in a DAG is a set of nodes that blocks - all paths between the two nodes, `u` and `v`. This function - verifies that a set is "minimal", meaning there is no smaller - d-separating set between the two nodes. + A d-separator, `z`, in a DAG is a set of nodes that blocks + all paths from nodes in set `x` to nodes in set `y`. + A minimal d-separator is a d-separator `z` such that removing + any subset of nodes makes it no longer a d-separator. - Note: This function checks whether `z` is a d-separator AND is minimal. - One can use the function `d_separated` to only check if `z` is a d-separator. - See examples below. + Note: This function checks whether `z` is a d-separator AND is + minimal. One can use the function `is_d_separator` to only check if + `z` is a d-separator. See examples below. Parameters ---------- G : nx.DiGraph - The graph. - u : node - A node in the graph. - v : node - A node in the graph. - z : Set of nodes - The set of nodes to check if it is a minimal d-separating set. - The function :func:`d_separated` is called inside this function + A NetworkX DAG. + x : node | set + A node or set of nodes in the graph. + y : node | set + A node or set of nodes in the graph. + z : node | set + The node or set of nodes to check if it is a minimal d-separating set. + The function :func:`is_d_separator` is called inside this function to verify that `z` is in fact a d-separator. + included : set | node | None + A node or set of nodes which must be included in the found separating set, + default is ``None``, which means the empty set. + restricted : set | node | None + Restricted node or set of nodes to consider. Only these nodes can be in + the found separating set, default is ``None`` meaning all nodes in ``G``. Returns ------- bool - Whether or not the set `z` is a d-separator and is also minimal. + Whether or not the set `z` is a minimal d-separator subject to + `restricted` nodes and `included` node constraints. Examples -------- @@ -338,7 +492,7 @@ def is_minimal_d_separator(G, u, v, z): >>> nx.is_minimal_d_separator(G, 0, 2, {1, 3, 4}) False >>> # alternatively, if we only want to check that {1, 3, 4} is a d-separator - >>> nx.d_separated(G, {0}, {4}, {1, 3, 4}) + >>> nx.is_d_separator(G, 0, 2, {1, 3, 4}) True Raises @@ -352,106 +506,217 @@ def is_minimal_d_separator(G, u, v, z): References ---------- - .. [1] Tian, J., & Paz, A. (1998). Finding Minimal D-separators. + .. [1] van der Zander, Benito, and Maciej Liśkiewicz. "Finding + minimal d-separators in linear time and applications." In + Uncertainty in Artificial Intelligence, pp. 637-647. PMLR, 2020. Notes ----- - This function only works on verifying a d-separating set is minimal - between two nodes. To verify that a d-separating set is minimal between - two sets of nodes is not supported. - - Uses algorithm 2 presented in [1]_. The complexity of the algorithm - is :math:`O(|E_{An}^m|)`, where :math:`|E_{An}^m|` stands for the - number of edges in the moralized graph of the sub-graph consisting - of only the ancestors of ``u`` and ``v``. - - The algorithm works by constructing the moral graph consisting of just - the ancestors of `u` and `v`. First, it performs BFS on the moral graph - starting from `u` and marking any nodes it encounters that are part of - the separating set, `z`. If a node is marked, then it does not continue - along that path. In the second stage, BFS with markings is repeated on the - moral graph starting from `v`. If at any stage, any node in `z` is - not marked, then `z` is considered not minimal. If the end of the algorithm - is reached, then `z` is minimal. + This function works on verifying that a set is minimal and + d-separating between two nodes. Uses criterion (a), (b), (c) on + page 4 of [1]_. a) closure(`x`) and `y` are disjoint. b) `z` contains + all nodes from `included` and is contained in the `restricted` + nodes and in the union of ancestors of `x`, `y`, and `included`. + c) the nodes in `z` not in `included` are contained in both + closure(x) and closure(y). The closure of a set is the set of nodes + connected to the set by a directed path in G. + + The complexity is :math:`O(m)`, where :math:`m` stands for the + number of edges in the subgraph of G consisting of only the + ancestors of `x` and `y`. For full details, see [1]_. - - https://en.wikipedia.org/wiki/Bayesian_network#d-separation """ - if not nx.d_separated(G, {u}, {v}, z): - return False - - x_anc = nx.ancestors(G, u) - y_anc = nx.ancestors(G, v) - xy_anc = x_anc.union(y_anc) + if not nx.is_directed_acyclic_graph(G): + raise nx.NetworkXError("graph should be directed acyclic") - # if Z contains any node which is not in ancestors of X or Y - # then it is definitely not minimal - if any(node not in xy_anc for node in z): + try: + x = {x} if x in G else x + y = {y} if y in G else y + z = {z} if z in G else z + + if included is None: + included = set() + elif included in G: + included = {included} + + if restricted is None: + restricted = set(G) + elif restricted in G: + restricted = {restricted} + + set_y = x | y | included | restricted + if set_y - G.nodes: + raise nx.NodeNotFound(f"The node(s) {set_y - G.nodes} are not found in G") + except TypeError: + raise nx.NodeNotFound( + "One of x, y, z, included or restricted is not a node or set of nodes in G" + ) + + if not included <= z: + raise nx.NetworkXError( + f"Included nodes {included} must be in proposed separating set z {x}" + ) + if not z <= restricted: + raise nx.NetworkXError( + f"Separating set {z} must be contained in restricted set {restricted}" + ) + + intersection = x.intersection(y) or x.intersection(z) or y.intersection(z) + if intersection: + raise nx.NetworkXError( + f"The sets are not disjoint, with intersection {intersection}" + ) + + nodeset = x | y | included + ancestors_x_y_included = nodeset.union(*[nx.ancestors(G, n) for n in nodeset]) + + # criterion (a) -- check that z is actually a separator + x_closure = _reachable(G, x, ancestors_x_y_included, z) + if x_closure & y: return False - D_anc_xy = x_anc.union(y_anc) - D_anc_xy.update((u, v)) - - # second, construct the moralization of the subgraph - moral_G = nx.moral_graph(G.subgraph(D_anc_xy)) - - # start BFS from X - marks = _bfs_with_marks(moral_G, u, z) - - # if not all the Z is marked, then the set is not minimal - if any(node not in marks for node in z): + # criterion (b) -- basic constraint; included and restricted already checked above + if not (z <= ancestors_x_y_included): return False - # similarly, start BFS from Y and check the marks - marks = _bfs_with_marks(moral_G, v, z) - # if not all the Z is marked, then the set is not minimal - if any(node not in marks for node in z): + # criterion (c) -- check that z is minimal + y_closure = _reachable(G, y, ancestors_x_y_included, z) + if not ((z - included) <= (x_closure & y_closure)): return False - return True -@not_implemented_for("directed") -def _bfs_with_marks(G, start_node, check_set): - """Breadth-first-search with markings. +@not_implemented_for("undirected") +def _reachable(G, x, a, z): + """Modified Bayes-Ball algorithm for finding d-connected nodes. - Performs BFS starting from ``start_node`` and whenever a node - inside ``check_set`` is met, it is "marked". Once a node is marked, - BFS does not continue along that path. The resulting marked nodes - are returned. + Find all nodes in `a` that are d-connected to those in `x` by + those in `z`. This is an implementation of the function + `REACHABLE` in [1]_ (which is itself a modification of the + Bayes-Ball algorithm [2]_) when restricted to DAGs. Parameters ---------- - G : nx.Graph - An undirected graph. - start_node : node - The start of the BFS. - check_set : set - The set of nodes to check against. + G : nx.DiGraph + A NetworkX DAG. + x : node | set + A node in the DAG, or a set of nodes. + a : node | set + A (set of) node(s) in the DAG containing the ancestors of `x`. + z : node | set + The node or set of nodes conditioned on when checking d-connectedness. Returns ------- - marked : set - A set of nodes that were marked. + w : set + The closure of `x` in `a` with respect to d-connectedness + given `z`. + + References + ---------- + .. [1] van der Zander, Benito, and Maciej Liśkiewicz. "Finding + minimal d-separators in linear time and applications." In + Uncertainty in Artificial Intelligence, pp. 637-647. PMLR, 2020. + + .. [2] Shachter, Ross D. "Bayes-ball: The rational pastime + (for determining irrelevance and requisite information in + belief networks and influence diagrams)." In Proceedings of the + Fourteenth Conference on Uncertainty in Artificial Intelligence + (UAI), (pp. 480–487). 1998. + """ + + def _pass(e, v, f, n): + """Whether a ball entering node `v` along edge `e` passes to `n` along `f`. + + Boolean function defined on page 6 of [1]_. + + Parameters + ---------- + e : bool + Directed edge by which the ball got to node `v`; `True` iff directed into `v`. + v : node + Node where the ball is. + f : bool + Directed edge connecting nodes `v` and `n`; `True` iff directed `n`. + n : node + Checking whether the ball passes to this node. + + Returns + ------- + b : bool + Whether the ball passes or not. + + References + ---------- + .. [1] van der Zander, Benito, and Maciej Liśkiewicz. "Finding + minimal d-separators in linear time and applications." In + Uncertainty in Artificial Intelligence, pp. 637-647. PMLR, 2020. + """ + is_element_of_A = n in a + # almost_definite_status = True # always true for DAGs; not so for RCGs + collider_if_in_Z = v not in z or (e and not f) + return is_element_of_A and collider_if_in_Z # and almost_definite_status + + queue = deque([]) + for node in x: + if bool(G.pred[node]): + queue.append((True, node)) + if bool(G.succ[node]): + queue.append((False, node)) + processed = queue.copy() + + while any(queue): + e, v = queue.popleft() + preds = ((False, n) for n in G.pred[v]) + succs = ((True, n) for n in G.succ[v]) + f_n_pairs = chain(preds, succs) + for f, n in f_n_pairs: + if (f, n) not in processed and _pass(e, v, f, n): + queue.append((f, n)) + processed.append((f, n)) + + return {w for (_, w) in processed} + + +# Deprecated functions: +def d_separated(G, x, y, z): + """Return whether nodes sets ``x`` and ``y`` are d-separated by ``z``. + + .. deprecated:: 3.3 + + This function is deprecated and will be removed in NetworkX v3.5. + Please use `is_d_separator(G, x, y, z)`. + + """ + import warnings + + warnings.warn( + "d_separated is deprecated and will be removed in NetworkX v3.5." + "Please use `is_d_separator(G, x, y, z)`.", + category=DeprecationWarning, + stacklevel=2, + ) + return nx.is_d_separator(G, x, y, z) + + +def minimal_d_separator(G, u, v): + """Returns a minimal_d-separating set between `x` and `y` if possible + + .. deprecated:: 3.3 + + minimal_d_separator is deprecated and will be removed in NetworkX v3.5. + Please use `find_minimal_d_separator(G, x, y)`. + """ - visited = {} - marked = set() - queue = [] - - visited[start_node] = None - queue.append(start_node) - while queue: - m = queue.pop(0) - - for nbr in G.neighbors(m): - if nbr not in visited: - # memoize where we visited so far - visited[nbr] = None - - # mark the node in Z' and do not continue along that path - if nbr in check_set: - marked.add(nbr) - else: - queue.append(nbr) - return marked + import warnings + + warnings.warn( + ( + "This function is deprecated and will be removed in NetworkX v3.5." + "Please use `is_d_separator(G, x, y)`." + ), + category=DeprecationWarning, + stacklevel=2, + ) + return nx.find_minimal_d_separator(G, u, v) diff --git a/networkx/algorithms/dag.py b/networkx/algorithms/dag.py index fb74df81c6d..a70e2c5efaf 100644 --- a/networkx/algorithms/dag.py +++ b/networkx/algorithms/dag.py @@ -36,7 +36,7 @@ chaini = chain.from_iterable -@nx._dispatch +@nx._dispatchable def descendants(G, source): """Returns all nodes reachable from `source` in `G`. @@ -73,7 +73,7 @@ def descendants(G, source): return {child for parent, child in nx.bfs_edges(G, source)} -@nx._dispatch +@nx._dispatchable def ancestors(G, source): """Returns all nodes having a path to `source` in `G`. @@ -110,7 +110,7 @@ def ancestors(G, source): return {child for parent, child in nx.bfs_edges(G, source, reverse=True)} -@nx._dispatch +@nx._dispatchable def has_cycle(G): """Decides whether the directed graph has a cycle.""" try: @@ -122,7 +122,7 @@ def has_cycle(G): return False -@nx._dispatch +@nx._dispatchable def is_directed_acyclic_graph(G): """Returns True if the graph `G` is a directed acyclic graph (DAG) or False if not. @@ -163,7 +163,7 @@ def is_directed_acyclic_graph(G): return G.is_directed() and not has_cycle(G) -@nx._dispatch +@nx._dispatchable def topological_generations(G): """Stratifies a DAG into generations. @@ -241,7 +241,7 @@ def topological_generations(G): ) -@nx._dispatch +@nx._dispatchable def topological_sort(G): """Returns a generator of nodes in topologically sorted order. @@ -310,7 +310,7 @@ def topological_sort(G): yield from generation -@nx._dispatch +@nx._dispatchable def lexicographical_topological_sort(G, key=None): """Generate the nodes in the unique lexicographical topological sort order. @@ -377,7 +377,7 @@ def lexicographical_topological_sort(G, key=None): The sort will fail for any graph with integer and string nodes. Comparison of integer to strings is not defined in python. Is 3 greater or less than 'red'? - >>> DG = nx.DiGraph([(1, 'red'), (3, 'red'), (1, 'green'), (2, 'blue')]) + >>> DG = nx.DiGraph([(1, "red"), (3, "red"), (1, "green"), (2, "blue")]) >>> list(nx.lexicographical_topological_sort(DG)) Traceback (most recent call last): ... @@ -453,7 +453,7 @@ def create_tuple(node): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def all_topological_sorts(G): """Returns a generator of _all_ topological sorts of the directed graph G. @@ -572,7 +572,7 @@ def all_topological_sorts(G): break -@nx._dispatch +@nx._dispatchable def is_aperiodic(G): """Returns True if `G` is aperiodic. @@ -641,7 +641,8 @@ def is_aperiodic(G): """ if not G.is_directed(): raise nx.NetworkXError("is_aperiodic not defined for undirected graphs") - + if len(G) == 0: + raise nx.NetworkXPointlessConcept("Graph has no nodes.") s = arbitrary_element(G) levels = {s: 0} this_level = [s] @@ -664,7 +665,7 @@ def is_aperiodic(G): return g == 1 and nx.is_aperiodic(G.subgraph(set(G) - set(levels))) -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def transitive_closure(G, reflexive=False): """Returns transitive closure of a graph @@ -757,7 +758,7 @@ def transitive_closure(G, reflexive=False): @not_implemented_for("undirected") -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def transitive_closure_dag(G, topo_order=None): """Returns the transitive closure of a directed acyclic graph. @@ -814,7 +815,7 @@ def transitive_closure_dag(G, topo_order=None): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable(returns_graph=True) def transitive_reduction(G): """Returns transitive reduction of a directed graph @@ -852,7 +853,7 @@ def transitive_reduction(G): To perform transitive reduction on a DiGraph and transfer node/edge data: >>> DG = nx.DiGraph() - >>> DG.add_edges_from([(1, 2), (2, 3), (1, 3)], color='red') + >>> DG.add_edges_from([(1, 2), (2, 3), (1, 3)], color="red") >>> TR = nx.transitive_reduction(DG) >>> TR.add_nodes_from(DG.nodes(data=True)) >>> TR.add_edges_from((u, v, DG.edges[u, v]) for u, v in TR.edges) @@ -887,7 +888,7 @@ def transitive_reduction(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def antichains(G, topo_order=None): """Generates antichains from a directed acyclic graph (DAG). @@ -954,7 +955,7 @@ def antichains(G, topo_order=None): @not_implemented_for("undirected") -@nx._dispatch(edge_attrs={"weight": "default_weight"}) +@nx._dispatchable(edge_attrs={"weight": "default_weight"}) def dag_longest_path(G, weight="weight", default_weight=1, topo_order=None): """Returns the longest path in a directed acyclic graph (DAG). @@ -987,7 +988,7 @@ def dag_longest_path(G, weight="weight", default_weight=1, topo_order=None): Examples -------- - >>> DG = nx.DiGraph([(0, 1, {'cost':1}), (1, 2, {'cost':1}), (0, 2, {'cost':42})]) + >>> DG = nx.DiGraph([(0, 1, {"cost": 1}), (1, 2, {"cost": 1}), (0, 2, {"cost": 42})]) >>> list(nx.all_simple_paths(DG, 0, 2)) [[0, 1, 2], [0, 2]] >>> nx.dag_longest_path(DG) @@ -1050,7 +1051,7 @@ def dag_longest_path(G, weight="weight", default_weight=1, topo_order=None): @not_implemented_for("undirected") -@nx._dispatch(edge_attrs={"weight": "default_weight"}) +@nx._dispatchable(edge_attrs={"weight": "default_weight"}) def dag_longest_path_length(G, weight="weight", default_weight=1): """Returns the longest path length in a DAG @@ -1077,7 +1078,7 @@ def dag_longest_path_length(G, weight="weight", default_weight=1): Examples -------- - >>> DG = nx.DiGraph([(0, 1, {'cost':1}), (1, 2, {'cost':1}), (0, 2, {'cost':42})]) + >>> DG = nx.DiGraph([(0, 1, {"cost": 1}), (1, 2, {"cost": 1}), (0, 2, {"cost": 42})]) >>> list(nx.all_simple_paths(DG, 0, 2)) [[0, 1, 2], [0, 2]] >>> nx.dag_longest_path_length(DG) @@ -1102,7 +1103,7 @@ def dag_longest_path_length(G, weight="weight", default_weight=1): return path_length -@nx._dispatch +@nx._dispatchable def root_to_leaf_paths(G): """Yields root-to-leaf paths in a directed acyclic graph. @@ -1123,7 +1124,7 @@ def root_to_leaf_paths(G): @not_implemented_for("multigraph") @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable(returns_graph=True) def dag_to_branching(G): """Returns a branching representing all (overlapping) paths from root nodes to leaf nodes in the given directed acyclic graph. @@ -1221,7 +1222,7 @@ def dag_to_branching(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def compute_v_structures(G): """Iterate through the graph to compute all v-structures. @@ -1249,7 +1250,7 @@ def compute_v_structures(G): Notes ----- - https://en.wikipedia.org/wiki/Collider_(statistics) + `Wikipedia: Collider in causal graphs `_ """ for collider, preds in G.pred.items(): for common_parents in combinations(preds, r=2): diff --git a/networkx/algorithms/distance_measures.py b/networkx/algorithms/distance_measures.py index 09bde0cc157..8215b470af3 100644 --- a/networkx/algorithms/distance_measures.py +++ b/networkx/algorithms/distance_measures.py @@ -12,6 +12,7 @@ "barycenter", "resistance_distance", "kemeny_constant", + "effective_graph_resistance", ] @@ -236,7 +237,7 @@ def _extrema_bounding(G, compute="diameter", weight=None): return None -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def eccentricity(G, v=None, sp=None, weight=None): """Returns the eccentricity of nodes in G. @@ -325,7 +326,7 @@ def eccentricity(G, v=None, sp=None, weight=None): return e -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def diameter(G, e=None, usebounds=False, weight=None): """Returns the diameter of the graph G. @@ -381,7 +382,7 @@ def diameter(G, e=None, usebounds=False, weight=None): return max(e.values()) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def periphery(G, e=None, usebounds=False, weight=None): """Returns the periphery of the graph G. @@ -440,7 +441,7 @@ def periphery(G, e=None, usebounds=False, weight=None): return p -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def radius(G, e=None, usebounds=False, weight=None): """Returns the radius of the graph G. @@ -493,7 +494,7 @@ def radius(G, e=None, usebounds=False, weight=None): return min(e.values()) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def center(G, e=None, usebounds=False, weight=None): """Returns the center of the graph G. @@ -552,7 +553,7 @@ def center(G, e=None, usebounds=False, weight=None): return p -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight", mutates_input={"attr": 2}) def barycenter(G, weight=None, attr=None, sp=None): r"""Calculate barycenter of a connected graph, optionally with edge weights. @@ -632,9 +633,9 @@ def barycenter(G, weight=None, attr=None, sp=None): @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def resistance_distance(G, nodeA=None, nodeB=None, weight=None, invert_weight=True): - """Returns the resistance distance between every pair of nodes on graph G. + """Returns the resistance distance between pairs of nodes in graph G. The resistance distance between two nodes of a graph is akin to treating the graph as a grid of resistors with a resistance equal to the provided @@ -734,14 +735,14 @@ def resistance_distance(G, nodeA=None, nodeB=None, weight=None, invert_weight=Tr if nodeA is not None and nodeB is not None: i = node_list.index(nodeA) j = node_list.index(nodeB) - return Linv[i, i] + Linv[j, j] - Linv[i, j] - Linv[j, i] + return Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i) elif nodeA is not None: i = node_list.index(nodeA) d = {} for n in G: j = node_list.index(n) - d[n] = Linv[i, i] + Linv[j, j] - Linv[i, j] - Linv[j, i] + d[n] = Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i) return d elif nodeB is not None: @@ -749,7 +750,7 @@ def resistance_distance(G, nodeA=None, nodeB=None, weight=None, invert_weight=Tr d = {} for n in G: i = node_list.index(n) - d[n] = Linv[i, i] + Linv[j, j] - Linv[i, j] - Linv[j, i] + d[n] = Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i) return d else: @@ -759,12 +760,105 @@ def resistance_distance(G, nodeA=None, nodeB=None, weight=None, invert_weight=Tr d[n] = {} for n2 in G: j = node_list.index(n2) - d[n][n2] = Linv[i, i] + Linv[j, j] - Linv[i, j] - Linv[j, i] + d[n][n2] = ( + Linv.item(i, i) + + Linv.item(j, j) + - Linv.item(i, j) + - Linv.item(j, i) + ) return d +@not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight") +def effective_graph_resistance(G, weight=None, invert_weight=True): + """Returns the Effective graph resistance of G. + + Also known as the Kirchhoff index. + + The effective graph resistance is defined as the sum + of the resistance distance of every node pair in G [1]_. + + If weight is not provided, then a weight of 1 is used for all edges. + + The effective graph resistance of a disconnected graph is infinite. + + Parameters + ---------- + G : NetworkX graph + A graph + + weight : string or None, optional (default=None) + The edge data key used to compute the effective graph resistance. + If None, then each edge has weight 1. + + invert_weight : boolean (default=True) + Proper calculation of resistance distance requires building the + Laplacian matrix with the reciprocal of the weight. Not required + if the weight is already inverted. Weight cannot be zero. + + Returns + ------- + RG : float + The effective graph resistance of `G`. + + Raises + ------ + NetworkXNotImplemented + If `G` is a directed graph. + + NetworkXError + If `G` does not contain any nodes. + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)]) + >>> round(nx.effective_graph_resistance(G), 10) + 10.25 + + Notes + ----- + The implementation is based on Theorem 2.2 in [2]_. Self-loops are ignored. + Multi-edges are contracted in one edge with weight equal to the harmonic sum of the weights. + + References + ---------- + .. [1] Wolfram + "Kirchhoff Index." + https://mathworld.wolfram.com/KirchhoffIndex.html + .. [2] W. Ellens, F. M. Spieksma, P. Van Mieghem, A. Jamakovic, R. E. Kooij. + Effective graph resistance. + Lin. Alg. Appl. 435:2491-2506, 2011. + """ + import numpy as np + + if len(G) == 0: + raise nx.NetworkXError("Graph G must contain at least one node.") + + # Disconnected graphs have infinite Effective graph resistance + if not nx.is_connected(G): + return float("inf") + + # Invert weights + G = G.copy() + if invert_weight and weight is not None: + if G.is_multigraph(): + for u, v, k, d in G.edges(keys=True, data=True): + d[weight] = 1 / d[weight] + else: + for u, v, d in G.edges(data=True): + d[weight] = 1 / d[weight] + + # Get Laplacian eigenvalues + mu = np.sort(nx.laplacian_spectrum(G, weight=weight)) + + # Compute Effective graph resistance based on spectrum of the Laplacian + # Self-loops are ignored + return float(np.sum(1 / mu[1:]) * G.number_of_nodes()) + + @nx.utils.not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def kemeny_constant(G, *, weight=None): """Returns the Kemeny constant of the given graph. @@ -793,7 +887,7 @@ def kemeny_constant(G, *, weight=None): Returns ------- - K : float + float The Kemeny constant of the graph `G`. Raises @@ -852,4 +946,4 @@ def kemeny_constant(G, *, weight=None): eig = np.sort(sp.linalg.eigvalsh(H.todense())) # Compute the Kemeny constant - return np.sum(1 / (1 - eig[:-1])) + return float(np.sum(1 / (1 - eig[:-1]))) diff --git a/networkx/algorithms/distance_regular.py b/networkx/algorithms/distance_regular.py index 18c19ee00e0..27b4d0216e4 100644 --- a/networkx/algorithms/distance_regular.py +++ b/networkx/algorithms/distance_regular.py @@ -17,7 +17,7 @@ ] -@nx._dispatch +@nx._dispatchable def is_distance_regular(G): """Returns True if the graph is distance regular, False otherwise. @@ -109,8 +109,9 @@ def global_parameters(b, c): return ((y, b[0] - x - y, x) for x, y in zip(b + [0], [0] + c)) -@not_implemented_for("directed", "multigraph") -@nx._dispatch +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable def intersection_array(G): """Returns the intersection array of a distance-regular graph. @@ -147,6 +148,8 @@ def intersection_array(G): global_parameters """ # test for regular graph (all degrees must be equal) + if len(G) == 0: + raise nx.NetworkXPointlessConcept("Graph has no nodes.") degree = iter(G.degree()) (_, k) = next(degree) for _, knext in degree: @@ -179,8 +182,9 @@ def intersection_array(G): # TODO There is a definition for directed strongly regular graphs. -@not_implemented_for("directed", "multigraph") -@nx._dispatch +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable def is_strongly_regular(G): """Returns True if and only if the given graph is strongly regular. diff --git a/networkx/algorithms/dominance.py b/networkx/algorithms/dominance.py index ffdbe7d2139..ab841fe21d5 100644 --- a/networkx/algorithms/dominance.py +++ b/networkx/algorithms/dominance.py @@ -11,7 +11,7 @@ @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def immediate_dominators(G, start): """Returns the immediate dominators of all nodes of a directed graph. @@ -84,7 +84,7 @@ def intersect(u, v): return idom -@nx._dispatch +@nx._dispatchable def dominance_frontiers(G, start): """Returns the dominance frontiers of all nodes of a directed graph. diff --git a/networkx/algorithms/dominating.py b/networkx/algorithms/dominating.py index 97408ab4380..8e9a458f8e3 100644 --- a/networkx/algorithms/dominating.py +++ b/networkx/algorithms/dominating.py @@ -7,7 +7,7 @@ __all__ = ["dominating_set", "is_dominating_set"] -@nx._dispatch +@nx._dispatchable def dominating_set(G, start_with=None): r"""Finds a dominating set for the graph G. @@ -55,17 +55,17 @@ def dominating_set(G, start_with=None): while remaining_nodes: # Choose an arbitrary node and determine its undominated neighbors. v = remaining_nodes.pop() - undominated_neighbors = set(G[v]) - dominating_set + undominated_nbrs = set(G[v]) - dominating_set # Add the node to the dominating set and the neighbors to the # dominated set. Finally, remove all of those nodes from the set # of remaining nodes. dominating_set.add(v) - dominated_nodes |= undominated_neighbors - remaining_nodes -= undominated_neighbors + dominated_nodes |= undominated_nbrs + remaining_nodes -= undominated_nbrs return dominating_set -@nx._dispatch +@nx._dispatchable def is_dominating_set(G, nbunch): """Checks if `nbunch` is a dominating set for `G`. diff --git a/networkx/algorithms/efficiency_measures.py b/networkx/algorithms/efficiency_measures.py index 3beea38b013..2c99b011431 100644 --- a/networkx/algorithms/efficiency_measures.py +++ b/networkx/algorithms/efficiency_measures.py @@ -9,7 +9,7 @@ @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def efficiency(G, u, v): """Returns the efficiency of a pair of nodes in a graph. @@ -60,7 +60,7 @@ def efficiency(G, u, v): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def global_efficiency(G): """Returns the average global efficiency of the graph. @@ -121,7 +121,7 @@ def global_efficiency(G): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def local_efficiency(G): """Returns the average local efficiency of the graph. diff --git a/networkx/algorithms/euler.py b/networkx/algorithms/euler.py index 9d61b5e4130..2e0e57358aa 100644 --- a/networkx/algorithms/euler.py +++ b/networkx/algorithms/euler.py @@ -17,7 +17,7 @@ ] -@nx._dispatch +@nx._dispatchable def is_eulerian(G): """Returns True if and only if `G` is Eulerian. @@ -69,7 +69,7 @@ def is_eulerian(G): return all(d % 2 == 0 for v, d in G.degree()) and nx.is_connected(G) -@nx._dispatch +@nx._dispatchable def is_semieulerian(G): """Return True iff `G` is semi-Eulerian. @@ -154,7 +154,7 @@ def _multigraph_eulerian_circuit(G, source): G.remove_edge(current_vertex, next_vertex, next_key) -@nx._dispatch +@nx._dispatchable def eulerian_circuit(G, source=None, keys=False): """Returns an iterator over the edges of an Eulerian circuit in `G`. @@ -235,7 +235,7 @@ def eulerian_circuit(G, source=None, keys=False): yield from _simplegraph_eulerian_circuit(G, source) -@nx._dispatch +@nx._dispatchable def has_eulerian_path(G, source=None): """Return True iff `G` has an Eulerian path. @@ -330,7 +330,7 @@ def has_eulerian_path(G, source=None): return sum(d % 2 == 1 for v, d in G.degree()) == 2 and nx.is_connected(G) -@nx._dispatch +@nx._dispatchable def eulerian_path(G, source=None, keys=False): """Return an iterator over the edges of an Eulerian path in `G`. @@ -386,7 +386,7 @@ def eulerian_path(G, source=None, keys=False): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable(returns_graph=True) def eulerize(G): """Transforms a graph into an Eulerian graph. diff --git a/networkx/algorithms/flow/boykovkolmogorov.py b/networkx/algorithms/flow/boykovkolmogorov.py index f571c98aebc..e1c9486f527 100644 --- a/networkx/algorithms/flow/boykovkolmogorov.py +++ b/networkx/algorithms/flow/boykovkolmogorov.py @@ -10,11 +10,12 @@ __all__ = ["boykov_kolmogorov"] -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "residual?": 4}, edge_attrs={"capacity": float("inf")}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, preserve_graph_attrs={"residual"}, + returns_graph=True, ) def boykov_kolmogorov( G, s, t, capacity="capacity", residual=None, value_only=False, cutoff=None diff --git a/networkx/algorithms/flow/capacityscaling.py b/networkx/algorithms/flow/capacityscaling.py index 2c0002d86c1..bf68565c548 100644 --- a/networkx/algorithms/flow/capacityscaling.py +++ b/networkx/algorithms/flow/capacityscaling.py @@ -149,7 +149,9 @@ def _build_flow_dict(G, R, capacity, weight): return flow_dict -@nx._dispatch(node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0}) +@nx._dispatchable( + node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0} +) def capacity_scaling( G, demand="demand", capacity="capacity", weight="weight", heap=BinaryHeap ): diff --git a/networkx/algorithms/flow/dinitz_alg.py b/networkx/algorithms/flow/dinitz_alg.py index 0348393d682..31c1a5e2a1c 100644 --- a/networkx/algorithms/flow/dinitz_alg.py +++ b/networkx/algorithms/flow/dinitz_alg.py @@ -10,11 +10,12 @@ __all__ = ["dinitz"] -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "residual?": 4}, edge_attrs={"capacity": float("inf")}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, preserve_graph_attrs={"residual"}, + returns_graph=True, ) def dinitz(G, s, t, capacity="capacity", residual=None, value_only=False, cutoff=None): """Find a maximum single-commodity flow using Dinitz' algorithm. diff --git a/networkx/algorithms/flow/edmondskarp.py b/networkx/algorithms/flow/edmondskarp.py index 7c8440a4519..92d79f181f2 100644 --- a/networkx/algorithms/flow/edmondskarp.py +++ b/networkx/algorithms/flow/edmondskarp.py @@ -8,10 +8,11 @@ __all__ = ["edmonds_karp"] -@nx._dispatch( +@nx._dispatchable( graphs="R", preserve_edge_attrs={"R": {"capacity": float("inf"), "flow": 0}}, preserve_graph_attrs=True, + mutates_input=True, ) def edmonds_karp_core(R, s, t, cutoff): """Implementation of the Edmonds-Karp algorithm.""" @@ -122,11 +123,12 @@ def edmonds_karp_impl(G, s, t, capacity, residual, cutoff): return R -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "residual?": 4}, edge_attrs={"capacity": float("inf")}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, preserve_graph_attrs={"residual"}, + returns_graph=True, ) def edmonds_karp( G, s, t, capacity="capacity", residual=None, value_only=False, cutoff=None diff --git a/networkx/algorithms/flow/gomory_hu.py b/networkx/algorithms/flow/gomory_hu.py index 0be27d56494..951abaeb517 100644 --- a/networkx/algorithms/flow/gomory_hu.py +++ b/networkx/algorithms/flow/gomory_hu.py @@ -13,7 +13,7 @@ @not_implemented_for("directed") -@nx._dispatch(edge_attrs={"capacity": float("inf")}) +@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True) def gomory_hu_tree(G, capacity="capacity", flow_func=None): r"""Returns the Gomory-Hu tree of an undirected graph G. diff --git a/networkx/algorithms/flow/maxflow.py b/networkx/algorithms/flow/maxflow.py index 35f359ee3b0..96bca029f00 100644 --- a/networkx/algorithms/flow/maxflow.py +++ b/networkx/algorithms/flow/maxflow.py @@ -16,7 +16,7 @@ __all__ = ["maximum_flow", "maximum_flow_value", "minimum_cut", "minimum_cut_value"] -@nx._dispatch(graphs="flowG", edge_attrs={"capacity": float("inf")}) +@nx._dispatchable(graphs="flowG", edge_attrs={"capacity": float("inf")}) def maximum_flow(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): """Find a maximum single-commodity flow. @@ -140,9 +140,7 @@ def maximum_flow(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): maximum flow by using the flow_func parameter. >>> from networkx.algorithms.flow import shortest_augmenting_path - >>> flow_value == nx.maximum_flow(G, "x", "y", flow_func=shortest_augmenting_path)[ - ... 0 - ... ] + >>> flow_value == nx.maximum_flow(G, "x", "y", flow_func=shortest_augmenting_path)[0] True """ @@ -163,7 +161,7 @@ def maximum_flow(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): return (R.graph["flow_value"], flow_dict) -@nx._dispatch(graphs="flowG", edge_attrs={"capacity": float("inf")}) +@nx._dispatchable(graphs="flowG", edge_attrs={"capacity": float("inf")}) def maximum_flow_value(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): """Find the value of maximum single-commodity flow. @@ -281,9 +279,7 @@ def maximum_flow_value(flowG, _s, _t, capacity="capacity", flow_func=None, **kwa maximum flow by using the flow_func parameter. >>> from networkx.algorithms.flow import shortest_augmenting_path - >>> flow_value == nx.maximum_flow_value( - ... G, "x", "y", flow_func=shortest_augmenting_path - ... ) + >>> flow_value == nx.maximum_flow_value(G, "x", "y", flow_func=shortest_augmenting_path) True """ @@ -303,7 +299,7 @@ def maximum_flow_value(flowG, _s, _t, capacity="capacity", flow_func=None, **kwa return R.graph["flow_value"] -@nx._dispatch(graphs="flowG", edge_attrs={"capacity": float("inf")}) +@nx._dispatchable(graphs="flowG", edge_attrs={"capacity": float("inf")}) def minimum_cut(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): """Compute the value and the node partition of a minimum (s, t)-cut. @@ -467,7 +463,7 @@ def minimum_cut(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): return (R.graph["flow_value"], partition) -@nx._dispatch(graphs="flowG", edge_attrs={"capacity": float("inf")}) +@nx._dispatchable(graphs="flowG", edge_attrs={"capacity": float("inf")}) def minimum_cut_value(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): """Compute the value of a minimum (s, t)-cut. @@ -582,9 +578,7 @@ def minimum_cut_value(flowG, _s, _t, capacity="capacity", flow_func=None, **kwar minimum cut by using the flow_func parameter. >>> from networkx.algorithms.flow import shortest_augmenting_path - >>> cut_value == nx.minimum_cut_value( - ... G, "x", "y", flow_func=shortest_augmenting_path - ... ) + >>> cut_value == nx.minimum_cut_value(G, "x", "y", flow_func=shortest_augmenting_path) True """ diff --git a/networkx/algorithms/flow/mincost.py b/networkx/algorithms/flow/mincost.py index cc8626c7c3c..2f9390d7a1c 100644 --- a/networkx/algorithms/flow/mincost.py +++ b/networkx/algorithms/flow/mincost.py @@ -7,7 +7,9 @@ import networkx as nx -@nx._dispatch(node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0}) +@nx._dispatchable( + node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0} +) def min_cost_flow_cost(G, demand="demand", capacity="capacity", weight="weight"): r"""Find the cost of a minimum cost flow satisfying all demands in digraph G. @@ -97,7 +99,9 @@ def min_cost_flow_cost(G, demand="demand", capacity="capacity", weight="weight") return nx.network_simplex(G, demand=demand, capacity=capacity, weight=weight)[0] -@nx._dispatch(node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0}) +@nx._dispatchable( + node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0} +) def min_cost_flow(G, demand="demand", capacity="capacity", weight="weight"): r"""Returns a minimum cost flow satisfying all demands in digraph G. @@ -182,11 +186,13 @@ def min_cost_flow(G, demand="demand", capacity="capacity", weight="weight"): >>> G.add_edge("b", "d", weight=1, capacity=9) >>> G.add_edge("c", "d", weight=2, capacity=5) >>> flowDict = nx.min_cost_flow(G) + >>> flowDict + {'a': {'b': 4, 'c': 1}, 'd': {}, 'b': {'d': 4}, 'c': {'d': 1}} """ return nx.network_simplex(G, demand=demand, capacity=capacity, weight=weight)[1] -@nx._dispatch(edge_attrs={"weight": 0}) +@nx._dispatchable(edge_attrs={"weight": 0}) def cost_of_flow(G, flowDict, weight="weight"): """Compute the cost of the flow given by flowDict on graph G. @@ -227,11 +233,26 @@ def cost_of_flow(G, flowDict, weight="weight"): cause problems). As a workaround you can use integer numbers by multiplying the relevant edge attributes by a convenient constant factor (eg 100). + + Examples + -------- + >>> G = nx.DiGraph() + >>> G.add_node("a", demand=-5) + >>> G.add_node("d", demand=5) + >>> G.add_edge("a", "b", weight=3, capacity=4) + >>> G.add_edge("a", "c", weight=6, capacity=10) + >>> G.add_edge("b", "d", weight=1, capacity=9) + >>> G.add_edge("c", "d", weight=2, capacity=5) + >>> flowDict = nx.min_cost_flow(G) + >>> flowDict + {'a': {'b': 4, 'c': 1}, 'd': {}, 'b': {'d': 4}, 'c': {'d': 1}} + >>> nx.cost_of_flow(G, flowDict) + 24 """ return sum((flowDict[u][v] * d.get(weight, 0) for u, v, d in G.edges(data=True))) -@nx._dispatch(edge_attrs={"capacity": float("inf"), "weight": 0}) +@nx._dispatchable(edge_attrs={"capacity": float("inf"), "weight": 0}) def max_flow_min_cost(G, s, t, capacity="capacity", weight="weight"): """Returns a maximum (s, t)-flow of minimum cost. diff --git a/networkx/algorithms/flow/networksimplex.py b/networkx/algorithms/flow/networksimplex.py index 9fa3589a0c9..a9822d96880 100644 --- a/networkx/algorithms/flow/networksimplex.py +++ b/networkx/algorithms/flow/networksimplex.py @@ -326,7 +326,9 @@ def find_leaving_edge(self, Wn, We): @not_implemented_for("undirected") -@nx._dispatch(node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0}) +@nx._dispatchable( + node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0} +) def network_simplex(G, demand="demand", capacity="capacity", weight="weight"): r"""Find a minimum cost flow satisfying all demands in digraph G. diff --git a/networkx/algorithms/flow/preflowpush.py b/networkx/algorithms/flow/preflowpush.py index 05b982ba1ff..5afa548060c 100644 --- a/networkx/algorithms/flow/preflowpush.py +++ b/networkx/algorithms/flow/preflowpush.py @@ -288,11 +288,12 @@ def global_relabel(from_sink): return R -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "residual?": 4}, edge_attrs={"capacity": float("inf")}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, preserve_graph_attrs={"residual"}, + returns_graph=True, ) def preflow_push( G, s, t, capacity="capacity", residual=None, global_relabel_freq=1, value_only=False diff --git a/networkx/algorithms/flow/shortestaugmentingpath.py b/networkx/algorithms/flow/shortestaugmentingpath.py index d06a88b7bc8..c2583d16646 100644 --- a/networkx/algorithms/flow/shortestaugmentingpath.py +++ b/networkx/algorithms/flow/shortestaugmentingpath.py @@ -163,11 +163,12 @@ def relabel(u): return R -@nx._dispatch( +@nx._dispatchable( graphs={"G": 0, "residual?": 4}, edge_attrs={"capacity": float("inf")}, preserve_edge_attrs={"residual": {"capacity": float("inf")}}, preserve_graph_attrs={"residual"}, + returns_graph=True, ) def shortest_augmenting_path( G, diff --git a/networkx/algorithms/flow/utils.py b/networkx/algorithms/flow/utils.py index 349be4b302a..dcb663f3b64 100644 --- a/networkx/algorithms/flow/utils.py +++ b/networkx/algorithms/flow/utils.py @@ -72,7 +72,7 @@ def clear_work(self): self._work = 0 -@nx._dispatch(edge_attrs={"capacity": float("inf")}) +@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True) def build_residual_network(G, capacity): """Build a residual network and initialize a zero flow. @@ -154,7 +154,7 @@ def build_residual_network(G, capacity): return R -@nx._dispatch( +@nx._dispatchable( graphs="R", preserve_edge_attrs={"R": {"capacity": float("inf")}}, preserve_graph_attrs=True, @@ -176,7 +176,7 @@ def detect_unboundedness(R, s, t): q.append(v) -@nx._dispatch(graphs={"G": 0, "R": 1}, preserve_edge_attrs={"R": {"flow": None}}) +@nx._dispatchable(graphs={"G": 0, "R": 1}, preserve_edge_attrs={"R": {"flow": None}}) def build_flow_dict(G, R): """Build a flow dictionary from a residual network.""" flow_dict = {} diff --git a/networkx/algorithms/graph_hashing.py b/networkx/algorithms/graph_hashing.py index d85a44a3604..bf78b7fb707 100644 --- a/networkx/algorithms/graph_hashing.py +++ b/networkx/algorithms/graph_hashing.py @@ -37,13 +37,13 @@ def _neighborhood_aggregate(G, node, node_labels, edge_attr=None): return node_labels[node] + "".join(sorted(label_list)) -@nx._dispatch(edge_attrs={"edge_attr": None}, node_attrs="node_attr") +@nx._dispatchable(edge_attrs={"edge_attr": None}, node_attrs="node_attr") def weisfeiler_lehman_graph_hash( G, edge_attr=None, node_attr=None, iterations=3, digest_size=16 ): """Return Weisfeiler Lehman (WL) graph hash. - The function iteratively aggregates and hashes neighbourhoods of each node. + The function iteratively aggregates and hashes neighborhoods of each node. After each node's neighbors are hashed to obtain updated node labels, a hashed histogram of resulting labels is returned as the final hash. @@ -160,7 +160,7 @@ def weisfeiler_lehman_step(G, labels, edge_attr=None): return _hash_label(str(tuple(subgraph_hash_counts)), digest_size) -@nx._dispatch(edge_attrs={"edge_attr": None}, node_attrs="node_attr") +@nx._dispatchable(edge_attrs={"edge_attr": None}, node_attrs="node_attr") def weisfeiler_lehman_subgraph_hashes( G, edge_attr=None, node_attr=None, iterations=3, digest_size=16 ): @@ -176,7 +176,7 @@ def weisfeiler_lehman_subgraph_hashes( additionally a hash of the initial node label (or equivalently a subgraph of depth 0) - The function iteratively aggregates and hashes neighbourhoods of each node. + The function iteratively aggregates and hashes neighborhoods of each node. This is achieved for each step by replacing for each node its label from the previous iteration with its hashed 1-hop neighborhood aggregate. The new node label is then appended to a list of node labels for each @@ -234,13 +234,9 @@ def weisfeiler_lehman_subgraph_hashes( Finding similar nodes in different graphs: >>> G1 = nx.Graph() - >>> G1.add_edges_from([ - ... (1, 2), (2, 3), (2, 4), (3, 5), (4, 6), (5, 7), (6, 7) - ... ]) + >>> G1.add_edges_from([(1, 2), (2, 3), (2, 4), (3, 5), (4, 6), (5, 7), (6, 7)]) >>> G2 = nx.Graph() - >>> G2.add_edges_from([ - ... (1, 3), (2, 3), (1, 6), (1, 5), (4, 6) - ... ]) + >>> G2.add_edges_from([(1, 3), (2, 3), (1, 6), (1, 5), (4, 6)]) >>> g1_hashes = nx.weisfeiler_lehman_subgraph_hashes(G1, iterations=3, digest_size=8) >>> g2_hashes = nx.weisfeiler_lehman_subgraph_hashes(G2, iterations=3, digest_size=8) @@ -254,7 +250,7 @@ def weisfeiler_lehman_subgraph_hashes( The first 2 WL subgraph hashes match. From this we can conclude that it's very likely the neighborhood of 4 hops around these nodes are isomorphic: each - iteration aggregates 1-hop neighbourhoods meaning hashes at depth $n$ are influenced + iteration aggregates 1-hop neighborhoods meaning hashes at depth $n$ are influenced by every node within $2n$ hops. However the neighborhood of 6 hops is no longer isomorphic since their 3rd hash does diff --git a/networkx/algorithms/graphical.py b/networkx/algorithms/graphical.py index cb1664427fd..b2ce6c33a9a 100644 --- a/networkx/algorithms/graphical.py +++ b/networkx/algorithms/graphical.py @@ -14,7 +14,7 @@ ] -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def is_graphical(sequence, method="eg"): """Returns True if sequence is a valid degree sequence. @@ -93,7 +93,7 @@ def _basic_graphical_tests(deg_sequence): return dmax, dmin, dsum, n, num_degs -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def is_valid_degree_sequence_havel_hakimi(deg_sequence): r"""Returns True if deg_sequence can be realized by a simple graph. @@ -183,7 +183,7 @@ def is_valid_degree_sequence_havel_hakimi(deg_sequence): return True -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def is_valid_degree_sequence_erdos_gallai(deg_sequence): r"""Returns True if deg_sequence can be realized by a simple graph. @@ -274,7 +274,7 @@ def is_valid_degree_sequence_erdos_gallai(deg_sequence): return True -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def is_multigraphical(sequence): """Returns True if some multigraph can realize the sequence. @@ -325,7 +325,7 @@ def is_multigraphical(sequence): return True -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def is_pseudographical(sequence): """Returns True if some pseudograph can realize the sequence. @@ -372,7 +372,7 @@ def is_pseudographical(sequence): return sum(deg_sequence) % 2 == 0 and min(deg_sequence) >= 0 -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def is_digraphical(in_sequence, out_sequence): r"""Returns True if some directed graph can realize the in- and out-degree sequences. diff --git a/networkx/algorithms/hierarchy.py b/networkx/algorithms/hierarchy.py index 6dc63a741b5..4bb01cb4568 100644 --- a/networkx/algorithms/hierarchy.py +++ b/networkx/algorithms/hierarchy.py @@ -6,7 +6,7 @@ __all__ = ["flow_hierarchy"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def flow_hierarchy(G, weight=None): """Returns the flow hierarchy of a directed network. diff --git a/networkx/algorithms/hybrid.py b/networkx/algorithms/hybrid.py index 347f5c2f199..4d0e5c36e80 100644 --- a/networkx/algorithms/hybrid.py +++ b/networkx/algorithms/hybrid.py @@ -10,7 +10,7 @@ __all__ = ["kl_connected_subgraph", "is_kl_connected"] -@nx._dispatch +@nx._dispatchable(returns_graph=True) def kl_connected_subgraph(G, k, l, low_memory=False, same_as_graph=False): """Returns the maximum locally `(k, l)`-connected subgraph of `G`. @@ -115,7 +115,7 @@ def kl_connected_subgraph(G, k, l, low_memory=False, same_as_graph=False): return H -@nx._dispatch +@nx._dispatchable def is_kl_connected(G, k, l, low_memory=False): """Returns True if and only if `G` is locally `(k, l)`-connected. diff --git a/networkx/algorithms/isolate.py b/networkx/algorithms/isolate.py index f9983282a63..23ac23875db 100644 --- a/networkx/algorithms/isolate.py +++ b/networkx/algorithms/isolate.py @@ -6,7 +6,7 @@ __all__ = ["is_isolate", "isolates", "number_of_isolates"] -@nx._dispatch +@nx._dispatchable def is_isolate(G, n): """Determines whether a node is an isolate. @@ -39,7 +39,7 @@ def is_isolate(G, n): return G.degree(n) == 0 -@nx._dispatch +@nx._dispatchable def isolates(G): """Iterator over isolates in the graph. @@ -85,7 +85,7 @@ def isolates(G): return (n for n, d in G.degree() if d == 0) -@nx._dispatch +@nx._dispatchable def number_of_isolates(G): """Returns the number of isolates in the graph. diff --git a/networkx/algorithms/isomorphism/ismags.py b/networkx/algorithms/isomorphism/ismags.py index 25ce94c87ec..24819faf95c 100644 --- a/networkx/algorithms/isomorphism/ismags.py +++ b/networkx/algorithms/isomorphism/ismags.py @@ -848,11 +848,11 @@ def _map_nodes(self, sgn, candidates, constraints, mapping=None, to_be_mapped=No left_to_map = to_be_mapped - set(mapping.keys()) new_candidates = candidates.copy() - sgn_neighbours = set(self.subgraph[sgn]) - not_gn_neighbours = set(self.graph.nodes) - set(self.graph[gn]) + sgn_nbrs = set(self.subgraph[sgn]) + not_gn_nbrs = set(self.graph.nodes) - set(self.graph[gn]) for sgn2 in left_to_map: - if sgn2 not in sgn_neighbours: - gn2_options = not_gn_neighbours + if sgn2 not in sgn_nbrs: + gn2_options = not_gn_nbrs else: # Get all edges to gn of the right color: g_edges = self._edges_of_same_color(sgn, sgn2) @@ -882,10 +882,7 @@ def _map_nodes(self, sgn, candidates, constraints, mapping=None, to_be_mapped=No # The next node is the one that is unmapped and has fewest # candidates - # Pylint disables because it's a one-shot function. - next_sgn = min( - left_to_map, key=lambda n: min(new_candidates[n], key=len) - ) # pylint: disable=cell-var-from-loop + next_sgn = min(left_to_map, key=lambda n: min(new_candidates[n], key=len)) yield from self._map_nodes( next_sgn, new_candidates, @@ -909,10 +906,7 @@ def _largest_common_subgraph(self, candidates, constraints, to_be_mapped=None): # "part of" the subgraph in to_be_mapped, and we make it a little # smaller every iteration. - # pylint disable because it's guarded against by default value - current_size = len( - next(iter(to_be_mapped), []) - ) # pylint: disable=stop-iteration-return + current_size = len(next(iter(to_be_mapped), [])) found_iso = False if current_size <= len(self.graph): diff --git a/networkx/algorithms/isomorphism/isomorph.py b/networkx/algorithms/isomorphism/isomorph.py index 6f562400a3b..00395b71cc1 100644 --- a/networkx/algorithms/isomorphism/isomorph.py +++ b/networkx/algorithms/isomorphism/isomorph.py @@ -12,7 +12,7 @@ ] -@nx._dispatch(graphs={"G1": 0, "G2": 1}) +@nx._dispatchable(graphs={"G1": 0, "G2": 1}) def could_be_isomorphic(G1, G2): """Returns False if graphs are definitely not isomorphic. True does NOT guarantee isomorphism. @@ -60,7 +60,7 @@ def could_be_isomorphic(G1, G2): graph_could_be_isomorphic = could_be_isomorphic -@nx._dispatch(graphs={"G1": 0, "G2": 1}) +@nx._dispatchable(graphs={"G1": 0, "G2": 1}) def fast_could_be_isomorphic(G1, G2): """Returns False if graphs are definitely not isomorphic. @@ -101,7 +101,7 @@ def fast_could_be_isomorphic(G1, G2): fast_graph_could_be_isomorphic = fast_could_be_isomorphic -@nx._dispatch(graphs={"G1": 0, "G2": 1}) +@nx._dispatchable(graphs={"G1": 0, "G2": 1}) def faster_could_be_isomorphic(G1, G2): """Returns False if graphs are definitely not isomorphic. @@ -134,7 +134,7 @@ def faster_could_be_isomorphic(G1, G2): faster_graph_could_be_isomorphic = faster_could_be_isomorphic -@nx._dispatch( +@nx._dispatchable( graphs={"G1": 0, "G2": 1}, preserve_edge_attrs="edge_match", preserve_node_attrs="node_match", diff --git a/networkx/algorithms/isomorphism/matchhelpers.py b/networkx/algorithms/isomorphism/matchhelpers.py index 5239ed77e6c..8185f34ebde 100644 --- a/networkx/algorithms/isomorphism/matchhelpers.py +++ b/networkx/algorithms/isomorphism/matchhelpers.py @@ -311,7 +311,6 @@ def generic_multiedge_match(attr, default, op): >>> nm = generic_node_match("weight", 1.0, isclose) >>> nm = generic_node_match("color", "red", eq) >>> nm = generic_node_match(["weight", "color"], [1.0, "red"], [isclose, eq]) - ... """ diff --git a/networkx/algorithms/isomorphism/temporalisomorphvf2.py b/networkx/algorithms/isomorphism/temporalisomorphvf2.py index 62cacc77887..b78ecf149f5 100644 --- a/networkx/algorithms/isomorphism/temporalisomorphvf2.py +++ b/networkx/algorithms/isomorphism/temporalisomorphvf2.py @@ -89,9 +89,7 @@ def __init__(self, G1, G2, temporal_attribute_name, delta): >>> G2 = nx.Graph(nx.path_graph(4, create_using=nx.Graph())) - >>> GM = isomorphism.TimeRespectingGraphMatcher( - ... G1, G2, "date", timedelta(days=1) - ... ) + >>> GM = isomorphism.TimeRespectingGraphMatcher(G1, G2, "date", timedelta(days=1)) """ self.temporal_attribute_name = temporal_attribute_name self.delta = delta @@ -158,9 +156,7 @@ def __init__(self, G1, G2, temporal_attribute_name, delta): >>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph())) - >>> GM = isomorphism.TimeRespectingDiGraphMatcher( - ... G1, G2, "date", timedelta(days=1) - ... ) + >>> GM = isomorphism.TimeRespectingDiGraphMatcher(G1, G2, "date", timedelta(days=1)) """ self.temporal_attribute_name = temporal_attribute_name self.delta = delta diff --git a/networkx/algorithms/isomorphism/tests/test_isomorphism.py b/networkx/algorithms/isomorphism/tests/test_isomorphism.py index c669040390d..548af808ffd 100644 --- a/networkx/algorithms/isomorphism/tests/test_isomorphism.py +++ b/networkx/algorithms/isomorphism/tests/test_isomorphism.py @@ -1,3 +1,5 @@ +import pytest + import networkx as nx from networkx.algorithms import isomorphism as iso @@ -38,3 +40,9 @@ def test_faster_could_be_isomorphic(self): def test_is_isomorphic(self): assert iso.is_isomorphic(self.G1, self.G2) assert not iso.is_isomorphic(self.G1, self.G4) + assert iso.is_isomorphic(self.G1.to_directed(), self.G2.to_directed()) + assert not iso.is_isomorphic(self.G1.to_directed(), self.G4.to_directed()) + with pytest.raises( + nx.NetworkXError, match="Graphs G1 and G2 are not of the same type." + ): + iso.is_isomorphic(self.G1.to_directed(), self.G1) diff --git a/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py b/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py index 95e5fec8728..fa1ab9bbaef 100644 --- a/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py +++ b/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py @@ -1,6 +1,8 @@ import random import time +import pytest + import networkx as nx from networkx.algorithms.isomorphism.tree_isomorphism import ( rooted_tree_isomorphism, @@ -9,6 +11,14 @@ from networkx.classes.function import is_directed +@pytest.mark.parametrize("graph_constructor", (nx.DiGraph, nx.MultiGraph)) +def test_tree_isomorphism_raises_on_directed_and_multigraphs(graph_constructor): + t1 = graph_constructor([(0, 1)]) + t2 = graph_constructor([(1, 2)]) + with pytest.raises(nx.NetworkXNotImplemented): + nx.isomorphism.tree_isomorphism(t1, t2) + + # have this work for graph # given two trees (either the directed or undirected) # transform t2 according to the isomorphism diff --git a/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py b/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py index 1a46c38f501..0e29b1be617 100644 --- a/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py +++ b/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py @@ -57,8 +57,9 @@ def test_single_node(self): dict(zip(G2, it.cycle(labels_many))), "label", ) - l1, l2 = nx.get_node_attributes(G1, "label"), nx.get_node_attributes( - G2, "label" + l1, l2 = ( + nx.get_node_attributes(G1, "label"), + nx.get_node_attributes(G2, "label"), ) gparams = _GraphParameters( @@ -118,8 +119,9 @@ def test_matching_order(self): dict(zip(G2, it.cycle(labels))), "label", ) - l1, l2 = nx.get_node_attributes(G1, "label"), nx.get_node_attributes( - G2, "label" + l1, l2 = ( + nx.get_node_attributes(G1, "label"), + nx.get_node_attributes(G2, "label"), ) gparams = _GraphParameters( G1, @@ -155,8 +157,9 @@ def test_matching_order_all_branches(self): G2.nodes[4]["label"] = "red" G2.nodes[5]["label"] = "blue" - l1, l2 = nx.get_node_attributes(G1, "label"), nx.get_node_attributes( - G2, "label" + l1, l2 = ( + nx.get_node_attributes(G1, "label"), + nx.get_node_attributes(G2, "label"), ) gparams = _GraphParameters( G1, diff --git a/networkx/algorithms/isomorphism/tree_isomorphism.py b/networkx/algorithms/isomorphism/tree_isomorphism.py index 6e935063336..e409d515f1c 100644 --- a/networkx/algorithms/isomorphism/tree_isomorphism.py +++ b/networkx/algorithms/isomorphism/tree_isomorphism.py @@ -24,7 +24,7 @@ __all__ = ["rooted_tree_isomorphism", "tree_isomorphism"] -@nx._dispatch(graphs={"t1": 0, "t2": 2}) +@nx._dispatchable(graphs={"t1": 0, "t2": 2}, returns_graph=True) def root_trees(t1, root1, t2, root2): """Create a single digraph dT of free trees t1 and t2 # with roots root1 and root2 respectively @@ -72,7 +72,7 @@ def root_trees(t1, root1, t2, root2): # figure out the level of each node, with 0 at root -@nx._dispatch +@nx._dispatchable def assign_levels(G, root): level = {} level[root] = 0 @@ -102,7 +102,7 @@ def generate_isomorphism(v, w, M, ordered_children): generate_isomorphism(x, y, M, ordered_children) -@nx._dispatch(graphs={"t1": 0, "t2": 2}) +@nx._dispatchable(graphs={"t1": 0, "t2": 2}) def rooted_tree_isomorphism(t1, root1, t2, root2): """ Given two rooted trees `t1` and `t2`, @@ -209,8 +209,9 @@ def rooted_tree_isomorphism(t1, root1, t2, root2): return isomorphism -@not_implemented_for("directed", "multigraph") -@nx._dispatch(graphs={"t1": 0, "t2": 1}) +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(graphs={"t1": 0, "t2": 1}) def tree_isomorphism(t1, t2): """ Given two undirected (or free) trees `t1` and `t2`, diff --git a/networkx/algorithms/isomorphism/vf2pp.py b/networkx/algorithms/isomorphism/vf2pp.py index 953204f669b..589e06447cc 100644 --- a/networkx/algorithms/isomorphism/vf2pp.py +++ b/networkx/algorithms/isomorphism/vf2pp.py @@ -97,7 +97,7 @@ ) -@nx._dispatch(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"}) +@nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"}) def vf2pp_isomorphism(G1, G2, node_label=None, default_label=None): """Return an isomorphic mapping between `G1` and `G2` if it exists. @@ -128,7 +128,7 @@ def vf2pp_isomorphism(G1, G2, node_label=None, default_label=None): return None -@nx._dispatch(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"}) +@nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"}) def vf2pp_is_isomorphic(G1, G2, node_label=None, default_label=None): """Examines whether G1 and G2 are isomorphic. @@ -157,7 +157,7 @@ def vf2pp_is_isomorphic(G1, G2, node_label=None, default_label=None): return False -@nx._dispatch(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"}) +@nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"}) def vf2pp_all_isomorphisms(G1, G2, node_label=None, default_label=None): """Yields all the possible mappings between G1 and G2. @@ -476,8 +476,8 @@ def _find_candidates( G1, G2, G1_labels, _, _, nodes_of_G2Labels, G2_nodes_of_degree = graph_params mapping, reverse_mapping, _, _, _, _, _, _, T2_tilde, _ = state_params - covered_neighbors = [nbr for nbr in G1[u] if nbr in mapping] - if not covered_neighbors: + covered_nbrs = [nbr for nbr in G1[u] if nbr in mapping] + if not covered_nbrs: candidates = set(nodes_of_G2Labels[G1_labels[u]]) candidates.intersection_update(G2_nodes_of_degree[G1_degree[u]]) candidates.intersection_update(T2_tilde) @@ -492,10 +492,10 @@ def _find_candidates( ) return candidates - nbr1 = covered_neighbors[0] + nbr1 = covered_nbrs[0] common_nodes = set(G2[mapping[nbr1]]) - for nbr1 in covered_neighbors[1:]: + for nbr1 in covered_nbrs[1:]: common_nodes.intersection_update(G2[mapping[nbr1]]) common_nodes.difference_update(reverse_mapping) diff --git a/networkx/algorithms/link_analysis/hits_alg.py b/networkx/algorithms/link_analysis/hits_alg.py index 8723c7d4932..e7b5141aa53 100644 --- a/networkx/algorithms/link_analysis/hits_alg.py +++ b/networkx/algorithms/link_analysis/hits_alg.py @@ -5,7 +5,7 @@ __all__ = ["hits"] -@nx._dispatch(preserve_edge_attrs={"G": {"weight": 1}}) +@nx._dispatchable(preserve_edge_attrs={"G": {"weight": 1}}) def hits(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True): """Returns HITS hubs and authorities values for nodes. diff --git a/networkx/algorithms/link_analysis/pagerank_alg.py b/networkx/algorithms/link_analysis/pagerank_alg.py index 371dd60edd9..2a4af6e5f12 100644 --- a/networkx/algorithms/link_analysis/pagerank_alg.py +++ b/networkx/algorithms/link_analysis/pagerank_alg.py @@ -6,7 +6,7 @@ __all__ = ["pagerank", "google_matrix"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def pagerank( G, alpha=0.85, @@ -172,7 +172,7 @@ def _pagerank_python( raise nx.PowerIterationFailedConvergence(max_iter) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def google_matrix( G, alpha=0.85, personalization=None, nodelist=None, weight="weight", dangling=None ): diff --git a/networkx/algorithms/link_analysis/tests/test_pagerank.py b/networkx/algorithms/link_analysis/tests/test_pagerank.py index 6a30f0cd12c..db0f8c8d5bb 100644 --- a/networkx/algorithms/link_analysis/tests/test_pagerank.py +++ b/networkx/algorithms/link_analysis/tests/test_pagerank.py @@ -83,7 +83,7 @@ def test_numpy_pagerank(self): for n in G: assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4) - # This additionally tests the @nx._dispatch mechanism, treating + # This additionally tests the @nx._dispatchable mechanism, treating # nx.google_matrix as if it were a re-implementation from another package @pytest.mark.parametrize("wrapper", [lambda x: x, dispatch_interface.convert]) def test_google_matrix(self, wrapper): diff --git a/networkx/algorithms/link_prediction.py b/networkx/algorithms/link_prediction.py index 7335a77f5e0..1fb24243a16 100644 --- a/networkx/algorithms/link_prediction.py +++ b/networkx/algorithms/link_prediction.py @@ -37,12 +37,18 @@ def _apply_prediction(G, func, ebunch=None): """ if ebunch is None: ebunch = nx.non_edges(G) + else: + for u, v in ebunch: + if u not in G: + raise nx.NodeNotFound(f"Node {u} not in G.") + if v not in G: + raise nx.NodeNotFound(f"Node {v} not in G.") return ((u, v, func(u, v)) for u, v in ebunch) @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def resource_allocation_index(G, ebunch=None): r"""Compute the resource allocation index of all node pairs in ebunch. @@ -72,6 +78,14 @@ def resource_allocation_index(G, ebunch=None): An iterator of 3-tuples in the form (u, v, p) where (u, v) is a pair of nodes and p is their resource allocation index. + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NodeNotFound + If `ebunch` has a node that is not in `G`. + Examples -------- >>> G = nx.complete_graph(5) @@ -97,7 +111,7 @@ def predict(u, v): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def jaccard_coefficient(G, ebunch=None): r"""Compute the Jaccard coefficient of all node pairs in ebunch. @@ -127,6 +141,14 @@ def jaccard_coefficient(G, ebunch=None): An iterator of 3-tuples in the form (u, v, p) where (u, v) is a pair of nodes and p is their Jaccard coefficient. + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NodeNotFound + If `ebunch` has a node that is not in `G`. + Examples -------- >>> G = nx.complete_graph(5) @@ -147,14 +169,14 @@ def predict(u, v): union_size = len(set(G[u]) | set(G[v])) if union_size == 0: return 0 - return len(list(nx.common_neighbors(G, u, v))) / union_size + return len(nx.common_neighbors(G, u, v)) / union_size return _apply_prediction(G, predict, ebunch) @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def adamic_adar_index(G, ebunch=None): r"""Compute the Adamic-Adar index of all node pairs in ebunch. @@ -186,6 +208,14 @@ def adamic_adar_index(G, ebunch=None): An iterator of 3-tuples in the form (u, v, p) where (u, v) is a pair of nodes and p is their Adamic-Adar index. + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NodeNotFound + If `ebunch` has a node that is not in `G`. + Examples -------- >>> G = nx.complete_graph(5) @@ -210,7 +240,7 @@ def predict(u, v): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def common_neighbor_centrality(G, ebunch=None, alpha=0.8): r"""Return the CCPA score for each pair of nodes. @@ -264,6 +294,17 @@ def common_neighbor_centrality(G, ebunch=None, alpha=0.8): pair of nodes and p is their Common Neighbor and Centrality based Parameterized Algorithm(CCPA) score. + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NetworkXAlgorithmError + If self loops exsists in `ebunch` or in `G` (if `ebunch` is `None`). + + NodeNotFound + If `ebunch` has a node that is not in `G`. + Examples -------- >>> G = nx.complete_graph(5) @@ -286,9 +327,9 @@ def common_neighbor_centrality(G, ebunch=None, alpha=0.8): def predict(u, v): if u == v: - raise nx.NetworkXAlgorithmError("Self links are not supported") + raise nx.NetworkXAlgorithmError("Self loops are not supported") - return sum(1 for _ in nx.common_neighbors(G, u, v)) + return len(nx.common_neighbors(G, u, v)) else: spl = dict(nx.shortest_path_length(G)) @@ -296,19 +337,18 @@ def predict(u, v): def predict(u, v): if u == v: - raise nx.NetworkXAlgorithmError("Self links are not supported") + raise nx.NetworkXAlgorithmError("Self loops are not supported") path_len = spl[u].get(v, inf) - return alpha * sum(1 for _ in nx.common_neighbors(G, u, v)) + ( - 1 - alpha - ) * (G.number_of_nodes() / path_len) + n_nbrs = len(nx.common_neighbors(G, u, v)) + return alpha * n_nbrs + (1 - alpha) * len(G) / path_len return _apply_prediction(G, predict, ebunch) @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def preferential_attachment(G, ebunch=None): r"""Compute the preferential attachment score of all node pairs in ebunch. @@ -338,6 +378,14 @@ def preferential_attachment(G, ebunch=None): An iterator of 3-tuples in the form (u, v, p) where (u, v) is a pair of nodes and p is their preferential attachment score. + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NodeNotFound + If `ebunch` has a node that is not in `G`. + Examples -------- >>> G = nx.complete_graph(5) @@ -362,7 +410,7 @@ def predict(u, v): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(node_attrs="community") +@nx._dispatchable(node_attrs="community") def cn_soundarajan_hopcroft(G, ebunch=None, community="community"): r"""Count the number of common neighbors of all node pairs in ebunch using community information. @@ -402,6 +450,17 @@ def cn_soundarajan_hopcroft(G, ebunch=None, community="community"): An iterator of 3-tuples in the form (u, v, p) where (u, v) is a pair of nodes and p is their score. + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NetworkXAlgorithmError + If no community information is available for a node in `ebunch` or in `G` (if `ebunch` is `None`). + + NodeNotFound + If `ebunch` has a node that is not in `G`. + Examples -------- >>> G = nx.path_graph(3) @@ -426,7 +485,7 @@ def cn_soundarajan_hopcroft(G, ebunch=None, community="community"): def predict(u, v): Cu = _community(G, u, community) Cv = _community(G, v, community) - cnbors = list(nx.common_neighbors(G, u, v)) + cnbors = nx.common_neighbors(G, u, v) neighbors = ( sum(_community(G, w, community) == Cu for w in cnbors) if Cu == Cv else 0 ) @@ -437,7 +496,7 @@ def predict(u, v): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(node_attrs="community") +@nx._dispatchable(node_attrs="community") def ra_index_soundarajan_hopcroft(G, ebunch=None, community="community"): r"""Compute the resource allocation index of all node pairs in ebunch using community information. @@ -477,6 +536,17 @@ def ra_index_soundarajan_hopcroft(G, ebunch=None, community="community"): An iterator of 3-tuples in the form (u, v, p) where (u, v) is a pair of nodes and p is their score. + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NetworkXAlgorithmError + If no community information is available for a node in `ebunch` or in `G` (if `ebunch` is `None`). + + NodeNotFound + If `ebunch` has a node that is not in `G`. + Examples -------- >>> G = nx.Graph() @@ -513,7 +583,7 @@ def predict(u, v): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(node_attrs="community") +@nx._dispatchable(node_attrs="community") def within_inter_cluster(G, ebunch=None, delta=0.001, community="community"): """Compute the ratio of within- and inter-cluster common neighbors of all node pairs in ebunch. @@ -553,6 +623,18 @@ def within_inter_cluster(G, ebunch=None, delta=0.001, community="community"): An iterator of 3-tuples in the form (u, v, p) where (u, v) is a pair of nodes and p is their WIC measure. + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NetworkXAlgorithmError + - If `delta` is less than or equal to zero. + - If no community information is available for a node in `ebunch` or in `G` (if `ebunch` is `None`). + + NodeNotFound + If `ebunch` has a node that is not in `G`. + Examples -------- >>> G = nx.Graph() @@ -587,7 +669,7 @@ def predict(u, v): Cv = _community(G, v, community) if Cu != Cv: return 0 - cnbors = set(nx.common_neighbors(G, u, v)) + cnbors = nx.common_neighbors(G, u, v) within = {w for w in cnbors if _community(G, w, community) == Cu} inter = cnbors - within return len(within) / (len(inter) + delta) @@ -601,4 +683,6 @@ def _community(G, u, community): try: return node_u[community] except KeyError as err: - raise nx.NetworkXAlgorithmError("No community information") from err + raise nx.NetworkXAlgorithmError( + f"No community information available for Node {u}" + ) from err diff --git a/networkx/algorithms/lowest_common_ancestors.py b/networkx/algorithms/lowest_common_ancestors.py index 3cad18bff39..f695ec208d3 100644 --- a/networkx/algorithms/lowest_common_ancestors.py +++ b/networkx/algorithms/lowest_common_ancestors.py @@ -14,7 +14,7 @@ @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def all_pairs_lowest_common_ancestor(G, pairs=None): """Return the lowest common ancestor of all pairs or the provided pairs @@ -112,7 +112,7 @@ def generate_lca_from_pairs(G, pairs): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def lowest_common_ancestor(G, node1, node2, default=None): """Compute the lowest common ancestor of the given pair of nodes. @@ -150,7 +150,7 @@ def lowest_common_ancestor(G, node1, node2, default=None): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def tree_all_pairs_lowest_common_ancestor(G, root=None, pairs=None): r"""Yield the lowest common ancestor for sets of pairs in a tree. diff --git a/networkx/algorithms/matching.py b/networkx/algorithms/matching.py index b20d7f6970c..f346c2e052f 100644 --- a/networkx/algorithms/matching.py +++ b/networkx/algorithms/matching.py @@ -17,7 +17,7 @@ @not_implemented_for("multigraph") @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def maximal_matching(G): r"""Find a maximal matching in the graph. @@ -82,7 +82,7 @@ def matching_dict_to_set(matching): return edges -@nx._dispatch +@nx._dispatchable def is_matching(G, matching): """Return True if ``matching`` is a valid matching of ``G`` @@ -143,7 +143,7 @@ def is_matching(G, matching): return True -@nx._dispatch +@nx._dispatchable def is_maximal_matching(G, matching): """Return True if ``matching`` is a maximal matching of ``G`` @@ -205,7 +205,7 @@ def is_maximal_matching(G, matching): return True -@nx._dispatch +@nx._dispatchable def is_perfect_matching(G, matching): """Return True if ``matching`` is a perfect matching for ``G`` @@ -259,7 +259,7 @@ def is_perfect_matching(G, matching): @not_implemented_for("multigraph") @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def min_weight_matching(G, weight="weight"): """Computing a minimum-weight maximal matching of G. @@ -320,7 +320,7 @@ def min_weight_matching(G, weight="weight"): @not_implemented_for("multigraph") @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def max_weight_matching(G, maxcardinality=False, weight="weight"): """Compute a maximum-weighted matching of G. @@ -410,7 +410,7 @@ class Blossom: # and w is a vertex in b.childs[wrap(i+1)]. # If b is a top-level S-blossom, - # b.mybestedges is a list of least-slack edges to neighbouring + # b.mybestedges is a list of least-slack edges to neighboring # S-blossoms, or None if no such list has been computed yet. # This is used for efficient computation of delta3. @@ -738,12 +738,12 @@ def _recurse(b, endstage): j += jstep while b.childs[j] != entrychild: # Examine the vertices of the sub-blossom to see whether - # it is reachable from a neighbouring S-vertex outside the + # it is reachable from a neighboring S-vertex outside the # expanding blossom. bv = b.childs[j] if label.get(bv) == 1: # This sub-blossom just got label S through one of its - # neighbours; leave it be. + # neighbors; leave it be. j += jstep continue if isinstance(bv, Blossom): @@ -972,11 +972,11 @@ def verifyOptimum(): v = queue.pop() assert label[inblossom[v]] == 1 - # Scan its neighbours: + # Scan its neighbors: for w in G.neighbors(v): if w == v: continue # ignore self-loops - # w is a neighbour to v + # w is a neighbor to v bv = inblossom[v] bw = inblossom[w] if bv == bw: diff --git a/networkx/algorithms/minors/contraction.py b/networkx/algorithms/minors/contraction.py index 1b4da352296..9f4d89fa802 100644 --- a/networkx/algorithms/minors/contraction.py +++ b/networkx/algorithms/minors/contraction.py @@ -68,8 +68,9 @@ def equivalence_classes(iterable, relation): `X` and a function implementation of `R`. >>> X = set(range(10)) - >>> def mod3(x, y): return (x - y) % 3 == 0 - >>> equivalence_classes(X, mod3) # doctest: +SKIP + >>> def mod3(x, y): + ... return (x - y) % 3 == 0 + >>> equivalence_classes(X, mod3) # doctest: +SKIP {frozenset({1, 4, 7}), frozenset({8, 2, 5}), frozenset({0, 9, 3, 6})} """ # For simplicity of implementation, we initialize the return value as a @@ -94,7 +95,7 @@ def equivalence_classes(iterable, relation): return {frozenset(block) for block in blocks} -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight", returns_graph=True) def quotient_graph( G, partition, @@ -202,9 +203,7 @@ def quotient_graph( are equivalent if they are not adjacent but have the same neighbor set. >>> G = nx.complete_bipartite_graph(2, 3) - >>> same_neighbors = lambda u, v: ( - ... u not in G[v] and v not in G[u] and G[u] == G[v] - ... ) + >>> same_neighbors = lambda u, v: (u not in G[v] and v not in G[u] and G[u] == G[v]) >>> Q = nx.quotient_graph(G, same_neighbors) >>> K2 = nx.complete_graph(2) >>> nx.is_isomorphic(Q, K2) @@ -425,7 +424,9 @@ def edge_data(b, c): return H -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable( + preserve_all_attrs=True, mutates_input={"not copy": 4}, returns_graph=True +) def contracted_nodes(G, u, v, self_loops=True, copy=True): """Returns the graph that results from contracting `u` and `v`. @@ -560,7 +561,9 @@ def contracted_nodes(G, u, v, self_loops=True, copy=True): identified_nodes = contracted_nodes -@nx._dispatch(preserve_edge_attrs=True) +@nx._dispatchable( + preserve_edge_attrs=True, mutates_input={"not copy": 3}, returns_graph=True +) def contracted_edge(G, edge, self_loops=True, copy=True): """Returns the graph that results from contracting the specified edge. diff --git a/networkx/algorithms/mis.py b/networkx/algorithms/mis.py index 00d101c5288..fc70514d9ea 100644 --- a/networkx/algorithms/mis.py +++ b/networkx/algorithms/mis.py @@ -10,7 +10,7 @@ @not_implemented_for("directed") @py_random_state(2) -@nx._dispatch +@nx._dispatchable def maximal_independent_set(G, nodes=None, seed=None): """Returns a random maximal independent set guaranteed to contain a given set of nodes. diff --git a/networkx/algorithms/moral.py b/networkx/algorithms/moral.py index af187259251..e2acf80f6c3 100644 --- a/networkx/algorithms/moral.py +++ b/networkx/algorithms/moral.py @@ -9,7 +9,7 @@ @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable(returns_graph=True) def moral_graph(G): r"""Return the Moral Graph diff --git a/networkx/algorithms/node_classification.py b/networkx/algorithms/node_classification.py index c1b46545abb..42e7e6ba2ff 100644 --- a/networkx/algorithms/node_classification.py +++ b/networkx/algorithms/node_classification.py @@ -28,7 +28,7 @@ @nx.utils.not_implemented_for("directed") -@nx._dispatch(node_attrs="label_name") +@nx._dispatchable(node_attrs="label_name") def harmonic_function(G, max_iter=30, label_name="label"): """Node classification by Harmonic function @@ -105,7 +105,7 @@ def harmonic_function(G, max_iter=30, label_name="label"): @nx.utils.not_implemented_for("directed") -@nx._dispatch(node_attrs="label_name") +@nx._dispatchable(node_attrs="label_name") def local_and_global_consistency(G, alpha=0.99, max_iter=30, label_name="label"): """Node classification by Local and Global Consistency diff --git a/networkx/algorithms/non_randomness.py b/networkx/algorithms/non_randomness.py index 777cecbc969..85483d330fa 100644 --- a/networkx/algorithms/non_randomness.py +++ b/networkx/algorithms/non_randomness.py @@ -11,7 +11,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def non_randomness(G, k=None, weight="weight"): """Compute the non-randomness of graph G. @@ -57,7 +57,7 @@ def non_randomness(G, k=None, weight="weight"): -------- >>> G = nx.karate_club_graph() >>> nr, nr_rd = nx.non_randomness(G, 2) - >>> nr, nr_rd = nx.non_randomness(G, 2, 'weight') + >>> nr, nr_rd = nx.non_randomness(G, 2, "weight") Notes ----- @@ -84,7 +84,7 @@ def non_randomness(G, k=None, weight="weight"): # eq. 4.4 eigenvalues = np.linalg.eigvals(nx.to_numpy_array(G, weight=weight)) - nr = np.real(np.sum(eigenvalues[:k])) + nr = float(np.real(np.sum(eigenvalues[:k]))) n = G.number_of_nodes() m = G.number_of_edges() diff --git a/networkx/algorithms/operators/all.py b/networkx/algorithms/operators/all.py index 1a9317b168b..ba1304b6c4f 100644 --- a/networkx/algorithms/operators/all.py +++ b/networkx/algorithms/operators/all.py @@ -7,7 +7,7 @@ __all__ = ["union_all", "compose_all", "disjoint_union_all", "intersection_all"] -@nx._dispatch(graphs="[graphs]", preserve_all_attrs=True) +@nx._dispatchable(graphs="[graphs]", preserve_all_attrs=True, returns_graph=True) def union_all(graphs, rename=()): """Returns the union of all graphs. @@ -110,7 +110,7 @@ def label(x): return R -@nx._dispatch(graphs="[graphs]", preserve_all_attrs=True) +@nx._dispatchable(graphs="[graphs]", preserve_all_attrs=True, returns_graph=True) def disjoint_union_all(graphs): """Returns the disjoint union of all graphs. @@ -164,7 +164,7 @@ def yield_relabeled(graphs): return R -@nx._dispatch(graphs="[graphs]", preserve_all_attrs=True) +@nx._dispatchable(graphs="[graphs]", preserve_all_attrs=True, returns_graph=True) def compose_all(graphs): """Returns the composition of all graphs. @@ -230,7 +230,7 @@ def compose_all(graphs): return R -@nx._dispatch(graphs="[graphs]") +@nx._dispatchable(graphs="[graphs]", returns_graph=True) def intersection_all(graphs): """Returns a new graph that contains only the nodes and the edges that exist in all graphs. @@ -270,8 +270,11 @@ def intersection_all(graphs): >>> gh = nx.intersection_all([g, h]) - >>> new_node_attr = {n: min(*(anyG.nodes[n].get('capacity', float('inf')) for anyG in [g, h])) for n in gh} - >>> nx.set_node_attributes(gh, new_node_attr, 'new_capacity') + >>> new_node_attr = { + ... n: min(*(anyG.nodes[n].get("capacity", float("inf")) for anyG in [g, h])) + ... for n in gh + ... } + >>> nx.set_node_attributes(gh, new_node_attr, "new_capacity") >>> gh.nodes(data=True) NodeDataView({0: {'new_capacity': 2}, 1: {'new_capacity': 3}}) diff --git a/networkx/algorithms/operators/binary.py b/networkx/algorithms/operators/binary.py index c36ea26c25c..0ca3a7b6bd2 100644 --- a/networkx/algorithms/operators/binary.py +++ b/networkx/algorithms/operators/binary.py @@ -15,7 +15,7 @@ _G_H = {"G": 0, "H": 1} -@nx._dispatch(graphs=_G_H, preserve_all_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True, returns_graph=True) def union(G, H, rename=()): """Combine graphs G and H. The names of nodes must be unique. @@ -71,7 +71,7 @@ def union(G, H, rename=()): return nx.union_all([G, H], rename) -@nx._dispatch(graphs=_G_H, preserve_all_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True, returns_graph=True) def disjoint_union(G, H): """Combine graphs G and H. The nodes are assumed to be unique (disjoint). @@ -125,7 +125,7 @@ def disjoint_union(G, H): return nx.disjoint_union_all([G, H]) -@nx._dispatch(graphs=_G_H) +@nx._dispatchable(graphs=_G_H, returns_graph=True) def intersection(G, H): """Returns a new graph that contains only the nodes and the edges that exist in both G and H. @@ -170,7 +170,7 @@ def intersection(G, H): return nx.intersection_all([G, H]) -@nx._dispatch(graphs=_G_H) +@nx._dispatchable(graphs=_G_H, returns_graph=True) def difference(G, H): """Returns a new graph that contains the edges that exist in G but not in H. @@ -225,7 +225,7 @@ def difference(G, H): return R -@nx._dispatch(graphs=_G_H) +@nx._dispatchable(graphs=_G_H, returns_graph=True) def symmetric_difference(G, H): """Returns new graph with edges that exist in either G or H but not both. @@ -288,7 +288,7 @@ def symmetric_difference(G, H): return R -@nx._dispatch(graphs=_G_H, preserve_all_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True, returns_graph=True) def compose(G, H): """Compose graph G with H by combining nodes and edges into a single graph. @@ -336,36 +336,40 @@ def compose(G, H): By default, the attributes from `H` take precedent over attributes from `G`. If you prefer another way of combining attributes, you can update them after the compose operation: - >>> G = nx.Graph([(0, 1, {'weight': 2.0}), (3, 0, {'weight': 100.0})]) - >>> H = nx.Graph([(0, 1, {'weight': 10.0}), (1, 2, {'weight': -1.0})]) - >>> nx.set_node_attributes(G, {0: 'dark', 1: 'light', 3: 'black'}, name='color') - >>> nx.set_node_attributes(H, {0: 'green', 1: 'orange', 2: 'yellow'}, name='color') + >>> G = nx.Graph([(0, 1, {"weight": 2.0}), (3, 0, {"weight": 100.0})]) + >>> H = nx.Graph([(0, 1, {"weight": 10.0}), (1, 2, {"weight": -1.0})]) + >>> nx.set_node_attributes(G, {0: "dark", 1: "light", 3: "black"}, name="color") + >>> nx.set_node_attributes(H, {0: "green", 1: "orange", 2: "yellow"}, name="color") >>> GcomposeH = nx.compose(G, H) Normally, color attribute values of nodes of GcomposeH come from H. We can workaround this as follows: - >>> node_data = {n: G.nodes[n]['color'] + " " + H.nodes[n]['color'] for n in G.nodes & H.nodes} - >>> nx.set_node_attributes(GcomposeH, node_data, 'color') - >>> print(GcomposeH.nodes[0]['color']) + >>> node_data = { + ... n: G.nodes[n]["color"] + " " + H.nodes[n]["color"] for n in G.nodes & H.nodes + ... } + >>> nx.set_node_attributes(GcomposeH, node_data, "color") + >>> print(GcomposeH.nodes[0]["color"]) dark green - >>> print(GcomposeH.nodes[3]['color']) + >>> print(GcomposeH.nodes[3]["color"]) black Similarly, we can update edge attributes after the compose operation in a way we prefer: - >>> edge_data = {e: G.edges[e]['weight'] * H.edges[e]['weight'] for e in G.edges & H.edges} - >>> nx.set_edge_attributes(GcomposeH, edge_data, 'weight') - >>> print(GcomposeH.edges[(0, 1)]['weight']) + >>> edge_data = { + ... e: G.edges[e]["weight"] * H.edges[e]["weight"] for e in G.edges & H.edges + ... } + >>> nx.set_edge_attributes(GcomposeH, edge_data, "weight") + >>> print(GcomposeH.edges[(0, 1)]["weight"]) 20.0 - >>> print(GcomposeH.edges[(3, 0)]['weight']) + >>> print(GcomposeH.edges[(3, 0)]["weight"]) 100.0 """ return nx.compose_all([G, H]) -@nx._dispatch(graphs=_G_H, preserve_all_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True, returns_graph=True) def full_join(G, H, rename=(None, None)): """Returns the full join of graphs G and H. diff --git a/networkx/algorithms/operators/product.py b/networkx/algorithms/operators/product.py index e7da4fc73be..dc342700480 100644 --- a/networkx/algorithms/operators/product.py +++ b/networkx/algorithms/operators/product.py @@ -14,6 +14,7 @@ "power", "rooted_product", "corona_product", + "modular_product", ] _G_H = {"G": 0, "H": 1} @@ -123,12 +124,12 @@ def _init_product_graph(G, H): return GH -@nx._dispatch(graphs=_G_H, preserve_node_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True, returns_graph=True) def tensor_product(G, H): r"""Returns the tensor product of G and H. The tensor product $P$ of the graphs $G$ and $H$ has a node set that - is the tensor product of the node sets, $V(P)=V(G) \times V(H)$. + is the Cartesian product of the node sets, $V(P)=V(G) \times V(H)$. $P$ has an edge $((u,v), (x,y))$ if and only if $(u,x)$ is an edge in $G$ and $(v,y)$ is an edge in $H$. @@ -179,7 +180,7 @@ def tensor_product(G, H): return GH -@nx._dispatch(graphs=_G_H, preserve_node_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True, returns_graph=True) def cartesian_product(G, H): r"""Returns the Cartesian product of G and H. @@ -231,7 +232,7 @@ def cartesian_product(G, H): return GH -@nx._dispatch(graphs=_G_H, preserve_node_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True, returns_graph=True) def lexicographic_product(G, H): r"""Returns the lexicographic product of G and H. @@ -284,7 +285,7 @@ def lexicographic_product(G, H): return GH -@nx._dispatch(graphs=_G_H, preserve_node_attrs=True) +@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True, returns_graph=True) def strong_product(G, H): r"""Returns the strong product of G and H. @@ -342,7 +343,7 @@ def strong_product(G, H): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable(returns_graph=True) def power(G, k): """Returns the specified power of a graph. @@ -431,7 +432,7 @@ def power(G, k): @not_implemented_for("multigraph") -@nx._dispatch(graphs=_G_H) +@nx._dispatchable(graphs=_G_H, returns_graph=True) def rooted_product(G, H, root): """Return the rooted product of graphs G and H rooted at root in H. @@ -471,7 +472,7 @@ def rooted_product(G, H, root): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(graphs=_G_H) +@nx._dispatchable(graphs=_G_H, returns_graph=True) def corona_product(G, H): r"""Returns the Corona product of G and H. @@ -532,3 +533,98 @@ def corona_product(G, H): GH.add_edges_from((G_node, (G_node, H_node)) for H_node in H) return GH + + +@nx._dispatchable( + graphs=_G_H, preserve_edge_attrs=True, preserve_node_attrs=True, returns_graph=True +) +def modular_product(G, H): + r"""Returns the Modular product of G and H. + + The modular product of `G` and `H` is the graph $M = G \nabla H$, + consisting of the node set $V(M) = V(G) \times V(H)$ that is the Cartesian + product of the node sets of `G` and `H`. Further, M contains an edge ((u, v), (x, y)): + + - if u is adjacent to x in `G` and v is adjacent to y in `H`, or + - if u is not adjacent to x in `G` and v is not adjacent to y in `H`. + + More formally:: + + E(M) = {((u, v), (x, y)) | ((u, x) in E(G) and (v, y) in E(H)) or + ((u, x) not in E(G) and (v, y) not in E(H))} + + Parameters + ---------- + G, H: NetworkX graphs + The graphs to take the modular product of. + + Returns + ------- + M: NetworkX graph + The Modular product of `G` and `H`. + + Raises + ------ + NetworkXNotImplemented + If `G` is not a simple graph. + + Examples + -------- + >>> G = nx.cycle_graph(4) + >>> H = nx.path_graph(2) + >>> M = nx.modular_product(G, H) + >>> list(M) + [(0, 0), (0, 1), (1, 0), (1, 1), (2, 0), (2, 1), (3, 0), (3, 1)] + >>> print(M) + Graph with 8 nodes and 8 edges + + Notes + ----- + The *modular product* is defined in [1]_ and was first + introduced as the *weak modular product*. + + The modular product reduces the problem of counting isomorphic subgraphs + in `G` and `H` to the problem of counting cliques in M. The subgraphs of + `G` and `H` that are induced by the nodes of a clique in M are + isomorphic [2]_ [3]_. + + References + ---------- + .. [1] R. Hammack, W. Imrich, and S. Klavžar, + "Handbook of Product Graphs", CRC Press, 2011. + + .. [2] H. G. Barrow and R. M. Burstall, + "Subgraph isomorphism, matching relational structures and maximal + cliques", Information Processing Letters, vol. 4, issue 4, pp. 83-84, + 1976, https://doi.org/10.1016/0020-0190(76)90049-1. + + .. [3] V. G. Vizing, "Reduction of the problem of isomorphism and isomorphic + entrance to the task of finding the nondensity of a graph." Proc. Third + All-Union Conference on Problems of Theoretical Cybernetics. 1974. + """ + if G.is_directed() or H.is_directed(): + raise nx.NetworkXNotImplemented( + "Modular product not implemented for directed graphs" + ) + if G.is_multigraph() or H.is_multigraph(): + raise nx.NetworkXNotImplemented( + "Modular product not implemented for multigraphs" + ) + + GH = _init_product_graph(G, H) + GH.add_nodes_from(_node_product(G, H)) + + for u, v, c in G.edges(data=True): + for x, y, d in H.edges(data=True): + GH.add_edge((u, x), (v, y), **_dict_product(c, d)) + GH.add_edge((v, x), (u, y), **_dict_product(c, d)) + + G = nx.complement(G) + H = nx.complement(H) + + for u, v, c in G.edges(data=True): + for x, y, d in H.edges(data=True): + GH.add_edge((u, x), (v, y), **_dict_product(c, d)) + GH.add_edge((v, x), (u, y), **_dict_product(c, d)) + + return GH diff --git a/networkx/algorithms/operators/tests/test_binary.py b/networkx/algorithms/operators/tests/test_binary.py index 4ce6f9bf033..9693e6332f5 100644 --- a/networkx/algorithms/operators/tests/test_binary.py +++ b/networkx/algorithms/operators/tests/test_binary.py @@ -43,7 +43,7 @@ def test_intersection(): assert sorted(I.edges()) == [(2, 3)] ################## - # Tests for @nx._dispatch mechanism with multiple graph arguments + # Tests for @nx._dispatchable mechanism with multiple graph arguments # nx.intersection is called as if it were a re-implementation # from another package. ################### @@ -53,7 +53,7 @@ def test_intersection(): assert set(I2.nodes()) == {1, 2, 3, 4} assert sorted(I2.edges()) == [(2, 3)] # Only test if not performing auto convert testing of backend implementations - if not nx.utils.backends._dispatch._automatic_backends: + if not nx.utils.backends._dispatchable._automatic_backends: with pytest.raises(TypeError): nx.intersection(G2, H) with pytest.raises(TypeError): diff --git a/networkx/algorithms/operators/tests/test_product.py b/networkx/algorithms/operators/tests/test_product.py index 50bc7b7e59e..2eb788bc302 100644 --- a/networkx/algorithms/operators/tests/test_product.py +++ b/networkx/algorithms/operators/tests/test_product.py @@ -433,3 +433,59 @@ def test_corona_product(): C = nx.corona_product(G, H) assert len(C) == (len(G) * len(H)) + len(G) assert C.size() == G.size() + len(G) * H.size() + len(G) * len(H) + + +def test_modular_product(): + G = nx.path_graph(3) + H = nx.path_graph(4) + M = nx.modular_product(G, H) + assert len(M) == len(G) * len(H) + + assert edges_equal( + list(M.edges()), + [ + ((0, 0), (1, 1)), + ((0, 0), (2, 2)), + ((0, 0), (2, 3)), + ((0, 1), (1, 0)), + ((0, 1), (1, 2)), + ((0, 1), (2, 3)), + ((0, 2), (1, 1)), + ((0, 2), (1, 3)), + ((0, 2), (2, 0)), + ((0, 3), (1, 2)), + ((0, 3), (2, 0)), + ((0, 3), (2, 1)), + ((1, 0), (2, 1)), + ((1, 1), (2, 0)), + ((1, 1), (2, 2)), + ((1, 2), (2, 1)), + ((1, 2), (2, 3)), + ((1, 3), (2, 2)), + ], + ) + + +def test_modular_product_raises(): + G = nx.Graph([(0, 1), (1, 2), (2, 0)]) + H = nx.Graph([(0, 1), (1, 2), (2, 0)]) + DG = nx.DiGraph([(0, 1), (1, 2), (2, 0)]) + DH = nx.DiGraph([(0, 1), (1, 2), (2, 0)]) + with pytest.raises(nx.NetworkXNotImplemented): + nx.modular_product(G, DH) + with pytest.raises(nx.NetworkXNotImplemented): + nx.modular_product(DG, H) + with pytest.raises(nx.NetworkXNotImplemented): + nx.modular_product(DG, DH) + + MG = nx.MultiGraph([(0, 1), (1, 2), (2, 0), (0, 1)]) + MH = nx.MultiGraph([(0, 1), (1, 2), (2, 0), (0, 1)]) + with pytest.raises(nx.NetworkXNotImplemented): + nx.modular_product(G, MH) + with pytest.raises(nx.NetworkXNotImplemented): + nx.modular_product(MG, H) + with pytest.raises(nx.NetworkXNotImplemented): + nx.modular_product(MG, MH) + with pytest.raises(nx.NetworkXNotImplemented): + # check multigraph with no multiedges + nx.modular_product(nx.MultiGraph(G), H) diff --git a/networkx/algorithms/operators/unary.py b/networkx/algorithms/operators/unary.py index ce6d9be9057..64be249f634 100644 --- a/networkx/algorithms/operators/unary.py +++ b/networkx/algorithms/operators/unary.py @@ -4,7 +4,7 @@ __all__ = ["complement", "reverse"] -@nx._dispatch +@nx._dispatchable(returns_graph=True) def complement(G): """Returns the graph complement of G. @@ -28,7 +28,7 @@ def complement(G): -------- >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (3, 5)]) >>> G_complement = nx.complement(G) - >>> G_complement.edges() # This shows the edges of the complemented graph + >>> G_complement.edges() # This shows the edges of the complemented graph EdgeView([(1, 4), (1, 5), (2, 4), (2, 5), (4, 5)]) """ @@ -40,7 +40,7 @@ def complement(G): return R -@nx._dispatch +@nx._dispatchable(returns_graph=True) def reverse(G, copy=True): """Returns the reverse directed graph of G. diff --git a/networkx/algorithms/planar_drawing.py b/networkx/algorithms/planar_drawing.py index 47f94f17215..ea25809b6ae 100644 --- a/networkx/algorithms/planar_drawing.py +++ b/networkx/algorithms/planar_drawing.py @@ -78,18 +78,18 @@ def combinatorial_embedding_to_pos(embedding, fully_triangulate=False): left_t_child[v3] = None for k in range(3, len(node_list)): - vk, contour_neighbors = node_list[k] - wp = contour_neighbors[0] - wp1 = contour_neighbors[1] - wq = contour_neighbors[-1] - wq1 = contour_neighbors[-2] - adds_mult_tri = len(contour_neighbors) > 2 + vk, contour_nbrs = node_list[k] + wp = contour_nbrs[0] + wp1 = contour_nbrs[1] + wq = contour_nbrs[-1] + wq1 = contour_nbrs[-2] + adds_mult_tri = len(contour_nbrs) > 2 # Stretch gaps: delta_x[wp1] += 1 delta_x[wq] += 1 - delta_x_wp_wq = sum(delta_x[x] for x in contour_neighbors[1:]) + delta_x_wp_wq = sum(delta_x[x] for x in contour_nbrs[1:]) # Adjust offsets delta_x[vk] = (-y_coordinate[wp] + delta_x_wp_wq + y_coordinate[wq]) // 2 @@ -326,8 +326,8 @@ def triangulate_face(embedding, v1, v2): v1, v2, v3 = v2, v3, v4 else: # Add edge for triangulation - embedding.add_half_edge_cw(v1, v3, v2) - embedding.add_half_edge_ccw(v3, v1, v2) + embedding.add_half_edge(v1, v3, ccw=v2) + embedding.add_half_edge(v3, v1, cw=v2) v1, v2, v3 = v1, v3, v4 # Get next node _, v4 = embedding.next_face_half_edge(v2, v3) @@ -445,8 +445,8 @@ def make_bi_connected(embedding, starting_node, outgoing_node, edges_counted): # cycle is not completed yet if v2 in face_set: # v2 encountered twice: Add edge to ensure 2-connectedness - embedding.add_half_edge_cw(v1, v3, v2) - embedding.add_half_edge_ccw(v3, v1, v2) + embedding.add_half_edge(v1, v3, ccw=v2) + embedding.add_half_edge(v3, v1, cw=v2) edges_counted.add((v2, v3)) edges_counted.add((v3, v1)) v2 = v1 diff --git a/networkx/algorithms/planarity.py b/networkx/algorithms/planarity.py index ad46f4739e5..b8dcda60c83 100644 --- a/networkx/algorithms/planarity.py +++ b/networkx/algorithms/planarity.py @@ -5,7 +5,7 @@ __all__ = ["check_planarity", "is_planar", "PlanarEmbedding"] -@nx._dispatch +@nx._dispatchable def is_planar(G): """Returns True if and only if `G` is planar. @@ -38,7 +38,7 @@ def is_planar(G): return check_planarity(G, counterexample=False)[0] -@nx._dispatch +@nx._dispatchable(returns_graph=True) def check_planarity(G, counterexample=False): """Check if a graph is planar and return a counterexample or an embedding. @@ -114,7 +114,7 @@ def check_planarity(G, counterexample=False): return True, embedding -@nx._dispatch +@nx._dispatchable(returns_graph=True) def check_planarity_recursive(G, counterexample=False): """Recursive version of :meth:`check_planarity`.""" planarity_state = LRPlanarity(G) @@ -130,7 +130,7 @@ def check_planarity_recursive(G, counterexample=False): return True, embedding -@nx._dispatch +@nx._dispatchable(returns_graph=True) def get_counterexample(G): """Obtains a Kuratowski subgraph. @@ -169,7 +169,7 @@ def get_counterexample(G): return subgraph -@nx._dispatch +@nx._dispatchable(returns_graph=True) def get_counterexample_recursive(G): """Recursive version of :meth:`get_counterexample`.""" @@ -376,7 +376,7 @@ def lr_planarity(self): # initialize the embedding previous_node = None for w in self.ordered_adjs[v]: - self.embedding.add_half_edge_cw(v, w, previous_node) + self.embedding.add_half_edge(v, w, ccw=previous_node) previous_node = w # Free no longer used variables @@ -436,7 +436,7 @@ def lr_planarity_recursive(self): # initialize the embedding previous_node = None for w in self.ordered_adjs[v]: - self.embedding.add_half_edge_cw(v, w, previous_node) + self.embedding.add_half_edge(v, w, ccw=previous_node) previous_node = w # compute the complete embedding @@ -714,9 +714,9 @@ def dfs_embedding(self, v): break # handle next node in dfs_stack (i.e. w) else: # back edge if self.side[ei] == 1: - self.embedding.add_half_edge_cw(w, v, self.right_ref[w]) + self.embedding.add_half_edge(w, v, ccw=self.right_ref[w]) else: - self.embedding.add_half_edge_ccw(w, v, self.left_ref[w]) + self.embedding.add_half_edge(w, v, cw=self.left_ref[w]) self.left_ref[w] = v def dfs_embedding_recursive(self, v): @@ -731,10 +731,10 @@ def dfs_embedding_recursive(self, v): else: # back edge if self.side[ei] == 1: # place v directly after right_ref[w] in embed. list of w - self.embedding.add_half_edge_cw(w, v, self.right_ref[w]) + self.embedding.add_half_edge(w, v, ccw=self.right_ref[w]) else: # place v directly before left_ref[w] in embed. list of w - self.embedding.add_half_edge_ccw(w, v, self.left_ref[w]) + self.embedding.add_half_edge(w, v, cw=self.left_ref[w]) self.left_ref[w] = v def sign(self, e): @@ -791,15 +791,12 @@ class PlanarEmbedding(nx.DiGraph): * Edges must go in both directions (because the edge attributes differ) * Every edge must have a 'cw' and 'ccw' attribute which corresponds to a correct planar embedding. - * A node with non zero degree must have a node attribute 'first_nbr'. As long as a PlanarEmbedding is invalid only the following methods should be called: - * :meth:`add_half_edge_ccw` - * :meth:`add_half_edge_cw` + * :meth:`add_half_edge` * :meth:`connect_components` - * :meth:`add_half_edge_first` Even though the graph is a subclass of nx.DiGraph, it can still be used for algorithms that require undirected graphs, because the method @@ -808,14 +805,14 @@ class PlanarEmbedding(nx.DiGraph): **Half edges:** - In methods like `add_half_edge_ccw` the term "half-edge" is used, which is + In methods like `add_half_edge` the term "half-edge" is used, which is a term that is used in `doubly connected edge lists `_. It is used to emphasize that the edge is only in one direction and there exists another half-edge in the opposite direction. While conventional edges always have two faces (including outer face) next to them, it is possible to assign each half-edge *exactly one* face. - For a half-edge (u, v) that is orientated such that u is below v then the + For a half-edge (u, v) that is oriented such that u is below v then the face that belongs to (u, v) is to the right of this half-edge. See Also @@ -833,23 +830,23 @@ class PlanarEmbedding(nx.DiGraph): Create an embedding of a star graph (compare `nx.star_graph(3)`): >>> G = nx.PlanarEmbedding() - >>> G.add_half_edge_cw(0, 1, None) - >>> G.add_half_edge_cw(0, 2, 1) - >>> G.add_half_edge_cw(0, 3, 2) - >>> G.add_half_edge_cw(1, 0, None) - >>> G.add_half_edge_cw(2, 0, None) - >>> G.add_half_edge_cw(3, 0, None) + >>> G.add_half_edge(0, 1) + >>> G.add_half_edge(0, 2, ccw=1) + >>> G.add_half_edge(0, 3, ccw=2) + >>> G.add_half_edge(1, 0) + >>> G.add_half_edge(2, 0) + >>> G.add_half_edge(3, 0) Alternatively the same embedding can also be defined in counterclockwise orientation. The following results in exactly the same PlanarEmbedding: >>> G = nx.PlanarEmbedding() - >>> G.add_half_edge_ccw(0, 1, None) - >>> G.add_half_edge_ccw(0, 3, 1) - >>> G.add_half_edge_ccw(0, 2, 3) - >>> G.add_half_edge_ccw(1, 0, None) - >>> G.add_half_edge_ccw(2, 0, None) - >>> G.add_half_edge_ccw(3, 0, None) + >>> G.add_half_edge(0, 1) + >>> G.add_half_edge(0, 3, cw=1) + >>> G.add_half_edge(0, 2, cw=3) + >>> G.add_half_edge(1, 0) + >>> G.add_half_edge(2, 0) + >>> G.add_half_edge(3, 0) After creating a graph, it is possible to validate that the PlanarEmbedding object is correct: @@ -858,6 +855,22 @@ class PlanarEmbedding(nx.DiGraph): """ + def __init__(self, incoming_graph_data=None, **attr): + super().__init__(incoming_graph_data=incoming_graph_data, **attr) + self.add_edge = self.__forbidden + self.add_edges_from = self.__forbidden + self.add_weighted_edges_from = self.__forbidden + + def __forbidden(self, *args, **kwargs): + """Forbidden operation + + Any edge additions to a PlanarEmbedding should be done using + method `add_half_edge`. + """ + raise NotImplementedError( + "Use `add_half_edge` method to add edges to a PlanarEmbedding." + ) + def get_data(self): """Converts the adjacency structure into a better readable structure. @@ -894,8 +907,79 @@ def set_data(self, data): """ for v in data: + ref = None for w in reversed(data[v]): - self.add_half_edge_first(v, w) + self.add_half_edge(v, w, cw=ref) + ref = w + + def remove_node(self, n): + """Remove node n. + + Removes the node n and all adjacent edges, updating the + PlanarEmbedding to account for any resulting edge removal. + Attempting to remove a non-existent node will raise an exception. + + Parameters + ---------- + n : node + A node in the graph + + Raises + ------ + NetworkXError + If n is not in the graph. + + See Also + -------- + remove_nodes_from + + """ + try: + for u in self._pred[n]: + succs_u = self._succ[u] + un_cw = succs_u[n]["cw"] + un_ccw = succs_u[n]["ccw"] + del succs_u[n] + del self._pred[u][n] + if n != un_cw: + succs_u[un_cw]["ccw"] = un_ccw + succs_u[un_ccw]["cw"] = un_cw + del self._node[n] + del self._succ[n] + del self._pred[n] + except KeyError as err: # NetworkXError if n not in self + raise nx.NetworkXError( + f"The node {n} is not in the planar embedding." + ) from err + + def remove_nodes_from(self, nodes): + """Remove multiple nodes. + + Parameters + ---------- + nodes : iterable container + A container of nodes (list, dict, set, etc.). If a node + in the container is not in the graph it is silently ignored. + + See Also + -------- + remove_node + + Notes + ----- + When removing nodes from an iterator over the graph you are changing, + a `RuntimeError` will be raised with message: + `RuntimeError: dictionary changed size during iteration`. This + happens when the graph's underlying dictionary is modified during + iteration. To avoid this error, evaluate the iterator into a separate + object, e.g. by using `list(iterator_of_nodes)`, and pass this + object to `G.remove_nodes_from`. + + """ + for n in nodes: + if n in self._node: + self.remove_node(n) + # silently skip non-existing nodes def neighbors_cw_order(self, v): """Generator for the neighbors of v in clockwise order. @@ -909,15 +993,91 @@ def neighbors_cw_order(self, v): node """ - if len(self[v]) == 0: + succs = self._succ[v] + if not succs: # v has no neighbors return - start_node = self.nodes[v]["first_nbr"] + start_node = next(reversed(succs)) yield start_node - current_node = self[v][start_node]["cw"] + current_node = succs[start_node]["cw"] while start_node != current_node: yield current_node - current_node = self[v][current_node]["cw"] + current_node = succs[current_node]["cw"] + + def add_half_edge(self, start_node, end_node, *, cw=None, ccw=None): + """Adds a half-edge from `start_node` to `end_node`. + + If the half-edge is not the first one out of `start_node`, a reference + node must be provided either in the clockwise (parameter `cw`) or in + the counterclockwise (parameter `ccw`) direction. Only one of `cw`/`ccw` + can be specified (or neither in the case of the first edge). + Note that specifying a reference in the clockwise (`cw`) direction means + inserting the new edge in the first counterclockwise position with + respect to the reference (and vice-versa). + + Parameters + ---------- + start_node : node + Start node of inserted edge. + end_node : node + End node of inserted edge. + cw, ccw: node + End node of reference edge. + Omit or pass `None` if adding the first out-half-edge of `start_node`. + + + Raises + ------ + NetworkXException + If the `cw` or `ccw` node is not a successor of `start_node`. + If `start_node` has successors, but neither `cw` or `ccw` is provided. + If both `cw` and `ccw` are specified. + + See Also + -------- + connect_components + """ + + succs = self._succ.get(start_node) + if succs: + # there is already some edge out of start_node + leftmost_nbr = next(reversed(self._succ[start_node])) + if cw is not None: + if cw not in succs: + raise nx.NetworkXError("Invalid clockwise reference node.") + if ccw is not None: + raise nx.NetworkXError("Only one of cw/ccw can be specified.") + ref_ccw = succs[cw]["ccw"] + super().add_edge(start_node, end_node, cw=cw, ccw=ref_ccw) + succs[ref_ccw]["cw"] = end_node + succs[cw]["ccw"] = end_node + # when (cw == leftmost_nbr), the newly added neighbor is + # already at the end of dict self._succ[start_node] and + # takes the place of the former leftmost_nbr + move_leftmost_nbr_to_end = cw != leftmost_nbr + elif ccw is not None: + if ccw not in succs: + raise nx.NetworkXError("Invalid counterclockwise reference node.") + ref_cw = succs[ccw]["cw"] + super().add_edge(start_node, end_node, cw=ref_cw, ccw=ccw) + succs[ref_cw]["ccw"] = end_node + succs[ccw]["cw"] = end_node + move_leftmost_nbr_to_end = True + else: + raise nx.NetworkXError( + "Node already has out-half-edge(s), either cw or ccw reference node required." + ) + if move_leftmost_nbr_to_end: + # LRPlanarity (via self.add_half_edge_first()) requires that + # we keep track of the leftmost neighbor, which we accomplish + # by keeping it as the last key in dict self._succ[start_node] + succs[leftmost_nbr] = succs.pop(leftmost_nbr) + + else: + if cw is not None or ccw is not None: + raise nx.NetworkXError("Invalid reference node.") + # adding the first edge out of start_node + super().add_edge(start_node, end_node, ccw=end_node, cw=end_node) def check_structure(self): """Runs without exceptions if this object is valid. @@ -927,7 +1087,6 @@ def check_structure(self): * Edges go in both directions (because the edge attributes differ). * Every edge has a 'cw' and 'ccw' attribute which corresponds to a correct planar embedding. - * A node with a degree larger than 0 has a node attribute 'first_nbr'. Running this method verifies that the underlying Graph must be planar. @@ -1000,24 +1159,12 @@ def add_half_edge_ccw(self, start_node, end_node, reference_neighbor): See Also -------- + add_half_edge add_half_edge_cw connect_components - add_half_edge_first """ - if reference_neighbor is None: - # The start node has no neighbors - self.add_edge(start_node, end_node) # Add edge to graph - self[start_node][end_node]["cw"] = end_node - self[start_node][end_node]["ccw"] = end_node - self.nodes[start_node]["first_nbr"] = end_node - else: - ccw_reference = self[start_node][reference_neighbor]["ccw"] - self.add_half_edge_cw(start_node, end_node, ccw_reference) - - if reference_neighbor == self.nodes[start_node].get("first_nbr", None): - # Update first neighbor - self.nodes[start_node]["first_nbr"] = end_node + self.add_half_edge(start_node, end_node, cw=reference_neighbor) def add_half_edge_cw(self, start_node, end_node, reference_neighbor): """Adds a half-edge from start_node to end_node. @@ -1041,31 +1188,84 @@ def add_half_edge_cw(self, start_node, end_node, reference_neighbor): See Also -------- + add_half_edge add_half_edge_ccw connect_components - add_half_edge_first """ - self.add_edge(start_node, end_node) # Add edge to graph + self.add_half_edge(start_node, end_node, ccw=reference_neighbor) - if reference_neighbor is None: - # The start node has no neighbors - self[start_node][end_node]["cw"] = end_node - self[start_node][end_node]["ccw"] = end_node - self.nodes[start_node]["first_nbr"] = end_node - return + def remove_edge(self, u, v): + """Remove the edge between u and v. - if reference_neighbor not in self[start_node]: - raise nx.NetworkXException( - "Cannot add edge. Reference neighbor does not exist" - ) + Parameters + ---------- + u, v : nodes + Remove the half-edges (u, v) and (v, u) and update the + edge ordering around the removed edge. + + Raises + ------ + NetworkXError + If there is not an edge between u and v. - # Get half-edge at the other side - cw_reference = self[start_node][reference_neighbor]["cw"] - # Alter half-edge data structures - self[start_node][reference_neighbor]["cw"] = end_node - self[start_node][end_node]["cw"] = cw_reference - self[start_node][cw_reference]["ccw"] = end_node - self[start_node][end_node]["ccw"] = reference_neighbor + See Also + -------- + remove_edges_from : remove a collection of edges + """ + try: + succs_u = self._succ[u] + succs_v = self._succ[v] + uv_cw = succs_u[v]["cw"] + uv_ccw = succs_u[v]["ccw"] + vu_cw = succs_v[u]["cw"] + vu_ccw = succs_v[u]["ccw"] + del succs_u[v] + del self._pred[v][u] + del succs_v[u] + del self._pred[u][v] + if v != uv_cw: + succs_u[uv_cw]["ccw"] = uv_ccw + succs_u[uv_ccw]["cw"] = uv_cw + if u != vu_cw: + succs_v[vu_cw]["ccw"] = vu_ccw + succs_v[vu_ccw]["cw"] = vu_cw + except KeyError as err: + raise nx.NetworkXError( + f"The edge {u}-{v} is not in the planar embedding." + ) from err + + def remove_edges_from(self, ebunch): + """Remove all edges specified in ebunch. + + Parameters + ---------- + ebunch: list or container of edge tuples + Each pair of half-edges between the nodes given in the tuples + will be removed from the graph. The nodes can be passed as: + + - 2-tuples (u, v) half-edges (u, v) and (v, u). + - 3-tuples (u, v, k) where k is ignored. + + See Also + -------- + remove_edge : remove a single edge + + Notes + ----- + Will fail silently if an edge in ebunch is not in the graph. + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> ebunch = [(1, 2), (2, 3)] + >>> G.remove_edges_from(ebunch) + """ + for e in ebunch: + u, v = e[:2] # ignore edge data + # assuming that the PlanarEmbedding is valid, if the half_edge + # (u, v) is in the graph, then so is half_edge (v, u) + if u in self._succ and v in self._succ[u]: + self.remove_edge(u, v) def connect_components(self, v, w): """Adds half-edges for (v, w) and (w, v) at some position. @@ -1084,15 +1284,24 @@ def connect_components(self, v, w): See Also -------- - add_half_edge_ccw - add_half_edge_cw - add_half_edge_first + add_half_edge """ - self.add_half_edge_first(v, w) - self.add_half_edge_first(w, v) + if v in self._succ and self._succ[v]: + ref = next(reversed(self._succ[v])) + else: + ref = None + self.add_half_edge(v, w, cw=ref) + if w in self._succ and self._succ[w]: + ref = next(reversed(self._succ[w])) + else: + ref = None + self.add_half_edge(w, v, cw=ref) def add_half_edge_first(self, start_node, end_node): - """The added half-edge is inserted at the first position in the order. + """Add a half-edge and set end_node as start_node's leftmost neighbor. + + The new edge is inserted counterclockwise with respect to the current + leftmost neighbor, if there is one. Parameters ---------- @@ -1101,15 +1310,14 @@ def add_half_edge_first(self, start_node, end_node): See Also -------- - add_half_edge_ccw - add_half_edge_cw + add_half_edge connect_components """ - if start_node in self and "first_nbr" in self.nodes[start_node]: - reference = self.nodes[start_node]["first_nbr"] - else: - reference = None - self.add_half_edge_ccw(start_node, end_node, reference) + succs = self._succ.get(start_node) + # the leftmost neighbor is the last entry in the + # self._succ[start_node] dict + leftmost_nbr = next(reversed(succs)) if succs else None + self.add_half_edge(start_node, end_node, cw=leftmost_nbr) def next_face_half_edge(self, v, w): """Returns the following half-edge left of a face. @@ -1177,3 +1385,16 @@ def is_directed(self): contained. """ return False + + def copy(self, as_view=False): + if as_view is True: + return nx.graphviews.generic_graph_view(self) + G = self.__class__() + G.graph.update(self.graph) + G.add_nodes_from((n, d.copy()) for n, d in self._node.items()) + super(self.__class__, G).add_edges_from( + (u, v, datadict.copy()) + for u, nbrs in self._adj.items() + for v, datadict in nbrs.items() + ) + return G diff --git a/networkx/algorithms/polynomials.py b/networkx/algorithms/polynomials.py index 57ecf0d09a9..217c7dbe37b 100644 --- a/networkx/algorithms/polynomials.py +++ b/networkx/algorithms/polynomials.py @@ -30,7 +30,7 @@ @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def tutte_polynomial(G): r"""Returns the Tutte polynomial of `G` @@ -180,7 +180,7 @@ def tutte_polynomial(G): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def chromatic_polynomial(G): r"""Returns the chromatic polynomial of `G` diff --git a/networkx/algorithms/reciprocity.py b/networkx/algorithms/reciprocity.py index cb36ae9d551..25b0fa1ba9c 100644 --- a/networkx/algorithms/reciprocity.py +++ b/networkx/algorithms/reciprocity.py @@ -8,7 +8,7 @@ @not_implemented_for("undirected", "multigraph") -@nx._dispatch +@nx._dispatchable def reciprocity(G, nodes=None): r"""Compute the reciprocity in a directed graph. @@ -76,7 +76,7 @@ def _reciprocity_iter(G, nodes): @not_implemented_for("undirected", "multigraph") -@nx._dispatch +@nx._dispatchable def overall_reciprocity(G): """Compute the reciprocity for the whole graph. diff --git a/networkx/algorithms/regular.py b/networkx/algorithms/regular.py index d3332342794..058ad365457 100644 --- a/networkx/algorithms/regular.py +++ b/networkx/algorithms/regular.py @@ -5,7 +5,7 @@ __all__ = ["is_regular", "is_k_regular", "k_factor"] -@nx._dispatch +@nx._dispatchable def is_regular(G): """Determines whether the graph ``G`` is a regular graph. @@ -29,6 +29,8 @@ def is_regular(G): True """ + if len(G) == 0: + raise nx.NetworkXPointlessConcept("Graph has no nodes.") n1 = nx.utils.arbitrary_element(G) if not G.is_directed(): d1 = G.degree(n1) @@ -42,7 +44,7 @@ def is_regular(G): @not_implemented_for("directed") -@nx._dispatch +@nx._dispatchable def is_k_regular(G, k): """Determines whether the graph ``G`` is a k-regular graph. @@ -69,7 +71,7 @@ def is_k_regular(G, k): @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch(preserve_edge_attrs=True) +@nx._dispatchable(preserve_edge_attrs=True, returns_graph=True) def k_factor(G, k, matching_weight="weight"): """Compute a k-factor of G diff --git a/networkx/algorithms/richclub.py b/networkx/algorithms/richclub.py index 6fe300fb538..445b27d1425 100644 --- a/networkx/algorithms/richclub.py +++ b/networkx/algorithms/richclub.py @@ -10,7 +10,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def rich_club_coefficient(G, normalized=True, Q=100, seed=None): r"""Returns the rich-club coefficient of the graph `G`. @@ -44,6 +44,12 @@ def rich_club_coefficient(G, normalized=True, Q=100, seed=None): rc : dictionary A dictionary, keyed by degree, with rich-club coefficient values. + Raises + ------ + NetworkXError + If `G` has fewer than four nodes and ``normalized=True``. + A randomly sampled graph for normalization cannot be generated in this case. + Examples -------- >>> G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)]) @@ -57,6 +63,14 @@ def rich_club_coefficient(G, normalized=True, Q=100, seed=None): algorithm ignores any edge weights and is not defined for directed graphs or graphs with parallel edges or self loops. + Normalization is done by computing the rich club coefficient for a randomly + sampled graph with the same degree distribution as `G` by + repeatedly swapping the endpoints of existing edges. For graphs with fewer than 4 + nodes, it is not possible to generate a random graph with a prescribed + degree distribution, as the degree distribution fully determines the graph + (hence making the coefficients trivially normalized to 1). + This function raises an exception in this case. + Estimates for appropriate values of `Q` are found in [2]_. References @@ -108,6 +122,9 @@ def _compute_rc(G): # side of the list, which would have a linear time cost. edge_degrees = sorted((sorted(map(G.degree, e)) for e in G.edges()), reverse=True) ek = G.number_of_edges() + if ek == 0: + return {} + k1, k2 = edge_degrees.pop() rc = {} for d, nk in enumerate(nks): diff --git a/networkx/algorithms/shortest_paths/astar.py b/networkx/algorithms/shortest_paths/astar.py index 64696353e40..1a36561fe80 100644 --- a/networkx/algorithms/shortest_paths/astar.py +++ b/networkx/algorithms/shortest_paths/astar.py @@ -9,7 +9,7 @@ __all__ = ["astar_path", "astar_path_length"] -@nx._dispatch(edge_attrs="weight", preserve_node_attrs="heuristic") +@nx._dispatchable(edge_attrs="weight", preserve_node_attrs="heuristic") def astar_path(G, source, target, heuristic=None, weight="weight", *, cutoff=None): """Returns a list of nodes in a shortest path between source and target using the A* ("A-star") algorithm. @@ -171,7 +171,7 @@ def heuristic(u, v): raise nx.NetworkXNoPath(f"Node {target} not reachable from {source}") -@nx._dispatch(edge_attrs="weight", preserve_node_attrs="heuristic") +@nx._dispatchable(edge_attrs="weight", preserve_node_attrs="heuristic") def astar_path_length( G, source, target, heuristic=None, weight="weight", *, cutoff=None ): diff --git a/networkx/algorithms/shortest_paths/dense.py b/networkx/algorithms/shortest_paths/dense.py index 08339b189dd..26e5503d7f9 100644 --- a/networkx/algorithms/shortest_paths/dense.py +++ b/networkx/algorithms/shortest_paths/dense.py @@ -10,7 +10,7 @@ ] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def floyd_warshall_numpy(G, nodelist=None, weight="weight"): """Find all-pairs shortest path lengths using Floyd's algorithm. @@ -84,7 +84,7 @@ def floyd_warshall_numpy(G, nodelist=None, weight="weight"): return A -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def floyd_warshall_predecessor_and_distance(G, weight="weight"): """Find all-pairs shortest path lengths using Floyd's algorithm. @@ -167,7 +167,7 @@ def floyd_warshall_predecessor_and_distance(G, weight="weight"): return dict(pred), dict(dist) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None) def reconstruct_path(source, target, predecessors): """Reconstruct a path from source to target using the predecessors dict as returned by floyd_warshall_predecessor_and_distance @@ -211,7 +211,7 @@ def reconstruct_path(source, target, predecessors): return list(reversed(path)) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def floyd_warshall(G, weight="weight"): """Find all-pairs shortest path lengths using Floyd's algorithm. @@ -233,7 +233,7 @@ def floyd_warshall(G, weight="weight"): -------- >>> G = nx.DiGraph() >>> G.add_weighted_edges_from([(0, 1, 5), (1, 2, 2), (2, 3, -3), (1, 3, 10), (3, 2, 8)]) - >>> fw = nx.floyd_warshall(G, weight='weight') + >>> fw = nx.floyd_warshall(G, weight="weight") >>> results = {a: dict(b) for a, b in fw.items()} >>> print(results) {0: {0: 0, 1: 5, 2: 7, 3: 4}, 1: {1: 0, 2: 2, 3: -1, 0: inf}, 2: {2: 0, 3: -3, 0: inf, 1: inf}, 3: {3: 0, 2: 8, 0: inf, 1: inf}} diff --git a/networkx/algorithms/shortest_paths/generic.py b/networkx/algorithms/shortest_paths/generic.py index e47c4b4f5a1..81b72419ace 100644 --- a/networkx/algorithms/shortest_paths/generic.py +++ b/networkx/algorithms/shortest_paths/generic.py @@ -19,7 +19,7 @@ ] -@nx._dispatch +@nx._dispatchable def has_path(G, source, target): """Returns *True* if *G* has a path from *source* to *target*. @@ -40,7 +40,7 @@ def has_path(G, source, target): return True -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def shortest_path(G, source=None, target=None, weight=None, method="dijkstra"): """Compute shortest paths in the graph. @@ -106,13 +106,13 @@ def shortest_path(G, source=None, target=None, weight=None, method="dijkstra"): >>> print(nx.shortest_path(G, source=0, target=4)) [0, 1, 2, 3, 4] >>> p = nx.shortest_path(G, source=0) # target not specified - >>> p[3] # shortest path from source=0 to target=3 + >>> p[3] # shortest path from source=0 to target=3 [0, 1, 2, 3] >>> p = nx.shortest_path(G, target=4) # source not specified - >>> p[1] # shortest path from source=1 to target=4 + >>> p[1] # shortest path from source=1 to target=4 [1, 2, 3, 4] - >>> p = nx.shortest_path(G) # source, target not specified - >>> p[2][4] # shortest path from source=2 to target=4 + >>> p = dict(nx.shortest_path(G)) # source, target not specified + >>> p[2][4] # shortest path from source=2 to target=4 [2, 3, 4] Notes @@ -135,16 +135,25 @@ def shortest_path(G, source=None, target=None, weight=None, method="dijkstra"): method = "unweighted" if weight is None else method if source is None: if target is None: - msg = "shortest_path for all_pairs will return an iterator in v3.3" - warnings.warn(msg, DeprecationWarning) + warnings.warn( + ( + "\n\nshortest_path will return an iterator that yields\n" + "(node, path) pairs instead of a dictionary when source\n" + "and target are unspecified beginning in version 3.5\n\n" + "To keep the current behavior, use:\n\n" + "\tdict(nx.shortest_path(G))" + ), + FutureWarning, + stacklevel=3, + ) # Find paths between all pairs. if method == "unweighted": - paths = dict(nx.all_pairs_shortest_path(G)) + paths = nx.all_pairs_shortest_path(G) elif method == "dijkstra": - paths = dict(nx.all_pairs_dijkstra_path(G, weight=weight)) + paths = nx.all_pairs_dijkstra_path(G, weight=weight) else: # method == 'bellman-ford': - paths = dict(nx.all_pairs_bellman_ford_path(G, weight=weight)) + paths = nx.all_pairs_bellman_ford_path(G, weight=weight) else: # Find paths from all nodes co-accessible to the target. if G.is_directed(): @@ -178,7 +187,7 @@ def shortest_path(G, source=None, target=None, weight=None, method="dijkstra"): return paths -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def shortest_path_length(G, source=None, target=None, weight=None, method="dijkstra"): """Compute shortest path lengths in the graph. @@ -322,7 +331,7 @@ def shortest_path_length(G, source=None, target=None, weight=None, method="dijks return paths -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def average_shortest_path_length(G, weight=None, method=None): r"""Returns the average shortest path length. @@ -435,11 +444,11 @@ def path_length(v): all_pairs = nx.floyd_warshall(G, weight=weight) s = sum(sum(t.values()) for t in all_pairs.values()) elif method == "floyd-warshall-numpy": - s = nx.floyd_warshall_numpy(G, weight=weight).sum() + s = float(nx.floyd_warshall_numpy(G, weight=weight).sum()) return s / (n * (n - 1)) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def all_shortest_paths(G, source, target, weight=None, method="dijkstra"): """Compute all shortest simple paths in the graph. @@ -517,7 +526,7 @@ def all_shortest_paths(G, source, target, weight=None, method="dijkstra"): return _build_paths_from_predecessors({source}, target, pred) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def single_source_all_shortest_paths(G, source, weight=None, method="dijkstra"): """Compute all shortest simple paths from the given source in the graph. @@ -593,7 +602,7 @@ def single_source_all_shortest_paths(G, source, weight=None, method="dijkstra"): pass -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def all_pairs_all_shortest_paths(G, weight=None, method="dijkstra"): """Compute all shortest paths between all nodes. @@ -647,8 +656,9 @@ def all_pairs_all_shortest_paths(G, weight=None, method="dijkstra"): single_source_all_shortest_paths """ for n in G: - yield n, dict( - single_source_all_shortest_paths(G, n, weight=weight, method=method) + yield ( + n, + dict(single_source_all_shortest_paths(G, n, weight=weight, method=method)), ) diff --git a/networkx/algorithms/shortest_paths/tests/test_generic.py b/networkx/algorithms/shortest_paths/tests/test_generic.py index 863f1dcb947..9fcc8c396d5 100644 --- a/networkx/algorithms/shortest_paths/tests/test_generic.py +++ b/networkx/algorithms/shortest_paths/tests/test_generic.py @@ -212,22 +212,22 @@ def test_single_source_all_shortest_paths(self): assert sorted(ans[4]) == [[4]] def test_all_pairs_shortest_path(self): - p = nx.shortest_path(self.cycle) + p = dict(nx.shortest_path(self.cycle)) assert p[0][3] == [0, 1, 2, 3] assert p == dict(nx.all_pairs_shortest_path(self.cycle)) - p = nx.shortest_path(self.grid) + p = dict(nx.shortest_path(self.grid)) validate_grid_path(4, 4, 1, 12, p[1][12]) # now with weights - p = nx.shortest_path(self.cycle, weight="weight") + p = dict(nx.shortest_path(self.cycle, weight="weight")) assert p[0][3] == [0, 1, 2, 3] assert p == dict(nx.all_pairs_dijkstra_path(self.cycle)) - p = nx.shortest_path(self.grid, weight="weight") + p = dict(nx.shortest_path(self.grid, weight="weight")) validate_grid_path(4, 4, 1, 12, p[1][12]) # weights and method specified - p = nx.shortest_path(self.cycle, weight="weight", method="dijkstra") + p = dict(nx.shortest_path(self.cycle, weight="weight", method="dijkstra")) assert p[0][3] == [0, 1, 2, 3] assert p == dict(nx.all_pairs_dijkstra_path(self.cycle)) - p = nx.shortest_path(self.cycle, weight="weight", method="bellman-ford") + p = dict(nx.shortest_path(self.cycle, weight="weight", method="bellman-ford")) assert p[0][3] == [0, 1, 2, 3] assert p == dict(nx.all_pairs_bellman_ford_path(self.cycle)) diff --git a/networkx/algorithms/shortest_paths/unweighted.py b/networkx/algorithms/shortest_paths/unweighted.py index 1503ba74784..bb587bba991 100644 --- a/networkx/algorithms/shortest_paths/unweighted.py +++ b/networkx/algorithms/shortest_paths/unweighted.py @@ -17,7 +17,7 @@ ] -@nx._dispatch +@nx._dispatchable def single_source_shortest_path_length(G, source, cutoff=None): """Compute the shortest path lengths from source to all reachable nodes. @@ -95,7 +95,7 @@ def _single_shortest_path_length(adj, firstlevel, cutoff): return -@nx._dispatch +@nx._dispatchable def single_target_shortest_path_length(G, target, cutoff=None): """Compute the shortest path lengths to target from all reachable nodes. @@ -135,8 +135,14 @@ def single_target_shortest_path_length(G, target, cutoff=None): if target not in G: raise nx.NodeNotFound(f"Target {target} is not in G") - msg = "single_target_shortest_path_length will return a dict starting in v3.3" - warnings.warn(msg, DeprecationWarning) + warnings.warn( + ( + "\n\nsingle_target_shortest_path_length will return a dict instead of" + "\nan iterator in version 3.5" + ), + FutureWarning, + stacklevel=3, + ) if cutoff is None: cutoff = float("inf") @@ -148,7 +154,7 @@ def single_target_shortest_path_length(G, target, cutoff=None): return _single_shortest_path_length(adj, nextlevel, cutoff) -@nx._dispatch +@nx._dispatchable def all_pairs_shortest_path_length(G, cutoff=None): """Computes the shortest path lengths between all nodes in `G`. @@ -193,7 +199,7 @@ def all_pairs_shortest_path_length(G, cutoff=None): yield (n, length(G, n, cutoff=cutoff)) -@nx._dispatch +@nx._dispatchable def bidirectional_shortest_path(G, source, target): """Returns a list of nodes in a shortest path between source and target. @@ -309,7 +315,7 @@ def _bidirectional_pred_succ(G, source, target): raise nx.NetworkXNoPath(f"No path between {source} and {target}.") -@nx._dispatch +@nx._dispatchable def single_source_shortest_path(G, source, cutoff=None): """Compute shortest path between source and all other nodes reachable from source. @@ -393,7 +399,7 @@ def _single_shortest_path(adj, firstlevel, paths, cutoff, join): return paths -@nx._dispatch +@nx._dispatchable def single_target_shortest_path(G, target, cutoff=None): """Compute shortest path to target from all nodes that reach target. @@ -445,7 +451,7 @@ def join(p1, p2): return dict(_single_shortest_path(adj, nextlevel, paths, cutoff, join)) -@nx._dispatch +@nx._dispatchable def all_pairs_shortest_path(G, cutoff=None): """Compute shortest paths between all nodes. @@ -485,7 +491,7 @@ def all_pairs_shortest_path(G, cutoff=None): yield (n, single_source_shortest_path(G, n, cutoff=cutoff)) -@nx._dispatch +@nx._dispatchable def predecessor(G, source, target=None, cutoff=None, return_seen=None): """Returns dict of predecessors for the path from source to all nodes in G. diff --git a/networkx/algorithms/shortest_paths/weighted.py b/networkx/algorithms/shortest_paths/weighted.py index bbbb03d9eee..6299d276cb1 100644 --- a/networkx/algorithms/shortest_paths/weighted.py +++ b/networkx/algorithms/shortest_paths/weighted.py @@ -78,7 +78,7 @@ def _weight_function(G, weight): return lambda u, v, data: data.get(weight, 1) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def dijkstra_path(G, source, target, weight="weight"): """Returns the shortest weighted path from source to target in G. @@ -133,7 +133,10 @@ def dijkstra_path(G, source, target, weight="weight"): >>> G.add_weighted_edges_from([(1, 2, 0.75), (1, 2, 0.5), (2, 3, 0.5), (1, 3, 1.5)]) >>> nodes = nx.dijkstra_path(G, 1, 3) >>> edges = nx.utils.pairwise(nodes) - >>> list((u, v, min(G[u][v], key=lambda k: G[u][v][k].get('weight', 1))) for u, v in edges) + >>> list( + ... (u, v, min(G[u][v], key=lambda k: G[u][v][k].get("weight", 1))) + ... for u, v in edges + ... ) [(1, 2, 1), (2, 3, 0)] Notes @@ -169,7 +172,7 @@ def dijkstra_path(G, source, target, weight="weight"): return path -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def dijkstra_path_length(G, source, target, weight="weight"): """Returns the shortest weighted path length in G from source to target. @@ -249,7 +252,7 @@ def dijkstra_path_length(G, source, target, weight="weight"): raise nx.NetworkXNoPath(f"Node {target} not reachable from {source}") from err -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def single_source_dijkstra_path(G, source, cutoff=None, weight="weight"): """Find shortest weighted paths in G from a source node. @@ -314,7 +317,7 @@ def single_source_dijkstra_path(G, source, cutoff=None, weight="weight"): return multi_source_dijkstra_path(G, {source}, cutoff=cutoff, weight=weight) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def single_source_dijkstra_path_length(G, source, cutoff=None, weight="weight"): """Find shortest weighted path lengths in G from a source node. @@ -386,7 +389,7 @@ def single_source_dijkstra_path_length(G, source, cutoff=None, weight="weight"): return multi_source_dijkstra_path_length(G, {source}, cutoff=cutoff, weight=weight) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def single_source_dijkstra(G, source, target=None, cutoff=None, weight="weight"): """Find shortest weighted paths and lengths from a source node. @@ -488,7 +491,7 @@ def single_source_dijkstra(G, source, target=None, cutoff=None, weight="weight") ) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def multi_source_dijkstra_path(G, sources, cutoff=None, weight="weight"): """Find shortest weighted paths in G from a given set of source nodes. @@ -562,7 +565,7 @@ def multi_source_dijkstra_path(G, sources, cutoff=None, weight="weight"): return path -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def multi_source_dijkstra_path_length(G, sources, cutoff=None, weight="weight"): """Find shortest weighted path lengths in G from a given set of source nodes. @@ -644,7 +647,7 @@ def multi_source_dijkstra_path_length(G, sources, cutoff=None, weight="weight"): return _dijkstra_multisource(G, sources, weight, cutoff=cutoff) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def multi_source_dijkstra(G, sources, target=None, cutoff=None, weight="weight"): """Find shortest weighted paths and lengths from a given set of source nodes. @@ -881,7 +884,7 @@ def _dijkstra_multisource( return dist -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def dijkstra_predecessor_and_distance(G, source, cutoff=None, weight="weight"): """Compute weighted shortest path length and predecessors. @@ -954,7 +957,7 @@ def dijkstra_predecessor_and_distance(G, source, cutoff=None, weight="weight"): return (pred, _dijkstra(G, source, weight, pred=pred, cutoff=cutoff)) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def all_pairs_dijkstra(G, cutoff=None, weight="weight"): """Find shortest weighted paths and lengths between all nodes. @@ -1023,7 +1026,7 @@ def all_pairs_dijkstra(G, cutoff=None, weight="weight"): yield (n, (dist, path)) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def all_pairs_dijkstra_path_length(G, cutoff=None, weight="weight"): """Compute shortest path lengths between all nodes in a weighted graph. @@ -1082,7 +1085,7 @@ def all_pairs_dijkstra_path_length(G, cutoff=None, weight="weight"): yield (n, length(G, n, cutoff=cutoff, weight=weight)) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def all_pairs_dijkstra_path(G, cutoff=None, weight="weight"): """Compute shortest paths between all nodes in a weighted graph. @@ -1136,7 +1139,7 @@ def all_pairs_dijkstra_path(G, cutoff=None, weight="weight"): yield (n, path(G, n, cutoff=cutoff, weight=weight)) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def bellman_ford_predecessor_and_distance( G, source, target=None, weight="weight", heuristic=False ): @@ -1484,7 +1487,7 @@ def _inner_bellman_ford( return None -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def bellman_ford_path(G, source, target, weight="weight"): """Returns the shortest path from source to target in a weighted graph G. @@ -1543,7 +1546,7 @@ def bellman_ford_path(G, source, target, weight="weight"): return path -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def bellman_ford_path_length(G, source, target, weight="weight"): """Returns the shortest path length from source to target in a weighted graph. @@ -1614,7 +1617,7 @@ def bellman_ford_path_length(G, source, target, weight="weight"): raise nx.NetworkXNoPath(f"node {target} not reachable from {source}") from err -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def single_source_bellman_ford_path(G, source, weight="weight"): """Compute shortest path between source and all other reachable nodes for a weighted graph. @@ -1670,7 +1673,7 @@ def single_source_bellman_ford_path(G, source, weight="weight"): return path -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def single_source_bellman_ford_path_length(G, source, weight="weight"): """Compute the shortest path length between source and all other reachable nodes for a weighted graph. @@ -1733,7 +1736,7 @@ def single_source_bellman_ford_path_length(G, source, weight="weight"): return _bellman_ford(G, [source], weight) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def single_source_bellman_ford(G, source, target=None, weight="weight"): """Compute shortest paths and lengths in a weighted graph G. @@ -1827,7 +1830,7 @@ def single_source_bellman_ford(G, source, target=None, weight="weight"): raise nx.NetworkXNoPath(msg) from err -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def all_pairs_bellman_ford_path_length(G, weight="weight"): """Compute shortest path lengths between all nodes in a weighted graph. @@ -1882,7 +1885,7 @@ def all_pairs_bellman_ford_path_length(G, weight="weight"): yield (n, dict(length(G, n, weight=weight))) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def all_pairs_bellman_ford_path(G, weight="weight"): """Compute shortest paths between all nodes in a weighted graph. @@ -1932,7 +1935,7 @@ def all_pairs_bellman_ford_path(G, weight="weight"): yield (n, path(G, n, weight=weight)) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def goldberg_radzik(G, source, weight="weight"): """Compute shortest path lengths and predecessors on shortest paths in weighted graphs. @@ -2119,7 +2122,7 @@ def relax(to_scan): return pred, d -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def negative_edge_cycle(G, weight="weight", heuristic=True): """Returns True if there exists a negative edge cycle anywhere in G. @@ -2190,7 +2193,7 @@ def negative_edge_cycle(G, weight="weight", heuristic=True): return False -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def find_negative_cycle(G, source, weight="weight"): """Returns a cycle with negative total weight if it exists. @@ -2283,7 +2286,7 @@ def find_negative_cycle(G, source, weight="weight"): raise nx.NetworkXUnbounded(msg) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def bidirectional_dijkstra(G, source, target, weight="weight"): r"""Dijkstra's algorithm for shortest paths using bidirectional search. @@ -2431,7 +2434,7 @@ def bidirectional_dijkstra(G, source, target, weight="weight"): raise nx.NetworkXNoPath(f"No path between {source} and {target}.") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def johnson(G, weight="weight"): r"""Uses Johnson's Algorithm to compute shortest paths. diff --git a/networkx/algorithms/similarity.py b/networkx/algorithms/similarity.py index 3d943c20d7e..24f303b13c9 100644 --- a/networkx/algorithms/similarity.py +++ b/networkx/algorithms/similarity.py @@ -20,6 +20,7 @@ from itertools import product import networkx as nx +from networkx.utils import np_random_state __all__ = [ "graph_edit_distance", @@ -36,7 +37,7 @@ def debug_print(*args, **kwargs): print(*args, **kwargs) -@nx._dispatch( +@nx._dispatchable( graphs={"G1": 0, "G2": 1}, preserve_edge_attrs=True, preserve_node_attrs=True ) def graph_edit_distance( @@ -210,7 +211,7 @@ def graph_edit_distance( return bestcost -@nx._dispatch(graphs={"G1": 0, "G2": 1}) +@nx._dispatchable(graphs={"G1": 0, "G2": 1}) def optimal_edit_paths( G1, G2, @@ -322,7 +323,8 @@ def optimal_edit_paths( edge_edit_path : list of tuples ((u1, v1), (u2, v2)) cost : numeric - Optimal edit path cost (graph edit distance). + Optimal edit path cost (graph edit distance). When the cost + is zero, it indicates that `G1` and `G2` are isomorphic. Examples -------- @@ -334,6 +336,14 @@ def optimal_edit_paths( >>> cost 5.0 + Notes + ----- + To transform `G1` into a graph isomorphic to `G2`, apply the node + and edge edits in the returned ``edit_paths``. + In the case of isomorphic graphs, the cost is zero, and the paths + represent different isomorphic mappings (isomorphisms). That is, the + edits involve renaming nodes and edges to match the structure of `G2`. + See Also -------- graph_edit_distance, optimize_edit_paths @@ -373,7 +383,7 @@ def optimal_edit_paths( return paths, bestcost -@nx._dispatch(graphs={"G1": 0, "G2": 1}) +@nx._dispatchable(graphs={"G1": 0, "G2": 1}) def optimize_graph_edit_distance( G1, G2, @@ -524,7 +534,7 @@ def optimize_graph_edit_distance( yield cost -@nx._dispatch( +@nx._dispatchable( graphs={"G1": 0, "G2": 1}, preserve_edge_attrs=True, preserve_node_attrs=True ) def optimize_edit_paths( @@ -1200,10 +1210,10 @@ def prune(cost): # assert sorted(G2.edges) == sorted(h for g, h in edge_path if h is not None) # print(vertex_path, edge_path, cost, file = sys.stderr) # assert cost == maxcost_value - yield list(vertex_path), list(edge_path), cost + yield list(vertex_path), list(edge_path), float(cost) -@nx._dispatch +@nx._dispatchable def simrank_similarity( G, source=None, @@ -1223,9 +1233,9 @@ def simrank(G, u, v): in_neighbors_u = G.predecessors(u) in_neighbors_v = G.predecessors(v) scale = C / (len(in_neighbors_u) * len(in_neighbors_v)) - return scale * sum(simrank(G, w, x) - for w, x in product(in_neighbors_u, - in_neighbors_v)) + return scale * sum( + simrank(G, w, x) for w, x in product(in_neighbors_u, in_neighbors_v) + ) where ``G`` is the graph, ``u`` is the source, ``v`` is the target, and ``C`` is a float decay or importance factor between 0 and 1. @@ -1275,6 +1285,14 @@ def simrank(G, u, v): If neither ``source`` nor ``target`` is ``None``, this returns the similarity value for the given pair of nodes. + Raises + ------ + ExceededMaxIterations + If the algorithm does not converge within ``max_iterations``. + + NodeNotFound + If either ``source`` or ``target`` is not in `G`. + Examples -------- >>> G = nx.cycle_graph(2) @@ -1311,8 +1329,21 @@ def simrank(G, u, v): import numpy as np nodelist = list(G) - s_indx = None if source is None else nodelist.index(source) - t_indx = None if target is None else nodelist.index(target) + if source is not None: + if source not in nodelist: + raise nx.NodeNotFound(f"Source node {source} not in G") + else: + s_indx = nodelist.index(source) + else: + s_indx = None + + if target is not None: + if target not in nodelist: + raise nx.NodeNotFound(f"Target node {target} not in G") + else: + t_indx = nodelist.index(target) + else: + t_indx = None x = _simrank_similarity_numpy( G, s_indx, t_indx, importance_factor, max_iterations, tolerance @@ -1320,10 +1351,10 @@ def simrank(G, u, v): if isinstance(x, np.ndarray): if x.ndim == 1: - return dict(zip(G, x)) + return dict(zip(G, x.tolist())) # else x.ndim == 2 - return {u: dict(zip(G, row)) for u, row in zip(G, x)} - return x + return {u: dict(zip(G, row)) for u, row in zip(G, x.tolist())} + return float(x) def _simrank_similarity_python( @@ -1495,7 +1526,7 @@ def _simrank_similarity_numpy( return newsim -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def panther_similarity( G, source, k=5, path_length=5, c=0.5, delta=0.1, eps=None, weight="weight" ): @@ -1511,7 +1542,7 @@ def panther_similarity( source : node Source node for which to find the top `k` similar other nodes k : int (default = 5) - The number of most similar nodes to return + The number of most similar nodes to return. path_length : int (default = 5) How long the randomly generated paths should be (``T`` in [1]_) c : float (default = 0.5) @@ -1533,7 +1564,20 @@ def panther_similarity( similarity : dictionary Dictionary of nodes to similarity scores (as floats). Note: the self-similarity (i.e., ``v``) will not be included in - the returned dictionary. + the returned dictionary. So, for ``k = 5``, a dictionary of + top 4 nodes and their similarity scores will be returned. + + Raises + ------ + NetworkXUnfeasible + If `source` is an isolated node. + + NodeNotFound + If `source` is not in `G`. + + Notes + ----- + The isolated nodes in `G` are ignored. Examples -------- @@ -1550,6 +1594,18 @@ def panther_similarity( """ import numpy as np + if source not in G: + raise nx.NodeNotFound(f"Source node {source} not in G") + + isolates = set(nx.isolates(G)) + + if source in isolates: + raise nx.NetworkXUnfeasible( + f"Panther similarity is not defined for the isolated source node {source}." + ) + + G = G.subgraph([node for node in G.nodes if node not in isolates]).copy() + num_nodes = G.number_of_nodes() if num_nodes < k: warnings.warn( @@ -1598,17 +1654,19 @@ def panther_similarity( top_k_sorted = top_k_unsorted[np.argsort(S[top_k_unsorted])][::-1] # Add back the similarity scores - top_k_sorted_names = (node_map[n] for n in top_k_sorted) - top_k_with_val = dict(zip(top_k_sorted_names, S[top_k_sorted])) + top_k_with_val = dict( + zip(node_map[top_k_sorted].tolist(), S[top_k_sorted].tolist()) + ) # Remove the self-similarity top_k_with_val.pop(source, None) return top_k_with_val -@nx._dispatch(edge_attrs="weight") +@np_random_state(5) +@nx._dispatchable(edge_attrs="weight") def generate_random_paths( - G, sample_size, path_length=5, index_map=None, weight="weight" + G, sample_size, path_length=5, index_map=None, weight="weight", seed=None ): """Randomly generate `sample_size` paths of length `path_length`. @@ -1629,6 +1687,9 @@ def generate_random_paths( weight : string or None, optional (default="weight") The name of an edge attribute that holds the numerical value used as a weight. If None then each edge has weight 1. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. Returns ------- @@ -1648,7 +1709,9 @@ def generate_random_paths( >>> G = nx.star_graph(3) >>> index_map = {} >>> random_path = nx.generate_random_paths(G, 3, index_map=index_map) - >>> paths_containing_node_0 = [random_path[path_idx] for path_idx in index_map.get(0, [])] + >>> paths_containing_node_0 = [ + ... random_path[path_idx] for path_idx in index_map.get(0, []) + ... ] References ---------- @@ -1660,18 +1723,22 @@ def generate_random_paths( """ import numpy as np + randint_fn = ( + seed.integers if isinstance(seed, np.random.Generator) else seed.randint + ) + # Calculate transition probabilities between # every pair of vertices according to Eq. (3) adj_mat = nx.to_numpy_array(G, weight=weight) inv_row_sums = np.reciprocal(adj_mat.sum(axis=1)).reshape(-1, 1) transition_probabilities = adj_mat * inv_row_sums - node_map = np.array(G) + node_map = list(G) num_nodes = G.number_of_nodes() for path_index in range(sample_size): # Sample current vertex v = v_i uniformly at random - node_index = np.random.randint(0, high=num_nodes) + node_index = randint_fn(num_nodes) node = node_map[node_index] # Add v into p_r and add p_r into the path set @@ -1689,22 +1756,22 @@ def generate_random_paths( for _ in range(path_length): # Randomly sample a neighbor (v_j) according # to transition probabilities from ``node`` (v) to its neighbors - neighbor_index = np.random.choice( + nbr_index = seed.choice( num_nodes, p=transition_probabilities[starting_index] ) # Set current vertex (v = v_j) - starting_index = neighbor_index + starting_index = nbr_index # Add v into p_r - neighbor_node = node_map[neighbor_index] - path.append(neighbor_node) + nbr_node = node_map[nbr_index] + path.append(nbr_node) # Add p_r into P_v if index_map is not None: - if neighbor_node in index_map: - index_map[neighbor_node].add(path_index) + if nbr_node in index_map: + index_map[nbr_node].add(path_index) else: - index_map[neighbor_node] = {path_index} + index_map[nbr_node] = {path_index} yield path diff --git a/networkx/algorithms/simple_paths.py b/networkx/algorithms/simple_paths.py index 6324ad67e2d..1bd2feb707b 100644 --- a/networkx/algorithms/simple_paths.py +++ b/networkx/algorithms/simple_paths.py @@ -13,7 +13,7 @@ ] -@nx._dispatch +@nx._dispatchable def is_simple_path(G, nodes): """Returns True if and only if `nodes` form a simple path in `G`. @@ -91,7 +91,7 @@ def is_simple_path(G, nodes): return all(v in G[u] for u, v in pairwise(nodes)) -@nx._dispatch +@nx._dispatchable def all_simple_paths(G, source, target, cutoff=None): """Generate all simple paths in the graph G from source to target. @@ -257,7 +257,7 @@ def all_simple_paths(G, source, target, cutoff=None): yield [source] + [edge[1] for edge in edge_path] -@nx._dispatch +@nx._dispatchable def all_simple_edge_paths(G, source, target, cutoff=None): """Generate lists of edges for all simple paths in G from source to target. @@ -319,7 +319,7 @@ def all_simple_edge_paths(G, source, target, cutoff=None): >>> G.add_node(0) >>> paths = list(nx.all_simple_edge_paths(G, 0, 0)) >>> for path in paths: - ... print (path) + ... print(path) [] >>> len(paths) 1 @@ -402,7 +402,7 @@ def _all_simple_edge_paths(G, source, targets, cutoff): @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def shortest_simple_paths(G, source, target, weight=None): """Generate all simple paths in the graph G from source to target, starting from shortest ones. diff --git a/networkx/algorithms/smallworld.py b/networkx/algorithms/smallworld.py index 172c4f9a879..05ae1708202 100644 --- a/networkx/algorithms/smallworld.py +++ b/networkx/algorithms/smallworld.py @@ -23,7 +23,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") @py_random_state(3) -@nx._dispatch +@nx._dispatchable(returns_graph=True) def random_reference(G, niter=1, connectivity=True, seed=None): """Compute a random graph by swapping edges of a given graph. @@ -121,7 +121,7 @@ def random_reference(G, niter=1, connectivity=True, seed=None): @not_implemented_for("directed") @not_implemented_for("multigraph") @py_random_state(4) -@nx._dispatch +@nx._dispatchable(returns_graph=True) def lattice_reference(G, niter=5, D=None, connectivity=True, seed=None): """Latticize the given graph by swapping edges. @@ -245,7 +245,7 @@ def lattice_reference(G, niter=5, D=None, connectivity=True, seed=None): @not_implemented_for("directed") @not_implemented_for("multigraph") @py_random_state(3) -@nx._dispatch +@nx._dispatchable def sigma(G, niter=100, nrand=10, seed=None): """Returns the small-world coefficient (sigma) of the given graph. @@ -308,13 +308,13 @@ def sigma(G, niter=100, nrand=10, seed=None): sigma = (C / Cr) / (L / Lr) - return sigma + return float(sigma) @not_implemented_for("directed") @not_implemented_for("multigraph") @py_random_state(3) -@nx._dispatch +@nx._dispatchable def omega(G, niter=5, nrand=10, seed=None): """Returns the small-world coefficient (omega) of a graph @@ -400,4 +400,4 @@ def omega(G, niter=5, nrand=10, seed=None): omega = (Lr / L) - (C / Cl) - return omega + return float(omega) diff --git a/networkx/algorithms/smetric.py b/networkx/algorithms/smetric.py index 80ae314bbdd..5a27014ee55 100644 --- a/networkx/algorithms/smetric.py +++ b/networkx/algorithms/smetric.py @@ -3,7 +3,7 @@ __all__ = ["s_metric"] -@nx._dispatch +@nx._dispatchable def s_metric(G, **kwargs): """Returns the s-metric [1]_ of graph. diff --git a/networkx/algorithms/sparsifiers.py b/networkx/algorithms/sparsifiers.py index a94aee0d09d..870b7ba6f37 100644 --- a/networkx/algorithms/sparsifiers.py +++ b/networkx/algorithms/sparsifiers.py @@ -10,7 +10,7 @@ @not_implemented_for("directed") @not_implemented_for("multigraph") @py_random_state(3) -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight", returns_graph=True) def spanner(G, stretch, weight=None, seed=None): """Returns a spanner of the given graph with the given stretch. @@ -136,11 +136,11 @@ def spanner(G, stretch, weight=None, seed=None): # remove edges to centers with edge weight less than # closest_center_weight for neighbor in residual_graph.adj[v]: - neighbor_cluster = clustering[neighbor] - neighbor_weight = lightest_edge_weight[neighbor_cluster] + nbr_cluster = clustering[neighbor] + nbr_weight = lightest_edge_weight[nbr_cluster] if ( - neighbor_cluster == closest_center - or neighbor_weight < closest_center_weight + nbr_cluster == closest_center + or nbr_weight < closest_center_weight ): edges_to_remove.add((v, neighbor)) @@ -257,14 +257,14 @@ def _lightest_edge_dicts(residual_graph, clustering, node): lightest_edge_neighbor = {} lightest_edge_weight = {} for neighbor in residual_graph.adj[node]: - neighbor_center = clustering[neighbor] + nbr_center = clustering[neighbor] weight = residual_graph[node][neighbor]["weight"] if ( - neighbor_center not in lightest_edge_weight - or weight < lightest_edge_weight[neighbor_center] + nbr_center not in lightest_edge_weight + or weight < lightest_edge_weight[nbr_center] ): - lightest_edge_neighbor[neighbor_center] = neighbor - lightest_edge_weight[neighbor_center] = weight + lightest_edge_neighbor[nbr_center] = neighbor + lightest_edge_weight[nbr_center] = weight return lightest_edge_neighbor, lightest_edge_weight diff --git a/networkx/algorithms/structuralholes.py b/networkx/algorithms/structuralholes.py index c676177b38e..bae42d060af 100644 --- a/networkx/algorithms/structuralholes.py +++ b/networkx/algorithms/structuralholes.py @@ -5,7 +5,7 @@ __all__ = ["constraint", "local_constraint", "effective_size"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def mutual_weight(G, u, v, weight=None): """Returns the sum of the weights of the edge from `u` to `v` and the edge from `v` to `u` in `G`. @@ -28,7 +28,7 @@ def mutual_weight(G, u, v, weight=None): return a_uv + a_vu -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def normalized_mutual_weight(G, u, v, norm=sum, weight=None): """Returns normalized mutual weight of the edges from `u` to `v` with respect to the mutual weights of the neighbors of `u` in `G`. @@ -49,7 +49,7 @@ def normalized_mutual_weight(G, u, v, norm=sum, weight=None): return 0 if scale == 0 else mutual_weight(G, u, v, weight) / scale -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def effective_size(G, nodes=None, weight=None): r"""Returns the effective size of all nodes in the graph ``G``. @@ -162,7 +162,7 @@ def redundancy(G, u, v, weight=None): return effective_size -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def constraint(G, nodes=None, weight=None): r"""Returns the constraint on all nodes in the graph ``G``. @@ -223,13 +223,13 @@ def constraint(G, nodes=None, weight=None): return constraint -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def local_constraint(G, u, v, weight=None): r"""Returns the local constraint on the node ``u`` with respect to the node ``v`` in the graph ``G``. Formally, the *local constraint on u with respect to v*, denoted - $\ell(v)$, is defined by + $\ell(u, v)$, is defined by .. math:: diff --git a/networkx/algorithms/summarization.py b/networkx/algorithms/summarization.py index 26665e09b1a..794a77ab34a 100644 --- a/networkx/algorithms/summarization.py +++ b/networkx/algorithms/summarization.py @@ -65,7 +65,7 @@ __all__ = ["dedensify", "snap_aggregation"] -@nx._dispatch +@nx._dispatchable(mutates_input={"not copy": 3}, returns_graph=True) def dedensify(G, threshold, prefix=None, copy=True): """Compresses neighborhoods around high-degree nodes @@ -180,12 +180,12 @@ def dedensify(G, threshold, prefix=None, copy=True): auxiliary = {} for node in G: - high_degree_neighbors = frozenset(high_degree_nodes & set(G[node])) - if high_degree_neighbors: - if high_degree_neighbors in auxiliary: - auxiliary[high_degree_neighbors].add(node) + high_degree_nbrs = frozenset(high_degree_nodes & set(G[node])) + if high_degree_nbrs: + if high_degree_nbrs in auxiliary: + auxiliary[high_degree_nbrs].add(node) else: - auxiliary[high_degree_neighbors] = {node} + auxiliary[high_degree_nbrs] = {node} if copy: G = G.copy() @@ -326,33 +326,33 @@ def _snap_eligible_group(G, groups, group_lookup, edge_types): ------- tuple: group ID to split, and neighbor-groups participation_counts data structure """ - neighbor_info = {node: {gid: Counter() for gid in groups} for node in group_lookup} + nbr_info = {node: {gid: Counter() for gid in groups} for node in group_lookup} for group_id in groups: current_group = groups[group_id] - # build neighbor_info for nodes in group + # build nbr_info for nodes in group for node in current_group: - neighbor_info[node] = {group_id: Counter() for group_id in groups} + nbr_info[node] = {group_id: Counter() for group_id in groups} edges = G.edges(node, keys=True) if G.is_multigraph() else G.edges(node) for edge in edges: neighbor = edge[1] edge_type = edge_types[edge] neighbor_group_id = group_lookup[neighbor] - neighbor_info[node][neighbor_group_id][edge_type] += 1 + nbr_info[node][neighbor_group_id][edge_type] += 1 # check if group_id is eligible to be split group_size = len(current_group) for other_group_id in groups: edge_counts = Counter() for node in current_group: - edge_counts.update(neighbor_info[node][other_group_id].keys()) + edge_counts.update(nbr_info[node][other_group_id].keys()) if not all(count == group_size for count in edge_counts.values()): - # only the neighbor_info of the returned group_id is required for handling group splits - return group_id, neighbor_info + # only the nbr_info of the returned group_id is required for handling group splits + return group_id, nbr_info - # if no eligible groups, complete neighbor_info is calculated - return None, neighbor_info + # if no eligible groups, complete nbr_info is calculated + return None, nbr_info def _snap_split(groups, neighbor_info, group_lookup, group_id): @@ -404,7 +404,9 @@ def _snap_split(groups, neighbor_info, group_lookup, group_id): return groups -@nx._dispatch(node_attrs="[node_attributes]", edge_attrs="[edge_attributes]") +@nx._dispatchable( + node_attrs="[node_attributes]", edge_attrs="[edge_attributes]", returns_graph=True +) def snap_aggregation( G, node_attributes, @@ -492,13 +494,13 @@ def snap_aggregation( >>> for node in nodes: ... attributes = nodes[node] ... G.add_node(node, **attributes) - ... >>> for source, target, type in edges: ... G.add_edge(source, target, type=type) - ... - >>> node_attributes = ('color', ) - >>> edge_attributes = ('type', ) - >>> summary_graph = nx.snap_aggregation(G, node_attributes=node_attributes, edge_attributes=edge_attributes) + >>> node_attributes = ("color",) + >>> edge_attributes = ("type",) + >>> summary_graph = nx.snap_aggregation( + ... G, node_attributes=node_attributes, edge_attributes=edge_attributes + ... ) Notes ----- @@ -540,12 +542,12 @@ def snap_aggregation( for node, node_type in group_lookup.items(): groups[node_type].add(node) - eligible_group_id, neighbor_info = _snap_eligible_group( + eligible_group_id, nbr_info = _snap_eligible_group( G, groups, group_lookup, edge_types ) while eligible_group_id: - groups = _snap_split(groups, neighbor_info, group_lookup, eligible_group_id) - eligible_group_id, neighbor_info = _snap_eligible_group( + groups = _snap_split(groups, nbr_info, group_lookup, eligible_group_id) + eligible_group_id, nbr_info = _snap_eligible_group( G, groups, group_lookup, edge_types ) return _snap_build_graph( @@ -553,7 +555,7 @@ def snap_aggregation( groups, node_attributes, edge_attributes, - neighbor_info, + nbr_info, edge_types, prefix, supernode_attribute, diff --git a/networkx/algorithms/swap.py b/networkx/algorithms/swap.py index 926be49831e..c190f970b6f 100644 --- a/networkx/algorithms/swap.py +++ b/networkx/algorithms/swap.py @@ -11,7 +11,7 @@ @nx.utils.not_implemented_for("undirected") @py_random_state(3) -@nx._dispatch +@nx._dispatchable(mutates_input=True, returns_graph=True) def directed_edge_swap(G, *, nswap=1, max_tries=100, seed=None): """Swap three edges in a directed graph while keeping the node degrees fixed. @@ -57,6 +57,8 @@ def directed_edge_swap(G, *, nswap=1, max_tries=100, seed=None): The graph G is modified in place. + A later swap is allowed to undo a previous swap. + References ---------- .. [1] Erdős, Péter L., et al. “A Simple Havel-Hakimi Type Algorithm to Realize @@ -131,7 +133,7 @@ def directed_edge_swap(G, *, nswap=1, max_tries=100, seed=None): @py_random_state(3) -@nx._dispatch +@nx._dispatchable(mutates_input=True, returns_graph=True) def double_edge_swap(G, nswap=1, max_tries=100, seed=None): """Swap two edges in the graph while keeping the node degrees fixed. @@ -229,7 +231,7 @@ def double_edge_swap(G, nswap=1, max_tries=100, seed=None): @py_random_state(3) -@nx._dispatch +@nx._dispatchable(mutates_input=True) def connected_double_edge_swap(G, nswap=1, _window_threshold=3, seed=None): """Attempts the specified number of double-edge swaps in the graph `G`. diff --git a/networkx/algorithms/tests/test_bridges.py b/networkx/algorithms/tests/test_bridges.py index 9c3ceba607f..b47f5860083 100644 --- a/networkx/algorithms/tests/test_bridges.py +++ b/networkx/algorithms/tests/test_bridges.py @@ -127,7 +127,7 @@ def test_no_weight(self): inf = float("inf") expected = {(3, 4, inf), (4, 3, inf)} assert next(nx.local_bridges(self.BB)) in expected - expected = {(u, v, 3) for u, v, in self.square.edges} + expected = {(u, v, 3) for u, v in self.square.edges} assert set(nx.local_bridges(self.square)) == expected assert list(nx.local_bridges(self.tri)) == [] diff --git a/networkx/algorithms/tests/test_broadcasting.py b/networkx/algorithms/tests/test_broadcasting.py new file mode 100644 index 00000000000..8ce34cf2bcc --- /dev/null +++ b/networkx/algorithms/tests/test_broadcasting.py @@ -0,0 +1,81 @@ +"""Unit tests for the broadcasting module.""" +import math + +import networkx as nx + + +def test_example_tree_broadcast(): + """ + Test the BROADCAST algorithm on the example in the paper titled: "Information Dissemination in Trees" + """ + edge_list = [ + (0, 1), + (1, 2), + (2, 7), + (3, 4), + (5, 4), + (4, 7), + (6, 7), + (7, 9), + (8, 9), + (9, 13), + (13, 14), + (14, 15), + (14, 16), + (14, 17), + (13, 11), + (11, 10), + (11, 12), + (13, 18), + (18, 19), + (18, 20), + ] + G = nx.Graph(edge_list) + b_T, b_C = nx.tree_broadcast_center(G) + assert b_T == 6 + assert b_C == {13, 9} + # test broadcast time from specific vertex + assert nx.tree_broadcast_time(G, 17) == 8 + assert nx.tree_broadcast_time(G, 3) == 9 + # test broadcast time of entire tree + assert nx.tree_broadcast_time(G) == 10 + + +def test_path_broadcast(): + for i in range(2, 12): + G = nx.path_graph(i) + b_T, b_C = nx.tree_broadcast_center(G) + assert b_T == math.ceil(i / 2) + assert b_C == { + math.ceil(i / 2), + math.floor(i / 2), + math.ceil(i / 2 - 1), + math.floor(i / 2 - 1), + } + assert nx.tree_broadcast_time(G) == i - 1 + + +def test_empty_graph_broadcast(): + H = nx.empty_graph(1) + b_T, b_C = nx.tree_broadcast_center(H) + assert b_T == 0 + assert b_C == {0} + assert nx.tree_broadcast_time(H) == 0 + + +def test_star_broadcast(): + for i in range(4, 12): + G = nx.star_graph(i) + b_T, b_C = nx.tree_broadcast_center(G) + assert b_T == i + assert b_C == set(G.nodes()) + assert nx.tree_broadcast_time(G) == b_T + + +def test_binomial_tree_broadcast(): + for i in range(2, 8): + G = nx.binomial_tree(i) + b_T, b_C = nx.tree_broadcast_center(G) + assert b_T == i + assert b_C == {0, 2 ** (i - 1)} + assert nx.tree_broadcast_time(G) == 2 * i - 1 diff --git a/networkx/algorithms/tests/test_cluster.py b/networkx/algorithms/tests/test_cluster.py index d69f036ff6c..b656ba81553 100644 --- a/networkx/algorithms/tests/test_cluster.py +++ b/networkx/algorithms/tests/test_cluster.py @@ -457,6 +457,12 @@ def test_peng_square_clustering(self): G = nx.Graph([(1, 2), (1, 3), (2, 4), (3, 4), (3, 5), (3, 6)]) assert nx.square_clustering(G, [1])[1] == 1 / 3 + def test_self_loops_square_clustering(self): + G = nx.path_graph(5) + assert nx.square_clustering(G) == {0: 0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0} + G.add_edges_from([(0, 0), (1, 1), (2, 2)]) + assert nx.square_clustering(G) == {0: 1, 1: 0.5, 2: 0.2, 3: 0.0, 4: 0} + class TestAverageClustering: @classmethod diff --git a/networkx/algorithms/tests/test_d_separation.py b/networkx/algorithms/tests/test_d_separation.py index a94d4dd4cfb..6f62971301b 100644 --- a/networkx/algorithms/tests/test_d_separation.py +++ b/networkx/algorithms/tests/test_d_separation.py @@ -81,6 +81,41 @@ def asia_graph_fixture(): return asia_graph() +@pytest.fixture() +def large_collider_graph(): + edge_list = [("A", "B"), ("C", "B"), ("B", "D"), ("D", "E"), ("B", "F"), ("G", "E")] + G = nx.DiGraph(edge_list) + return G + + +@pytest.fixture() +def chain_and_fork_graph(): + edge_list = [("A", "B"), ("B", "C"), ("B", "D"), ("D", "C")] + G = nx.DiGraph(edge_list) + return G + + +@pytest.fixture() +def no_separating_set_graph(): + edge_list = [("A", "B")] + G = nx.DiGraph(edge_list) + return G + + +@pytest.fixture() +def large_no_separating_set_graph(): + edge_list = [("A", "B"), ("C", "A"), ("C", "B")] + G = nx.DiGraph(edge_list) + return G + + +@pytest.fixture() +def collider_trek_graph(): + edge_list = [("A", "B"), ("C", "B"), ("C", "D")] + G = nx.DiGraph(edge_list) + return G + + @pytest.mark.parametrize( "graph", [path_graph(), fork_graph(), collider_graph(), naive_bayes_graph(), asia_graph()], @@ -90,40 +125,40 @@ def test_markov_condition(graph): for node in graph.nodes: parents = set(graph.predecessors(node)) non_descendants = graph.nodes - nx.descendants(graph, node) - {node} - parents - assert nx.d_separated(graph, {node}, non_descendants, parents) + assert nx.is_d_separator(graph, {node}, non_descendants, parents) def test_path_graph_dsep(path_graph): """Example-based test of d-separation for path_graph.""" - assert nx.d_separated(path_graph, {0}, {2}, {1}) - assert not nx.d_separated(path_graph, {0}, {2}, {}) + assert nx.is_d_separator(path_graph, {0}, {2}, {1}) + assert not nx.is_d_separator(path_graph, {0}, {2}, set()) def test_fork_graph_dsep(fork_graph): """Example-based test of d-separation for fork_graph.""" - assert nx.d_separated(fork_graph, {1}, {2}, {0}) - assert not nx.d_separated(fork_graph, {1}, {2}, {}) + assert nx.is_d_separator(fork_graph, {1}, {2}, {0}) + assert not nx.is_d_separator(fork_graph, {1}, {2}, set()) def test_collider_graph_dsep(collider_graph): """Example-based test of d-separation for collider_graph.""" - assert nx.d_separated(collider_graph, {0}, {1}, {}) - assert not nx.d_separated(collider_graph, {0}, {1}, {2}) + assert nx.is_d_separator(collider_graph, {0}, {1}, set()) + assert not nx.is_d_separator(collider_graph, {0}, {1}, {2}) def test_naive_bayes_dsep(naive_bayes_graph): """Example-based test of d-separation for naive_bayes_graph.""" for u, v in combinations(range(1, 5), 2): - assert nx.d_separated(naive_bayes_graph, {u}, {v}, {0}) - assert not nx.d_separated(naive_bayes_graph, {u}, {v}, {}) + assert nx.is_d_separator(naive_bayes_graph, {u}, {v}, {0}) + assert not nx.is_d_separator(naive_bayes_graph, {u}, {v}, set()) def test_asia_graph_dsep(asia_graph): """Example-based test of d-separation for asia_graph.""" - assert nx.d_separated( + assert nx.is_d_separator( asia_graph, {"asia", "smoking"}, {"dyspnea", "xray"}, {"bronchitis", "either"} ) - assert nx.d_separated( + assert nx.is_d_separator( asia_graph, {"tuberculosis", "cancer"}, {"bronchitis"}, {"smoking", "xray"} ) @@ -137,11 +172,11 @@ def test_undirected_graphs_are_not_supported(): """ g = nx.path_graph(3, nx.Graph) with pytest.raises(nx.NetworkXNotImplemented): - nx.d_separated(g, {0}, {1}, {2}) + nx.is_d_separator(g, {0}, {1}, {2}) with pytest.raises(nx.NetworkXNotImplemented): nx.is_minimal_d_separator(g, {0}, {1}, {2}) with pytest.raises(nx.NetworkXNotImplemented): - nx.minimal_d_separator(g, {0}, {1}) + nx.find_minimal_d_separator(g, {0}, {1}) def test_cyclic_graphs_raise_error(): @@ -152,60 +187,128 @@ def test_cyclic_graphs_raise_error(): """ g = nx.cycle_graph(3, nx.DiGraph) with pytest.raises(nx.NetworkXError): - nx.d_separated(g, {0}, {1}, {2}) + nx.is_d_separator(g, {0}, {1}, {2}) with pytest.raises(nx.NetworkXError): - nx.minimal_d_separator(g, 0, 1) + nx.find_minimal_d_separator(g, {0}, {1}) with pytest.raises(nx.NetworkXError): - nx.is_minimal_d_separator(g, 0, 1, {2}) + nx.is_minimal_d_separator(g, {0}, {1}, {2}) def test_invalid_nodes_raise_error(asia_graph): """ Test that graphs that have invalid nodes passed in raise errors. """ + # Check both set and node arguments + with pytest.raises(nx.NodeNotFound): + nx.is_d_separator(asia_graph, {0}, {1}, {2}) + with pytest.raises(nx.NodeNotFound): + nx.is_d_separator(asia_graph, 0, 1, 2) + with pytest.raises(nx.NodeNotFound): + nx.is_minimal_d_separator(asia_graph, {0}, {1}, {2}) with pytest.raises(nx.NodeNotFound): - nx.d_separated(asia_graph, {0}, {1}, {2}) + nx.is_minimal_d_separator(asia_graph, 0, 1, 2) with pytest.raises(nx.NodeNotFound): - nx.is_minimal_d_separator(asia_graph, 0, 1, {2}) + nx.find_minimal_d_separator(asia_graph, {0}, {1}) with pytest.raises(nx.NodeNotFound): - nx.minimal_d_separator(asia_graph, 0, 1) + nx.find_minimal_d_separator(asia_graph, 0, 1) -def test_minimal_d_separator(): +def test_nondisjoint_node_sets_raise_error(collider_graph): + """ + Test that error is raised when node sets aren't disjoint. + """ + with pytest.raises(nx.NetworkXError): + nx.is_d_separator(collider_graph, 0, 1, 0) + with pytest.raises(nx.NetworkXError): + nx.is_d_separator(collider_graph, 0, 2, 0) + with pytest.raises(nx.NetworkXError): + nx.is_d_separator(collider_graph, 0, 0, 1) + with pytest.raises(nx.NetworkXError): + nx.is_d_separator(collider_graph, 1, 0, 0) + with pytest.raises(nx.NetworkXError): + nx.find_minimal_d_separator(collider_graph, 0, 0) + with pytest.raises(nx.NetworkXError): + nx.find_minimal_d_separator(collider_graph, 0, 1, included=0) + with pytest.raises(nx.NetworkXError): + nx.find_minimal_d_separator(collider_graph, 1, 0, included=0) + with pytest.raises(nx.NetworkXError): + nx.is_minimal_d_separator(collider_graph, 0, 0, set()) + with pytest.raises(nx.NetworkXError): + nx.is_minimal_d_separator(collider_graph, 0, 1, set(), included=0) + with pytest.raises(nx.NetworkXError): + nx.is_minimal_d_separator(collider_graph, 1, 0, set(), included=0) + + +def test_is_minimal_d_separator( + large_collider_graph, + chain_and_fork_graph, + no_separating_set_graph, + large_no_separating_set_graph, + collider_trek_graph, +): # Case 1: # create a graph A -> B <- C # B -> D -> E; # B -> F; # G -> E; - edge_list = [("A", "B"), ("C", "B"), ("B", "D"), ("D", "E"), ("B", "F"), ("G", "E")] - G = nx.DiGraph(edge_list) - assert not nx.d_separated(G, {"B"}, {"E"}, set()) + assert not nx.is_d_separator(large_collider_graph, {"B"}, {"E"}, set()) # minimal set of the corresponding graph # for B and E should be (D,) - Zmin = nx.minimal_d_separator(G, "B", "E") - - # the minimal separating set should pass the test for minimality - assert nx.is_minimal_d_separator(G, "B", "E", Zmin) + Zmin = nx.find_minimal_d_separator(large_collider_graph, "B", "E") + # check that the minimal d-separator is a d-separating set + assert nx.is_d_separator(large_collider_graph, "B", "E", Zmin) + # the minimal separating set should also pass the test for minimality + assert nx.is_minimal_d_separator(large_collider_graph, "B", "E", Zmin) + # function should also work with set arguments + assert nx.is_minimal_d_separator(large_collider_graph, {"A", "B"}, {"G", "E"}, Zmin) assert Zmin == {"D"} # Case 2: # create a graph A -> B -> C # B -> D -> C; - edge_list = [("A", "B"), ("B", "C"), ("B", "D"), ("D", "C")] - G = nx.DiGraph(edge_list) - assert not nx.d_separated(G, {"A"}, {"C"}, set()) - Zmin = nx.minimal_d_separator(G, "A", "C") + assert not nx.is_d_separator(chain_and_fork_graph, {"A"}, {"C"}, set()) + Zmin = nx.find_minimal_d_separator(chain_and_fork_graph, "A", "C") # the minimal separating set should pass the test for minimality - assert nx.is_minimal_d_separator(G, "A", "C", Zmin) + assert nx.is_minimal_d_separator(chain_and_fork_graph, "A", "C", Zmin) assert Zmin == {"B"} - Znotmin = Zmin.union({"D"}) - assert not nx.is_minimal_d_separator(G, "A", "C", Znotmin) + assert not nx.is_minimal_d_separator(chain_and_fork_graph, "A", "C", Znotmin) + + # Case 3: + # create a graph A -> B + + # there is no m-separating set between A and B at all, so + # no minimal m-separating set can exist + assert not nx.is_d_separator(no_separating_set_graph, {"A"}, {"B"}, set()) + assert nx.find_minimal_d_separator(no_separating_set_graph, "A", "B") is None + + # Case 4: + # create a graph A -> B with A <- C -> B + + # there is no m-separating set between A and B at all, so + # no minimal m-separating set can exist + # however, the algorithm will initially propose C as a + # minimal (but invalid) separating set + assert not nx.is_d_separator(large_no_separating_set_graph, {"A"}, {"B"}, {"C"}) + assert nx.find_minimal_d_separator(large_no_separating_set_graph, "A", "B") is None + + # Test `included` and `excluded` args + # create graph A -> B <- C -> D + assert nx.find_minimal_d_separator(collider_trek_graph, "A", "D", included="B") == { + "B", + "C", + } + assert ( + nx.find_minimal_d_separator( + collider_trek_graph, "A", "D", included="B", restricted="B" + ) + is None + ) -def test_minimal_d_separator_checks_dsep(): +def test_is_minimal_d_separator_checks_dsep(): """Test that is_minimal_d_separator checks for d-separation as well.""" g = nx.DiGraph() g.add_edges_from( @@ -221,8 +324,25 @@ def test_minimal_d_separator_checks_dsep(): ] ) - assert not nx.d_separated(g, {"C"}, {"F"}, {"D"}) + assert not nx.is_d_separator(g, {"C"}, {"F"}, {"D"}) # since {'D'} and {} are not d-separators, we return false assert not nx.is_minimal_d_separator(g, "C", "F", {"D"}) - assert not nx.is_minimal_d_separator(g, "C", "F", {}) + assert not nx.is_minimal_d_separator(g, "C", "F", set()) + + +def test__reachable(large_collider_graph): + reachable = nx.algorithms.d_separation._reachable + g = large_collider_graph + x = {"F", "D"} + ancestors = {"A", "B", "C", "D", "F"} + assert reachable(g, x, ancestors, {"B"}) == {"B", "F", "D"} + assert reachable(g, x, ancestors, set()) == ancestors + + +def test_deprecations(): + G = nx.DiGraph([(0, 1), (1, 2)]) + with pytest.deprecated_call(): + nx.d_separated(G, 0, 2, {1}) + with pytest.deprecated_call(): + z = nx.minimal_d_separator(G, 0, 2) diff --git a/networkx/algorithms/tests/test_dag.py b/networkx/algorithms/tests/test_dag.py index 540c0c55e4d..d26c9fd3b4d 100644 --- a/networkx/algorithms/tests/test_dag.py +++ b/networkx/algorithms/tests/test_dag.py @@ -618,11 +618,17 @@ def test_is_aperiodic_selfloop(): assert nx.is_aperiodic(G) -def test_is_aperiodic_raise(): +def test_is_aperiodic_undirected_raises(): G = nx.Graph() pytest.raises(nx.NetworkXError, nx.is_aperiodic, G) +def test_is_aperiodic_empty_graph(): + G = nx.empty_graph(create_using=nx.DiGraph) + with pytest.raises(nx.NetworkXPointlessConcept, match="Graph has no nodes."): + nx.is_aperiodic(G) + + def test_is_aperiodic_bipartite(): # Bipartite graph G = nx.DiGraph(nx.davis_southern_women_graph()) diff --git a/networkx/algorithms/tests/test_distance_measures.py b/networkx/algorithms/tests/test_distance_measures.py index d4b5cf2f119..5cfe2cc9cae 100644 --- a/networkx/algorithms/tests/test_distance_measures.py +++ b/networkx/algorithms/tests/test_distance_measures.py @@ -423,6 +423,91 @@ def test_resistance_distance_all(self): assert round(rd[1][3], 5) == 1 +class TestEffectiveGraphResistance: + @classmethod + def setup_class(cls): + global np + np = pytest.importorskip("numpy") + + def setup_method(self): + G = nx.Graph() + G.add_edge(1, 2, weight=2) + G.add_edge(1, 3, weight=1) + G.add_edge(2, 3, weight=4) + self.G = G + + def test_effective_graph_resistance_directed_graph(self): + G = nx.DiGraph() + with pytest.raises(nx.NetworkXNotImplemented): + nx.effective_graph_resistance(G) + + def test_effective_graph_resistance_empty(self): + G = nx.Graph() + with pytest.raises(nx.NetworkXError): + nx.effective_graph_resistance(G) + + def test_effective_graph_resistance_not_connected(self): + G = nx.Graph([(1, 2), (3, 4)]) + RG = nx.effective_graph_resistance(G) + assert np.isinf(RG) + + def test_effective_graph_resistance(self): + RG = nx.effective_graph_resistance(self.G, "weight", True) + rd12 = 1 / (1 / (1 + 4) + 1 / 2) + rd13 = 1 / (1 / (1 + 2) + 1 / 4) + rd23 = 1 / (1 / (2 + 4) + 1 / 1) + assert np.isclose(RG, rd12 + rd13 + rd23) + + def test_effective_graph_resistance_noinv(self): + RG = nx.effective_graph_resistance(self.G, "weight", False) + rd12 = 1 / (1 / (1 / 1 + 1 / 4) + 1 / (1 / 2)) + rd13 = 1 / (1 / (1 / 1 + 1 / 2) + 1 / (1 / 4)) + rd23 = 1 / (1 / (1 / 2 + 1 / 4) + 1 / (1 / 1)) + assert np.isclose(RG, rd12 + rd13 + rd23) + + def test_effective_graph_resistance_no_weight(self): + RG = nx.effective_graph_resistance(self.G) + assert np.isclose(RG, 2) + + def test_effective_graph_resistance_neg_weight(self): + self.G[2][3]["weight"] = -4 + RG = nx.effective_graph_resistance(self.G, "weight", True) + rd12 = 1 / (1 / (1 + -4) + 1 / 2) + rd13 = 1 / (1 / (1 + 2) + 1 / (-4)) + rd23 = 1 / (1 / (2 + -4) + 1 / 1) + assert np.isclose(RG, rd12 + rd13 + rd23) + + def test_effective_graph_resistance_multigraph(self): + G = nx.MultiGraph() + G.add_edge(1, 2, weight=2) + G.add_edge(1, 3, weight=1) + G.add_edge(2, 3, weight=1) + G.add_edge(2, 3, weight=3) + RG = nx.effective_graph_resistance(G, "weight", True) + edge23 = 1 / (1 / 1 + 1 / 3) + rd12 = 1 / (1 / (1 + edge23) + 1 / 2) + rd13 = 1 / (1 / (1 + 2) + 1 / edge23) + rd23 = 1 / (1 / (2 + edge23) + 1 / 1) + assert np.isclose(RG, rd12 + rd13 + rd23) + + def test_effective_graph_resistance_div0(self): + with pytest.raises(ZeroDivisionError): + self.G[1][2]["weight"] = 0 + nx.effective_graph_resistance(self.G, "weight") + + def test_effective_graph_resistance_complete_graph(self): + N = 10 + G = nx.complete_graph(N) + RG = nx.effective_graph_resistance(G) + assert np.isclose(RG, N - 1) + + def test_effective_graph_resistance_path_graph(self): + N = 10 + G = nx.path_graph(N) + RG = nx.effective_graph_resistance(G) + assert np.isclose(RG, (N - 1) * N * (N + 1) // 6) + + class TestBarycenter: """Test :func:`networkx.algorithms.distance_measures.barycenter`.""" diff --git a/networkx/algorithms/tests/test_distance_regular.py b/networkx/algorithms/tests/test_distance_regular.py index d336b188214..545fb6dee6a 100644 --- a/networkx/algorithms/tests/test_distance_regular.py +++ b/networkx/algorithms/tests/test_distance_regular.py @@ -1,7 +1,19 @@ +import pytest + import networkx as nx from networkx import is_strongly_regular +@pytest.mark.parametrize( + "f", (nx.is_distance_regular, nx.intersection_array, nx.is_strongly_regular) +) +@pytest.mark.parametrize("graph_constructor", (nx.DiGraph, nx.MultiGraph)) +def test_raises_on_directed_and_multigraphs(f, graph_constructor): + G = graph_constructor([(0, 1), (1, 2)]) + with pytest.raises(nx.NetworkXNotImplemented): + f(G) + + class TestDistanceRegular: def test_is_distance_regular(self): assert nx.is_distance_regular(nx.icosahedral_graph()) @@ -41,6 +53,13 @@ def test_intersection_array(self): assert c == [1, 2, 5] +@pytest.mark.parametrize("f", (nx.is_distance_regular, nx.is_strongly_regular)) +def test_empty_graph_raises(f): + G = nx.Graph() + with pytest.raises(nx.NetworkXPointlessConcept, match="Graph has no nodes"): + f(G) + + class TestStronglyRegular: """Unit tests for the :func:`~networkx.is_strongly_regular` function. diff --git a/networkx/algorithms/tests/test_euler.py b/networkx/algorithms/tests/test_euler.py index 08eaf7fccc6..b5871f09b5a 100644 --- a/networkx/algorithms/tests/test_euler.py +++ b/networkx/algorithms/tests/test_euler.py @@ -5,6 +5,13 @@ import networkx as nx +@pytest.mark.parametrize("f", (nx.is_eulerian, nx.is_semieulerian)) +def test_empty_graph_raises(f): + G = nx.Graph() + with pytest.raises(nx.NetworkXPointlessConcept, match="Connectivity is undefined"): + f(G) + + class TestIsEulerian: def test_is_eulerian(self): assert nx.is_eulerian(nx.complete_graph(5)) diff --git a/networkx/algorithms/tests/test_link_prediction.py b/networkx/algorithms/tests/test_link_prediction.py index 7fc04d20672..b4643b3420d 100644 --- a/networkx/algorithms/tests/test_link_prediction.py +++ b/networkx/algorithms/tests/test_link_prediction.py @@ -34,23 +34,17 @@ def test_S4(self): G = nx.star_graph(4) self.test(G, [(1, 2)], [(1, 2, 0.25)]) - def test_notimplemented(self): - assert pytest.raises( - nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)] - ) - assert pytest.raises( - nx.NetworkXNotImplemented, - self.func, - nx.MultiGraph([(0, 1), (1, 2)]), - [(0, 2)], - ) + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): assert pytest.raises( - nx.NetworkXNotImplemented, - self.func, - nx.MultiDiGraph([(0, 1), (1, 2)]), - [(0, 2)], + nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)] ) + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + def test_no_common_neighbor(self): G = nx.Graph() G.add_nodes_from([0, 1]) @@ -80,23 +74,17 @@ def test_P4(self): G = nx.path_graph(4) self.test(G, [(0, 2)], [(0, 2, 0.5)]) - def test_notimplemented(self): - assert pytest.raises( - nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)] - ) - assert pytest.raises( - nx.NetworkXNotImplemented, - self.func, - nx.MultiGraph([(0, 1), (1, 2)]), - [(0, 2)], - ) + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): assert pytest.raises( - nx.NetworkXNotImplemented, - self.func, - nx.MultiDiGraph([(0, 1), (1, 2)]), - [(0, 2)], + nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)] ) + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + def test_no_common_neighbor(self): G = nx.Graph() G.add_edges_from([(0, 1), (2, 3)]) @@ -131,23 +119,17 @@ def test_S4(self): G = nx.star_graph(4) self.test(G, [(1, 2)], [(1, 2, 1 / math.log(4))]) - def test_notimplemented(self): - assert pytest.raises( - nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)] - ) - assert pytest.raises( - nx.NetworkXNotImplemented, - self.func, - nx.MultiGraph([(0, 1), (1, 2)]), - [(0, 2)], - ) + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): assert pytest.raises( - nx.NetworkXNotImplemented, - self.func, - nx.MultiDiGraph([(0, 1), (1, 2)]), - [(0, 2)], + nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)] ) + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + def test_no_common_neighbor(self): G = nx.Graph() G.add_nodes_from([0, 1]) @@ -190,6 +172,11 @@ def test_notimplemented(self, graph_type): nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)] ) + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + def test_no_common_neighbor(self): G = nx.Graph() G.add_nodes_from([0, 1]) @@ -223,23 +210,17 @@ def test_S4(self): G = nx.star_graph(4) self.test(G, [(0, 2)], [(0, 2, 4)]) - def test_notimplemented(self): - assert pytest.raises( - nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)] - ) - assert pytest.raises( - nx.NetworkXNotImplemented, - self.func, - nx.MultiGraph([(0, 1), (1, 2)]), - [(0, 2)], - ) + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): assert pytest.raises( - nx.NetworkXNotImplemented, - self.func, - nx.MultiDiGraph([(0, 1), (1, 2)]), - [(0, 2)], + nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)] ) + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + def test_zero_degrees(self): G = nx.Graph() G.add_nodes_from([0, 1]) @@ -282,17 +263,21 @@ def test_S4(self): G.nodes[4]["community"] = 0 self.test(G, [(1, 2)], [(1, 2, 2)]) - def test_notimplemented(self): - G = nx.DiGraph([(0, 1), (1, 2)]) - G.add_nodes_from([0, 1, 2], community=0) - assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) - G = nx.MultiGraph([(0, 1), (1, 2)]) - G.add_nodes_from([0, 1, 2], community=0) - assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) - G = nx.MultiDiGraph([(0, 1), (1, 2)]) + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): + G = graph_type([(0, 1), (1, 2)]) G.add_nodes_from([0, 1, 2], community=0) assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + def test_no_common_neighbor(self): G = nx.Graph() G.add_nodes_from([0, 1]) @@ -387,17 +372,21 @@ def test_S4(self): G.nodes[4]["community"] = 0 self.test(G, [(1, 2)], [(1, 2, 0.25)]) - def test_notimplemented(self): - G = nx.DiGraph([(0, 1), (1, 2)]) - G.add_nodes_from([0, 1, 2], community=0) - assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) - G = nx.MultiGraph([(0, 1), (1, 2)]) - G.add_nodes_from([0, 1, 2], community=0) - assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) - G = nx.MultiDiGraph([(0, 1), (1, 2)]) + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): + G = graph_type([(0, 1), (1, 2)]) G.add_nodes_from([0, 1, 2], community=0) assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + def test_no_common_neighbor(self): G = nx.Graph() G.add_nodes_from([0, 1]) @@ -495,17 +484,21 @@ def test_S4(self): G.nodes[4]["community"] = 0 self.test(G, [(1, 2)], [(1, 2, 1 / self.delta)]) - def test_notimplemented(self): - G = nx.DiGraph([(0, 1), (1, 2)]) - G.add_nodes_from([0, 1, 2], community=0) - assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) - G = nx.MultiGraph([(0, 1), (1, 2)]) - G.add_nodes_from([0, 1, 2], community=0) - assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) - G = nx.MultiDiGraph([(0, 1), (1, 2)]) + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): + G = graph_type([(0, 1), (1, 2)]) G.add_nodes_from([0, 1, 2], community=0) assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + def test_no_common_neighbor(self): G = nx.Graph() G.add_nodes_from([0, 1]) diff --git a/networkx/algorithms/tests/test_mis.py b/networkx/algorithms/tests/test_mis.py index 379c5c07c7a..02be02d4c33 100644 --- a/networkx/algorithms/tests/test_mis.py +++ b/networkx/algorithms/tests/test_mis.py @@ -58,5 +58,5 @@ def test_random_graphs(): G = nx.erdos_renyi_graph(i * 10 + 1, random.random()) IS = nx.maximal_independent_set(G) assert G.subgraph(IS).number_of_edges() == 0 - neighbors_of_MIS = set.union(*(set(G.neighbors(v)) for v in IS)) - assert all(v in neighbors_of_MIS for v in set(G.nodes()).difference(IS)) + nbrs_of_MIS = set.union(*(set(G.neighbors(v)) for v in IS)) + assert all(v in nbrs_of_MIS for v in set(G.nodes()).difference(IS)) diff --git a/networkx/algorithms/tests/test_planarity.py b/networkx/algorithms/tests/test_planarity.py index 470b1d23bb8..99bcff4184a 100644 --- a/networkx/algorithms/tests/test_planarity.py +++ b/networkx/algorithms/tests/test_planarity.py @@ -277,6 +277,20 @@ def test_counterexample_planar_recursive(self): G.add_node(1) get_counterexample_recursive(G) + def test_edge_removal_from_planar_embedding(self): + # PlanarEmbedding.check_structure() must succeed after edge removal + edges = ((0, 1), (1, 2), (2, 3), (3, 4), (4, 0), (0, 2), (0, 3)) + G = nx.Graph(edges) + cert, P = nx.check_planarity(G) + assert cert is True + P.remove_edge(0, 2) + self.check_graph(P, is_planar=True) + P.add_half_edge_ccw(1, 3, 2) + P.add_half_edge_cw(3, 1, 2) + self.check_graph(P, is_planar=True) + P.remove_edges_from(((0, 3), (1, 3))) + self.check_graph(P, is_planar=True) + def check_embedding(G, embedding): """Raises an exception if the combinatorial embedding is not correct @@ -372,48 +386,114 @@ def check_counterexample(G, sub_graph): class TestPlanarEmbeddingClass: + def test_add_half_edge(self): + embedding = nx.PlanarEmbedding() + embedding.add_half_edge(0, 1) + with pytest.raises( + nx.NetworkXException, match="Invalid clockwise reference node." + ): + embedding.add_half_edge(0, 2, cw=3) + with pytest.raises( + nx.NetworkXException, match="Invalid counterclockwise reference node." + ): + embedding.add_half_edge(0, 2, ccw=3) + with pytest.raises( + nx.NetworkXException, match="Only one of cw/ccw can be specified." + ): + embedding.add_half_edge(0, 2, cw=1, ccw=1) + with pytest.raises( + nx.NetworkXException, + match=( + r"Node already has out-half-edge\(s\), either" + " cw or ccw reference node required." + ), + ): + embedding.add_half_edge(0, 2) + # these should work + embedding.add_half_edge(0, 2, cw=1) + embedding.add_half_edge(0, 3, ccw=1) + assert sorted(embedding.edges(data=True)) == [ + (0, 1, {"ccw": 2, "cw": 3}), + (0, 2, {"cw": 1, "ccw": 3}), + (0, 3, {"cw": 2, "ccw": 1}), + ] + def test_get_data(self): - embedding = self.get_star_embedding(3) + embedding = self.get_star_embedding(4) data = embedding.get_data() - data_cmp = {0: [2, 1], 1: [0], 2: [0]} + data_cmp = {0: [3, 2, 1], 1: [0], 2: [0], 3: [0]} assert data == data_cmp + def test_edge_removal(self): + embedding = nx.PlanarEmbedding() + embedding.set_data( + { + 1: [2, 5, 7], + 2: [1, 3, 4, 5], + 3: [2, 4], + 4: [3, 6, 5, 2], + 5: [7, 1, 2, 4], + 6: [4, 7], + 7: [6, 1, 5], + } + ) + # remove_edges_from() calls remove_edge(), so both are tested here + embedding.remove_edges_from(((5, 4), (1, 5))) + embedding.check_structure() + embedding_expected = nx.PlanarEmbedding() + embedding_expected.set_data( + { + 1: [2, 7], + 2: [1, 3, 4, 5], + 3: [2, 4], + 4: [3, 6, 2], + 5: [7, 2], + 6: [4, 7], + 7: [6, 1, 5], + } + ) + assert nx.utils.graphs_equal(embedding, embedding_expected) + def test_missing_edge_orientation(self): + embedding = nx.PlanarEmbedding({1: {2: {}}, 2: {1: {}}}) with pytest.raises(nx.NetworkXException): - embedding = nx.PlanarEmbedding() - embedding.add_edge(1, 2) - embedding.add_edge(2, 1) # Invalid structure because the orientation of the edge was not set embedding.check_structure() def test_invalid_edge_orientation(self): + embedding = nx.PlanarEmbedding( + { + 1: {2: {"cw": 2, "ccw": 2}}, + 2: {1: {"cw": 1, "ccw": 1}}, + 1: {3: {}}, + 3: {1: {}}, + } + ) with pytest.raises(nx.NetworkXException): - embedding = nx.PlanarEmbedding() - embedding.add_half_edge_first(1, 2) - embedding.add_half_edge_first(2, 1) - embedding.add_edge(1, 3) embedding.check_structure() def test_missing_half_edge(self): + embedding = nx.PlanarEmbedding() + embedding.add_half_edge(1, 2) with pytest.raises(nx.NetworkXException): - embedding = nx.PlanarEmbedding() - embedding.add_half_edge_first(1, 2) # Invalid structure because other half edge is missing embedding.check_structure() def test_not_fulfilling_euler_formula(self): + embedding = nx.PlanarEmbedding() + for i in range(5): + ref = None + for j in range(5): + if i != j: + embedding.add_half_edge(i, j, cw=ref) + ref = j with pytest.raises(nx.NetworkXException): - embedding = nx.PlanarEmbedding() - for i in range(5): - for j in range(5): - if i != j: - embedding.add_half_edge_first(i, j) embedding.check_structure() def test_missing_reference(self): - with pytest.raises(nx.NetworkXException): - embedding = nx.PlanarEmbedding() - embedding.add_half_edge_cw(1, 2, 3) + embedding = nx.PlanarEmbedding() + with pytest.raises(nx.NetworkXException, match="Invalid reference node."): + embedding.add_half_edge(1, 2, ccw=3) def test_connect_components(self): embedding = nx.PlanarEmbedding() @@ -421,22 +501,35 @@ def test_connect_components(self): def test_successful_face_traversal(self): embedding = nx.PlanarEmbedding() - embedding.add_half_edge_first(1, 2) - embedding.add_half_edge_first(2, 1) + embedding.add_half_edge(1, 2) + embedding.add_half_edge(2, 1) face = embedding.traverse_face(1, 2) assert face == [1, 2] def test_unsuccessful_face_traversal(self): + embedding = nx.PlanarEmbedding( + {1: {2: {"cw": 3, "ccw": 2}}, 2: {1: {"cw": 3, "ccw": 1}}} + ) with pytest.raises(nx.NetworkXException): - embedding = nx.PlanarEmbedding() - embedding.add_edge(1, 2, ccw=2, cw=3) - embedding.add_edge(2, 1, ccw=1, cw=3) embedding.traverse_face(1, 2) + def test_forbidden_methods(self): + embedding = nx.PlanarEmbedding() + embedding.add_node(42) # no exception + embedding.add_nodes_from([(23, 24)]) # no exception + with pytest.raises(NotImplementedError): + embedding.add_edge(1, 3) + with pytest.raises(NotImplementedError): + embedding.add_edges_from([(0, 2), (1, 4)]) + with pytest.raises(NotImplementedError): + embedding.add_weighted_edges_from([(0, 2, 350), (1, 4, 125)]) + @staticmethod def get_star_embedding(n): embedding = nx.PlanarEmbedding() + ref = None for i in range(1, n): - embedding.add_half_edge_first(0, i) - embedding.add_half_edge_first(i, 0) + embedding.add_half_edge(0, i, cw=ref) + ref = i + embedding.add_half_edge(i, 0) return embedding diff --git a/networkx/algorithms/tests/test_regular.py b/networkx/algorithms/tests/test_regular.py index 0c8e4e46582..a8b4c3a30de 100644 --- a/networkx/algorithms/tests/test_regular.py +++ b/networkx/algorithms/tests/test_regular.py @@ -68,6 +68,12 @@ def test_is_regular4(self): assert reg.is_regular(g) +def test_is_regular_empty_graph_raises(): + G = nx.Graph() + with pytest.raises(nx.NetworkXPointlessConcept, match="Graph has no nodes"): + nx.is_regular(G) + + class TestIsKRegular: def test_is_k_regular1(self): g = gen.cycle_graph(4) diff --git a/networkx/algorithms/tests/test_richclub.py b/networkx/algorithms/tests/test_richclub.py index 5638ddbf007..8d83abaea18 100644 --- a/networkx/algorithms/tests/test_richclub.py +++ b/networkx/algorithms/tests/test_richclub.py @@ -91,6 +91,58 @@ def test_rich_club_selfloop(): nx.rich_club_coefficient(G) +def test_rich_club_leq_3_nodes_unnormalized(): + # edgeless graphs upto 3 nodes + G = nx.Graph() + rc = nx.rich_club_coefficient(G, normalized=False) + assert rc == {} + + for i in range(3): + G.add_node(i) + rc = nx.rich_club_coefficient(G, normalized=False) + assert rc == {} + + # 2 nodes, single edge + G = nx.Graph() + G.add_edge(0, 1) + rc = nx.rich_club_coefficient(G, normalized=False) + assert rc == {0: 1} + + # 3 nodes, single edge + G = nx.Graph() + G.add_nodes_from([0, 1, 2]) + G.add_edge(0, 1) + rc = nx.rich_club_coefficient(G, normalized=False) + assert rc == {0: 1} + + # 3 nodes, 2 edges + G.add_edge(1, 2) + rc = nx.rich_club_coefficient(G, normalized=False) + assert rc == {0: 2 / 3} + + # 3 nodes, 3 edges + G.add_edge(0, 2) + rc = nx.rich_club_coefficient(G, normalized=False) + assert rc == {0: 1, 1: 1} + + +def test_rich_club_leq_3_nodes_normalized(): + G = nx.Graph() + with pytest.raises( + nx.exception.NetworkXError, + match="Graph has fewer than four nodes", + ): + rc = nx.rich_club_coefficient(G, normalized=True) + + for i in range(3): + G.add_node(i) + with pytest.raises( + nx.exception.NetworkXError, + match="Graph has fewer than four nodes", + ): + rc = nx.rich_club_coefficient(G, normalized=True) + + # def test_richclub2_normalized(): # T = nx.balanced_tree(2,10) # rcNorm = nx.richclub.rich_club_coefficient(T,Q=2) diff --git a/networkx/algorithms/tests/test_similarity.py b/networkx/algorithms/tests/test_similarity.py index b7fcf90fa43..3836ccfe182 100644 --- a/networkx/algorithms/tests/test_similarity.py +++ b/networkx/algorithms/tests/test_similarity.py @@ -699,6 +699,16 @@ def test_simrank_max_iterations(self, alg): G = nx.cycle_graph(5) pytest.raises(nx.ExceededMaxIterations, alg, G, max_iterations=10) + def test_simrank_source_not_found(self): + G = nx.cycle_graph(5) + with pytest.raises(nx.NodeNotFound, match="Source node 10 not in G"): + nx.simrank_similarity(G, source=10) + + def test_simrank_target_not_found(self): + G = nx.cycle_graph(5) + with pytest.raises(nx.NodeNotFound, match="Target node 10 not in G"): + nx.simrank_similarity(G, target=10) + def test_simrank_between_versions(self): G = nx.cycle_graph(5) # _python tolerance 1e-4 @@ -819,9 +829,22 @@ def test_panther_similarity_weighted(self): sim = nx.panther_similarity(G, "v1", path_length=2, weight="w") assert sim == expected - def test_generate_random_paths_unweighted(self): - np.random.seed(42) + def test_panther_similarity_source_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (0, 3), (1, 2), (2, 4)]) + with pytest.raises(nx.NodeNotFound, match="Source node 10 not in G"): + nx.panther_similarity(G, source=10) + + def test_panther_similarity_isolated(self): + G = nx.Graph() + G.add_nodes_from(range(5)) + with pytest.raises( + nx.NetworkXUnfeasible, + match="Panther similarity is not defined for the isolated source node 1.", + ): + nx.panther_similarity(G, source=1) + def test_generate_random_paths_unweighted(self): index_map = {} num_paths = 10 path_length = 2 @@ -832,7 +855,7 @@ def test_generate_random_paths_unweighted(self): G.add_edge(1, 2) G.add_edge(2, 4) paths = nx.generate_random_paths( - G, num_paths, path_length=path_length, index_map=index_map + G, num_paths, path_length=path_length, index_map=index_map, seed=42 ) expected_paths = [ [3, 0, 3], diff --git a/networkx/algorithms/tests/test_structuralholes.py b/networkx/algorithms/tests/test_structuralholes.py index 6f92baa4f32..215ce4530fa 100644 --- a/networkx/algorithms/tests/test_structuralholes.py +++ b/networkx/algorithms/tests/test_structuralholes.py @@ -52,7 +52,7 @@ def setup_method(self): ("G", "C"): 10, } - # This additionally tests the @nx._dispatch mechanism, treating + # This additionally tests the @nx._dispatchable mechanism, treating # nx.mutual_weight as if it were a re-implementation from another package @pytest.mark.parametrize("wrapper", [lambda x: x, dispatch_interface.convert]) def test_constraint_directed(self, wrapper): diff --git a/networkx/algorithms/tests/test_swap.py b/networkx/algorithms/tests/test_swap.py index 49dd5f8e8c7..c4aeb0682e0 100644 --- a/networkx/algorithms/tests/test_swap.py +++ b/networkx/algorithms/tests/test_swap.py @@ -2,14 +2,36 @@ import networkx as nx - -def test_directed_edge_swap(): - graph = nx.path_graph(200, create_using=nx.DiGraph) - in_degrees = sorted((n, d) for n, d in graph.in_degree()) - out_degrees = sorted((n, d) for n, d in graph.out_degree()) - G = nx.directed_edge_swap(graph, nswap=40, max_tries=500, seed=1) - assert in_degrees == sorted((n, d) for n, d in G.in_degree()) - assert out_degrees == sorted((n, d) for n, d in G.out_degree()) +cycle = nx.cycle_graph(5, create_using=nx.DiGraph) +tree = nx.random_tree(10, create_using=nx.DiGraph, seed=42) +path = nx.path_graph(5, create_using=nx.DiGraph) +binomial = nx.binomial_tree(3, create_using=nx.DiGraph) +HH = nx.directed_havel_hakimi_graph([1, 2, 1, 2, 2, 2], [3, 1, 0, 1, 2, 3]) +balanced_tree = nx.balanced_tree(2, 3, create_using=nx.DiGraph) + + +@pytest.mark.parametrize("G", [path, binomial, HH, cycle, tree, balanced_tree]) +def test_directed_edge_swap(G): + in_degree = set(G.in_degree) + out_degree = set(G.out_degree) + edges = set(G.edges) + nx.directed_edge_swap(G, nswap=1, max_tries=100, seed=1) + assert in_degree == set(G.in_degree) + assert out_degree == set(G.out_degree) + assert edges != set(G.edges) + assert 3 == sum(e not in edges for e in G.edges) + + +def test_directed_edge_swap_undo_previous_swap(): + G = nx.DiGraph(nx.path_graph(4).edges) # only 1 swap possible + edges = set(G.edges) + nx.directed_edge_swap(G, nswap=2, max_tries=100) + assert edges == set(G.edges) + + nx.directed_edge_swap(G, nswap=1, max_tries=100, seed=1) + assert {(0, 2), (1, 3), (2, 1)} == set(G.edges) + nx.directed_edge_swap(G, nswap=1, max_tries=100, seed=1) + assert edges == set(G.edges) def test_edge_cases_directed_edge_swap(): diff --git a/networkx/algorithms/tests/test_wiener.py b/networkx/algorithms/tests/test_wiener.py index 1cb404064fe..aded95143ca 100644 --- a/networkx/algorithms/tests/test_wiener.py +++ b/networkx/algorithms/tests/test_wiener.py @@ -1,66 +1,123 @@ -"""Unit tests for the :mod:`networkx.algorithms.wiener` module.""" - - -from networkx import DiGraph, complete_graph, empty_graph, path_graph, wiener_index - - -class TestWienerIndex: - """Unit tests for computing the Wiener index of a graph.""" - - def test_disconnected_graph(self): - """Tests that the Wiener index of a disconnected graph is - positive infinity. - - """ - assert wiener_index(empty_graph(2)) == float("inf") - - def test_directed(self): - """Tests that each pair of nodes in the directed graph is - counted once when computing the Wiener index. - - """ - G = complete_graph(3) - H = DiGraph(G) - assert (2 * wiener_index(G)) == wiener_index(H) - - def test_complete_graph(self): - """Tests that the Wiener index of the complete graph is simply - the number of edges. - - """ - n = 10 - G = complete_graph(n) - assert wiener_index(G) == (n * (n - 1) / 2) - - def test_path_graph(self): - """Tests that the Wiener index of the path graph is correctly - computed. - - """ - # In P_n, there are n - 1 pairs of vertices at distance one, n - - # 2 pairs at distance two, n - 3 at distance three, ..., 1 at - # distance n - 1, so the Wiener index should be - # - # 1 * (n - 1) + 2 * (n - 2) + ... + (n - 2) * 2 + (n - 1) * 1 - # - # For example, in P_5, - # - # 1 * 4 + 2 * 3 + 3 * 2 + 4 * 1 = 2 (1 * 4 + 2 * 3) - # - # and in P_6, - # - # 1 * 5 + 2 * 4 + 3 * 3 + 4 * 2 + 5 * 1 = 2 (1 * 5 + 2 * 4) + 3 * 3 - # - # assuming n is *odd*, this gives the formula - # - # 2 \sum_{i = 1}^{(n - 1) / 2} [i * (n - i)] - # - # assuming n is *even*, this gives the formula - # - # 2 \sum_{i = 1}^{n / 2} [i * (n - i)] - (n / 2) ** 2 - # - n = 9 - G = path_graph(n) - expected = 2 * sum(i * (n - i) for i in range(1, (n // 2) + 1)) - actual = wiener_index(G) - assert expected == actual +import networkx as nx + + +def test_wiener_index_of_disconnected_graph(): + assert nx.wiener_index(nx.empty_graph(2)) == float("inf") + + +def test_wiener_index_of_directed_graph(): + G = nx.complete_graph(3) + H = nx.DiGraph(G) + assert (2 * nx.wiener_index(G)) == nx.wiener_index(H) + + +def test_wiener_index_of_complete_graph(): + n = 10 + G = nx.complete_graph(n) + assert nx.wiener_index(G) == (n * (n - 1) / 2) + + +def test_wiener_index_of_path_graph(): + # In P_n, there are n - 1 pairs of vertices at distance one, n - + # 2 pairs at distance two, n - 3 at distance three, ..., 1 at + # distance n - 1, so the Wiener index should be + # + # 1 * (n - 1) + 2 * (n - 2) + ... + (n - 2) * 2 + (n - 1) * 1 + # + # For example, in P_5, + # + # 1 * 4 + 2 * 3 + 3 * 2 + 4 * 1 = 2 (1 * 4 + 2 * 3) + # + # and in P_6, + # + # 1 * 5 + 2 * 4 + 3 * 3 + 4 * 2 + 5 * 1 = 2 (1 * 5 + 2 * 4) + 3 * 3 + # + # assuming n is *odd*, this gives the formula + # + # 2 \sum_{i = 1}^{(n - 1) / 2} [i * (n - i)] + # + # assuming n is *even*, this gives the formula + # + # 2 \sum_{i = 1}^{n / 2} [i * (n - i)] - (n / 2) ** 2 + # + n = 9 + G = nx.path_graph(n) + expected = 2 * sum(i * (n - i) for i in range(1, (n // 2) + 1)) + actual = nx.wiener_index(G) + assert expected == actual + + +def test_schultz_and_gutman_index_of_disconnected_graph(): + n = 4 + G = nx.Graph() + G.add_nodes_from(list(range(1, n + 1))) + expected = float("inf") + + G.add_edge(1, 2) + G.add_edge(3, 4) + + actual_1 = nx.schultz_index(G) + actual_2 = nx.gutman_index(G) + + assert expected == actual_1 + assert expected == actual_2 + + +def test_schultz_and_gutman_index_of_complete_bipartite_graph_1(): + n = 3 + m = 3 + cbg = nx.complete_bipartite_graph(n, m) + + expected_1 = n * m * (n + m) + 2 * n * (n - 1) * m + 2 * m * (m - 1) * n + actual_1 = nx.schultz_index(cbg) + + expected_2 = n * m * (n * m) + n * (n - 1) * m * m + m * (m - 1) * n * n + actual_2 = nx.gutman_index(cbg) + + assert expected_1 == actual_1 + assert expected_2 == actual_2 + + +def test_schultz_and_gutman_index_of_complete_bipartite_graph_2(): + n = 2 + m = 5 + cbg = nx.complete_bipartite_graph(n, m) + + expected_1 = n * m * (n + m) + 2 * n * (n - 1) * m + 2 * m * (m - 1) * n + actual_1 = nx.schultz_index(cbg) + + expected_2 = n * m * (n * m) + n * (n - 1) * m * m + m * (m - 1) * n * n + actual_2 = nx.gutman_index(cbg) + + assert expected_1 == actual_1 + assert expected_2 == actual_2 + + +def test_schultz_and_gutman_index_of_complete_graph(): + n = 5 + cg = nx.complete_graph(n) + + expected_1 = n * (n - 1) * (n - 1) + actual_1 = nx.schultz_index(cg) + + assert expected_1 == actual_1 + + expected_2 = n * (n - 1) * (n - 1) * (n - 1) / 2 + actual_2 = nx.gutman_index(cg) + + assert expected_2 == actual_2 + + +def test_schultz_and_gutman_index_of_odd_cycle_graph(): + k = 5 + n = 2 * k + 1 + ocg = nx.cycle_graph(n) + + expected_1 = 2 * n * k * (k + 1) + actual_1 = nx.schultz_index(ocg) + + expected_2 = 2 * n * k * (k + 1) + actual_2 = nx.gutman_index(ocg) + + assert expected_1 == actual_1 + assert expected_2 == actual_2 diff --git a/networkx/algorithms/threshold.py b/networkx/algorithms/threshold.py index 0839321de0d..bcc03d10621 100644 --- a/networkx/algorithms/threshold.py +++ b/networkx/algorithms/threshold.py @@ -9,7 +9,7 @@ __all__ = ["is_threshold_graph", "find_threshold_graph"] -@nx._dispatch +@nx._dispatchable def is_threshold_graph(G): """ Returns `True` if `G` is a threshold graph. @@ -301,7 +301,7 @@ def weights_to_creation_sequence( # Manipulating NetworkX.Graphs in context of threshold graphs -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def threshold_graph(creation_sequence, create_using=None): """ Create a threshold graph from the creation sequence or compact @@ -353,7 +353,7 @@ def threshold_graph(creation_sequence, create_using=None): return G -@nx._dispatch +@nx._dispatchable def find_alternating_4_cycle(G): """ Returns False if there aren't any alternating 4 cycles. @@ -369,7 +369,7 @@ def find_alternating_4_cycle(G): return False -@nx._dispatch +@nx._dispatchable(returns_graph=True) def find_threshold_graph(G, create_using=None): """ Returns a threshold subgraph that is close to largest in `G`. @@ -394,7 +394,7 @@ def find_threshold_graph(G, create_using=None): >>> from networkx.algorithms.threshold import find_threshold_graph >>> G = nx.barbell_graph(3, 3) >>> T = find_threshold_graph(G) - >>> T.nodes # may vary + >>> T.nodes # may vary NodeView((7, 8, 5, 6)) References @@ -404,7 +404,7 @@ def find_threshold_graph(G, create_using=None): return threshold_graph(find_creation_sequence(G), create_using) -@nx._dispatch +@nx._dispatchable def find_creation_sequence(G): """ Find a threshold subgraph that is close to largest in G. diff --git a/networkx/algorithms/time_dependent.py b/networkx/algorithms/time_dependent.py index e83f42ad92c..d67cdcf0b8e 100644 --- a/networkx/algorithms/time_dependent.py +++ b/networkx/algorithms/time_dependent.py @@ -8,7 +8,7 @@ @not_implemented_for("undirected") @not_implemented_for("multigraph") -@nx._dispatch(node_attrs={"time": None, "weight": 1}) +@nx._dispatchable(node_attrs={"time": None, "weight": 1}) def cd_index(G, node, time_delta, *, time="time", weight=None): r"""Compute the CD index for `node` within the graph `G`. @@ -55,10 +55,10 @@ def cd_index(G, node, time_delta, *, time="time", weight=None): >>> G = nx.DiGraph() >>> nodes = { ... 1: {"time": datetime(2015, 1, 1)}, - ... 2: {"time": datetime(2012, 1, 1), 'weight': 4}, + ... 2: {"time": datetime(2012, 1, 1), "weight": 4}, ... 3: {"time": datetime(2010, 1, 1)}, ... 4: {"time": datetime(2008, 1, 1)}, - ... 5: {"time": datetime(2014, 1, 1)} + ... 5: {"time": datetime(2014, 1, 1)}, ... } >>> G.add_nodes_from([(n, nodes[n]) for n in nodes]) >>> edges = [(1, 3), (1, 4), (2, 3), (3, 4), (3, 5)] diff --git a/networkx/algorithms/tournament.py b/networkx/algorithms/tournament.py index 0b164cb3b16..43a71faa70e 100644 --- a/networkx/algorithms/tournament.py +++ b/networkx/algorithms/tournament.py @@ -65,7 +65,7 @@ def index_satisfying(iterable, condition): @not_implemented_for("undirected") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def is_tournament(G): """Returns True if and only if `G` is a tournament. @@ -104,7 +104,7 @@ def is_tournament(G): @not_implemented_for("undirected") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def hamiltonian_path(G): """Returns a Hamiltonian path in the given tournament graph. @@ -151,7 +151,7 @@ def hamiltonian_path(G): @py_random_state(1) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def random_tournament(n, seed=None): r"""Returns a random tournament graph on `n` nodes. @@ -186,7 +186,7 @@ def random_tournament(n, seed=None): @not_implemented_for("undirected") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def score_sequence(G): """Returns the score sequence for the given tournament graph. @@ -217,7 +217,7 @@ def score_sequence(G): @not_implemented_for("undirected") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable(preserve_edge_attrs={"G": {"weight": 1}}) def tournament_matrix(G): r"""Returns the tournament matrix for the given tournament graph. @@ -260,7 +260,7 @@ def tournament_matrix(G): @not_implemented_for("undirected") @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable def is_reachable(G, s, t): """Decides whether there is a path from `s` to `t` in the tournament. @@ -349,7 +349,7 @@ def is_closed(G, nodes): @not_implemented_for("undirected") @not_implemented_for("multigraph") -@nx._dispatch(name="tournament_is_strongly_connected") +@nx._dispatchable(name="tournament_is_strongly_connected") def is_strongly_connected(G): """Decides whether the given tournament is strongly connected. diff --git a/networkx/algorithms/traversal/beamsearch.py b/networkx/algorithms/traversal/beamsearch.py index 9b339e306f7..ab90f44aedb 100644 --- a/networkx/algorithms/traversal/beamsearch.py +++ b/networkx/algorithms/traversal/beamsearch.py @@ -4,7 +4,7 @@ __all__ = ["bfs_beam_edges"] -@nx._dispatch +@nx._dispatchable def bfs_beam_edges(G, source, value, width=None): """Iterates over edges in a beam search. diff --git a/networkx/algorithms/traversal/breadth_first_search.py b/networkx/algorithms/traversal/breadth_first_search.py index 989c7f8622d..b87cca124a6 100644 --- a/networkx/algorithms/traversal/breadth_first_search.py +++ b/networkx/algorithms/traversal/breadth_first_search.py @@ -1,5 +1,4 @@ """Basic algorithms for breadth-first searching the nodes of a graph.""" -import math from collections import deque import networkx as nx @@ -16,7 +15,7 @@ ] -@nx._dispatch +@nx._dispatchable def generic_bfs_edges(G, source, neighbors=None, depth_limit=None, sort_neighbors=None): """Iterate over edges in a breadth-first search. @@ -44,7 +43,7 @@ def generic_bfs_edges(G, source, neighbors=None, depth_limit=None, sort_neighbor depth_limit : int, optional(default=len(G)) Specify the maximum search depth. - sort_neighbors : Callable + sort_neighbors : Callable (default=None) .. deprecated:: 3.2 @@ -52,9 +51,9 @@ def generic_bfs_edges(G, source, neighbors=None, depth_limit=None, sort_neighbor version 3.4. A custom (e.g. sorted) ordering of neighbors can be specified with the `neighbors` parameter. - A function that takes the list of neighbors of a given node as input, - and returns an iterator over these neighbors but with a custom - ordering. + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. Yields ------ @@ -130,7 +129,7 @@ def generic_bfs_edges(G, source, neighbors=None, depth_limit=None, sort_neighbor depth += 1 -@nx._dispatch +@nx._dispatchable def bfs_edges(G, source, reverse=False, depth_limit=None, sort_neighbors=None): """Iterate over edges in a breadth-first-search starting at source. @@ -149,9 +148,10 @@ def bfs_edges(G, source, reverse=False, depth_limit=None, sort_neighbors=None): depth_limit : int, optional(default=len(G)) Specify the maximum search depth - sort_neighbors : function - A function that takes the list of neighbors of given node as input, and - returns an *iterator* over these neighbors but with custom ordering. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. Yields ------ @@ -210,7 +210,7 @@ def bfs_edges(G, source, reverse=False, depth_limit=None, sort_neighbors=None): else: successors = G.neighbors - if callable(sort_neighbors): + if sort_neighbors is not None: yield from generic_bfs_edges( G, source, lambda node: iter(sort_neighbors(successors(node))), depth_limit ) @@ -218,7 +218,7 @@ def bfs_edges(G, source, reverse=False, depth_limit=None, sort_neighbors=None): yield from generic_bfs_edges(G, source, successors, depth_limit) -@nx._dispatch +@nx._dispatchable(returns_graph=True) def bfs_tree(G, source, reverse=False, depth_limit=None, sort_neighbors=None): """Returns an oriented tree constructed from of a breadth-first-search starting at source. @@ -236,9 +236,10 @@ def bfs_tree(G, source, reverse=False, depth_limit=None, sort_neighbors=None): depth_limit : int, optional(default=len(G)) Specify the maximum search depth - sort_neighbors : function - A function that takes the list of neighbors of given node as input, and - returns an *iterator* over these neighbors but with custom ordering. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. Returns ------- @@ -285,7 +286,7 @@ def bfs_tree(G, source, reverse=False, depth_limit=None, sort_neighbors=None): return T -@nx._dispatch +@nx._dispatchable def bfs_predecessors(G, source, depth_limit=None, sort_neighbors=None): """Returns an iterator of predecessors in breadth-first-search from source. @@ -299,9 +300,10 @@ def bfs_predecessors(G, source, depth_limit=None, sort_neighbors=None): depth_limit : int, optional(default=len(G)) Specify the maximum search depth - sort_neighbors : function - A function that takes the list of neighbors of given node as input, and - returns an *iterator* over these neighbors but with custom ordering. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. Returns ------- @@ -350,7 +352,7 @@ def bfs_predecessors(G, source, depth_limit=None, sort_neighbors=None): yield (t, s) -@nx._dispatch +@nx._dispatchable def bfs_successors(G, source, depth_limit=None, sort_neighbors=None): """Returns an iterator of successors in breadth-first-search from source. @@ -364,9 +366,10 @@ def bfs_successors(G, source, depth_limit=None, sort_neighbors=None): depth_limit : int, optional(default=len(G)) Specify the maximum search depth - sort_neighbors : function - A function that takes the list of neighbors of given node as input, and - returns an *iterator* over these neighbors but with custom ordering. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. Returns ------- @@ -423,7 +426,7 @@ def bfs_successors(G, source, depth_limit=None, sort_neighbors=None): yield (parent, children) -@nx._dispatch +@nx._dispatchable def bfs_layers(G, sources): """Returns an iterator of all the layers in breadth-first search traversal. @@ -481,7 +484,7 @@ def bfs_layers(G, sources): LEVEL_EDGE = "level" -@nx._dispatch +@nx._dispatchable def bfs_labeled_edges(G, sources): """Iterate over edges in a breadth-first search (BFS) labeled by type. @@ -513,7 +516,7 @@ def bfs_labeled_edges(G, sources): Examples -------- - >>> G = nx.cycle_graph(4, create_using = nx.DiGraph) + >>> G = nx.cycle_graph(4, create_using=nx.DiGraph) >>> list(nx.bfs_labeled_edges(G, 0)) [(0, 1, 'tree'), (1, 2, 'tree'), (2, 3, 'tree'), (3, 0, 'reverse')] >>> G = nx.complete_graph(3) @@ -556,7 +559,7 @@ def bfs_labeled_edges(G, sources): visit(u) -@nx._dispatch +@nx._dispatchable def descendants_at_distance(G, source, distance): """Returns all nodes at a fixed `distance` from `source` in `G`. diff --git a/networkx/algorithms/traversal/depth_first_search.py b/networkx/algorithms/traversal/depth_first_search.py index 185a99c3dd1..3ca0f91d964 100644 --- a/networkx/algorithms/traversal/depth_first_search.py +++ b/networkx/algorithms/traversal/depth_first_search.py @@ -14,8 +14,8 @@ ] -@nx._dispatch -def dfs_edges(G, source=None, depth_limit=None): +@nx._dispatchable +def dfs_edges(G, source=None, depth_limit=None, *, sort_neighbors=None): """Iterate over edges in a depth-first-search (DFS). Perform a depth-first-search over the nodes of `G` and yield @@ -33,6 +33,11 @@ def dfs_edges(G, source=None, depth_limit=None): depth_limit : int, optional (default=len(G)) Specify the maximum search depth. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + Yields ------ edge: 2-tuple of nodes @@ -78,12 +83,18 @@ def dfs_edges(G, source=None, depth_limit=None): if depth_limit is None: depth_limit = len(G) + get_children = ( + G.neighbors + if sort_neighbors is None + else lambda n: iter(sort_neighbors(G.neighbors(n))) + ) + visited = set() for start in nodes: if start in visited: continue visited.add(start) - stack = [(start, iter(G[start]))] + stack = [(start, get_children(start))] depth_now = 1 while stack: parent, children = stack[-1] @@ -92,7 +103,7 @@ def dfs_edges(G, source=None, depth_limit=None): yield parent, child visited.add(child) if depth_now < depth_limit: - stack.append((child, iter(G[child]))) + stack.append((child, get_children(child))) depth_now += 1 break else: @@ -100,8 +111,8 @@ def dfs_edges(G, source=None, depth_limit=None): depth_now -= 1 -@nx._dispatch -def dfs_tree(G, source=None, depth_limit=None): +@nx._dispatchable(returns_graph=True) +def dfs_tree(G, source=None, depth_limit=None, *, sort_neighbors=None): """Returns oriented tree constructed from a depth-first-search from source. Parameters @@ -114,6 +125,11 @@ def dfs_tree(G, source=None, depth_limit=None): depth_limit : int, optional (default=len(G)) Specify the maximum search depth. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + Returns ------- T : NetworkX DiGraph @@ -134,20 +150,20 @@ def dfs_tree(G, source=None, depth_limit=None): dfs_preorder_nodes dfs_postorder_nodes dfs_labeled_edges - edge_dfs - bfs_tree + :func:`~networkx.algorithms.traversal.edgedfs.edge_dfs` + :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_tree` """ T = nx.DiGraph() if source is None: T.add_nodes_from(G) else: T.add_node(source) - T.add_edges_from(dfs_edges(G, source, depth_limit)) + T.add_edges_from(dfs_edges(G, source, depth_limit, sort_neighbors=sort_neighbors)) return T -@nx._dispatch -def dfs_predecessors(G, source=None, depth_limit=None): +@nx._dispatchable +def dfs_predecessors(G, source=None, depth_limit=None, *, sort_neighbors=None): """Returns dictionary of predecessors in depth-first-search from source. Parameters @@ -163,6 +179,11 @@ def dfs_predecessors(G, source=None, depth_limit=None): depth_limit : int, optional (default=len(G)) Specify the maximum search depth. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + Returns ------- pred: dict @@ -194,14 +215,17 @@ def dfs_predecessors(G, source=None, depth_limit=None): dfs_preorder_nodes dfs_postorder_nodes dfs_labeled_edges - edge_dfs - bfs_tree + :func:`~networkx.algorithms.traversal.edgedfs.edge_dfs` + :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_tree` """ - return {t: s for s, t in dfs_edges(G, source, depth_limit)} + return { + t: s + for s, t in dfs_edges(G, source, depth_limit, sort_neighbors=sort_neighbors) + } -@nx._dispatch -def dfs_successors(G, source=None, depth_limit=None): +@nx._dispatchable +def dfs_successors(G, source=None, depth_limit=None, *, sort_neighbors=None): """Returns dictionary of successors in depth-first-search from source. Parameters @@ -217,6 +241,11 @@ def dfs_successors(G, source=None, depth_limit=None): depth_limit : int, optional (default=len(G)) Specify the maximum search depth. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + Returns ------- succ: dict @@ -248,17 +277,22 @@ def dfs_successors(G, source=None, depth_limit=None): dfs_preorder_nodes dfs_postorder_nodes dfs_labeled_edges - edge_dfs - bfs_tree + :func:`~networkx.algorithms.traversal.edgedfs.edge_dfs` + :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_tree` """ d = defaultdict(list) - for s, t in dfs_edges(G, source=source, depth_limit=depth_limit): + for s, t in dfs_edges( + G, + source=source, + depth_limit=depth_limit, + sort_neighbors=sort_neighbors, + ): d[s].append(t) return dict(d) -@nx._dispatch -def dfs_postorder_nodes(G, source=None, depth_limit=None): +@nx._dispatchable +def dfs_postorder_nodes(G, source=None, depth_limit=None, *, sort_neighbors=None): """Generate nodes in a depth-first-search post-ordering starting at source. Parameters @@ -271,6 +305,11 @@ def dfs_postorder_nodes(G, source=None, depth_limit=None): depth_limit : int, optional (default=len(G)) Specify the maximum search depth. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + Returns ------- nodes: generator @@ -302,15 +341,17 @@ def dfs_postorder_nodes(G, source=None, depth_limit=None): dfs_edges dfs_preorder_nodes dfs_labeled_edges - edge_dfs - bfs_tree + :func:`~networkx.algorithms.traversal.edgedfs.edge_dfs` + :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_tree` """ - edges = nx.dfs_labeled_edges(G, source=source, depth_limit=depth_limit) + edges = nx.dfs_labeled_edges( + G, source=source, depth_limit=depth_limit, sort_neighbors=sort_neighbors + ) return (v for u, v, d in edges if d == "reverse") -@nx._dispatch -def dfs_preorder_nodes(G, source=None, depth_limit=None): +@nx._dispatchable +def dfs_preorder_nodes(G, source=None, depth_limit=None, *, sort_neighbors=None): """Generate nodes in a depth-first-search pre-ordering starting at source. Parameters @@ -324,6 +365,11 @@ def dfs_preorder_nodes(G, source=None, depth_limit=None): depth_limit : int, optional (default=len(G)) Specify the maximum search depth. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + Returns ------- nodes: generator @@ -355,14 +401,16 @@ def dfs_preorder_nodes(G, source=None, depth_limit=None): dfs_edges dfs_postorder_nodes dfs_labeled_edges - bfs_edges + :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_edges` """ - edges = nx.dfs_labeled_edges(G, source=source, depth_limit=depth_limit) + edges = nx.dfs_labeled_edges( + G, source=source, depth_limit=depth_limit, sort_neighbors=sort_neighbors + ) return (v for u, v, d in edges if d == "forward") -@nx._dispatch -def dfs_labeled_edges(G, source=None, depth_limit=None): +@nx._dispatchable +def dfs_labeled_edges(G, source=None, depth_limit=None, *, sort_neighbors=None): """Iterate over edges in a depth-first-search (DFS) labeled by type. Parameters @@ -376,6 +424,11 @@ def dfs_labeled_edges(G, source=None, depth_limit=None): depth_limit : int, optional (default=len(G)) Specify the maximum search depth. + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + Returns ------- edges: generator @@ -439,13 +492,19 @@ def dfs_labeled_edges(G, source=None, depth_limit=None): if depth_limit is None: depth_limit = len(G) + get_children = ( + G.neighbors + if sort_neighbors is None + else lambda n: iter(sort_neighbors(G.neighbors(n))) + ) + visited = set() for start in nodes: if start in visited: continue yield start, start, "forward" visited.add(start) - stack = [(start, iter(G[start]))] + stack = [(start, get_children(start))] depth_now = 1 while stack: parent, children = stack[-1] @@ -456,7 +515,7 @@ def dfs_labeled_edges(G, source=None, depth_limit=None): yield parent, child, "forward" visited.add(child) if depth_now < depth_limit: - stack.append((child, iter(G[child]))) + stack.append((child, iter(get_children(child)))) depth_now += 1 break else: diff --git a/networkx/algorithms/traversal/edgebfs.py b/networkx/algorithms/traversal/edgebfs.py index c29ef5e0269..484ae12b5ea 100644 --- a/networkx/algorithms/traversal/edgebfs.py +++ b/networkx/algorithms/traversal/edgebfs.py @@ -16,7 +16,7 @@ __all__ = ["edge_bfs"] -@nx._dispatch +@nx._dispatchable def edge_bfs(G, source=None, orientation=None): """A directed, breadth-first-search of edges in `G`, beginning at `source`. diff --git a/networkx/algorithms/traversal/edgedfs.py b/networkx/algorithms/traversal/edgedfs.py index 1e583de6ec4..010f68246c9 100644 --- a/networkx/algorithms/traversal/edgedfs.py +++ b/networkx/algorithms/traversal/edgedfs.py @@ -14,7 +14,7 @@ __all__ = ["edge_dfs"] -@nx._dispatch +@nx._dispatchable def edge_dfs(G, source=None, orientation=None): """A directed, depth-first-search of edges in `G`, beginning at `source`. diff --git a/networkx/algorithms/traversal/tests/test_dfs.py b/networkx/algorithms/traversal/tests/test_dfs.py index 0eb698b0f2d..e43d7d61629 100644 --- a/networkx/algorithms/traversal/tests/test_dfs.py +++ b/networkx/algorithms/traversal/tests/test_dfs.py @@ -55,6 +55,14 @@ def test_dfs_edges(self): edges = nx.dfs_edges(self.D) assert list(edges) == [(0, 1), (2, 3)] + def test_dfs_edges_sorting(self): + G = nx.Graph([(0, 1), (1, 2), (1, 3), (2, 4), (3, 0), (0, 4)]) + edges_asc = nx.dfs_edges(G, source=0, sort_neighbors=sorted) + sorted_desc = lambda x: sorted(x, reverse=True) + edges_desc = nx.dfs_edges(G, source=0, sort_neighbors=sorted_desc) + assert list(edges_asc) == [(0, 1), (1, 2), (2, 4), (1, 3)] + assert list(edges_desc) == [(0, 4), (4, 2), (2, 1), (1, 3)] + def test_dfs_labeled_edges(self): edges = list(nx.dfs_labeled_edges(self.G, source=0)) forward = [(u, v) for (u, v, d) in edges if d == "forward"] @@ -80,6 +88,52 @@ def test_dfs_labeled_edges(self): (0, 0, "reverse"), ] + def test_dfs_labeled_edges_sorting(self): + G = nx.Graph([(0, 1), (1, 2), (1, 3), (2, 4), (3, 0), (0, 4)]) + edges_asc = nx.dfs_labeled_edges(G, source=0, sort_neighbors=sorted) + sorted_desc = lambda x: sorted(x, reverse=True) + edges_desc = nx.dfs_labeled_edges(G, source=0, sort_neighbors=sorted_desc) + assert list(edges_asc) == [ + (0, 0, "forward"), + (0, 1, "forward"), + (1, 0, "nontree"), + (1, 2, "forward"), + (2, 1, "nontree"), + (2, 4, "forward"), + (4, 0, "nontree"), + (4, 2, "nontree"), + (2, 4, "reverse"), + (1, 2, "reverse"), + (1, 3, "forward"), + (3, 0, "nontree"), + (3, 1, "nontree"), + (1, 3, "reverse"), + (0, 1, "reverse"), + (0, 3, "nontree"), + (0, 4, "nontree"), + (0, 0, "reverse"), + ] + assert list(edges_desc) == [ + (0, 0, "forward"), + (0, 4, "forward"), + (4, 2, "forward"), + (2, 4, "nontree"), + (2, 1, "forward"), + (1, 3, "forward"), + (3, 1, "nontree"), + (3, 0, "nontree"), + (1, 3, "reverse"), + (1, 2, "nontree"), + (1, 0, "nontree"), + (2, 1, "reverse"), + (4, 2, "reverse"), + (4, 0, "nontree"), + (0, 4, "reverse"), + (0, 3, "nontree"), + (0, 1, "nontree"), + (0, 0, "reverse"), + ] + def test_dfs_labeled_disconnected_edges(self): edges = list(nx.dfs_labeled_edges(self.D)) forward = [(u, v) for (u, v, d) in edges if d == "forward"] diff --git a/networkx/algorithms/tree/branchings.py b/networkx/algorithms/tree/branchings.py index 653266915e7..34593ea4100 100644 --- a/networkx/algorithms/tree/branchings.py +++ b/networkx/algorithms/tree/branchings.py @@ -28,7 +28,6 @@ # } import string from dataclasses import dataclass, field -from enum import Enum from operator import itemgetter from queue import PriorityQueue @@ -73,7 +72,7 @@ def _max_weight(weight): return weight -@nx._dispatch(edge_attrs={"attr": "default"}) +@nx._dispatchable(edge_attrs={"attr": "default"}) def branching_weight(G, attr="weight", default=1): """ Returns the total weight of a branching. @@ -108,7 +107,7 @@ def branching_weight(G, attr="weight", default=1): @py_random_state(4) -@nx._dispatch(edge_attrs={"attr": "default"}) +@nx._dispatchable(edge_attrs={"attr": "default"}, returns_graph=True) def greedy_branching(G, attr="weight", default=1, kind="max", seed=None): """ Returns a branching obtained through a greedy algorithm. @@ -470,7 +469,6 @@ def find_optimum( D = set() nodes = iter(list(G.nodes())) attr = self._attr - G_pred = G.pred def desired_edge(v): """ @@ -745,9 +743,10 @@ def is_root(G, u, edgekeys): return H -@nx._dispatch( +@nx._dispatchable( edge_attrs={"attr": "default", "partition": 0}, preserve_edge_attrs="preserve_attrs", + returns_graph=True, ) def maximum_branching( G, @@ -1173,30 +1172,32 @@ def is_root(G, u, edgekeys): return H -@nx._dispatch( +@nx._dispatchable( edge_attrs={"attr": "default", "partition": None}, preserve_edge_attrs="preserve_attrs", + returns_graph=True, ) def minimum_branching( G, attr="weight", default=1, preserve_attrs=False, partition=None ): for _, _, d in G.edges(data=True): - d[attr] = -d[attr] + d[attr] = -d.get(attr, default) B = maximum_branching(G, attr, default, preserve_attrs, partition) for _, _, d in G.edges(data=True): - d[attr] = -d[attr] + d[attr] = -d.get(attr, default) for _, _, d in B.edges(data=True): - d[attr] = -d[attr] + d[attr] = -d.get(attr, default) return B -@nx._dispatch( +@nx._dispatchable( edge_attrs={"attr": "default", "partition": None}, preserve_edge_attrs="preserve_attrs", + returns_graph=True, ) def minimal_branching( G, /, *, attr="weight", default=1, preserve_attrs=False, partition=None @@ -1233,7 +1234,7 @@ def minimal_branching( """ max_weight = -INF min_weight = INF - for _, _, w in G.edges(data=attr): + for _, _, w in G.edges(data=attr, default=default): if w > max_weight: max_weight = w if w < min_weight: @@ -1244,23 +1245,24 @@ def minimal_branching( # the difference between the max and min weights. This is important # in order to prevent the edge weights from becoming negative during # computation - d[attr] = max_weight + 1 + (max_weight - min_weight) - d[attr] + d[attr] = max_weight + 1 + (max_weight - min_weight) - d.get(attr, default) B = maximum_branching(G, attr, default, preserve_attrs, partition) # Reverse the weight transformations for _, _, d in G.edges(data=True): - d[attr] = max_weight + 1 + (max_weight - min_weight) - d[attr] + d[attr] = max_weight + 1 + (max_weight - min_weight) - d.get(attr, default) for _, _, d in B.edges(data=True): - d[attr] = max_weight + 1 + (max_weight - min_weight) - d[attr] + d[attr] = max_weight + 1 + (max_weight - min_weight) - d.get(attr, default) return B -@nx._dispatch( +@nx._dispatchable( edge_attrs={"attr": "default", "partition": None}, preserve_edge_attrs="preserve_attrs", + returns_graph=True, ) def maximum_spanning_arborescence( G, attr="weight", default=1, preserve_attrs=False, partition=None @@ -1277,22 +1279,22 @@ def maximum_spanning_arborescence( min_weight = INF max_weight = -INF - for _, _, w in G.edges(data=attr): + for _, _, w in G.edges(data=attr, default=default): if w < min_weight: min_weight = w if w > max_weight: max_weight = w for _, _, d in G.edges(data=True): - d[attr] = d[attr] - min_weight + 1 - (min_weight - max_weight) + d[attr] = d.get(attr, default) - min_weight + 1 - (min_weight - max_weight) B = maximum_branching(G, attr, default, preserve_attrs, partition) for _, _, d in G.edges(data=True): - d[attr] = d[attr] + min_weight - 1 + (min_weight - max_weight) + d[attr] = d.get(attr, default) + min_weight - 1 + (min_weight - max_weight) for _, _, d in B.edges(data=True): - d[attr] = d[attr] + min_weight - 1 + (min_weight - max_weight) + d[attr] = d.get(attr, default) + min_weight - 1 + (min_weight - max_weight) if not is_arborescence(B): raise nx.exception.NetworkXException("No maximum spanning arborescence in G.") @@ -1300,9 +1302,10 @@ def maximum_spanning_arborescence( return B -@nx._dispatch( +@nx._dispatchable( edge_attrs={"attr": "default", "partition": None}, preserve_edge_attrs="preserve_attrs", + returns_graph=True, ) def minimum_spanning_arborescence( G, attr="weight", default=1, preserve_attrs=False, partition=None @@ -1365,8 +1368,8 @@ def minimum_spanning_arborescence( minimum_branching.__doc__ = ( docstring_branching.format(kind="minimum", style="branching") + """ -See Also --------- +See Also +-------- minimal_branching """ ) diff --git a/networkx/algorithms/tree/coding.py b/networkx/algorithms/tree/coding.py index a74fd48cff0..8cec023c228 100644 --- a/networkx/algorithms/tree/coding.py +++ b/networkx/algorithms/tree/coding.py @@ -32,7 +32,7 @@ class NotATree(nx.NetworkXException): @not_implemented_for("directed") -@nx._dispatch(graphs="T") +@nx._dispatchable(graphs="T") def to_nested_tuple(T, root, canonical_form=False): """Returns a nested tuple representation of the given tree. @@ -128,7 +128,7 @@ def _make_tuple(T, root, _parent): return _make_tuple(T, root, None) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_nested_tuple(sequence, sensible_relabeling=False): """Returns the rooted tree corresponding to the given nested tuple. @@ -213,7 +213,7 @@ def _make_tree(sequence): @not_implemented_for("directed") -@nx._dispatch(graphs="T") +@nx._dispatchable(graphs="T") def to_prufer_sequence(T): r"""Returns the Prüfer sequence of the given tree. @@ -314,7 +314,7 @@ def parents(u): return result -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_prufer_sequence(sequence): r"""Returns the tree corresponding to the given Prüfer sequence. diff --git a/networkx/algorithms/tree/decomposition.py b/networkx/algorithms/tree/decomposition.py index 0517100016f..c8b8f2477b4 100644 --- a/networkx/algorithms/tree/decomposition.py +++ b/networkx/algorithms/tree/decomposition.py @@ -10,7 +10,7 @@ @not_implemented_for("multigraph") -@nx._dispatch +@nx._dispatchable(returns_graph=True) def junction_tree(G): r"""Returns a junction tree of a given graph. diff --git a/networkx/algorithms/tree/mst.py b/networkx/algorithms/tree/mst.py index f4cec03dc30..72c1980cb15 100644 --- a/networkx/algorithms/tree/mst.py +++ b/networkx/algorithms/tree/mst.py @@ -18,6 +18,7 @@ "maximum_spanning_edges", "minimum_spanning_tree", "maximum_spanning_tree", + "number_of_spanning_trees", "random_spanning_tree", "partition_spanning_tree", "EdgePartition", @@ -41,7 +42,7 @@ class EdgePartition(Enum): @not_implemented_for("multigraph") -@nx._dispatch(edge_attrs="weight", preserve_edge_attrs="data") +@nx._dispatchable(edge_attrs="weight", preserve_edge_attrs="data") def boruvka_mst_edges( G, minimum=True, weight="weight", keys=False, data=True, ignore_nan=False ): @@ -138,7 +139,7 @@ def best_edge(component): forest.union(u, v) -@nx._dispatch( +@nx._dispatchable( edge_attrs={"weight": None, "partition": None}, preserve_edge_attrs="data" ) def kruskal_mst_edges( @@ -251,7 +252,7 @@ def kruskal_mst_edges( subtrees.union(u, v) -@nx._dispatch(edge_attrs="weight", preserve_edge_attrs="data") +@nx._dispatchable(edge_attrs="weight", preserve_edge_attrs="data") def prim_mst_edges(G, minimum, weight="weight", keys=True, data=True, ignore_nan=False): """Iterate over edges of Prim's algorithm min/max spanning tree. @@ -367,7 +368,7 @@ def prim_mst_edges(G, minimum, weight="weight", keys=True, data=True, ignore_nan @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight", preserve_edge_attrs="data") +@nx._dispatchable(edge_attrs="weight", preserve_edge_attrs="data") def minimum_spanning_edges( G, algorithm="kruskal", weight="weight", keys=True, data=True, ignore_nan=False ): @@ -462,7 +463,7 @@ def minimum_spanning_edges( @not_implemented_for("directed") -@nx._dispatch(edge_attrs="weight", preserve_edge_attrs="data") +@nx._dispatchable(edge_attrs="weight", preserve_edge_attrs="data") def maximum_spanning_edges( G, algorithm="kruskal", weight="weight", keys=True, data=True, ignore_nan=False ): @@ -555,7 +556,7 @@ def maximum_spanning_edges( ) -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def minimum_spanning_tree(G, weight="weight", algorithm="kruskal", ignore_nan=False): """Returns a minimum spanning tree or forest on an undirected graph `G`. @@ -615,7 +616,7 @@ def minimum_spanning_tree(G, weight="weight", algorithm="kruskal", ignore_nan=Fa return T -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def partition_spanning_tree( G, minimum=True, weight="weight", partition="partition", ignore_nan=False ): @@ -679,7 +680,7 @@ def partition_spanning_tree( return T -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def maximum_spanning_tree(G, weight="weight", algorithm="kruskal", ignore_nan=False): """Returns a maximum spanning tree or forest on an undirected graph `G`. @@ -743,7 +744,7 @@ def maximum_spanning_tree(G, weight="weight", algorithm="kruskal", ignore_nan=Fa @py_random_state(3) -@nx._dispatch(preserve_edge_attrs=True) +@nx._dispatchable(preserve_edge_attrs=True, returns_graph=True) def random_spanning_tree(G, weight=None, *, multiplicative=True, seed=None): """ Sample a random spanning tree using the edges weights of `G`. @@ -897,14 +898,15 @@ def spanning_tree_total_weight(G, weight): # itself. if G.number_of_edges() == 1: return G.edges(data=weight).__iter__().__next__()[2] - # 2. There are more than two edges in the graph. Then, we can find the + # 2. There are no edges or two or more edges in the graph. Then, we find the # total weight of the spanning trees using the formula in the - # reference paper: take the weight of that edge and multiple it by - # the number of spanning trees which have to include that edge. This + # reference paper: take the weight of each edge and multiply it by + # the number of spanning trees which include that edge. This # can be accomplished by contracting the edge and finding the # multiplicative total spanning tree weight if the weight of each edge # is assumed to be 1, which is conveniently built into networkx already, - # by calling total_spanning_tree_weight with weight=None + # by calling total_spanning_tree_weight with weight=None. + # Note that with no edges the returned value is just zero. else: total = 0 for u, v, w in G.edges(data=weight): @@ -913,6 +915,10 @@ def spanning_tree_total_weight(G, weight): ) return total + if G.number_of_nodes() < 2: + # no edges in the spanning tree + return nx.empty_graph(G.nodes) + U = set() st_cached_value = 0 V = set(G.edges()) @@ -1131,3 +1137,137 @@ def _clear_partition(self, G): for u, v, d in G.edges(data=True): if self.partition_key in d: del d[self.partition_key] + + +@nx._dispatchable(edge_attrs="weight") +def number_of_spanning_trees(G, *, root=None, weight=None): + """Returns the number of spanning trees in `G`. + + A spanning tree for an undirected graph is a tree that connects + all nodes in the graph. For a directed graph, the analog of a + spanning tree is called a (spanning) arborescence. The arborescence + includes a unique directed path from the `root` node to each other node. + The graph must be weakly connected, and the root must be a node + that includes all nodes as successors [3]_. Note that to avoid + discussing sink-roots and reverse-arborescences, we have reversed + the edge orientation from [3]_ and use the in-degree laplacian. + + This function (when `weight` is `None`) returns the number of + spanning trees for an undirected graph and the number of + arborescences from a single root node for a directed graph. + When `weight` is the name of an edge attribute which holds the + weight value of each edge, the function returns the sum over + all trees of the multiplicative weight of each tree. That is, + the weight of the tree is the product of its edge weights. + + Kirchoff's Tree Matrix Theorem states that any cofactor of the + Laplacian matrix of a graph is the number of spanning trees in the + graph. (Here we use cofactors for a diagonal entry so that the + cofactor becomes the determinant of the matrix with one row + and its matching column removed.) For a weighted Laplacian matrix, + the cofactor is the sum across all spanning trees of the + multiplicative weight of each tree. That is, the weight of each + tree is the product of its edge weights. The theorem is also + known as Kirchhoff's theorem [1]_ and the Matrix-Tree theorem [2]_. + + For directed graphs, a similar theorem (Tutte's Theorem) holds with + the cofactor chosen to be the one with row and column removed that + correspond to the root. The cofactor is the number of arborescences + with the specified node as root. And the weighted version gives the + sum of the arborescence weights with root `root`. The arborescence + weight is the product of its edge weights. + + Parameters + ---------- + G : NetworkX graph + + root : node + A node in the directed graph `G` that has all nodes as descendants. + (This is ignored for undirected graphs.) + + weight : string or None, optional (default=None) + The name of the edge attribute holding the edge weight. + If `None`, then each edge is assumed to have a weight of 1. + + Returns + ------- + Number + Undirected graphs: + The number of spanning trees of the graph `G`. + Or the sum of all spanning tree weights of the graph `G` + where the weight of a tree is the product of its edge weights. + Directed graphs: + The number of arborescences of `G` rooted at node `root`. + Or the sum of all arborescence weights of the graph `G` with + specified root where the weight of an arborescence is the product + of its edge weights. + + Raises + ------ + NetworkXPointlessConcept + If `G` does not contain any nodes. + + NetworkXError + If the graph `G` is directed and the root node + is not specified or is not in G. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> round(nx.number_of_spanning_trees(G)) + 125 + + >>> G = nx.Graph() + >>> G.add_edge(1, 2, weight=2) + >>> G.add_edge(1, 3, weight=1) + >>> G.add_edge(2, 3, weight=1) + >>> round(nx.number_of_spanning_trees(G, weight="weight")) + 5 + + Notes + ----- + Self-loops are excluded. Multi-edges are contracted in one edge + equal to the sum of the weights. + + References + ---------- + .. [1] Wikipedia + "Kirchhoff's theorem." + https://en.wikipedia.org/wiki/Kirchhoff%27s_theorem + .. [2] Kirchhoff, G. R. + Über die Auflösung der Gleichungen, auf welche man + bei der Untersuchung der linearen Vertheilung + Galvanischer Ströme geführt wird + Annalen der Physik und Chemie, vol. 72, pp. 497-508, 1847. + .. [3] Margoliash, J. + "Matrix-Tree Theorem for Directed Graphs" + https://www.math.uchicago.edu/~may/VIGRE/VIGRE2010/REUPapers/Margoliash.pdf + """ + import numpy as np + + if len(G) == 0: + raise nx.NetworkXPointlessConcept("Graph G must contain at least one node.") + + # undirected G + if not nx.is_directed(G): + if not nx.is_connected(G): + return 0 + G_laplacian = nx.laplacian_matrix(G, weight=weight).toarray() + return float(np.linalg.det(G_laplacian[1:, 1:])) + + # directed G + if root is None: + raise nx.NetworkXError("Input `root` must be provided when G is directed") + if root not in G: + raise nx.NetworkXError("The node root is not in the graph G.") + if not nx.is_weakly_connected(G): + return 0 + + # Compute directed Laplacian matrix + nodelist = [root] + [n for n in G if n != root] + A = nx.adjacency_matrix(G, nodelist=nodelist, weight=weight) + D = np.diag(A.sum(axis=0)) + G_laplacian = D - A + + # Compute number of spanning trees + return float(np.linalg.det(G_laplacian[1:, 1:])) diff --git a/networkx/algorithms/tree/operations.py b/networkx/algorithms/tree/operations.py index df1b4e7bec0..f4368d6a322 100644 --- a/networkx/algorithms/tree/operations.py +++ b/networkx/algorithms/tree/operations.py @@ -32,7 +32,7 @@ def join(rooted_trees, label_attribute=None): # Argument types don't match dispatching, but allow manual selection of backend -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def join_trees(rooted_trees, *, label_attribute=None, first_label=0): """Returns a new rooted tree made by joining `rooted_trees` diff --git a/networkx/algorithms/tree/recognition.py b/networkx/algorithms/tree/recognition.py index 15bbdf7d83b..a9eae98707a 100644 --- a/networkx/algorithms/tree/recognition.py +++ b/networkx/algorithms/tree/recognition.py @@ -79,7 +79,7 @@ @nx.utils.not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def is_arborescence(G): """ Returns True if `G` is an arborescence. @@ -119,7 +119,7 @@ def is_arborescence(G): @nx.utils.not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def is_branching(G): """ Returns True if `G` is a branching. @@ -158,7 +158,7 @@ def is_branching(G): return is_forest(G) and max(d for n, d in G.in_degree()) <= 1 -@nx._dispatch +@nx._dispatchable def is_forest(G): """ Returns True if `G` is a forest. @@ -215,7 +215,7 @@ def is_forest(G): return all(len(c) - 1 == c.number_of_edges() for c in components) -@nx._dispatch +@nx._dispatchable def is_tree(G): """ Returns True if `G` is a tree. diff --git a/networkx/algorithms/tree/tests/test_branchings.py b/networkx/algorithms/tree/tests/test_branchings.py index ad1cc33b9a4..9058606714f 100644 --- a/networkx/algorithms/tree/tests/test_branchings.py +++ b/networkx/algorithms/tree/tests/test_branchings.py @@ -630,3 +630,29 @@ def test_arborescence_iterator_initial_partition(): for e in excluded_edges: assert e not in B.edges assert arborescence_count == 16 + + +def test_branchings_with_default_weights(): + """ + Tests that various brancing algorithms work on graphs without weights. + For more information, see issue #7279. + """ + graph = nx.erdos_renyi_graph(10, p=0.2, directed=True, seed=123) + + assert all( + "weight" not in d for (u, v, d) in graph.edges(data=True) + ), "test is for graphs without a weight attribute" + + # Calling these functions will modify graph inplace to add weights + # copy the graph to avoid this. + nx.minimum_spanning_arborescence(graph.copy()) + nx.maximum_spanning_arborescence(graph.copy()) + nx.minimum_branching(graph.copy()) + nx.maximum_branching(graph.copy()) + nx.algorithms.tree.minimal_branching(graph.copy()) + nx.algorithms.tree.branching_weight(graph.copy()) + nx.algorithms.tree.greedy_branching(graph.copy()) + + assert all( + "weight" not in d for (u, v, d) in graph.edges(data=True) + ), "The above calls should not modify the initial graph in-place" diff --git a/networkx/algorithms/tree/tests/test_mst.py b/networkx/algorithms/tree/tests/test_mst.py index 373f16cf7a0..a6048d5472a 100644 --- a/networkx/algorithms/tree/tests/test_mst.py +++ b/networkx/algorithms/tree/tests/test_mst.py @@ -706,3 +706,150 @@ def test_random_spanning_tree_additive_large(): # Assert that p is greater than the significance level so that we do not # reject the null hypothesis assert not p < 0.05 + + +def test_random_spanning_tree_empty_graph(): + G = nx.Graph() + rst = nx.tree.random_spanning_tree(G) + assert len(rst.nodes) == 0 + assert len(rst.edges) == 0 + + +def test_random_spanning_tree_single_node_graph(): + G = nx.Graph() + G.add_node(0) + rst = nx.tree.random_spanning_tree(G) + assert len(rst.nodes) == 1 + assert len(rst.edges) == 0 + + +def test_random_spanning_tree_single_node_loop(): + G = nx.Graph() + G.add_node(0) + G.add_edge(0, 0) + rst = nx.tree.random_spanning_tree(G) + assert len(rst.nodes) == 1 + assert len(rst.edges) == 0 + + +class TestNumberSpanningTrees: + @classmethod + def setup_class(cls): + global np + np = pytest.importorskip("numpy") + + def test_nst_disconnected(self): + G = nx.empty_graph(2) + assert np.isclose(nx.number_of_spanning_trees(G), 0) + + def test_nst_no_nodes(self): + G = nx.Graph() + with pytest.raises(nx.NetworkXPointlessConcept): + nx.number_of_spanning_trees(G) + + def test_nst_weight(self): + G = nx.Graph() + G.add_edge(1, 2, weight=1) + G.add_edge(1, 3, weight=1) + G.add_edge(2, 3, weight=2) + # weights are ignored + assert np.isclose(nx.number_of_spanning_trees(G), 3) + # including weight + assert np.isclose(nx.number_of_spanning_trees(G, weight="weight"), 5) + + def test_nst_negative_weight(self): + G = nx.Graph() + G.add_edge(1, 2, weight=1) + G.add_edge(1, 3, weight=-1) + G.add_edge(2, 3, weight=-2) + # weights are ignored + assert np.isclose(nx.number_of_spanning_trees(G), 3) + # including weight + assert np.isclose(nx.number_of_spanning_trees(G, weight="weight"), -1) + + def test_nst_selfloop(self): + # self-loops are ignored + G = nx.complete_graph(3) + G.add_edge(1, 1) + assert np.isclose(nx.number_of_spanning_trees(G), 3) + + def test_nst_multigraph(self): + G = nx.MultiGraph() + G.add_edge(1, 2) + G.add_edge(1, 2) + G.add_edge(1, 3) + G.add_edge(2, 3) + assert np.isclose(nx.number_of_spanning_trees(G), 5) + + def test_nst_complete_graph(self): + # this is known as Cayley's formula + N = 5 + G = nx.complete_graph(N) + assert np.isclose(nx.number_of_spanning_trees(G), N ** (N - 2)) + + def test_nst_path_graph(self): + G = nx.path_graph(5) + assert np.isclose(nx.number_of_spanning_trees(G), 1) + + def test_nst_cycle_graph(self): + G = nx.cycle_graph(5) + assert np.isclose(nx.number_of_spanning_trees(G), 5) + + def test_nst_directed_noroot(self): + G = nx.empty_graph(3, create_using=nx.MultiDiGraph) + with pytest.raises(nx.NetworkXError): + nx.number_of_spanning_trees(G) + + def test_nst_directed_root_not_exist(self): + G = nx.empty_graph(3, create_using=nx.MultiDiGraph) + with pytest.raises(nx.NetworkXError): + nx.number_of_spanning_trees(G, root=42) + + def test_nst_directed_not_weak_connected(self): + G = nx.DiGraph() + G.add_edge(1, 2) + G.add_edge(3, 4) + assert np.isclose(nx.number_of_spanning_trees(G, root=1), 0) + + def test_nst_directed_cycle_graph(self): + G = nx.DiGraph() + G = nx.cycle_graph(7, G) + assert np.isclose(nx.number_of_spanning_trees(G, root=0), 1) + + def test_nst_directed_complete_graph(self): + G = nx.DiGraph() + G = nx.complete_graph(7, G) + assert np.isclose(nx.number_of_spanning_trees(G, root=0), 7**5) + + def test_nst_directed_multi(self): + G = nx.MultiDiGraph() + G = nx.cycle_graph(3, G) + G.add_edge(1, 2) + assert np.isclose(nx.number_of_spanning_trees(G, root=0), 2) + + def test_nst_directed_selfloop(self): + G = nx.MultiDiGraph() + G = nx.cycle_graph(3, G) + G.add_edge(1, 1) + assert np.isclose(nx.number_of_spanning_trees(G, root=0), 1) + + def test_nst_directed_weak_connected(self): + G = nx.MultiDiGraph() + G = nx.cycle_graph(3, G) + G.remove_edge(1, 2) + assert np.isclose(nx.number_of_spanning_trees(G, root=0), 0) + + def test_nst_directed_weighted(self): + # from root=1: + # arborescence 1: 1->2, 1->3, weight=2*1 + # arborescence 2: 1->2, 2->3, weight=2*3 + G = nx.DiGraph() + G.add_edge(1, 2, weight=2) + G.add_edge(1, 3, weight=1) + G.add_edge(2, 3, weight=3) + Nst = nx.number_of_spanning_trees(G, root=1, weight="weight") + assert np.isclose(Nst, 8) + Nst = nx.number_of_spanning_trees(G, root=2, weight="weight") + assert np.isclose(Nst, 0) + Nst = nx.number_of_spanning_trees(G, root=3, weight="weight") + assert np.isclose(Nst, 0) diff --git a/networkx/algorithms/tree/tests/test_recognition.py b/networkx/algorithms/tree/tests/test_recognition.py index a9c6c5aade9..105f5a89e9b 100644 --- a/networkx/algorithms/tree/tests/test_recognition.py +++ b/networkx/algorithms/tree/tests/test_recognition.py @@ -119,6 +119,12 @@ def test_emptybranch(): assert not nx.is_arborescence(G) +def test_is_branching_empty_graph_raises(): + G = nx.DiGraph() + with pytest.raises(nx.NetworkXPointlessConcept, match="G has no nodes."): + nx.is_branching(G) + + def test_path(): G = nx.DiGraph() nx.add_path(G, range(5)) @@ -160,3 +166,9 @@ def test_notarborescence2(): G.add_edge(6, 4) assert not nx.is_branching(G) assert not nx.is_arborescence(G) + + +def test_is_arborescense_empty_graph_raises(): + G = nx.DiGraph() + with pytest.raises(nx.NetworkXPointlessConcept, match="G has no nodes."): + nx.is_arborescence(G) diff --git a/networkx/algorithms/triads.py b/networkx/algorithms/triads.py index 07172465dae..1e67c145362 100644 --- a/networkx/algorithms/triads.py +++ b/networkx/algorithms/triads.py @@ -129,7 +129,7 @@ def _tricode(G, v, u, w): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def triadic_census(G, nodelist=None): """Determines the triadic census of a directed graph. @@ -155,7 +155,6 @@ def triadic_census(G, nodelist=None): >>> triadic_census = nx.triadic_census(G) >>> for key, value in triadic_census.items(): ... print(f"{key}: {value}") - ... 003: 0 012: 0 102: 0 @@ -281,7 +280,7 @@ def triadic_census(G, nodelist=None): return census -@nx._dispatch +@nx._dispatchable def is_triad(G): """Returns True if the graph G is a triad, else False. @@ -312,7 +311,7 @@ def is_triad(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def all_triplets(G): """Returns a generator of all possible sets of 3 nodes in a DiGraph. @@ -355,7 +354,7 @@ def all_triplets(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable(returns_graph=True) def all_triads(G): """A generator of all possible triads in G. @@ -386,7 +385,7 @@ def all_triads(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def triads_by_type(G): """Returns a list of all triads for each triad type in a directed graph. There are exactly 16 different types of triads possible. Suppose 1, 2, 3 are three @@ -427,9 +426,9 @@ def triads_by_type(G): -------- >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 1), (5, 6), (5, 4), (6, 7)]) >>> dict = nx.triads_by_type(G) - >>> dict['120C'][0].edges() + >>> dict["120C"][0].edges() OutEdgeView([(1, 2), (1, 3), (2, 3), (3, 1)]) - >>> dict['012'][0].edges() + >>> dict["012"][0].edges() OutEdgeView([(1, 2)]) References @@ -449,7 +448,7 @@ def triads_by_type(G): @not_implemented_for("undirected") -@nx._dispatch +@nx._dispatchable def triad_type(G): """Returns the sociological triad type for a triad. @@ -548,7 +547,7 @@ def triad_type(G): @not_implemented_for("undirected") @py_random_state(1) -@nx._dispatch(preserve_all_attrs=True) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) def random_triad(G, seed=None): """Returns a random triad from a directed graph. diff --git a/networkx/algorithms/vitality.py b/networkx/algorithms/vitality.py index c41efd13f2c..29f98fd1bae 100644 --- a/networkx/algorithms/vitality.py +++ b/networkx/algorithms/vitality.py @@ -8,7 +8,7 @@ __all__ = ["closeness_vitality"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def closeness_vitality(G, node=None, weight=None, wiener_index=None): """Returns the closeness vitality for nodes in the graph. diff --git a/networkx/algorithms/voronoi.py b/networkx/algorithms/voronoi.py index af17f013ec8..60c45332339 100644 --- a/networkx/algorithms/voronoi.py +++ b/networkx/algorithms/voronoi.py @@ -5,7 +5,7 @@ __all__ = ["voronoi_cells"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def voronoi_cells(G, center_nodes, weight="weight"): """Returns the Voronoi cells centered at `center_nodes` with respect to the shortest-path distance metric. diff --git a/networkx/algorithms/walks.py b/networkx/algorithms/walks.py index 6f357ce1d42..fe341757750 100644 --- a/networkx/algorithms/walks.py +++ b/networkx/algorithms/walks.py @@ -6,7 +6,7 @@ __all__ = ["number_of_walks"] -@nx._dispatch +@nx._dispatchable def number_of_walks(G, walk_length): """Returns the number of walks connecting each pair of nodes in `G` @@ -74,7 +74,7 @@ def number_of_walks(G, walk_length): # power = sp.sparse.linalg.matrix_power(A, walk_length) power = np.linalg.matrix_power(A.toarray(), walk_length) result = { - u: {v: power[u_idx, v_idx] for v_idx, v in enumerate(G)} + u: {v: power.item(u_idx, v_idx) for v_idx, v in enumerate(G)} for u_idx, u in enumerate(G) } return result diff --git a/networkx/algorithms/wiener.py b/networkx/algorithms/wiener.py index 9e81cdc72ca..cb55d609f7d 100644 --- a/networkx/algorithms/wiener.py +++ b/networkx/algorithms/wiener.py @@ -1,39 +1,49 @@ -"""Functions related to the Wiener index of a graph.""" +"""Functions related to the Wiener Index of a graph. -from itertools import chain +The Wiener Index is a topological measure of a graph +related to the distance between nodes and their degree. +The Schultz Index and Gutman Index are similar measures. +They are used categorize molecules via the network of +atoms connected by chemical bonds. The indices are +correlated with functional aspects of the molecules. -import networkx as nx +References +---------- +.. [1] `Wikipedia: Wiener Index `_ +.. [2] M.V. Diudeaa and I. Gutman, Wiener-Type Topological Indices, + Croatica Chemica Acta, 71 (1998), 21-51. + https://hrcak.srce.hr/132323 +""" -from .components import is_connected, is_strongly_connected -from .shortest_paths import shortest_path_length as spl +import itertools as it -__all__ = ["wiener_index"] +import networkx as nx -#: Rename the :func:`chain.from_iterable` function for the sake of -#: brevity. -chaini = chain.from_iterable +__all__ = ["wiener_index", "schultz_index", "gutman_index"] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def wiener_index(G, weight=None): """Returns the Wiener index of the given graph. The *Wiener index* of a graph is the sum of the shortest-path - distances between each pair of reachable nodes. For pairs of nodes - in undirected graphs, only one orientation of the pair is counted. + (weighted) distances between each pair of reachable nodes. + For pairs of nodes in undirected graphs, only one orientation + of the pair is counted. Parameters ---------- G : NetworkX graph - weight : object - The edge attribute to use as distance when computing - shortest-path distances. This is passed directly to the - :func:`networkx.shortest_path_length` function. + weight : string or None, optional (default: None) + If None, every edge has weight 1. + If a string, use this edge attribute as the edge weight. + Any edge attribute not present defaults to 1. + The edge weights are used to computing shortest-path distances. Returns ------- - float + number The Wiener index of the graph `G`. Raises @@ -68,12 +78,149 @@ def wiener_index(G, weight=None): >>> nx.wiener_index(G) inf + References + ---------- + .. [1] `Wikipedia: Wiener Index `_ """ - is_directed = G.is_directed() - if (is_directed and not is_strongly_connected(G)) or ( - not is_directed and not is_connected(G) - ): + connected = nx.is_strongly_connected(G) if G.is_directed() else nx.is_connected(G) + if not connected: return float("inf") - total = sum(chaini(p.values() for v, p in spl(G, weight=weight))) + + spl = nx.shortest_path_length(G, weight=weight) + total = sum(it.chain.from_iterable(nbrs.values() for node, nbrs in spl)) # Need to account for double counting pairs of nodes in undirected graphs. - return total if is_directed else total / 2 + return total if G.is_directed() else total / 2 + + +@nx.utils.not_implemented_for("directed") +@nx.utils.not_implemented_for("multigraph") +@nx._dispatchable(edge_attrs="weight") +def schultz_index(G, weight=None): + r"""Returns the Schultz Index (of the first kind) of `G` + + The *Schultz Index* [3]_ of a graph is the sum over all node pairs of + distances times the sum of degrees. Consider an undirected graph `G`. + For each node pair ``(u, v)`` compute ``dist(u, v) * (deg(u) + deg(v)`` + where ``dist`` is the shortest path length between two nodes and ``deg`` + is the degree of a node. + + The Schultz Index is the sum of these quantities over all (unordered) + pairs of nodes. + + Parameters + ---------- + G : NetworkX graph + The undirected graph of interest. + weight : string or None, optional (default: None) + If None, every edge has weight 1. + If a string, use this edge attribute as the edge weight. + Any edge attribute not present defaults to 1. + The edge weights are used to computing shortest-path distances. + + Returns + ------- + number + The first kind of Schultz Index of the graph `G`. + + Examples + -------- + The Schultz Index of the (unweighted) complete graph on *n* nodes + equals the number of pairs of the *n* nodes times ``2 * (n - 1)``, + since each pair of nodes is at distance one and the sum of degree + of two nodes is ``2 * (n - 1)``. + + >>> n = 10 + >>> G = nx.complete_graph(n) + >>> nx.schultz_index(G) == (n * (n - 1) / 2) * (2 * (n - 1)) + True + + Graph that is disconnected + + >>> nx.schultz_index(nx.empty_graph(2)) + inf + + References + ---------- + .. [1] I. Gutman, Selected properties of the Schultz molecular topological index, + J. Chem. Inf. Comput. Sci. 34 (1994), 1087–1089. + https://doi.org/10.1021/ci00021a009 + .. [2] M.V. Diudeaa and I. Gutman, Wiener-Type Topological Indices, + Croatica Chemica Acta, 71 (1998), 21-51. + https://hrcak.srce.hr/132323 + .. [3] H. P. Schultz, Topological organic chemistry. 1. + Graph theory and topological indices of alkanes,i + J. Chem. Inf. Comput. Sci. 29 (1989), 239–257. + + """ + if not nx.is_connected(G): + return float("inf") + + spl = nx.shortest_path_length(G, weight=weight) + d = dict(G.degree, weight=weight) + return sum(dist * (d[u] + d[v]) for u, info in spl for v, dist in info.items()) / 2 + + +@nx.utils.not_implemented_for("directed") +@nx.utils.not_implemented_for("multigraph") +@nx._dispatchable(edge_attrs="weight") +def gutman_index(G, weight=None): + r"""Returns the Gutman Index for the graph `G`. + + The *Gutman Index* measures the topology of networks, especially for molecule + networks of atoms connected by bonds [1]_. It is also called the Schultz Index + of the second kind [2]_. + + Consider an undirected graph `G` with node set ``V``. + The Gutman Index of a graph is the sum over all (unordered) pairs of nodes + of nodes ``(u, v)``, with distance ``dist(u, v)`` and degrees ``deg(u)`` + and ``deg(v)``, of ``dist(u, v) * deg(u) * deg(v)`` + + Parameters + ---------- + G : NetworkX graph + + weight : string or None, optional (default: None) + If None, every edge has weight 1. + If a string, use this edge attribute as the edge weight. + Any edge attribute not present defaults to 1. + The edge weights are used to computing shortest-path distances. + + Returns + ------- + number + The Gutman Index of the graph `G`. + + Examples + -------- + The Gutman Index of the (unweighted) complete graph on *n* nodes + equals the number of pairs of the *n* nodes times ``(n - 1) * (n - 1)``, + since each pair of nodes is at distance one and the product of degree of two + vertices is ``(n - 1) * (n - 1)``. + + >>> n = 10 + >>> G = nx.complete_graph(n) + >>> nx.gutman_index(G) == (n * (n - 1) / 2) * ((n - 1) * (n - 1)) + True + + Graphs that are disconnected + + >>> G = nx.empty_graph(2) + >>> nx.gutman_index(G) + inf + + References + ---------- + .. [1] M.V. Diudeaa and I. Gutman, Wiener-Type Topological Indices, + Croatica Chemica Acta, 71 (1998), 21-51. + https://hrcak.srce.hr/132323 + .. [2] I. Gutman, Selected properties of the Schultz molecular topological index, + J. Chem. Inf. Comput. Sci. 34 (1994), 1087–1089. + https://doi.org/10.1021/ci00021a009 + + """ + if not nx.is_connected(G): + return float("inf") + + spl = nx.shortest_path_length(G, weight=weight) + d = dict(G.degree, weight=weight) + return sum(dist * d[u] * d[v] for u, vinfo in spl for v, dist in vinfo.items()) / 2 diff --git a/networkx/classes/digraph.py b/networkx/classes/digraph.py index 988ed599892..945643776b4 100644 --- a/networkx/classes/digraph.py +++ b/networkx/classes/digraph.py @@ -1026,7 +1026,7 @@ def in_edges(self): Examples -------- >>> G = nx.DiGraph() - >>> G.add_edge(1, 2, color='blue') + >>> G.add_edge(1, 2, color="blue") >>> G.in_edges() InEdgeView([(1, 2)]) >>> G.in_edges(nbunch=2) diff --git a/networkx/classes/function.py b/networkx/classes/function.py index 3f40eab1583..726b3e23b26 100644 --- a/networkx/classes/function.py +++ b/networkx/classes/function.py @@ -80,7 +80,7 @@ def degree(G, nbunch=None, weight=None): def neighbors(G, n): """Returns an iterator over all neighbors of node n. - This function wraps the :func:``G.neighbors ` function. + This function wraps the :func:`G.neighbors ` function. """ return G.neighbors(n) @@ -156,7 +156,7 @@ def degree_histogram(G): (Order(number_of_edges)) """ counts = Counter(d for n, d in G.degree()) - return [counts.get(i, 0) for i in range(max(counts) + 1)] + return [counts.get(i, 0) for i in range(max(counts) + 1 if counts else 0)] def is_directed(G): @@ -809,13 +809,13 @@ def set_edge_attributes(G, values, name=None): if G.is_multigraph(): for (u, v, key), value in values.items(): try: - G[u][v][key][name] = value + G._adj[u][v][key][name] = value except KeyError: pass else: for (u, v), value in values.items(): try: - G[u][v][name] = value + G._adj[u][v][name] = value except KeyError: pass except AttributeError: @@ -827,13 +827,13 @@ def set_edge_attributes(G, values, name=None): if G.is_multigraph(): for (u, v, key), d in values.items(): try: - G[u][v][key].update(d) + G._adj[u][v][key].update(d) except KeyError: pass else: for (u, v), d in values.items(): try: - G[u][v].update(d) + G._adj[u][v].update(d) except KeyError: pass @@ -918,11 +918,10 @@ def non_neighbors(graph, node): Returns ------- - non_neighbors : iterator - Iterator of nodes in the graph that are not neighbors of the node. + non_neighbors : set + Set of nodes in the graph that are not neighbors of the node. """ - nbors = set(neighbors(graph, node)) | {node} - return (nnode for nnode in graph if nnode not in nbors) + return graph._adj.keys() - graph._adj[node].keys() - {node} def non_edges(graph): @@ -964,8 +963,8 @@ def common_neighbors(G, u, v): Returns ------- - cnbors : iterator - Iterator of common neighbors of u and v in the graph. + cnbors : set + Set of common neighbors of u and v in the graph. Raises ------ @@ -983,9 +982,7 @@ def common_neighbors(G, u, v): if v not in G: raise nx.NetworkXError("v is not in the graph.") - # Return a generator explicitly instead of yielding so that the above - # checks are executed eagerly. - return (w for w in G[u] if w in G[v] and w not in (u, v)) + return G._adj[u].keys() & G._adj[v].keys() - {u, v} def is_weighted(G, edge=None, weight="weight"): @@ -1114,7 +1111,7 @@ def is_empty(G): is the number of nodes in the graph. """ - return not any(G.adj.values()) + return not any(G._adj.values()) def nodes_with_selfloops(G): @@ -1141,7 +1138,7 @@ def nodes_with_selfloops(G): [1] """ - return (n for n, nbrs in G.adj.items() if n in nbrs) + return (n for n, nbrs in G._adj.items() if n in nbrs) def selfloop_edges(G, data=False, keys=False, default=None): @@ -1191,56 +1188,59 @@ def selfloop_edges(G, data=False, keys=False, default=None): if keys is True: return ( (n, n, k, d) - for n, nbrs in G.adj.items() + for n, nbrs in G._adj.items() if n in nbrs for k, d in nbrs[n].items() ) else: return ( (n, n, d) - for n, nbrs in G.adj.items() + for n, nbrs in G._adj.items() if n in nbrs for d in nbrs[n].values() ) else: - return ((n, n, nbrs[n]) for n, nbrs in G.adj.items() if n in nbrs) + return ((n, n, nbrs[n]) for n, nbrs in G._adj.items() if n in nbrs) elif data is not False: if G.is_multigraph(): if keys is True: return ( (n, n, k, d.get(data, default)) - for n, nbrs in G.adj.items() + for n, nbrs in G._adj.items() if n in nbrs for k, d in nbrs[n].items() ) else: return ( (n, n, d.get(data, default)) - for n, nbrs in G.adj.items() + for n, nbrs in G._adj.items() if n in nbrs for d in nbrs[n].values() ) else: return ( (n, n, nbrs[n].get(data, default)) - for n, nbrs in G.adj.items() + for n, nbrs in G._adj.items() if n in nbrs ) else: if G.is_multigraph(): if keys is True: return ( - (n, n, k) for n, nbrs in G.adj.items() if n in nbrs for k in nbrs[n] + (n, n, k) + for n, nbrs in G._adj.items() + if n in nbrs + for k in nbrs[n] ) else: return ( (n, n) - for n, nbrs in G.adj.items() + for n, nbrs in G._adj.items() if n in nbrs for i in range(len(nbrs[n])) # for easy edge removal (#4068) ) else: - return ((n, n) for n, nbrs in G.adj.items() if n in nbrs) + return ((n, n) for n, nbrs in G._adj.items() if n in nbrs) def number_of_selfloops(G): @@ -1288,7 +1288,10 @@ def is_path(G, path): True if `path` is a valid path in `G` """ - return all((node in G and nbr in G[node]) for node, nbr in nx.utils.pairwise(path)) + try: + return all(nbr in G._adj[node] for node, nbr in nx.utils.pairwise(path)) + except (KeyError, TypeError): + return False def path_weight(G, path, weight): @@ -1323,7 +1326,7 @@ def path_weight(G, path, weight): raise nx.NetworkXNoPath("path does not exist") for node, nbr in nx.utils.pairwise(path): if multigraph: - cost += min(v[weight] for v in G[node][nbr].values()) + cost += min(v[weight] for v in G._adj[node][nbr].values()) else: - cost += G[node][nbr][weight] + cost += G._adj[node][nbr][weight] return cost diff --git a/networkx/classes/graph.py b/networkx/classes/graph.py index 5bbf079a47b..02d332be1a6 100644 --- a/networkx/classes/graph.py +++ b/networkx/classes/graph.py @@ -1211,20 +1211,12 @@ def update(self, edges=None, nodes=None): >>> DG = nx.DiGraph() >>> # dict-of-dict-of-attribute >>> adj = {1: {2: 1.3, 3: 0.7}, 2: {1: 1.4}, 3: {1: 0.7}} - >>> e = [ - ... (u, v, {"weight": d}) - ... for u, nbrs in adj.items() - ... for v, d in nbrs.items() - ... ] + >>> e = [(u, v, {"weight": d}) for u, nbrs in adj.items() for v, d in nbrs.items()] >>> DG.update(edges=e, nodes=adj) >>> # dict-of-dict-of-dict >>> adj = {1: {2: {"weight": 1.3}, 3: {"color": 0.7, "weight": 1.2}}} - >>> e = [ - ... (u, v, {"weight": d}) - ... for u, nbrs in adj.items() - ... for v, d in nbrs.items() - ... ] + >>> e = [(u, v, {"weight": d}) for u, nbrs in adj.items() for v, d in nbrs.items()] >>> DG.update(edges=e, nodes=adj) >>> # predecessor adjacency (dict-of-set) @@ -1547,8 +1539,8 @@ def clear_edges(self): >>> list(G.edges) [] """ - for neighbours_dict in self._adj.values(): - neighbours_dict.clear() + for nbr_dict in self._adj.values(): + nbr_dict.clear() def is_multigraph(self): """Returns True if graph is a multigraph, False otherwise.""" @@ -1800,14 +1792,22 @@ def subgraph(self, nodes): SG = G.__class__() SG.add_nodes_from((n, G.nodes[n]) for n in largest_wcc) if SG.is_multigraph(): - SG.add_edges_from((n, nbr, key, d) - for n, nbrs in G.adj.items() if n in largest_wcc - for nbr, keydict in nbrs.items() if nbr in largest_wcc - for key, d in keydict.items()) + SG.add_edges_from( + (n, nbr, key, d) + for n, nbrs in G.adj.items() + if n in largest_wcc + for nbr, keydict in nbrs.items() + if nbr in largest_wcc + for key, d in keydict.items() + ) else: - SG.add_edges_from((n, nbr, d) - for n, nbrs in G.adj.items() if n in largest_wcc - for nbr, d in nbrs.items() if nbr in largest_wcc) + SG.add_edges_from( + (n, nbr, d) + for n, nbrs in G.adj.items() + if n in largest_wcc + for nbr, d in nbrs.items() + if nbr in largest_wcc + ) SG.graph.update(G.graph) Examples diff --git a/networkx/classes/graphviews.py b/networkx/classes/graphviews.py index 3fb08df08e3..275bbd71c24 100644 --- a/networkx/classes/graphviews.py +++ b/networkx/classes/graphviews.py @@ -180,7 +180,6 @@ def subgraph_view(G, *, filter_node=no_filter, filter_edge=no_filter): >>> def filter_node(n1): ... return n1 != 5 - ... >>> view = nx.subgraph_view(G, filter_node=filter_node) >>> view.nodes() NodeView((0, 1, 2, 3, 4)) @@ -191,12 +190,15 @@ def subgraph_view(G, *, filter_node=no_filter, filter_edge=no_filter): >>> G[3][4]["cross_me"] = False >>> def filter_edge(n1, n2): ... return G[n1][n2].get("cross_me", True) - ... >>> view = nx.subgraph_view(G, filter_edge=filter_edge) >>> view.edges() EdgeView([(0, 1), (1, 2), (2, 3), (4, 5)]) - >>> view = nx.subgraph_view(G, filter_node=filter_node, filter_edge=filter_edge,) + >>> view = nx.subgraph_view( + ... G, + ... filter_node=filter_node, + ... filter_edge=filter_edge, + ... ) >>> view.nodes() NodeView((0, 1, 2, 3, 4)) >>> view.edges() diff --git a/networkx/classes/multidigraph.py b/networkx/classes/multidigraph.py index fb8b1a35c40..5a278aa967f 100644 --- a/networkx/classes/multidigraph.py +++ b/networkx/classes/multidigraph.py @@ -639,7 +639,7 @@ def edges(self): >>> G = nx.MultiDiGraph() >>> nx.add_path(G, [0, 1, 2]) >>> key = G.add_edge(2, 3, weight=5) - >>> key2 = G.add_edge(1, 2) # second edge between these nodes + >>> key2 = G.add_edge(1, 2) # second edge between these nodes >>> [e for e in G.edges()] [(0, 1), (1, 2), (1, 2), (2, 3)] >>> list(G.edges(data=True)) # default data is {} (empty dict) @@ -746,9 +746,9 @@ def degree(self): 1 >>> list(G.degree([0, 1, 2])) [(0, 1), (1, 2), (2, 2)] - >>> G.add_edge(0, 1) # parallel edge + >>> G.add_edge(0, 1) # parallel edge 1 - >>> list(G.degree([0, 1, 2])) # parallel edges are counted + >>> list(G.degree([0, 1, 2])) # parallel edges are counted [(0, 2), (1, 3), (2, 2)] """ @@ -797,9 +797,9 @@ def in_degree(self): 0 >>> list(G.in_degree([0, 1, 2])) [(0, 0), (1, 1), (2, 1)] - >>> G.add_edge(0, 1) # parallel edge + >>> G.add_edge(0, 1) # parallel edge 1 - >>> list(G.in_degree([0, 1, 2])) # parallel edges counted + >>> list(G.in_degree([0, 1, 2])) # parallel edges counted [(0, 0), (1, 2), (2, 1)] """ @@ -847,9 +847,9 @@ def out_degree(self): 1 >>> list(G.out_degree([0, 1, 2])) [(0, 1), (1, 1), (2, 1)] - >>> G.add_edge(0, 1) # parallel edge + >>> G.add_edge(0, 1) # parallel edge 1 - >>> list(G.out_degree([0, 1, 2])) # counts parallel edges + >>> list(G.out_degree([0, 1, 2])) # counts parallel edges [(0, 2), (1, 1), (2, 1)] """ diff --git a/networkx/classes/multigraph.py b/networkx/classes/multigraph.py index bbf2ce24136..b21968000da 100644 --- a/networkx/classes/multigraph.py +++ b/networkx/classes/multigraph.py @@ -390,7 +390,7 @@ def adj(self): >>> G.edges[1, 2, 0]["weight"] = 3 >>> result = set() >>> for edgekey, data in G[1][2].items(): - ... result.add(data.get('weight', 1)) + ... result.add(data.get("weight", 1)) >>> result {1, 3} diff --git a/networkx/classes/reportviews.py b/networkx/classes/reportviews.py index 59a16243f98..264823539a6 100644 --- a/networkx/classes/reportviews.py +++ b/networkx/classes/reportviews.py @@ -239,11 +239,13 @@ def data(self, data=True, default=None): Examples -------- >>> G = nx.Graph() - >>> G.add_nodes_from([ - ... (0, {"color": "red", "weight": 10}), - ... (1, {"color": "blue"}), - ... (2, {"color": "yellow", "weight": 2}) - ... ]) + >>> G.add_nodes_from( + ... [ + ... (0, {"color": "red", "weight": 10}), + ... (1, {"color": "blue"}), + ... (2, {"color": "yellow", "weight": 2}), + ... ] + ... ) Accessing node data with ``data=True`` (the default) returns a NodeDataView mapping each node to all of its attributes: @@ -1129,11 +1131,13 @@ def data(self, data=True, default=None, nbunch=None): Examples -------- >>> G = nx.Graph() - >>> G.add_edges_from([ - ... (0, 1, {"dist": 3, "capacity": 20}), - ... (1, 2, {"dist": 4}), - ... (2, 0, {"dist": 5}) - ... ]) + >>> G.add_edges_from( + ... [ + ... (0, 1, {"dist": 3, "capacity": 20}), + ... (1, 2, {"dist": 4}), + ... (2, 0, {"dist": 5}), + ... ] + ... ) Accessing edge data with ``data=True`` (the default) returns an edge data view object listing each edge with all of its attributes: diff --git a/networkx/classes/tests/dispatch_interface.py b/networkx/classes/tests/dispatch_interface.py index 5cef755241b..96c363b93bb 100644 --- a/networkx/classes/tests/dispatch_interface.py +++ b/networkx/classes/tests/dispatch_interface.py @@ -6,7 +6,7 @@ # This is comprehensive, but only tests the `test_override_dispatch` # function in networkx.classes.backends. -# To test the `_dispatch` function directly, several tests scattered throughout +# To test the `_dispatchable` function directly, several tests scattered throughout # NetworkX have been augmented to test normal and dispatch mode. # Searching for `dispatch_interface` should locate the specific tests. diff --git a/networkx/classes/tests/test_backends.py b/networkx/classes/tests/test_backends.py index 7adb7009aef..cc171cf5be1 100644 --- a/networkx/classes/tests/test_backends.py +++ b/networkx/classes/tests/test_backends.py @@ -17,14 +17,22 @@ def test_dispatch_kwds_vs_args(): def test_pickle(): + count = 0 for name, func in nx.utils.backends._registered_algorithms.items(): - assert pickle.loads(pickle.dumps(func)) is func + try: + # Some functions can't be pickled, but it's not b/c of _dispatchable + pickled = pickle.dumps(func) + except pickle.PicklingError: + continue + assert pickle.loads(pickled) is func + count += 1 + assert count > 0 assert pickle.loads(pickle.dumps(nx.inverse_line_graph)) is nx.inverse_line_graph @pytest.mark.skipif( - "not nx._dispatch._automatic_backends " - "or nx._dispatch._automatic_backends[0] != 'nx-loopback'" + "not nx._dispatchable._automatic_backends " + "or nx._dispatchable._automatic_backends[0] != 'nx-loopback'" ) def test_graph_converter_needs_backend(): # When testing, `nx.from_scipy_sparse_array` will *always* call the backend @@ -74,3 +82,7 @@ def convert_to_nx(obj, *, name=None): del LoopbackDispatcher.from_scipy_sparse_array with pytest.raises(ImportError, match="Unable to load"): nx.from_scipy_sparse_array(A, backend="bad-backend-name") + + +def test_dispatchable_are_functions(): + assert type(nx.pagerank) is type(nx.pagerank.orig_func) diff --git a/networkx/classes/tests/test_function.py b/networkx/classes/tests/test_function.py index 61b73c2d256..ee4cca8372c 100644 --- a/networkx/classes/tests/test_function.py +++ b/networkx/classes/tests/test_function.py @@ -6,6 +6,11 @@ from networkx.utils import edges_equal, nodes_equal +def test_degree_histogram_empty(): + G = nx.Graph() + assert nx.degree_histogram(G) == [] + + class TestFunction: def setup_method(self): self.G = nx.Graph({0: [1, 2, 3], 1: [1, 2, 0], 4: []}, name="Test") @@ -297,13 +302,13 @@ def test_neighbors_complete_graph(self): def test_non_neighbors(self): graph = nx.complete_graph(100) pop = random.sample(list(graph), 1) - nbors = list(nx.non_neighbors(graph, pop[0])) + nbors = nx.non_neighbors(graph, pop[0]) # should be all the other vertices in the graph assert len(nbors) == 0 graph = nx.path_graph(100) node = random.sample(list(graph), 1)[0] - nbors = list(nx.non_neighbors(graph, node)) + nbors = nx.non_neighbors(graph, node) # should be all the other vertices in the graph if node != 0 and node != 99: assert len(nbors) == 97 @@ -312,13 +317,13 @@ def test_non_neighbors(self): # create a star graph with 99 outer nodes graph = nx.star_graph(99) - nbors = list(nx.non_neighbors(graph, 0)) + nbors = nx.non_neighbors(graph, 0) assert len(nbors) == 0 # disconnected graph graph = nx.Graph() graph.add_nodes_from(range(10)) - nbors = list(nx.non_neighbors(graph, 0)) + nbors = nx.non_neighbors(graph, 0) assert len(nbors) == 9 def test_non_edges(self): diff --git a/networkx/conftest.py b/networkx/conftest.py index ef3551f4156..2f5f6cfdaf7 100644 --- a/networkx/conftest.py +++ b/networkx/conftest.py @@ -46,11 +46,11 @@ def pytest_configure(config): if backend is None: backend = os.environ.get("NETWORKX_TEST_BACKEND") if backend: - networkx.utils.backends._dispatch._automatic_backends = [backend] + networkx.utils.backends._dispatchable._automatic_backends = [backend] fallback_to_nx = config.getoption("--fallback-to-nx") if not fallback_to_nx: fallback_to_nx = os.environ.get("NETWORKX_FALLBACK_TO_NX") - networkx.utils.backends._dispatch._fallback_to_nx = bool(fallback_to_nx) + networkx.utils.backends._dispatchable._fallback_to_nx = bool(fallback_to_nx) # nx-loopback backend is only available when testing backends = entry_points(name="nx-loopback", group="networkx.backends") if backends: @@ -69,8 +69,8 @@ def pytest_configure(config): def pytest_collection_modifyitems(config, items): # Setting this to True here allows tests to be set up before dispatching # any function call to a backend. - networkx.utils.backends._dispatch._is_testing = True - if automatic_backends := networkx.utils.backends._dispatch._automatic_backends: + networkx.utils.backends._dispatchable._is_testing = True + if automatic_backends := networkx.utils.backends._dispatchable._automatic_backends: # Allow pluggable backends to add markers to tests (such as skip or xfail) # when running in auto-conversion test mode backend = networkx.utils.backends.backends[automatic_backends[0]].load() @@ -89,18 +89,15 @@ def pytest_collection_modifyitems(config, items): # TODO: The warnings below need to be dealt with, but for now we silence them. @pytest.fixture(autouse=True) def set_warnings(): - warnings.filterwarnings( - "ignore", category=DeprecationWarning, message="nx.nx_pydot" - ) warnings.filterwarnings( "ignore", - category=DeprecationWarning, - message="single_target_shortest_path_length will", + category=FutureWarning, + message="\n\nsingle_target_shortest_path_length", ) warnings.filterwarnings( "ignore", - category=DeprecationWarning, - message="shortest_path for all_pairs", + category=FutureWarning, + message="\n\nshortest_path", ) warnings.filterwarnings( "ignore", category=DeprecationWarning, message="\nforest_str is deprecated" @@ -120,7 +117,9 @@ def set_warnings(): "ignore", category=DeprecationWarning, message="\n\nThe `normalized`" ) warnings.filterwarnings( - "ignore", category=DeprecationWarning, message="function `join` is deprecated" + "ignore", + category=DeprecationWarning, + message="The function `join` is deprecated", ) warnings.filterwarnings( "ignore", @@ -133,6 +132,12 @@ def set_warnings(): warnings.filterwarnings( "ignore", category=DeprecationWarning, message="\n\nrandom_triad" ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="minimal_d_separator" + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="d_separated" + ) warnings.filterwarnings("ignore", category=DeprecationWarning, message="\n\nk_core") warnings.filterwarnings( "ignore", category=DeprecationWarning, message="\n\nk_shell" @@ -143,18 +148,17 @@ def set_warnings(): warnings.filterwarnings( "ignore", category=DeprecationWarning, message="\n\nk_corona" ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="\n\ntotal_spanning_tree_weight" + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message=r"\n\nThe 'create=matrix'" + ) @pytest.fixture(autouse=True) def add_nx(doctest_namespace): doctest_namespace["nx"] = networkx - # TODO: remove the try-except block when we require numpy >= 2 - try: - import numpy as np - - np.set_printoptions(legacy="1.21") - except ImportError: - pass # What dependencies are installed? @@ -219,6 +223,7 @@ def add_nx(doctest_namespace): "algorithms/node_classification.py", "algorithms/non_randomness.py", "algorithms/shortest_paths/dense.py", + "algorithms/tree/mst.py", "generators/expanders.py", "linalg/bethehessianmatrix.py", "linalg/laplacianmatrix.py", diff --git a/networkx/convert.py b/networkx/convert.py index 14598024684..7cc8fe40126 100644 --- a/networkx/convert.py +++ b/networkx/convert.py @@ -176,7 +176,7 @@ def to_networkx_graph(data, create_using=None, multigraph_input=False): raise nx.NetworkXError("Input is not a known data type for conversion.") -@nx._dispatch +@nx._dispatchable def to_dict_of_lists(G, nodelist=None): """Returns adjacency representation of graph as a dictionary of lists. @@ -202,7 +202,7 @@ def to_dict_of_lists(G, nodelist=None): return d -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_dict_of_lists(d, create_using=None): """Returns a graph from a dictionary of lists. @@ -278,9 +278,7 @@ def to_dict_of_dicts(G, nodelist=None, edge_data=None): For a more custom approach to handling edge data, try:: dod = { - n: { - nbr: custom(n, nbr, dd) for nbr, dd in nbrdict.items() - } + n: {nbr: custom(n, nbr, dd) for nbr, dd in nbrdict.items()} for n, nbrdict in G.adj.items() } @@ -300,9 +298,9 @@ def to_dict_of_dicts(G, nodelist=None, edge_data=None): >>> G = nx.Graph() >>> G.add_edges_from( ... [ - ... (0, 1, {'weight': 1.0}), - ... (1, 2, {'weight': 2.0}), - ... (2, 0, {'weight': 1.0}), + ... (0, 1, {"weight": 1.0}), + ... (1, 2, {"weight": 2.0}), + ... (2, 0, {"weight": 1.0}), ... ] ... ) >>> d = nx.to_dict_of_dicts(G) @@ -310,7 +308,7 @@ def to_dict_of_dicts(G, nodelist=None, edge_data=None): {0: {1: {'weight': 1.0}, 2: {'weight': 1.0}}, 1: {0: {'weight': 1.0}, 2: {'weight': 2.0}}, 2: {1: {'weight': 2.0}, 0: {'weight': 1.0}}} - >>> d[1][2]['weight'] + >>> d[1][2]["weight"] 2.0 If `edge_data` is not `None`, edge data in the original graph (if any) is @@ -325,15 +323,15 @@ def to_dict_of_dicts(G, nodelist=None, edge_data=None): This also applies to MultiGraphs: edge data is preserved by default: >>> G = nx.MultiGraph() - >>> G.add_edge(0, 1, key='a', weight=1.0) + >>> G.add_edge(0, 1, key="a", weight=1.0) 'a' - >>> G.add_edge(0, 1, key='b', weight=5.0) + >>> G.add_edge(0, 1, key="b", weight=5.0) 'b' >>> d = nx.to_dict_of_dicts(G) >>> d # doctest: +SKIP {0: {1: {'a': {'weight': 1.0}, 'b': {'weight': 5.0}}}, 1: {0: {'a': {'weight': 1.0}, 'b': {'weight': 5.0}}}} - >>> d[0][1]['b']['weight'] + >>> d[0][1]["b"]["weight"] 5.0 But multi edge data is lost if `edge_data` is not `None`: @@ -364,7 +362,7 @@ def to_dict_of_dicts(G, nodelist=None, edge_data=None): return dod -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_dict_of_dicts(d, create_using=None, multigraph_input=False): """Returns a graph from a dictionary of dictionaries. @@ -451,7 +449,7 @@ def from_dict_of_dicts(d, create_using=None, multigraph_input=False): return G -@nx._dispatch(preserve_edge_attrs=True) +@nx._dispatchable(preserve_edge_attrs=True) def to_edgelist(G, nodelist=None): """Returns a list of edges in the graph. @@ -469,7 +467,7 @@ def to_edgelist(G, nodelist=None): return G.edges(nodelist, data=True) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_edgelist(edgelist, create_using=None): """Returns a graph from a list of edges. diff --git a/networkx/convert_matrix.py b/networkx/convert_matrix.py index c731985bb9f..6165ac18e31 100644 --- a/networkx/convert_matrix.py +++ b/networkx/convert_matrix.py @@ -43,7 +43,7 @@ ] -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def to_pandas_adjacency( G, nodelist=None, @@ -101,20 +101,21 @@ def to_pandas_adjacency( diagonal matrix entry value to the weight attribute of the edge (or the number 1 if the edge has no weight attribute). If the alternate convention of doubling the edge weight is desired the - resulting Pandas DataFrame can be modified as follows: - - >>> import pandas as pd - >>> pd.options.display.max_columns = 20 - >>> import numpy as np - >>> G = nx.Graph([(1, 1)]) - >>> df = nx.to_pandas_adjacency(G, dtype=int) - >>> df - 1 - 1 1 - >>> df.values[np.diag_indices_from(df)] *= 2 - >>> df - 1 - 1 2 + resulting Pandas DataFrame can be modified as follows:: + + >>> import pandas as pd + >>> G = nx.Graph([(1, 1), (2, 2)]) + >>> df = nx.to_pandas_adjacency(G) + >>> df + 1 2 + 1 1.0 0.0 + 2 0.0 1.0 + >>> diag_idx = list(range(len(df))) + >>> df.iloc[diag_idx, diag_idx] *= 2 + >>> df + 1 2 + 1 2.0 0.0 + 2 0.0 2.0 Examples -------- @@ -150,7 +151,7 @@ def to_pandas_adjacency( return pd.DataFrame(data=M, index=nodelist, columns=nodelist) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_pandas_adjacency(df, create_using=None): r"""Returns a graph from Pandas DataFrame. @@ -219,7 +220,7 @@ def from_pandas_adjacency(df, create_using=None): return G -@nx._dispatch(preserve_edge_attrs=True) +@nx._dispatchable(preserve_edge_attrs=True) def to_pandas_edgelist( G, source="source", @@ -273,9 +274,9 @@ def to_pandas_edgelist( 0 A B 1 7 1 C E 9 10 - >>> G = nx.MultiGraph([('A', 'B', {'cost': 1}), ('A', 'B', {'cost': 9})]) - >>> df = nx.to_pandas_edgelist(G, nodelist=['A', 'C'], edge_key='ekey') - >>> df[['source', 'target', 'cost', 'ekey']] + >>> G = nx.MultiGraph([("A", "B", {"cost": 1}), ("A", "B", {"cost": 9})]) + >>> df = nx.to_pandas_edgelist(G, nodelist=["A", "C"], edge_key="ekey") + >>> df[["source", "target", "cost", "ekey"]] source target cost ekey 0 A B 1 0 1 A B 9 1 @@ -311,7 +312,7 @@ def to_pandas_edgelist( return pd.DataFrame(edgelistdict, dtype=dtype) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_pandas_edgelist( df, source="source", @@ -483,7 +484,7 @@ def from_pandas_edgelist( return g -@nx._dispatch(edge_attrs="weight") +@nx._dispatchable(edge_attrs="weight") def to_scipy_sparse_array(G, nodelist=None, dtype=None, weight="weight", format="csr"): """Returns the graph adjacency matrix as a SciPy sparse array. @@ -623,10 +624,11 @@ def _csr_gen_triples(A): """ nrows = A.shape[0] - data, indices, indptr = A.data, A.indices, A.indptr - for i in range(nrows): - for j in range(indptr[i], indptr[i + 1]): - yield i, int(indices[j]), data[j] + indptr, dst_indices, data = A.indptr, A.indices, A.data + import numpy as np + + src_indices = np.repeat(np.arange(nrows), np.diff(indptr)) + return zip(src_indices.tolist(), dst_indices.tolist(), A.data.tolist()) def _csc_gen_triples(A): @@ -635,10 +637,11 @@ def _csc_gen_triples(A): """ ncols = A.shape[1] - data, indices, indptr = A.data, A.indices, A.indptr - for i in range(ncols): - for j in range(indptr[i], indptr[i + 1]): - yield int(indices[j]), i, data[j] + indptr, src_indices, data = A.indptr, A.indices, A.data + import numpy as np + + dst_indices = np.repeat(np.arange(ncols), np.diff(indptr)) + return zip(src_indices.tolist(), dst_indices.tolist(), A.data.tolist()) def _coo_gen_triples(A): @@ -646,7 +649,7 @@ def _coo_gen_triples(A): of weighted edge triples. """ - return ((int(i), int(j), d) for i, j, d in zip(A.row, A.col, A.data)) + return zip(A.row.tolist(), A.col.tolist(), A.data.tolist()) def _dok_gen_triples(A): @@ -655,7 +658,8 @@ def _dok_gen_triples(A): """ for (r, c), v in A.items(): - yield r, c, v + # Use `v.item()` to convert a NumPy scalar to the appropriate Python scalar + yield int(r), int(c), v.item() def _generate_weighted_edges(A): @@ -675,7 +679,7 @@ def _generate_weighted_edges(A): return _coo_gen_triples(A.tocoo()) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_scipy_sparse_array( A, parallel_edges=False, create_using=None, edge_attribute="weight" ): @@ -736,9 +740,7 @@ def from_scipy_sparse_array( as the number of parallel edges joining those two vertices: >>> A = sp.sparse.csr_array([[1, 1], [1, 2]]) - >>> G = nx.from_scipy_sparse_array( - ... A, parallel_edges=True, create_using=nx.MultiGraph - ... ) + >>> G = nx.from_scipy_sparse_array(A, parallel_edges=True, create_using=nx.MultiGraph) >>> G[1][1] AtlasView({0: {'weight': 1}, 1: {'weight': 1}}) @@ -780,7 +782,7 @@ def from_scipy_sparse_array( return G -@nx._dispatch(edge_attrs="weight") # edge attrs may also be obtained from `dtype` +@nx._dispatchable(edge_attrs="weight") # edge attrs may also be obtained from `dtype` def to_numpy_array( G, nodelist=None, @@ -934,7 +936,7 @@ def to_numpy_array( >>> G.add_edge(2, 0, weight=0) >>> G.add_edge(2, 1, weight=0) >>> G.add_edge(3, 0, weight=1) - >>> nx.to_numpy_array(G, nonedge=-1.) + >>> nx.to_numpy_array(G, nonedge=-1.0) array([[-1., 2., -1., 1.], [ 2., -1., 0., -1.], [-1., 0., -1., 0.], @@ -1019,7 +1021,7 @@ def to_numpy_array( return A -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_numpy_array(A, parallel_edges=False, create_using=None, edge_attr="weight"): """Returns a graph from a 2D NumPy array. diff --git a/networkx/drawing/layout.py b/networkx/drawing/layout.py index fa120d67074..0167df56836 100644 --- a/networkx/drawing/layout.py +++ b/networkx/drawing/layout.py @@ -32,6 +32,7 @@ "fruchterman_reingold_layout", "spiral_layout", "multipartite_layout", + "bfs_layout", "arf_layout", ] @@ -1030,8 +1031,9 @@ def multipartite_layout(G, subset_key="subset", align="vertical", scale=1, cente G : NetworkX graph or list of nodes A position will be assigned to every node in G. - subset_key : string (default='subset') - Key of node data to be used as layer subset. + subset_key : string or dict (default='subset') + If a string, the key of node data in G that holds the node subset. + If a dict, keyed by layer number to the nodes in that layer/subset. align : string (default='vertical') The alignment of nodes. Vertical or horizontal. @@ -1052,6 +1054,12 @@ def multipartite_layout(G, subset_key="subset", align="vertical", scale=1, cente >>> G = nx.complete_multipartite_graph(28, 16, 10) >>> pos = nx.multipartite_layout(G) + or use a dict to provide the layers of the layout + + >>> G = nx.Graph([(0, 1), (1, 2), (1, 3), (3, 4)]) + >>> layers = {"a": [0], "b": [1], "c": [2, 3], "d": [4]} + >>> pos = nx.multipartite_layout(G, subset_key=layers) + Notes ----- This algorithm currently only works in two dimensions and does not @@ -1071,25 +1079,31 @@ def multipartite_layout(G, subset_key="subset", align="vertical", scale=1, cente if len(G) == 0: return {} - layers = {} - for v, data in G.nodes(data=True): - try: - layer = data[subset_key] - except KeyError: - msg = "all nodes must have subset_key (default='subset') as data" - raise ValueError(msg) - layers[layer] = [v] + layers.get(layer, []) + try: + # check if subset_key is dict-like + if len(G) != sum(len(nodes) for nodes in subset_key.values()): + raise nx.NetworkXError( + "all nodes must be in one subset of `subset_key` dict" + ) + except AttributeError: + # subset_key is not a dict, hence a string + node_to_subset = nx.get_node_attributes(G, subset_key) + if len(node_to_subset) != len(G): + raise nx.NetworkXError( + f"all nodes need a subset_key attribute: {subset_key}" + ) + subset_key = nx.utils.groups(node_to_subset) # Sort by layer, if possible try: - layers = sorted(layers.items()) + layers = dict(sorted(subset_key.items())) except TypeError: - layers = list(layers.items()) + layers = subset_key pos = None nodes = [] width = len(layers) - for i, (_, layer) in enumerate(layers): + for i, layer in enumerate(layers.values()): height = len(layer) xs = np.repeat(i, height) ys = np.arange(0, height, dtype=float) @@ -1295,3 +1309,50 @@ def rescale_layout_dict(pos, scale=1): pos_v = np.array(list(pos.values())) pos_v = rescale_layout(pos_v, scale=scale) return dict(zip(pos, pos_v)) + + +def bfs_layout(G, start, *, align="vertical", scale=1, center=None): + """Position nodes according to breadth-first search algorithm. + + Parameters + ---------- + G : NetworkX graph + A position will be assigned to every node in G. + + start : node in `G` + Starting node for bfs + + center : array-like or None + Coordinate pair around which to center the layout. + + Returns + ------- + pos : dict + A dictionary of positions keyed by node. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> pos = nx.bfs_layout(G, 0) + + Notes + ----- + This algorithm currently only works in two dimensions and does not + try to minimize edge crossings. + + """ + G, center = _process_params(G, center, 2) + + # Compute layers with BFS + layers = dict(enumerate(nx.bfs_layers(G, start))) + + if len(G) != sum(len(nodes) for nodes in layers.values()): + raise nx.NetworkXError( + "bfs_layout didn't include all nodes. Perhaps use input graph:\n" + " G.subgraph(nx.node_connected_component(G, start))" + ) + + # Compute node positions with multipartite_layout + return multipartite_layout( + G, subset_key=layers, align=align, scale=scale, center=center + ) diff --git a/networkx/drawing/nx_agraph.py b/networkx/drawing/nx_agraph.py index da0758493f3..f91031fcae6 100644 --- a/networkx/drawing/nx_agraph.py +++ b/networkx/drawing/nx_agraph.py @@ -33,7 +33,7 @@ ] -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_agraph(A, create_using=None): """Returns a NetworkX Graph or DiGraph from a PyGraphviz graph. @@ -203,7 +203,7 @@ def write_dot(G, path): return -@nx._dispatch(name="agraph_read_dot", graphs=None) +@nx._dispatchable(name="agraph_read_dot", graphs=None, returns_graph=True) def read_dot(path): """Returns a NetworkX graph from a dot file on path. diff --git a/networkx/drawing/nx_pydot.py b/networkx/drawing/nx_pydot.py index 1cd17818373..92c5f333e1c 100644 --- a/networkx/drawing/nx_pydot.py +++ b/networkx/drawing/nx_pydot.py @@ -19,7 +19,6 @@ - Graphviz: https://www.graphviz.org - DOT Language: http://www.graphviz.org/doc/info/lang.html """ -import warnings from locale import getpreferredencoding import networkx as nx @@ -41,20 +40,13 @@ def write_dot(G, path): Path can be a string or a file handle. """ - msg = ( - "nx.nx_pydot.write_dot depends on the pydot package, which has " - "known issues and is not actively maintained. Consider using " - "nx.nx_agraph.write_dot instead.\n\n" - "See https://github.com/networkx/networkx/issues/5723" - ) - warnings.warn(msg, DeprecationWarning, stacklevel=2) P = to_pydot(G) path.write(P.to_string()) return @open_file(0, mode="r") -@nx._dispatch(name="pydot_read_dot", graphs=None) +@nx._dispatchable(name="pydot_read_dot", graphs=None, returns_graph=True) def read_dot(path): """Returns a NetworkX :class:`MultiGraph` or :class:`MultiDiGraph` from the dot file with the passed path. @@ -79,14 +71,6 @@ def read_dot(path): """ import pydot - msg = ( - "nx.nx_pydot.read_dot depends on the pydot package, which has " - "known issues and is not actively maintained. Consider using " - "nx.nx_agraph.read_dot instead.\n\n" - "See https://github.com/networkx/networkx/issues/5723" - ) - warnings.warn(msg, DeprecationWarning, stacklevel=2) - data = path.read() # List of one or more "pydot.Dot" instances deserialized from this file. @@ -96,7 +80,7 @@ def read_dot(path): return from_pydot(P_list[0]) -@nx._dispatch(graphs=None) +@nx._dispatchable(graphs=None, returns_graph=True) def from_pydot(P): """Returns a NetworkX graph from a Pydot graph. @@ -120,12 +104,6 @@ def from_pydot(P): >>> G = nx.Graph(nx.nx_pydot.from_pydot(A)) """ - msg = ( - "nx.nx_pydot.from_pydot depends on the pydot package, which has " - "known issues and is not actively maintained.\n\n" - "See https://github.com/networkx/networkx/issues/5723" - ) - warnings.warn(msg, DeprecationWarning, stacklevel=2) if P.get_strict(None): # pydot bug: get_strict() shouldn't take argument multiedges = False @@ -220,13 +198,6 @@ def to_pydot(N): """ import pydot - msg = ( - "nx.nx_pydot.to_pydot depends on the pydot package, which has " - "known issues and is not actively maintained.\n\n" - "See https://github.com/networkx/networkx/issues/5723" - ) - warnings.warn(msg, DeprecationWarning, stacklevel=2) - # set Graphviz graph type if N.is_directed(): graph_type = "digraph" @@ -348,14 +319,6 @@ def graphviz_layout(G, prog="neato", root=None): ----- This is a wrapper for pydot_layout. """ - msg = ( - "nx.nx_pydot.graphviz_layout depends on the pydot package, which has " - "known issues and is not actively maintained. Consider using " - "nx.nx_agraph.graphviz_layout instead.\n\n" - "See https://github.com/networkx/networkx/issues/5723" - ) - warnings.warn(msg, DeprecationWarning, stacklevel=2) - return pydot_layout(G=G, prog=prog, root=root) @@ -392,19 +355,13 @@ def pydot_layout(G, prog="neato", root=None): If this occurs in your case, consider relabeling the nodes just for the layout computation using something similar to:: - H = nx.convert_node_labels_to_integers(G, label_attribute='node_label') - H_layout = nx.nx_pydot.pydot_layout(G, prog='dot') - G_layout = {H.nodes[n]['node_label']: p for n, p in H_layout.items()} + H = nx.convert_node_labels_to_integers(G, label_attribute="node_label") + H_layout = nx.nx_pydot.pydot_layout(G, prog="dot") + G_layout = {H.nodes[n]["node_label"]: p for n, p in H_layout.items()} """ import pydot - msg = ( - "nx.nx_pydot.pydot_layout depends on the pydot package, which has " - "known issues and is not actively maintained.\n\n" - "See https://github.com/networkx/networkx/issues/5723" - ) - warnings.warn(msg, DeprecationWarning, stacklevel=2) P = to_pydot(G) if root is not None: P.set("root", str(root)) diff --git a/networkx/drawing/nx_pylab.py b/networkx/drawing/nx_pylab.py index 096e7b01d05..6171c0f7d9a 100644 --- a/networkx/drawing/nx_pylab.py +++ b/networkx/drawing/nx_pylab.py @@ -16,6 +16,8 @@ - :func:`matplotlib.pyplot.scatter` - :obj:`matplotlib.patches.FancyArrowPatch` """ +import collections +import itertools from numbers import Number import networkx as nx @@ -467,6 +469,197 @@ def draw_networkx_nodes( return node_collection +class FancyArrowFactory: + """Draw arrows with `matplotlib.patches.FancyarrowPatch`""" + + class ConnectionStyleFactory: + def __init__(self, connectionstyles, selfloop_height, ax=None): + import matplotlib as mpl + import matplotlib.path # call as mpl.path + import numpy as np + + self.ax = ax + self.mpl = mpl + self.np = np + self.base_connection_styles = [ + mpl.patches.ConnectionStyle(cs) for cs in connectionstyles + ] + self.n = len(self.base_connection_styles) + self.selfloop_height = selfloop_height + + def curved(self, edge_index): + return self.base_connection_styles[edge_index % self.n] + + def self_loop(self, edge_index): + def self_loop_connection(posA, posB, *args, **kwargs): + if not self.np.all(posA == posB): + raise nx.NetworkXError( + "`self_loop` connection style method" + "is only to be used for self-loops" + ) + # this is called with _screen space_ values + # so convert back to data space + data_loc = self.ax.transData.inverted().transform(posA) + v_shift = 0.1 * self.selfloop_height + h_shift = v_shift * 0.5 + # put the top of the loop first so arrow is not hidden by node + path = self.np.asarray( + [ + # 1 + [0, v_shift], + # 4 4 4 + [h_shift, v_shift], + [h_shift, 0], + [0, 0], + # 4 4 4 + [-h_shift, 0], + [-h_shift, v_shift], + [0, v_shift], + ] + ) + # Rotate self loop 90 deg. if more than 1 + # This will allow for maximum of 4 visible self loops + if edge_index % 4: + x, y = path.T + for _ in range(edge_index % 4): + x, y = y, -x + path = self.np.array([x, y]).T + return self.mpl.path.Path( + self.ax.transData.transform(data_loc + path), [1, 4, 4, 4, 4, 4, 4] + ) + + return self_loop_connection + + def __init__( + self, + edge_pos, + edgelist, + nodelist, + edge_indices, + node_size, + selfloop_height, + connectionstyle="arc3", + node_shape="o", + arrowstyle="-", + arrowsize=10, + edge_color="k", + alpha=None, + linewidth=1.0, + style="solid", + min_source_margin=0, + min_target_margin=0, + ax=None, + ): + import matplotlib as mpl + import matplotlib.patches # call as mpl.patches + import matplotlib.pyplot as plt + import numpy as np + + if isinstance(connectionstyle, str): + connectionstyle = [connectionstyle] + elif np.iterable(connectionstyle): + connectionstyle = list(connectionstyle) + else: + msg = "ConnectionStyleFactory arg `connectionstyle` must be str or iterable" + raise nx.NetworkXError(msg) + self.ax = ax + self.mpl = mpl + self.np = np + self.edge_pos = edge_pos + self.edgelist = edgelist + self.nodelist = nodelist + self.node_shape = node_shape + self.min_source_margin = min_source_margin + self.min_target_margin = min_target_margin + self.edge_indices = edge_indices + self.node_size = node_size + self.connectionstyle_factory = self.ConnectionStyleFactory( + connectionstyle, selfloop_height, ax + ) + self.arrowstyle = arrowstyle + self.arrowsize = arrowsize + self.arrow_colors = mpl.colors.colorConverter.to_rgba_array(edge_color, alpha) + self.linewidth = linewidth + self.style = style + if isinstance(arrowsize, list) and len(arrowsize) != len(edge_pos): + raise ValueError("arrowsize should have the same length as edgelist") + + def __call__(self, i): + (x1, y1), (x2, y2) = self.edge_pos[i] + shrink_source = 0 # space from source to tail + shrink_target = 0 # space from head to target + if self.np.iterable(self.node_size): # many node sizes + source, target = self.edgelist[i][:2] + source_node_size = self.node_size[self.nodelist.index(source)] + target_node_size = self.node_size[self.nodelist.index(target)] + shrink_source = self.to_marker_edge(source_node_size, self.node_shape) + shrink_target = self.to_marker_edge(target_node_size, self.node_shape) + else: + shrink_source = self.to_marker_edge(self.node_size, self.node_shape) + shrink_target = shrink_source + shrink_source = max(shrink_source, self.min_source_margin) + shrink_target = max(shrink_target, self.min_target_margin) + + # scale factor of arrow head + if isinstance(self.arrowsize, list): + mutation_scale = self.arrowsize[i] + else: + mutation_scale = self.arrowsize + + if len(self.arrow_colors) > i: + arrow_color = self.arrow_colors[i] + elif len(self.arrow_colors) == 1: + arrow_color = self.arrow_colors[0] + else: # Cycle through colors + arrow_color = self.arrow_colors[i % len(self.arrow_colors)] + + if self.np.iterable(self.linewidth): + if len(self.linewidth) > i: + linewidth = self.linewidth[i] + else: + linewidth = self.linewidth[i % len(self.linewidth)] + else: + linewidth = self.linewidth + + if ( + self.np.iterable(self.style) + and not isinstance(self.style, str) + and not isinstance(self.style, tuple) + ): + if len(self.style) > i: + linestyle = self.style[i] + else: # Cycle through styles + linestyle = self.style[i % len(self.style)] + else: + linestyle = self.style + + if x1 == x2 and y1 == y2: + connectionstyle = self.connectionstyle_factory.self_loop( + self.edge_indices[i] + ) + else: + connectionstyle = self.connectionstyle_factory.curved(self.edge_indices[i]) + return self.mpl.patches.FancyArrowPatch( + (x1, y1), + (x2, y2), + arrowstyle=self.arrowstyle, + shrinkA=shrink_source, + shrinkB=shrink_target, + mutation_scale=mutation_scale, + color=arrow_color, + linewidth=linewidth, + connectionstyle=connectionstyle, + linestyle=linestyle, + zorder=1, # arrows go behind nodes + ) + + def to_marker_edge(self, marker_size, marker): + if marker in "s^>vv