├── docs ├── api │ └── api.md ├── index.md ├── Makefile ├── make.bat └── conf.py ├── README.md ├── .github ├── dependabot.yml └── workflows │ ├── build-docs.yml │ ├── deploy-docs.yml │ └── test_and_deploy.yml ├── .coveragerc ├── src └── napari_graph │ ├── __init__.py │ ├── interops.py │ ├── numba.py │ ├── _tests │ ├── test_interops.py │ └── test_graph.py │ ├── undirected_graph.py │ ├── directed_graph.py │ └── base_graph.py ├── pyproject.toml ├── tox.ini ├── setup.cfg ├── LICENSE ├── .pre-commit-config.yaml └── .gitignore /docs/api/api.md: -------------------------------------------------------------------------------- 1 | # napari_graph API Reference 2 | 3 | ```{toctree} 4 | napari_graph 5 | ``` 6 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # Fast editable graphs for napari 2 | 3 | napari-graph is a Python implementation of the Mastodon graph structure. 4 | 5 | ```{toctree} 6 | api/api 7 | ``` 8 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # napari-graph 2 | 3 | Efficient graph data structures and algorithms for fast slicing, visualization, and editing. 4 | 5 | Implementation based on the [trackmate-graph](https://github.com/mastodon-sc/mastodon/blob/master/doc/trackmate-graph.pdf). 6 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "monthly" 7 | commit-message: 8 | prefix: "ci(dependabot):" 9 | groups: 10 | github-actions: 11 | patterns: 12 | - "*" 13 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | # .coveragerc to control coverage.py 2 | [run] 3 | branch = True 4 | source = 5 | napari_graph 6 | omit = 7 | */_version.py 8 | parallel = True 9 | 10 | [report] 11 | # Regexes for lines to exclude from consideration 12 | exclude_lines = 13 | except ImportError: 14 | @(abc\.)?abstractmethod 15 | 16 | 17 | [paths] 18 | source = 19 | src/ 20 | */site-packages/ 21 | -------------------------------------------------------------------------------- /src/napari_graph/__init__.py: -------------------------------------------------------------------------------- 1 | from napari_graph.base_graph import BaseGraph 2 | from napari_graph.directed_graph import DirectedGraph 3 | from napari_graph.interops import to_napari_graph 4 | from napari_graph.undirected_graph import UndirectedGraph 5 | 6 | try: 7 | from napari_graph._version import version as __version__ 8 | except ImportError: 9 | __version__ = "not-installed" 10 | 11 | __all__ = [ 12 | "BaseGraph", 13 | "DirectedGraph", 14 | "UndirectedGraph", 15 | "to_napari_graph", 16 | ] 17 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools", "wheel", "setuptools_scm"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [tool.setuptools_scm] 6 | write_to = "src/napari_graph/_version.py" 7 | 8 | [tool.black] 9 | target-version = ['py38', 'py39', 'py310'] 10 | skip-string-normalization = true 11 | line-length = 79 12 | exclude = ''' 13 | ( 14 | /( 15 | \.eggs 16 | | \.git 17 | | \.hg 18 | | \.mypy_cache 19 | | \.tox 20 | | \.venv 21 | | _build 22 | | buck-out 23 | | build 24 | | dist 25 | | examples 26 | | vendored 27 | | _vendor 28 | )/ 29 | | napari/resources/qt.py 30 | | tools/minreq.py 31 | ) 32 | ''' 33 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = python -msphinx 7 | SPHINXPROJ = napari-graph 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @NAPARI_APPLICATION_IPY_INTERACTIVE=0 $(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=python -msphinx 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | set SPHINXPROJ=Skan 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The Sphinx module was not found. Make sure you have Sphinx installed, 20 | echo.then set the SPHINXBUILD environment variable to point to the full 21 | echo.path of the 'sphinx-build' executable. Alternatively you may add the 22 | echo.Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | 2 | [tox] 3 | envlist = py{39,310,311,312}-{linux,macos,windows} 4 | isolated_build = true 5 | 6 | [gh-actions] 7 | python = 8 | 3.9: py39 9 | 3.10: py310 10 | 3.11: py311 11 | 3.12: py312 12 | 13 | # This section turns environment variables from github actions 14 | # into tox environment factors. This, combined with the [gh-actions] 15 | # section above would mean that a test running python 3.9 on ubuntu-latest 16 | # tox env of `py39-linux` 17 | [gh-actions:env] 18 | PLATFORM = 19 | ubuntu-latest: linux 20 | ubuntu-16.04: linux 21 | ubuntu-18.04: linux 22 | ubuntu-20.04: linux 23 | windows-latest: windows 24 | macos-latest: macos 25 | macos-13: macos 26 | 27 | [testenv] 28 | passenv = 29 | CI 30 | GITHUB_ACTIONS 31 | AZURE_PIPELINES 32 | 33 | setenv = 34 | TOX_ENV_NAME = {envname} 35 | 36 | deps = 37 | pytest 38 | coverage 39 | commands = 40 | coverage run --parallel-mode --source=napari_graph --branch -m pytest . 41 | 42 | [testenv:linting] 43 | deps = pre-commit 44 | commands = pre-commit run --all-files --show-diff-on-failure 45 | -------------------------------------------------------------------------------- /.github/workflows/build-docs.yml: -------------------------------------------------------------------------------- 1 | name: Build docs 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - main 7 | push: 8 | branches: 9 | - docs 10 | workflow_dispatch: 11 | 12 | jobs: 13 | 14 | build-and-upload-artifact: 15 | name: build 16 | runs-on: ubuntu-latest 17 | 18 | steps: 19 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 20 | 21 | - name: Set up Python 3.9 22 | uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 23 | with: 24 | python-version: 3.9 25 | 26 | - name: Install dependencies 27 | run: | 28 | python -m pip install --upgrade pip 29 | python -m pip install 'setuptools<50.0' 30 | python -m pip install .[testing,docs] 31 | 32 | - name: Build docs 33 | run: | 34 | cd docs 35 | make html 36 | cd .. 37 | 38 | - name: Upload artifact 39 | uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 40 | with: 41 | name: docs 42 | path: docs/_build/html 43 | -------------------------------------------------------------------------------- /src/napari_graph/interops.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | import networkx as nx 4 | import numpy as np 5 | import pandas as pd 6 | 7 | from napari_graph.base_graph import BaseGraph 8 | from napari_graph.undirected_graph import UndirectedGraph 9 | 10 | COMPATIBLE_CLASSES = { 11 | BaseGraph: lambda x: x, 12 | nx.Graph: BaseGraph.from_networkx, 13 | pd.DataFrame: lambda x: UndirectedGraph(coords=x), 14 | np.ndarray: lambda x: UndirectedGraph(coords=x), 15 | } 16 | 17 | 18 | def to_napari_graph(graph: Any) -> BaseGraph: 19 | """Generic function to convert "any" graph to a napari-graph. 20 | 21 | Supported formats: 22 | - napari-graph (itself) 23 | - NetworkX 24 | 25 | Parameters 26 | ---------- 27 | graph : Any 28 | Any kind of graph to a napari-graph. See supported formats above. 29 | 30 | Returns 31 | ------- 32 | BaseGraph 33 | A napari-graph graph. 34 | """ 35 | for cls, conversion_func in COMPATIBLE_CLASSES.items(): 36 | if isinstance(graph, cls): 37 | return conversion_func(graph) 38 | 39 | raise NotImplementedError( 40 | f"Conversion from {type(graph)} to napari-graph does not exist." 41 | ) 42 | -------------------------------------------------------------------------------- /.github/workflows/deploy-docs.yml: -------------------------------------------------------------------------------- 1 | name: Deploy docs 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | tags: 8 | - "v*" # Push events to matching v*, i.e. v1.0, v20.15.10 9 | # Allows you to run this workflow manually from the Actions tab 10 | workflow_dispatch: 11 | 12 | jobs: 13 | 14 | build-and-deploy: 15 | name: deploy 16 | runs-on: ubuntu-latest 17 | 18 | steps: 19 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 20 | 21 | - name: Set up Python 3.9 22 | uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 23 | with: 24 | python-version: 3.9 25 | 26 | - name: Install dependencies 27 | run: | 28 | python -m pip install --upgrade pip 29 | python -m pip install 'setuptools<50.0' 30 | python -m pip install .[testing,docs] 31 | 32 | - name: Build docs 33 | run: | 34 | cd docs 35 | make html 36 | cd .. 37 | 38 | - name: Deploy docs 🚀 39 | uses: JamesIves/github-pages-deploy-action@6c2d9db40f9296374acc17b90404b6e8864128c8 # v4.7.3 40 | with: 41 | branch: gh-pages 42 | folder: docs/_build/html 43 | -------------------------------------------------------------------------------- /src/napari_graph/numba.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import warnings 3 | from typing import Callable, Optional 4 | 5 | try: 6 | from numba import njit, typed, types 7 | 8 | except ImportError: 9 | 10 | warnings.warn( 11 | "numba not installed, falling back to stubs. " 12 | "Install numba for better napari-graph performance." 13 | ) 14 | 15 | def njit(func: Optional[Callable] = None, **kwargs) -> Callable: 16 | """Immitate numba.njit decorator""" 17 | 18 | def _decorator(f: Callable) -> Callable: 19 | @functools.wraps(f) 20 | def wrapper(*args, **kwargs): 21 | return f(*args, **kwargs) 22 | 23 | return wrapper 24 | 25 | if func: 26 | return _decorator(func) 27 | 28 | return _decorator 29 | 30 | class StubList(list): 31 | @staticmethod 32 | def empty_list(type) -> list: 33 | return [] 34 | 35 | class StubDict(dict): 36 | @staticmethod 37 | def empty(key_type, value_type) -> dict: 38 | return {} 39 | 40 | class typed: # type: ignore[no-redef] 41 | List = StubList 42 | Dict = StubDict 43 | 44 | class types: # type: ignore[no-redef] 45 | int64 = int 46 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = napari-graph 3 | author = Jordão Bragantini, Jonas Windhager & Juan Nunez-Iglesias 4 | author_email = jordao.bragantini@czbiohub.org 5 | license = BSD-3 6 | description = Fast editable graphs in Python and numba 7 | url = https://github.com/napari/napari-graph 8 | long_description = file: README.md 9 | long_description_content_type = text/markdown 10 | classifiers = 11 | Development Status :: 3 - Alpha 12 | Intended Audience :: Developers 13 | Framework :: napari 14 | Topic :: Software Development :: Testing 15 | Programming Language :: Python 16 | Programming Language :: Python :: 3 17 | Programming Language :: Python :: 3.8 18 | Programming Language :: Python :: 3.9 19 | Programming Language :: Python :: 3.10 20 | Operating System :: OS Independent 21 | License :: OSI Approved :: BSD License 22 | 23 | [options] 24 | package_dir = 25 | =src 26 | include_package_data = True 27 | packages = find: 28 | setup_requires = setuptools_scm 29 | install_requires = 30 | networkx 31 | numpy 32 | pandas 33 | python_requires = >=3.8 34 | 35 | [options.packages.find] 36 | where = src 37 | 38 | [options.extras_require] 39 | fast = 40 | numba 41 | testing = 42 | coverage 43 | pytest 44 | pytest-cov 45 | docs = 46 | sphinx-material 47 | sphinx 48 | jupyter 49 | notebook 50 | sphinx-toggleprompt 51 | sphinx-copybutton 52 | sphinxcontrib-bibtex 53 | myst-nb 54 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2022, napari 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | # basic pre-commit 3 | - repo: https://github.com/pre-commit/pre-commit-hooks 4 | rev: v4.6.0 5 | hooks: 6 | - id: trailing-whitespace 7 | - id: end-of-file-fixer 8 | - id: check-added-large-files 9 | - id: check-yaml 10 | # make every import absolute 11 | - repo: https://github.com/MarcoGorelli/absolufy-imports 12 | rev: v0.3.1 13 | hooks: 14 | - id: absolufy-imports 15 | # sorting imports 16 | - repo: https://github.com/pycqa/isort 17 | rev: 5.13.2 18 | hooks: 19 | - id: isort 20 | args: ["--profile", "black", "--filter-files"] 21 | # automatic upgrade to newer python versions syntax 22 | - repo: https://github.com/asottile/pyupgrade 23 | rev: v3.15.2 24 | hooks: 25 | - id: pyupgrade 26 | args: ["--py37-plus", "--keep-runtime-typing"] 27 | # syntax linting and formatting 28 | - repo: https://github.com/myint/autoflake 29 | rev: v2.3.1 30 | hooks: 31 | - id: autoflake 32 | args: [--in-place, --remove-all-unused-imports, 33 | --ignore-init-module-imports] 34 | 35 | - repo: https://github.com/PyCQA/flake8 36 | rev: 7.0.0 37 | hooks: 38 | - id: flake8 39 | additional_dependencies: [flake8-typing-imports==1.12.0] 40 | args: [--max-line-length, '120', 41 | --ignore, 'E741,W503,E203', 42 | --per-file-ignores, 43 | '__init__.py:F401'] 44 | 45 | - repo: https://github.com/psf/black 46 | rev: 24.4.2 47 | hooks: 48 | - id: black 49 | 50 | - repo: https://github.com/pre-commit/mirrors-mypy 51 | rev: v1.10.0 52 | hooks: 53 | - id: mypy 54 | args: [--ignore-missing-imports] 55 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | 131 | # ignoring setuptools_scm _version 132 | _version.py 133 | -------------------------------------------------------------------------------- /src/napari_graph/_tests/test_interops.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | from typing import Any, Callable, List 3 | 4 | import networkx as nx 5 | import numpy as np 6 | import pandas as pd 7 | import pytest 8 | 9 | from napari_graph import ( # noqa 10 | BaseGraph, 11 | DirectedGraph, 12 | UndirectedGraph, 13 | to_napari_graph, 14 | ) 15 | 16 | 17 | def _graph_list() -> List[BaseGraph]: 18 | 19 | coords = pd.DataFrame( 20 | [ 21 | [0, 2.5], 22 | [4, 2.5], 23 | [1, 0], 24 | [2, 3.5], 25 | [3, 0], 26 | ], 27 | columns=["y", "x"], 28 | ) 29 | 30 | edges = np.asarray([[0, 1], [1, 2], [2, 3], [3, 4], [4, 0]]) 31 | 32 | empty_graph = UndirectedGraph() 33 | 34 | graph = UndirectedGraph(edges) 35 | 36 | digraph = DirectedGraph(edges) 37 | 38 | spatial_graph = UndirectedGraph( 39 | edges=edges, 40 | coords=coords, 41 | ) 42 | 43 | spatial_digraph = DirectedGraph( 44 | edges=edges, 45 | coords=coords, 46 | ) 47 | 48 | only_coords = UndirectedGraph(coords=coords) 49 | 50 | only_coords_di = DirectedGraph(coords=coords) 51 | 52 | return [ 53 | empty_graph, 54 | graph, 55 | digraph, 56 | spatial_graph, 57 | spatial_digraph, 58 | only_coords, 59 | only_coords_di, 60 | ] 61 | 62 | 63 | @pytest.mark.parametrize( 64 | "in_graph,to_class", 65 | itertools.product(_graph_list(), [BaseGraph.to_networkx]), 66 | ) 67 | def test_conversion( 68 | in_graph: BaseGraph, to_class: Callable[[BaseGraph], Any] 69 | ) -> None: 70 | 71 | nxgraph = to_class(in_graph) 72 | out_graph = to_napari_graph(nxgraph) 73 | 74 | assert np.array_equal(in_graph.get_nodes(), out_graph.get_nodes()) 75 | 76 | if in_graph.is_spatial(): 77 | assert np.array_equal( 78 | in_graph.get_coordinates(), out_graph.get_coordinates() 79 | ) 80 | 81 | if in_graph.n_edges == 0: 82 | assert out_graph.n_edges == 0 83 | 84 | else: 85 | in_graph_edges = np.concatenate(in_graph.get_edges(), axis=0).sort() 86 | out_graph_edges = np.concatenate(out_graph.get_edges(), axis=0).sort() 87 | assert np.array_equal(in_graph_edges, out_graph_edges) 88 | 89 | 90 | def test_weighted_networkx_graph() -> None: 91 | 92 | nxgraph = nx.DiGraph() 93 | nxgraph.add_edge(10, 11, weight=0.1) 94 | nxgraph.add_edge(11, 12, weight=0.2) 95 | nxgraph.add_edge(12, 10, weight=0.3) 96 | 97 | nxgraph_edges = np.asarray(nxgraph.edges).sort() 98 | 99 | graph = to_napari_graph(nxgraph) 100 | graph_edges = np.concatenate(graph.get_edges(), axis=0).sort() 101 | 102 | assert np.array_equal(nxgraph_edges, graph_edges) 103 | 104 | 105 | def test_table_like_graphs() -> None: 106 | 107 | coords = pd.DataFrame( 108 | [ 109 | [0, 2.5], 110 | [4, 2.5], 111 | [1, 0], 112 | [2, 3.5], 113 | [3, 0], 114 | ], 115 | columns=["y", "x"], 116 | ) 117 | coords.index = np.arange( 118 | 10, 5, -1 119 | ) # testing index that don't start with zero 120 | 121 | # testing pandas dataframe 122 | graph = to_napari_graph(coords) 123 | assert np.allclose(graph.get_coordinates(), coords) 124 | 125 | # testing numpy array 126 | graph = to_napari_graph(coords.to_numpy()) 127 | assert np.allclose(graph.get_coordinates(), coords.to_numpy()) 128 | 129 | # testing bad table 130 | not_a_table = np.ones((5, 5, 5)) 131 | with pytest.raises(ValueError): 132 | graph = to_napari_graph(not_a_table) 133 | 134 | 135 | def test_networkx_non_integer_ids(): 136 | """Check that passing nx graph with non-integer IDs doesn't crash.""" 137 | g = nx.hexagonal_lattice_graph(5, 5, with_positions=True) 138 | with pytest.warns(UserWarning, match='Node IDs must be integers.'): 139 | BaseGraph.from_networkx(g) 140 | 141 | 142 | def test_networkx_basic_roundtrip(): 143 | g = nx.hexagonal_lattice_graph(5, 5, with_positions=True) 144 | gint = nx.convert_node_labels_to_integers(g) 145 | ng = BaseGraph.from_networkx(gint) 146 | g2 = ng.to_networkx() 147 | # convert positions to tuples because nx comparison tools don't like arrays 148 | for node in g2.nodes(): 149 | g2.nodes[node]['pos'] = tuple(g2.nodes[node]['pos']) 150 | assert nx.utils.edges_equal(gint.edges, g2.edges) 151 | assert set(gint.nodes) == set(g2.nodes) 152 | for node in gint.nodes: 153 | np.testing.assert_allclose( 154 | gint.nodes[node]['pos'], g2.nodes[node]['pos'] 155 | ) 156 | -------------------------------------------------------------------------------- /.github/workflows/test_and_deploy.yml: -------------------------------------------------------------------------------- 1 | # This workflows will upload a Python Package using Twine when a release is created 2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries 3 | 4 | name: tests 5 | 6 | on: 7 | push: 8 | branches: 9 | - master 10 | - main 11 | tags: 12 | - "v*" # Push events to matching v*, i.e. v1.0, v20.15.10 13 | pull_request: 14 | branches: 15 | - master 16 | - main 17 | workflow_dispatch: 18 | 19 | jobs: 20 | linting: 21 | runs-on: ubuntu-latest 22 | steps: 23 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 24 | - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 25 | with: 26 | python-version: "3.10" 27 | - name: Install tox 28 | run: | 29 | pip install tox 30 | 31 | - name: Run pre-commit to check linting and typing 32 | run: tox -e linting 33 | 34 | test: 35 | needs: [linting] 36 | name: ${{ matrix.platform }} py${{ matrix.python-version }} 37 | runs-on: ${{ matrix.platform }} 38 | strategy: 39 | matrix: 40 | platform: [ubuntu-latest, windows-latest, macos-latest] 41 | python-version: ['3.9', '3.10', '3.11', '3.12'] 42 | 43 | steps: 44 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 45 | - name: Set up Python ${{ matrix.python-version }} 46 | uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 47 | with: 48 | python-version: ${{ matrix.python-version }} 49 | 50 | # these libraries enable testing on Qt on linux 51 | - uses: tlambert03/setup-qt-libs@19e4ef2d781d81f5f067182e228b54ec90d23b76 # v1.8 52 | 53 | # strategy borrowed from vispy for installing opengl libs on windows 54 | - name: Install Windows OpenGL 55 | if: runner.os == 'Windows' 56 | run: | 57 | git clone --depth 1 https://github.com/pyvista/gl-ci-helpers.git 58 | powershell gl-ci-helpers/appveyor/install_opengl.ps1 59 | 60 | # note: if you need dependencies from conda, considering using 61 | # setup-miniconda: https://github.com/conda-incubator/setup-miniconda 62 | # and 63 | # tox-conda: https://github.com/tox-dev/tox-conda 64 | - name: Install dependencies 65 | run: | 66 | python -m pip install --upgrade pip 67 | pip install setuptools tox tox-gh-actions 68 | python -m pip install .[testing] 69 | 70 | # this runs the platform-specific tests declared in tox.ini 71 | - name: Test without numba 72 | run: | 73 | python -m tox 74 | env: 75 | PLATFORM: ${{ matrix.platform }} 76 | 77 | - name: Install numba 78 | run: | 79 | pip install .[fast] 80 | 81 | - name: Test with numba 82 | run: | 83 | python -m tox 84 | env: 85 | PLATFORM: ${{ matrix.platform }} 86 | 87 | - name: Upload coverage data 88 | uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 89 | with: 90 | name: cov-reports-${{ matrix.platform }}-py-${{ matrix.python-version }} 91 | include-hidden-files: true 92 | path: | 93 | ./.coverage* 94 | 95 | 96 | coverage_prepare: 97 | name: Prepare coverage 98 | runs-on: ubuntu-latest 99 | needs: [test] 100 | steps: 101 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 102 | - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 103 | with: 104 | python-version: "3.x" 105 | cache-dependency-path: pyproject.toml 106 | cache: 'pip' 107 | - name: Install Dependencies 108 | run: | 109 | pip install --upgrade pip 110 | pip install codecov 111 | 112 | - name: Download coverage data 113 | uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 114 | with: 115 | pattern: cov-reports-* 116 | path: coverage 117 | merge-multiple: true 118 | - name: Upload coverage input 119 | uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 120 | with: 121 | name: coverage_part 122 | path: coverage 123 | retention-days: 5 124 | 125 | - name: combine coverage data 126 | run: | 127 | python -Im coverage combine --debug=pathmap,config coverage 128 | python -Im coverage xml -o coverage.xml 129 | # Report and write to summary. 130 | python -Im coverage report --format=markdown --skip-empty --skip-covered >> $GITHUB_STEP_SUMMARY 131 | coverage report -m --fail-under 80 132 | 133 | - name: Upload coverage artifact 134 | uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 135 | with: 136 | name: coverage_xml 137 | path: coverage.xml 138 | retention-days: 5 139 | 140 | - name: Upload coverage data 141 | uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3 142 | with: 143 | fail_ci_if_error: true 144 | token: ${{ secrets.CODECOV_TOKEN }} 145 | 146 | 147 | deploy: 148 | # this will run when you have tagged a commit, starting with "v*" 149 | # and requires that you have put your twine API key in your 150 | # github secrets (see readme for details) 151 | needs: [test] 152 | runs-on: ubuntu-latest 153 | if: contains(github.ref, 'tags') 154 | steps: 155 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 156 | - name: Set up Python 157 | uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 158 | with: 159 | python-version: "3.x" 160 | - name: Install dependencies 161 | run: | 162 | python -m pip install --upgrade pip 163 | pip install -U setuptools setuptools_scm wheel twine build 164 | - name: Build and publish 165 | env: 166 | TWINE_USERNAME: __token__ 167 | TWINE_PASSWORD: ${{ secrets.TWINE_API_KEY }} 168 | run: | 169 | git tag 170 | python -m build --sdist --wheel 171 | twine upload dist/* 172 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # napari-graph documentation build configuration file 2 | # 3 | # This file is execfile()d with the current directory set to its 4 | # containing dir. 5 | # 6 | # Note that not all possible configuration values are present in this 7 | # autogenerated file. 8 | # 9 | # All configuration values have a default; values that are commented out 10 | # serve to show the default. 11 | 12 | # If extensions (or modules to document with autodoc) are in another directory, 13 | # add these directories to sys.path here. If the directory is relative to the 14 | # documentation root, use os.path.abspath to make it absolute, like shown here. 15 | # Add the local code to the Python path, so docs are generated for 16 | # current working copy 17 | 18 | from typing import Any, Dict 19 | 20 | import napari_graph 21 | 22 | # -- General configuration ------------------------------------------------ 23 | 24 | # If your documentation needs a minimal Sphinx version, state it here. 25 | # needs_sphinx = '1.0' 26 | 27 | # Execution options 28 | execution_timeout = 300 # in seconds? 29 | 30 | # Add any Sphinx extension module names here, as strings. They can be 31 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 32 | # ones. 33 | extensions = [ 34 | 'sphinx.ext.autodoc', 35 | 'sphinx.ext.mathjax', 36 | 'sphinx.ext.viewcode', 37 | 'sphinx.ext.githubpages', 38 | 'sphinx.ext.napoleon', 39 | 'sphinxcontrib.bibtex', 40 | 'sphinx_copybutton', 41 | 'myst_nb', 42 | ] 43 | 44 | # sphinx-copybutton prompt-stripping configuration 45 | copybutton_prompt_text = ( 46 | r">>> |\.\.\. |\$ |In \[\d*\]: | {2,5}\.\.\.: | {5,8}: " 47 | ) 48 | copybutton_prompt_is_regexp = True 49 | 50 | # Add any paths that contain templates here, relative to this directory. 51 | templates_path = ['_templates'] 52 | 53 | # The suffix(es) of source filenames. 54 | # You can specify multiple suffix as a list of string: 55 | # 56 | # source_suffix = ['.rst', '.md'] 57 | source_suffix = '.rst' 58 | 59 | # The master toctree document. 60 | master_doc = 'index' 61 | 62 | # General information about the project. 63 | project = 'napari-graph' 64 | copyright = '2022, Jordão Bragantini & Juan Nunez-Iglesias' 65 | author = 'Jordão Bragantini & Juan Nunez-Iglesias' 66 | 67 | # The version info for the project you're documenting, acts as replacement for 68 | # |version| and |release|, also used in various other places throughout the 69 | # built documents. 70 | # 71 | # The short X.Y version. 72 | 73 | version = napari_graph.__version__ 74 | # The full version, including alpha/beta/rc tags. 75 | release = napari_graph.__version__ 76 | 77 | # The language for content autogenerated by Sphinx. Refer to documentation 78 | # for a list of supported languages. 79 | # 80 | # This is also used if you do content translation via gettext catalogs. 81 | # Usually you set "language" from the command line for these cases. 82 | language = None 83 | 84 | # List of patterns, relative to source directory, that match files and 85 | # directories to ignore when looking for source files. 86 | # This patterns also effect to html_static_path and html_extra_path 87 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 88 | 89 | # The name of the Pygments (syntax highlighting) style to use. 90 | pygments_style = 'sphinx' 91 | 92 | # If true, `todo` and `todoList` produce output, else they produce nothing. 93 | todo_include_todos = False 94 | 95 | # List of publications 96 | bibtex_bibfiles = ['publications.bib'] 97 | 98 | # -- Options for HTML output ---------------------------------------------- 99 | 100 | # The theme to use for HTML and HTML Help pages. See the documentation for 101 | # a list of builtin themes. 102 | 103 | html_theme = 'sphinx_material' 104 | 105 | # Theme options are theme-specific and customize the look and feel of a theme 106 | # further. For a list of options available for each theme, see the 107 | # documentation. 108 | # 109 | 110 | html_theme = 'sphinx_material' 111 | 112 | # Material theme options (see theme.conf for more information) 113 | html_theme_options = { 114 | # Set the name of the project to appear in the navigation. 115 | 'nav_title': 'napari-graph', 116 | # Set you GA account ID to enable tracking 117 | # 'google_analytics_account': 'UA-XXXXX', 118 | # Specify a base_url used to generate sitemap.xml. If not 119 | # specified, then no sitemap will be built. 120 | 'base_url': 'https://napari.org/napari-graph', 121 | # Set the color and the accent color 122 | 'color_primary': 'blue', 123 | 'color_accent': 'light-blue', 124 | # Set the repo location to get a badge with stats 125 | 'repo_url': 'https://github.com/napari/napari-graph', 126 | 'repo_name': 'Skan', 127 | # Visible levels of the global TOC; -1 means unlimited 128 | 'globaltoc_depth': 1, 129 | # If False, expand all TOC entries 130 | 'globaltoc_collapse': False, 131 | # If True, show hidden TOC entries 132 | 'globaltoc_includehidden': False, 133 | } 134 | 135 | # Add any paths that contain custom static files (such as style sheets) here, 136 | # relative to this directory. They are copied after the builtin static files, 137 | # so a file named "default.css" will overwrite the builtin "default.css". 138 | html_static_path = ['_static'] 139 | 140 | # Custom sidebar templates, must be a dictionary that maps document names 141 | # to template names. 142 | # 143 | # This is required for the alabaster theme 144 | # refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars 145 | html_sidebars = { 146 | "**": [ 147 | "logo-text.html", 148 | "globaltoc.html", 149 | "localtoc.html", 150 | "searchbox.html", 151 | ] 152 | } 153 | 154 | # -- Options for HTMLHelp output ------------------------------------------ 155 | 156 | # Output file base name for HTML help builder. 157 | htmlhelp_basename = 'napari-graph-docs' 158 | 159 | # -- Options for LaTeX output --------------------------------------------- 160 | 161 | latex_elements: Dict[str, Any] = { 162 | # The paper size ('letterpaper' or 'a4paper'). 163 | # 164 | # 'papersize': 'letterpaper', 165 | # The font size ('10pt', '11pt' or '12pt'). 166 | # 167 | # 'pointsize': '10pt', 168 | # Additional stuff for the LaTeX preamble. 169 | # 170 | # 'preamble': '', 171 | # Latex figure (float) alignment 172 | # 173 | # 'figure_align': 'htbp', 174 | } 175 | 176 | # Grouping the document tree into LaTeX files. List of tuples 177 | # (source start file, target name, title, 178 | # author, documentclass [howto, manual, or own class]). 179 | latex_documents = [ 180 | ( 181 | master_doc, 182 | 'napari-graph.tex', 183 | 'napari-graph documentation', 184 | author, 185 | 'manual', 186 | ), 187 | ] 188 | 189 | # -- Options for manual page output --------------------------------------- 190 | 191 | # One entry per manual page. List of tuples 192 | # (source start file, name, description, authors, manual section). 193 | man_pages = [(master_doc, 'napari-graph-documentation', [author], 1)] 194 | 195 | # -- Options for Texinfo output ------------------------------------------- 196 | 197 | # Grouping the document tree into Texinfo files. List of tuples 198 | # (source start file, target name, title, author, 199 | # dir menu entry, description, category) 200 | texinfo_documents = [ 201 | ( 202 | master_doc, 203 | 'napari-graph', 204 | 'napari-graph documentation', 205 | author, 206 | 'napari-graph', 207 | 'Fast editable graphs in Python', 208 | 'Miscellaneous', 209 | ), 210 | ] 211 | -------------------------------------------------------------------------------- /src/napari_graph/undirected_graph.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional, Tuple, Union 2 | 3 | import numpy as np 4 | from numpy.typing import ArrayLike 5 | 6 | from napari_graph.base_graph import ( 7 | _EDGE_EMPTY_PTR, 8 | BaseGraph, 9 | _iterate_edges, 10 | _remove_edge, 11 | ) 12 | from napari_graph.numba import njit, typed 13 | 14 | """ 15 | Undirected edge constants for accessing the directed graph buffer data. 16 | Each edge occupies _UN_EDGE_SIZE spaces on the graph buffer. 17 | _LL_UN_EDGE_POS indicates the displacement between the edge initial index and 18 | the edge undirected linked list position. 19 | 20 | Example of a directed graph edge buffer: 21 | [ 22 | source_node_buffer_id_0, 23 | target_node_buffer_id_0, 24 | edge_linked_list_0, 25 | source_node_buffer_id_1, 26 | target_node_buffer_id_1, 27 | edge_linked_list_1, 28 | ... 29 | ] 30 | """ 31 | _UN_EDGE_SIZE = 3 32 | _LL_UN_EDGE_POS = 2 33 | 34 | 35 | @njit 36 | def _add_undirected_edge( 37 | buffer: np.ndarray, 38 | node2edges: np.ndarray, 39 | empty_idx: int, 40 | src_node: int, 41 | tgt_node: int, 42 | ) -> int: 43 | """Add a single edge (`src_idx`, `tgt_idx`) to `buffer`. 44 | 45 | Update the edge linked list (present in the buffer) and the `node2edges` 46 | mapping (head of linked list). 47 | 48 | NOTE: Edges are added at the beginning of the linked list so we don't have 49 | to track its tail and the operation can be done in O(1). This might 50 | decrease cache hits because they're sorted in memory in the opposite 51 | direction we iterate it. 52 | 53 | Parameters 54 | ---------- 55 | buffer : np.ndarray 56 | Edges buffer. 57 | node2edges : np.ndarray 58 | Mapping from node indices to edge buffer indices -- head of edges linked list. 59 | empty_idx : int 60 | First index of empty edges linked list. 61 | src_node : int 62 | Source node of added edge. 63 | tgt_node : int 64 | Target node of added edge. 65 | 66 | Returns 67 | ------- 68 | int 69 | New first index of empty edges linked list. 70 | """ 71 | 72 | if empty_idx == _EDGE_EMPTY_PTR: 73 | raise ValueError("Edge buffer is full.") 74 | 75 | elif empty_idx < 0: 76 | raise ValueError("Invalid empty index.") 77 | 78 | next_edge = node2edges[src_node] 79 | node2edges[src_node] = empty_idx 80 | 81 | buffer_index = empty_idx * _UN_EDGE_SIZE 82 | next_empty = buffer[buffer_index + _LL_UN_EDGE_POS] 83 | 84 | buffer[buffer_index] = src_node 85 | buffer[buffer_index + 1] = tgt_node 86 | buffer[buffer_index + _LL_UN_EDGE_POS] = next_edge 87 | 88 | return next_empty 89 | 90 | 91 | @njit 92 | def _add_undirected_edges( 93 | buffer: np.ndarray, 94 | edges: np.ndarray, 95 | empty_idx: int, 96 | n_edges: int, 97 | node2edges: np.ndarray, 98 | ) -> Tuple[int, int]: 99 | """Add an array of edges into the `buffer`. 100 | 101 | Edges are duplicated so both directions are available for fast graph 102 | transversal. 103 | """ 104 | size = edges.shape[0] 105 | for i in range(size): 106 | 107 | # adding (u, v) 108 | empty_idx = _add_undirected_edge( 109 | buffer, node2edges, empty_idx, edges[i, 0], edges[i, 1] 110 | ) 111 | # adding (v, u) 112 | empty_idx = _add_undirected_edge( 113 | buffer, node2edges, empty_idx, edges[i, 1], edges[i, 0] 114 | ) 115 | 116 | n_edges += 1 117 | 118 | return empty_idx, n_edges 119 | 120 | 121 | @njit 122 | def _remove_undirected_edge( 123 | src_node: int, 124 | tgt_node: int, 125 | empty_idx: int, 126 | edges_buffer: np.ndarray, 127 | node2edges: np.ndarray, 128 | ) -> int: 129 | """Remove a single edge (and its duplicated sibling edge) from the buffer. 130 | 131 | NOTE: Edges are removed such that empty pairs are consecutive in memory. 132 | """ 133 | empty_idx = _remove_edge( 134 | tgt_node, 135 | src_node, 136 | empty_idx, 137 | edges_buffer, 138 | node2edges, 139 | _UN_EDGE_SIZE, 140 | _LL_UN_EDGE_POS, 141 | ) 142 | 143 | empty_idx = _remove_edge( 144 | src_node, 145 | tgt_node, 146 | empty_idx, 147 | edges_buffer, 148 | node2edges, 149 | _UN_EDGE_SIZE, 150 | _LL_UN_EDGE_POS, 151 | ) 152 | 153 | return empty_idx 154 | 155 | 156 | @njit 157 | def _remove_undirected_edges( 158 | edges: np.ndarray, 159 | empty_idx: int, 160 | n_edges: int, 161 | edges_buffer: np.ndarray, 162 | node2edges: np.ndarray, 163 | ) -> Tuple[int, int]: 164 | """Remove an array of edges (and their duplicated siblings) from buffer.""" 165 | 166 | for i in range(edges.shape[0]): 167 | empty_idx = _remove_undirected_edge( 168 | edges[i, 0], edges[i, 1], empty_idx, edges_buffer, node2edges 169 | ) 170 | n_edges -= 1 171 | return empty_idx, n_edges 172 | 173 | 174 | @njit 175 | def _remove_undirected_incident_edges( 176 | node: int, 177 | empty_idx: int, 178 | n_edges: int, 179 | edges_buffer: np.ndarray, 180 | node2edges: np.ndarray, 181 | ) -> Tuple[int, int]: 182 | """Removes every edges that contains `node_idx`. 183 | 184 | NOTE: Edges are removed such that empty pairs are consecutive in memory. 185 | 186 | Parameters 187 | ---------- 188 | node : int 189 | Node index in the buffer domain. 190 | empty_idx : int 191 | Index first edge (head of) linked list 192 | n_edges : int 193 | Current number of total edges 194 | edges_buffer : np.ndarray 195 | Buffer containing the edges data 196 | node2edges : np.ndarray 197 | Mapping from node indices to edge buffer indices -- head of edges linked list. 198 | 199 | Returns 200 | ------- 201 | Tuple[int, int] 202 | New empty linked list head, new number of edges 203 | """ 204 | 205 | # the edges are removed such that the empty edges linked list contains 206 | # two positions adjacent in memory so we can serialize the edges using 207 | # numpy vectorization 208 | idx = node2edges[node] 209 | 210 | for _ in range(edges_buffer.shape[0] // _UN_EDGE_SIZE): 211 | if idx == _EDGE_EMPTY_PTR: 212 | break # no edges left at the given node 213 | 214 | buffer_idx = idx * _UN_EDGE_SIZE 215 | next_idx = edges_buffer[buffer_idx + _LL_UN_EDGE_POS] 216 | # checking if sibling edges is before or after current node 217 | if ( 218 | buffer_idx > 0 219 | and edges_buffer[buffer_idx - _UN_EDGE_SIZE + 1] 220 | == edges_buffer[buffer_idx] 221 | and edges_buffer[buffer_idx - _UN_EDGE_SIZE] 222 | == edges_buffer[buffer_idx + 1] 223 | ): 224 | 225 | src_node = edges_buffer[buffer_idx + 1] 226 | tgt_node = edges_buffer[buffer_idx] 227 | else: 228 | src_node = edges_buffer[buffer_idx] 229 | tgt_node = edges_buffer[buffer_idx + 1] 230 | 231 | empty_idx = _remove_edge( 232 | tgt_node, 233 | src_node, 234 | empty_idx, 235 | edges_buffer, 236 | node2edges, 237 | _UN_EDGE_SIZE, 238 | _LL_UN_EDGE_POS, 239 | ) 240 | 241 | empty_idx = _remove_edge( 242 | src_node, 243 | tgt_node, 244 | empty_idx, 245 | edges_buffer, 246 | node2edges, 247 | _UN_EDGE_SIZE, 248 | _LL_UN_EDGE_POS, 249 | ) 250 | 251 | idx = next_idx 252 | n_edges -= 1 253 | else: 254 | if idx != _EDGE_EMPTY_PTR: 255 | raise ValueError( 256 | "Infinite loop detected at undirected graph node removal, edges buffer must be corrupted." 257 | ) 258 | 259 | return empty_idx, n_edges 260 | 261 | 262 | @njit 263 | def _iterate_undirected_edges( 264 | edge_ptr_indices: np.ndarray, edges_buffer: np.ndarray 265 | ) -> typed.List: 266 | """Inline the edges size and linked list position shift.""" 267 | return _iterate_edges( 268 | edge_ptr_indices, edges_buffer, _UN_EDGE_SIZE, _LL_UN_EDGE_POS 269 | ) 270 | 271 | 272 | class UndirectedGraph(BaseGraph): 273 | """Undirected graph class. 274 | 275 | Parameters 276 | ---------- 277 | n_nodes : int 278 | Number of nodes to allocate to graph. 279 | ndim : int 280 | Number of spatial dimensions of graph. 281 | n_edges : int 282 | Number of edges of the graph. 283 | """ 284 | 285 | _EDGE_DUPLICATION = 2 286 | _EDGE_SIZE = _UN_EDGE_SIZE 287 | _LL_EDGE_POS = _LL_UN_EDGE_POS 288 | 289 | def _add_edges(self, edges: np.ndarray) -> None: 290 | self._empty_edge_idx, self._n_edges = _add_undirected_edges( 291 | self._edges_buffer, 292 | edges, 293 | self._empty_edge_idx, 294 | self._n_edges, 295 | self._node2edges, 296 | ) 297 | 298 | def get_edges( 299 | self, nodes: Optional[ArrayLike] = None, mode: str = 'indices' 300 | ) -> Union[List[np.ndarray], np.ndarray]: 301 | """Return the edges data of the given nodes. 302 | 303 | If no nodes are provided, all edges are returned. 304 | 305 | NOTE: when `nodes` is None, the returned edges are duplicated, so that 306 | if (u, v) was inserted this function will return both (u, v) and 307 | (v, u). 308 | 309 | Parameters 310 | ---------- 311 | nodes : Optional[ArrayLike], optional 312 | Node indices, by default None 313 | mode : str 314 | Type of data queried from the edges. For example, `indices` or 315 | `coords`. 316 | 317 | Returns 318 | ------- 319 | List[np.ndarray] 320 | List of (N_i) x 2 x D arrays, where N_i is the number of edges at 321 | the ith node. D is the dimensionality of `coords` when mode == 322 | `coords` and it's ignored when mode == `indices`. N_i dimension is 323 | ignored when N_i is 1. 324 | """ 325 | return self._iterate_edges_generic( 326 | nodes, 327 | node2edges=self._node2edges, 328 | iterate_edges_func=_iterate_undirected_edges, 329 | mode=mode, 330 | ) 331 | 332 | def _remove_edges(self, edges: np.ndarray) -> None: 333 | self._empty_edge_idx, self._n_edges = _remove_undirected_edges( 334 | edges, 335 | self._empty_edge_idx, 336 | self._n_edges, 337 | self._edges_buffer, 338 | self._node2edges, 339 | ) 340 | 341 | def _remove_incident_edges(self, node_buffer_index: int) -> None: 342 | ( 343 | self._empty_edge_idx, 344 | self._n_edges, 345 | ) = _remove_undirected_incident_edges( 346 | node_buffer_index, 347 | self._empty_edge_idx, 348 | self._n_edges, 349 | self._edges_buffer, 350 | self._node2edges, 351 | ) 352 | -------------------------------------------------------------------------------- /src/napari_graph/_tests/test_graph.py: -------------------------------------------------------------------------------- 1 | from itertools import product 2 | from typing import Any, Callable, List, Optional, Protocol, Type 3 | 4 | import numpy as np 5 | import pandas as pd 6 | import pytest 7 | from numpy.typing import ArrayLike 8 | 9 | from napari_graph import DirectedGraph, UndirectedGraph 10 | from napari_graph.base_graph import _EDGE_EMPTY_PTR, BaseGraph 11 | from napari_graph.undirected_graph import _LL_UN_EDGE_POS, _UN_EDGE_SIZE 12 | 13 | 14 | @pytest.mark.parametrize("n_prealloc_edges", [0, 2, 5]) 15 | def test_undirected_edge_addition(n_prealloc_edges: int) -> None: 16 | coords = pd.DataFrame( 17 | [ 18 | [0, 2.5], 19 | [4, 2.5], 20 | [1, 0], 21 | [2, 3.5], 22 | [3, 0], 23 | ], 24 | columns=["y", "x"], 25 | ) 26 | 27 | edges = [[0, 1], [1, 2], [2, 3], [3, 4], [0, 4]] 28 | 29 | graph = UndirectedGraph( 30 | coords=coords, 31 | n_edges=n_prealloc_edges, 32 | ) 33 | graph.add_edges(edges) 34 | 35 | for node_idx, node_edges in zip( 36 | coords.index, graph.get_edges(coords.index) 37 | ): 38 | # checking if two edges per node and connecting only two nodes 39 | assert node_edges.shape == (2, 2) 40 | 41 | # checking if the given index is the source 42 | assert np.all(node_edges[:, 0] == node_idx) 43 | 44 | # checking if the edges are corrected 45 | for edge in node_edges: 46 | assert sorted(edge) in edges 47 | 48 | 49 | @pytest.mark.parametrize("n_prealloc_edges", [0, 2, 5]) 50 | def test_directed_edge_addition(n_prealloc_edges: int) -> None: 51 | coords = pd.DataFrame( 52 | [ 53 | [0, 2.5], 54 | [4, 2.5], 55 | [1, 0], 56 | [2, 3.5], 57 | [3, 0], 58 | ], 59 | columns=["y", "x"], 60 | ) 61 | 62 | edges = np.asarray([[0, 1], [1, 2], [2, 3], [3, 4], [4, 0]]) 63 | 64 | graph = DirectedGraph( 65 | coords=coords, 66 | n_edges=n_prealloc_edges, 67 | ) 68 | graph.add_edges(edges) 69 | 70 | source_edges = np.asarray(graph.get_source_edges(coords.index)) 71 | target_edges = np.asarray( 72 | graph.get_target_edges(np.roll(coords.index, -1)) 73 | ) 74 | assert np.all(source_edges == edges[:, np.newaxis, :]) 75 | assert np.all(target_edges == edges[:, np.newaxis, :]) 76 | 77 | 78 | @pytest.mark.parametrize("n_prealloc_nodes", [0, 3, 6, 12]) 79 | def test_node_addition_indices_coords(n_prealloc_nodes: int) -> None: 80 | # test node addition with indices and coords and different pre-allocation size 81 | size = 6 82 | ndim = 3 83 | 84 | indices = np.random.choice(range(100), size=size, replace=False) 85 | coords = np.random.randn(size, ndim) 86 | 87 | graph = DirectedGraph(ndim=ndim, n_nodes=n_prealloc_nodes) 88 | for i in range(size): 89 | graph.add_nodes(indices=indices[i], coords=coords[i]) 90 | assert len(graph) == i + 1 91 | 92 | np.testing.assert_allclose(graph._coords[: len(graph)], coords) 93 | np.testing.assert_array_equal(graph._buffer2world[: len(graph)], indices) 94 | np.testing.assert_array_equal( 95 | graph._map_world2buffer(indices), range(size) 96 | ) 97 | 98 | 99 | def test_node_addition_non_spatial() -> None: 100 | graph = DirectedGraph() 101 | 102 | size = 5 103 | new_indices = graph.add_nodes(count=size) 104 | 105 | np.testing.assert_array_equal(new_indices, np.arange(size)) 106 | 107 | new_indices = graph.add_nodes(indices=[10, 11]) 108 | nodes = graph.get_nodes() 109 | 110 | np.testing.assert_equal(new_indices, [10, 11]) 111 | np.testing.assert_array_equal(nodes[:size], np.arange(size)) 112 | np.testing.assert_array_equal(nodes[size:], [10, 11]) 113 | 114 | with pytest.raises(ValueError): 115 | graph.add_nodes(coords=[[0, 1], [2, 3]]) 116 | 117 | graph.add_nodes(indices=15) 118 | assert len(graph) == 8 119 | 120 | with pytest.raises(ValueError): 121 | graph.add_nodes(indices=15) 122 | 123 | 124 | def test_node_addition_spatial() -> None: 125 | graph = DirectedGraph(ndim=2) 126 | 127 | with pytest.raises(ValueError): 128 | graph.add_nodes(indices=5, coords=[0, 0], count=1) 129 | 130 | new_index = graph.add_nodes(coords=[2, 2]) 131 | assert len(graph) == 1 132 | assert new_index == 0 133 | np.testing.assert_equal(graph.get_coordinates(), [[2, 2]]) 134 | 135 | new_index = graph.add_nodes(indices=[10, 11], coords=[[0, 0], [1, 1]]) 136 | assert len(graph) == 3 137 | np.testing.assert_equal(new_index, [10, 11]) 138 | 139 | with pytest.raises(ValueError): 140 | graph.add_nodes(indices=13) 141 | 142 | with pytest.raises(ValueError): 143 | graph.add_nodes(count=2) 144 | 145 | with pytest.raises(ValueError): 146 | graph.add_nodes(indices=[2, 3], coords=[[1, 2]]) 147 | 148 | assert len(graph) == 3 149 | 150 | 151 | @pytest.mark.parametrize( 152 | "graph_type,ndim", 153 | product([UndirectedGraph, DirectedGraph], [None, 2, 3]), 154 | ) 155 | def test_empty_graph(graph_type: Type[BaseGraph], ndim: Optional[int]) -> None: 156 | _ = graph_type(ndim=ndim) 157 | 158 | 159 | class TestGraph: 160 | _GRAPH_CLASS: Type[BaseGraph] 161 | __test__ = False # ignored for testing 162 | _index_shift = 0 # shift used to test special indexing 163 | 164 | def setup_method(self, method: Callable) -> None: 165 | self.coords = pd.DataFrame( 166 | [ 167 | [0, 2.5], 168 | [4, 2.5], 169 | [1, 0], 170 | [2, 3.5], 171 | [3, 0], 172 | ], 173 | index=np.arange(5) + self._index_shift, 174 | columns=["y", "x"], 175 | ) 176 | 177 | self.edges = ( 178 | np.asarray([[0, 1], [1, 2], [2, 3], [3, 4], [4, 0]], dtype=int) 179 | + self._index_shift 180 | ) 181 | 182 | self.graph = self._GRAPH_CLASS( 183 | edges=self.edges, 184 | coords=self.coords, 185 | ) 186 | 187 | @staticmethod 188 | def contains(edge: ArrayLike, edges: List[ArrayLike]) -> bool: 189 | return any( 190 | np.allclose(e, edge) if len(e) > 0 else False for e in edges 191 | ) 192 | 193 | def teardown_method(self, method: Callable) -> None: 194 | del self.edges, self.graph 195 | if hasattr(self, "coords"): 196 | del self.coords 197 | 198 | def test_edge_buffers(self) -> None: 199 | # testing buffer correctness on a non-trivial case when a node was removed 200 | node_id = 3 + self._index_shift 201 | self.graph.remove_node(node_id) 202 | 203 | valid_edges = np.logical_not(np.any(self.edges == node_id, axis=1)) 204 | 205 | expected_edges = self.edges[valid_edges, :] 206 | (expected_indices,) = np.nonzero(valid_edges) 207 | expected_indices *= ( 208 | self.graph._EDGE_SIZE * self.graph._EDGE_DUPLICATION 209 | ) 210 | 211 | indices, edges = self.graph.get_edges_buffers() 212 | 213 | assert np.allclose(expected_indices, indices) 214 | assert np.allclose(expected_edges, edges) 215 | 216 | def test_subgraph_edges(self) -> None: 217 | # 3 is disconnected in subgraph 218 | nodes_ids = np.asarray([0, 1, 3]) + self._index_shift 219 | 220 | subgraph = self.graph.subgraph_edges(nodes_ids) 221 | expected_subgraph = np.asarray([[0, 1]]) + self._index_shift 222 | 223 | np.testing.assert_array_equal(subgraph, expected_subgraph) 224 | 225 | buffer_node_ids = np.asarray([0, 1, 3]) 226 | buffer_subgraph = self.graph.subgraph_edges( 227 | buffer_node_ids, is_buffer_domain=True 228 | ) 229 | 230 | np.testing.assert_equal(buffer_subgraph, [[0, 1]]) 231 | 232 | 233 | class TestDirectedGraph(TestGraph): 234 | graph: DirectedGraph # required by typing 235 | _GRAPH_CLASS = DirectedGraph 236 | __test__ = True 237 | 238 | def test_edge_removal(self) -> None: 239 | edge = np.asarray([0, 1]) + self._index_shift 240 | self.graph.remove_edges(edge) 241 | assert self.graph.n_edges == 4 242 | assert self.graph.n_empty_edges == self.graph._ALLOC_MIN - 4 243 | assert not self.contains(edge, self.graph.get_source_edges()) 244 | assert not self.contains(np.flip(edge), self.graph.get_target_edges()) 245 | 246 | edges = np.asarray([[1, 2], [2, 3]]) + self._index_shift 247 | self.graph.remove_edges(edges) 248 | assert self.graph.n_edges == 2 249 | assert self.graph.n_empty_edges == self.graph._ALLOC_MIN - 2 250 | assert not self.contains(edges[0], self.graph.get_source_edges()) 251 | assert not self.contains(edges[1], self.graph.get_source_edges()) 252 | 253 | assert self.graph.n_allocated_edges == self.graph._ALLOC_MIN 254 | 255 | def test_node_removal(self) -> None: 256 | nodes = np.asarray([3, 4, 1]) + self._index_shift 257 | original_size = len(self.graph) 258 | 259 | for i in range(len(nodes)): 260 | node = nodes[i] 261 | self.graph.remove_node(node) 262 | 263 | for edge in self.graph.get_source_edges(): 264 | assert node not in edge 265 | 266 | for edge in self.graph.get_target_edges(): 267 | assert node not in edge 268 | 269 | assert node not in self.graph.get_nodes() 270 | assert len(self.graph) == original_size - i - 1 271 | 272 | def test_edge_coordinates(self) -> None: 273 | coords = self.coords.loc[self.edges.ravel()].to_numpy() 274 | coords = coords.reshape(self.edges.shape + (-1,)) 275 | 276 | source_edge_coords = np.concatenate( 277 | self.graph.get_source_edges(mode='coords'), axis=0 278 | ) 279 | assert np.allclose(coords, source_edge_coords) 280 | 281 | target_edges_coords = np.concatenate( 282 | self.graph.get_target_edges(mode='coords'), axis=0 283 | ) 284 | rolled_coords = np.roll(coords, shift=1, axis=0) 285 | assert np.allclose(rolled_coords, target_edges_coords) 286 | 287 | 288 | class TestUndirectedGraph(TestGraph): 289 | _GRAPH_CLASS = UndirectedGraph 290 | __test__ = True 291 | 292 | def test_edge_removal(self) -> None: 293 | edge = np.asarray([0, 1]) + self._index_shift 294 | self.graph.remove_edges(edge) 295 | assert self.graph.n_edges == 4 296 | assert self.graph.n_empty_edges == self.graph._ALLOC_MIN - 4 297 | assert not self.contains(edge, self.graph.get_edges()) 298 | assert not self.contains(np.flip(edge), self.graph.get_edges()) 299 | 300 | edges = np.asarray([[1, 2], [2, 3]]) + self._index_shift 301 | self.graph.remove_edges(edges) 302 | assert self.graph.n_edges == 2 303 | assert self.graph.n_empty_edges == self.graph._ALLOC_MIN - 2 304 | assert not self.contains(edges[0], self.graph.get_edges()) 305 | assert not self.contains(edges[1], self.graph.get_edges()) 306 | 307 | assert self.graph.n_allocated_edges == self.graph._ALLOC_MIN 308 | 309 | self.assert_empty_linked_list_pairs_are_neighbors() 310 | 311 | def test_node_removal(self) -> None: 312 | nodes = np.asarray([3, 4, 1]) + self._index_shift 313 | original_size = len(self.graph) 314 | 315 | for i in range(len(nodes)): 316 | node = nodes[i] 317 | self.graph.remove_node(node) 318 | 319 | for edge in self.graph.get_edges(): 320 | assert node not in edge 321 | 322 | assert node not in self.graph.get_nodes() 323 | assert len(self.graph) == original_size - i - 1 324 | 325 | self.assert_empty_linked_list_pairs_are_neighbors() 326 | 327 | def test_edge_coordinates(self) -> None: 328 | edge_coords = self.graph.get_edges(mode='coords') 329 | 330 | for node, coords in zip(self.graph.get_nodes(), edge_coords): 331 | for i, edge in enumerate(self.graph.get_edges(node)): 332 | assert np.allclose( 333 | self.coords.loc[edge, ["y", "x"]].to_numpy(), coords[i] 334 | ) 335 | 336 | def assert_empty_linked_list_pairs_are_neighbors(self) -> None: 337 | # testing if empty edges linked list pairs are neighbors 338 | empty_idx = self.graph._empty_edge_idx 339 | while empty_idx != _EDGE_EMPTY_PTR: 340 | next_empty_idx = self.graph._edges_buffer[ 341 | empty_idx * _UN_EDGE_SIZE + _LL_UN_EDGE_POS 342 | ] 343 | assert empty_idx + 1 == next_empty_idx 344 | # skipping one 345 | empty_idx = self.graph._edges_buffer[ 346 | next_empty_idx * _UN_EDGE_SIZE + _LL_UN_EDGE_POS 347 | ] 348 | 349 | 350 | class NonSpatialMixin(Protocol): 351 | # required by typing 352 | _GRAPH_CLASS: Any 353 | graph: BaseGraph 354 | edges: np.ndarray 355 | _index_shift: int 356 | 357 | def setup_method(self, method: Callable) -> None: 358 | self.edges = ( 359 | np.asarray([[0, 1], [1, 2], [2, 3], [3, 4], [4, 0]], dtype=int) 360 | + self._index_shift 361 | ) 362 | 363 | self.graph = self._GRAPH_CLASS(edges=self.edges) 364 | 365 | def test_edge_coordinates(self) -> None: 366 | pytest.skip("Non-spatial graph has no coordinates.") 367 | 368 | 369 | class TestNonSpatialDirectedGraph(NonSpatialMixin, TestDirectedGraph): # type: ignore[misc] 370 | pass 371 | 372 | 373 | class TestNonSpatialUnirectedGraph(NonSpatialMixin, TestUndirectedGraph): 374 | pass 375 | 376 | 377 | class TestDirectedGraphSpecialIndex(TestDirectedGraph): 378 | _index_shift = 10 379 | 380 | 381 | class TestUndirectedGraphSpecialIndex(TestUndirectedGraph): 382 | _index_shift = 10 383 | 384 | 385 | @pytest.mark.parametrize( 386 | "node_id", 387 | [0, 1], 388 | ) 389 | def test_removing_last_edge_from_digraph(node_id: int) -> None: 390 | # regression test from bug reported on Zulip: 391 | # https://napari.zulipchat.com/#narrow/stream/360030-working-group-graph/topic/Node.20removal.20bug/near/411290388 392 | coords = np.asarray([[0, 0], [20, 20], [100, 100], [120, 120]]) 393 | 394 | graph = DirectedGraph(edges=[[0, 1]], coords=coords) 395 | 396 | # removing node 3 is fine since it is not connected to the sole edge 397 | graph.remove_node(3) 398 | 399 | # removing either 0 or 1 (source or destination) caused error 400 | graph.remove_node(node_id) 401 | 402 | assert graph.n_edges == 0 403 | assert graph.n_nodes == 2 404 | -------------------------------------------------------------------------------- /src/napari_graph/directed_graph.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional, Tuple, Union 2 | 3 | import numpy as np 4 | from numpy.typing import ArrayLike 5 | 6 | from napari_graph.base_graph import ( 7 | _EDGE_EMPTY_PTR, 8 | _NODE_EMPTY_PTR, 9 | BaseGraph, 10 | _iterate_edges, 11 | _remove_edge, 12 | ) 13 | from napari_graph.numba import njit, typed 14 | from napari_graph.undirected_graph import _UN_EDGE_SIZE 15 | 16 | """ 17 | Directed edge constants for accessing the directed graph buffer data. 18 | Each edge occupies _DI_EDGE_SIZE spaces on the graph buffer. 19 | _LL_DI_EDGE_POS indicates the displacement between the edge initial index and 20 | the **target** edge directed linked list position. 21 | 22 | Example of a directed graph edge buffer: 23 | [ 24 | source_node_buffer_id_0, 25 | target_node_buffer_id_0, 26 | source_edge_linked_list_0, 27 | target_edge_linked_List_0, 28 | source_node_buffer_id_1, 29 | target_node_buffer_id_1, 30 | source_edge_linked_list_1, 31 | target_edge_linked_List_1, 32 | ... 33 | ] 34 | """ 35 | _DI_EDGE_SIZE = _UN_EDGE_SIZE + 1 36 | _LL_DI_EDGE_POS = 2 37 | 38 | 39 | @njit 40 | def _add_directed_edge( 41 | buffer: np.ndarray, 42 | node2src_edges: np.ndarray, 43 | node2tgt_edges: np.ndarray, 44 | empty_idx: int, 45 | src_node: int, 46 | tgt_node: int, 47 | ) -> int: 48 | """Add a single directed edge to `buffer`. 49 | 50 | This updates the `buffer`'s source and target linked list 51 | and the nodes to edges mappings. 52 | 53 | NOTE: see `_add_undirected_edge` docs for comment about cache misses. 54 | 55 | Parameters 56 | ---------- 57 | buffer : np.ndarray 58 | Edges buffer. 59 | node2src_edges : np.ndarray 60 | Mapping from node indices to source edge buffer indices -- head of edges linked list. 61 | node2tgt_edges : np.ndarray 62 | Mapping from node indices to target edge buffer indices -- head of edges linked list. 63 | empty_idx : int 64 | First index of empty edges linked list. 65 | src_node : int 66 | Source node of added edge. 67 | tgt_node : int 68 | Target node of added edge. 69 | 70 | Returns 71 | ------- 72 | int 73 | New first index of empty edges linked list. 74 | """ 75 | if empty_idx == _EDGE_EMPTY_PTR: 76 | raise ValueError("Edge buffer is full.") 77 | 78 | elif empty_idx < 0: 79 | raise ValueError("Invalid empty index.") 80 | 81 | next_src_edge = node2src_edges[src_node] 82 | next_tgt_edge = node2tgt_edges[tgt_node] 83 | node2src_edges[src_node] = empty_idx 84 | node2tgt_edges[tgt_node] = empty_idx 85 | 86 | buffer_index = empty_idx * _DI_EDGE_SIZE 87 | next_empty = buffer[buffer_index + _LL_DI_EDGE_POS] 88 | 89 | buffer[buffer_index] = src_node 90 | buffer[buffer_index + 1] = tgt_node 91 | buffer[buffer_index + _LL_DI_EDGE_POS] = next_src_edge 92 | buffer[buffer_index + _LL_DI_EDGE_POS + 1] = next_tgt_edge 93 | 94 | return next_empty 95 | 96 | 97 | @njit 98 | def _add_directed_edges( 99 | buffer: np.ndarray, 100 | edges: np.ndarray, 101 | empty_idx: int, 102 | n_edges: int, 103 | node2src_edges: np.ndarray, 104 | node2tgt_edges: np.ndarray, 105 | ) -> Tuple[int, int]: 106 | """Add an array of edges into the `buffer`. 107 | 108 | Directed edges contains two linked lists, outgoing (source) and incoming 109 | (target) edges. 110 | """ 111 | size = edges.shape[0] 112 | for i in range(size): 113 | 114 | empty_idx = _add_directed_edge( 115 | buffer, 116 | node2src_edges, 117 | node2tgt_edges, 118 | empty_idx, 119 | edges[i, 0], 120 | edges[i, 1], 121 | ) 122 | n_edges += 1 123 | 124 | return empty_idx, n_edges 125 | 126 | 127 | @njit(inline='always') 128 | def _remove_target_edge( 129 | src_node: int, 130 | tgt_node: int, 131 | edges_buffer: np.ndarray, 132 | node2tgt_edges: np.ndarray, 133 | ) -> None: 134 | """Remove edge from target edges linked list. 135 | It doesn't clean the buffer, because it'll be used later. 136 | 137 | Parameters 138 | ---------- 139 | src_node : int 140 | Source node of added edge. 141 | tgt_node : int 142 | Target node of added edge. 143 | edges_buffer : np.ndarray 144 | Edges buffer. 145 | node2tgt_edges : np.ndarray 146 | Mapping from node indices to target edge buffer indices -- head of edges linked list. 147 | """ 148 | idx = node2tgt_edges[tgt_node] # different indexing from source edge 149 | prev_buffer_idx = _EDGE_EMPTY_PTR 150 | 151 | for _ in range(edges_buffer.shape[0] // _DI_EDGE_SIZE): 152 | if idx == _EDGE_EMPTY_PTR: 153 | raise ValueError( 154 | "Could not find target node at directed edge removal." 155 | ) 156 | 157 | buffer_idx = idx * _DI_EDGE_SIZE 158 | next_edge_idx = edges_buffer[buffer_idx + _LL_DI_EDGE_POS + 1] 159 | 160 | # edge found 161 | if ( 162 | edges_buffer[buffer_idx] == src_node 163 | ): # different indexing from source edge 164 | # skipping found edge from linked list 165 | if prev_buffer_idx == _EDGE_EMPTY_PTR: 166 | node2tgt_edges[tgt_node] = ( 167 | next_edge_idx # different indexing from source edge 168 | ) 169 | else: 170 | edges_buffer[prev_buffer_idx + _LL_DI_EDGE_POS + 1] = ( 171 | next_edge_idx 172 | ) 173 | 174 | edges_buffer[buffer_idx + _LL_DI_EDGE_POS + 1] = _EDGE_EMPTY_PTR 175 | break 176 | 177 | # moving to next edge 178 | idx = next_edge_idx 179 | prev_buffer_idx = buffer_idx 180 | else: 181 | raise ValueError( 182 | "Infinite loop detected at target edge removal, edges buffer must be corrupted." 183 | ) 184 | 185 | 186 | @njit 187 | def _remove_directed_edge( 188 | src_node: int, 189 | tgt_node: int, 190 | empty_idx: int, 191 | edges_buffer: np.ndarray, 192 | node2src_edges: np.ndarray, 193 | node2tgt_edges: np.ndarray, 194 | ) -> int: 195 | """Remove a single directed edge from the edges buffer.""" 196 | 197 | # must be executed before default edge removal and cleanup 198 | _remove_target_edge(src_node, tgt_node, edges_buffer, node2tgt_edges) 199 | 200 | empty_idx = _remove_edge( 201 | src_node, 202 | tgt_node, 203 | empty_idx, 204 | edges_buffer, 205 | node2src_edges, 206 | _DI_EDGE_SIZE, 207 | _LL_DI_EDGE_POS, 208 | ) 209 | 210 | return empty_idx 211 | 212 | 213 | @njit 214 | def _remove_directed_edges( 215 | edges: np.ndarray, 216 | empty_idx: int, 217 | n_edges: int, 218 | edges_buffer: np.ndarray, 219 | node2src_edges: np.ndarray, 220 | node2tgt_edges: np.ndarray, 221 | ) -> Tuple[int, int]: 222 | """Remove an array of edges from the edges buffer.""" 223 | 224 | for i in range(edges.shape[0]): 225 | empty_idx = _remove_directed_edge( 226 | edges[i, 0], 227 | edges[i, 1], 228 | empty_idx, 229 | edges_buffer, 230 | node2src_edges, 231 | node2tgt_edges, 232 | ) 233 | n_edges -= 1 234 | return empty_idx, n_edges 235 | 236 | 237 | @njit 238 | def _remove_directed_incident_edges( 239 | node: int, 240 | empty_idx: int, 241 | n_edges: int, 242 | edges_buffer: np.ndarray, 243 | node2src_edges: np.ndarray, 244 | node2tgt_edges: np.ndarray, 245 | is_target: int, 246 | ) -> Tuple[int, int]: 247 | """Remove directed edges from the buffer that contain the given `node`. 248 | 249 | The parameter `is_target` should be zero to remove edges where `node` is 250 | the source node, and 1 for the target node. 251 | 252 | Parameters 253 | ---------- 254 | node : int 255 | Node index in the buffer domain. 256 | empty_idx : int 257 | Index first edge (head of) linked list 258 | n_edges : int 259 | Current number of total edges 260 | edges_buffer : np.ndarray 261 | Buffer containing the edges data 262 | node2src_edges : np.ndarray 263 | Mapping from node indices to source edge buffer indices -- head of edges linked list. 264 | node2tgt_edges : np.ndarray 265 | Mapping from node indices to target edge buffer indices -- head of edges linked list. 266 | is_target : int 267 | Binary integer flag indicating if it is a target or not, used to shift linked list position. 268 | 269 | Returns 270 | ------- 271 | Tuple[int, int] 272 | New empty linked list head, new number of edges 273 | """ 274 | if is_target: 275 | idx = node2tgt_edges[node] 276 | else: 277 | idx = node2src_edges[node] 278 | 279 | # safe guard against a corrupted buffer causing an infite loop 280 | for _ in range(edges_buffer.shape[0] // _DI_EDGE_SIZE): 281 | if idx == _EDGE_EMPTY_PTR: 282 | break # no edges left at the given node 283 | 284 | buffer_idx = idx * _DI_EDGE_SIZE 285 | next_idx = edges_buffer[buffer_idx + _LL_DI_EDGE_POS + is_target] 286 | 287 | src_node = edges_buffer[buffer_idx] 288 | tgt_node = edges_buffer[buffer_idx + 1] 289 | 290 | # must be removed before source edge, due to information cleanup 291 | _remove_target_edge(src_node, tgt_node, edges_buffer, node2tgt_edges) 292 | empty_idx = _remove_edge( 293 | src_node, 294 | tgt_node, 295 | empty_idx, 296 | edges_buffer, 297 | node2src_edges, 298 | _DI_EDGE_SIZE, 299 | _LL_DI_EDGE_POS, 300 | ) 301 | 302 | idx = next_idx 303 | n_edges -= 1 304 | else: 305 | if idx != _EDGE_EMPTY_PTR: 306 | raise ValueError( 307 | "Infinite loop detected at directed graph node removal, edges buffer must be corrupted." 308 | ) 309 | 310 | return empty_idx, n_edges 311 | 312 | 313 | @njit 314 | def _iterate_directed_source_edges( 315 | edge_ptr_indices: np.ndarray, edges_buffer: np.ndarray 316 | ) -> typed.List: 317 | """Inline the edges size and linked list position shift.""" 318 | return _iterate_edges( 319 | edge_ptr_indices, edges_buffer, _DI_EDGE_SIZE, _LL_DI_EDGE_POS 320 | ) 321 | 322 | 323 | @njit 324 | def _iterate_directed_target_edges( 325 | edge_ptr_indices: np.ndarray, edges_buffer: np.ndarray 326 | ) -> typed.List: 327 | """Inline the edges size and linked list position shift.""" 328 | return _iterate_edges( 329 | edge_ptr_indices, edges_buffer, _DI_EDGE_SIZE, _LL_DI_EDGE_POS + 1 330 | ) 331 | 332 | 333 | class DirectedGraph(BaseGraph): 334 | """Directed graph class. 335 | 336 | Parameters 337 | ---------- 338 | edges : ArrayLike 339 | Nx2 array of pair of nodes (edges). 340 | coords : 341 | Optional array of spatial coordinates of nodes. 342 | ndim : int 343 | Number of spatial dimensions of graph. 344 | n_nodes : int 345 | Optional number of nodes to pre-allocate in the graph. 346 | n_edges : int 347 | Optional number of edges to pre-allocate in the graph. 348 | """ 349 | 350 | _EDGE_DUPLICATION = 1 351 | _EDGE_SIZE = _DI_EDGE_SIZE 352 | _LL_EDGE_POS = _LL_DI_EDGE_POS 353 | 354 | def _init_node_buffers(self, n_nodes: int) -> None: 355 | super()._init_node_buffers(n_nodes) 356 | self._node2tgt_edges = np.full( 357 | n_nodes, fill_value=_EDGE_EMPTY_PTR, dtype=int 358 | ) 359 | 360 | def _realloc_nodes_buffers(self, size: int) -> None: 361 | diff_size = size - self.n_allocated_nodes 362 | super()._realloc_nodes_buffers(size) 363 | self._node2tgt_edges = np.append( 364 | self._node2tgt_edges, 365 | np.full(diff_size, fill_value=_NODE_EMPTY_PTR, dtype=np.int64), 366 | ) 367 | 368 | def _add_edges(self, edges: np.ndarray) -> None: 369 | self._empty_edge_idx, self._n_edges = _add_directed_edges( 370 | self._edges_buffer, 371 | edges, 372 | self._empty_edge_idx, 373 | self._n_edges, 374 | self._node2edges, 375 | self._node2tgt_edges, 376 | ) 377 | 378 | def get_edges( 379 | self, nodes: Optional[ArrayLike] = None, mode: str = 'indices' 380 | ) -> Union[List[np.ndarray], np.ndarray]: 381 | """`source_edges` alias""" 382 | return self.get_target_edges(nodes, mode) 383 | 384 | def out_edges( 385 | self, nodes: Optional[ArrayLike] = None, mode: str = 'indices' 386 | ) -> Union[List[np.ndarray], np.ndarray]: 387 | """`source_edges` alias""" 388 | return self.get_source_edges(nodes, mode) 389 | 390 | def get_source_edges( 391 | self, nodes: Optional[ArrayLike] = None, mode: str = 'indices' 392 | ) -> Union[List[np.ndarray], np.ndarray]: 393 | """Return the source edges (outgoing) of the given nodes. 394 | 395 | If no nodes are provided, all source edges are returned. 396 | 397 | Parameters 398 | ---------- 399 | nodes : Optional[ArrayLike], optional 400 | Node indices, by default None 401 | mode : str 402 | Type of data queried from the edges. For example, `indices` or 403 | `coords`. 404 | 405 | Returns 406 | ------- 407 | List[np.ndarray] 408 | List of (N_i) x 2 x D arrays, where N_i is the number of edges at 409 | the ith node. D is the dimensionality of `coords` when 410 | mode == `coords` and is ignored when mode == `indices`. N_i 411 | dimension is ignored when N_i is 1. 412 | """ 413 | return self._iterate_edges_generic( 414 | nodes, 415 | node2edges=self._node2edges, 416 | iterate_edges_func=_iterate_directed_source_edges, 417 | mode=mode, 418 | ) 419 | 420 | def in_edges( 421 | self, nodes: Optional[ArrayLike] = None, mode: str = 'indices' 422 | ) -> Union[List[np.ndarray], np.ndarray]: 423 | """`target_edges` alias""" 424 | return self.get_target_edges(nodes, mode) 425 | 426 | def get_target_edges( 427 | self, nodes: Optional[ArrayLike] = None, mode: str = 'indices' 428 | ) -> Union[List[np.ndarray], np.ndarray]: 429 | """Return the target edges (incoming) of the given nodes. 430 | 431 | If no nodes are provided, all target edges are returned. 432 | 433 | Parameters 434 | ---------- 435 | nodes : Optional[ArrayLike], optional 436 | Node indices, by default None 437 | mode : str 438 | Type of data queried from the edges. For example, `indices` or 439 | `coords`. 440 | 441 | Returns 442 | ------- 443 | List[np.ndarray] 444 | List of (N_i) x 2 x D arrays, where N_i is the number of edges at 445 | the ith node. D is the dimensionality of `coords` when 446 | mode == `coords` and it's ignored when mode == `indices`. N_i 447 | dimension is ignored when N_i is 1. 448 | """ 449 | return self._iterate_edges_generic( 450 | nodes, 451 | node2edges=self._node2tgt_edges, 452 | iterate_edges_func=_iterate_directed_target_edges, 453 | mode=mode, 454 | ) 455 | 456 | def _remove_edges(self, edges: np.ndarray) -> None: 457 | self._empty_edge_idx, self._n_edges = _remove_directed_edges( 458 | edges, 459 | self._empty_edge_idx, 460 | self._n_edges, 461 | self._edges_buffer, 462 | self._node2edges, 463 | self._node2tgt_edges, 464 | ) 465 | 466 | def _remove_incident_edges(self, node_buffer_index: int) -> None: 467 | """Remove directed edges that contain `node` in either direction.""" 468 | for is_target in (1, 0): 469 | ( 470 | self._empty_edge_idx, 471 | self._n_edges, 472 | ) = _remove_directed_incident_edges( 473 | node_buffer_index, 474 | self._empty_edge_idx, 475 | self._n_edges, 476 | self._edges_buffer, 477 | self._node2edges, # node2src_edges 478 | self._node2tgt_edges, 479 | is_target, 480 | ) 481 | -------------------------------------------------------------------------------- /src/napari_graph/base_graph.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import warnings 4 | from abc import abstractmethod 5 | from typing import Callable, List, Optional, Tuple, Union 6 | 7 | import networkx as nx 8 | import numpy as np 9 | import pandas as pd 10 | from numpy.typing import ArrayLike 11 | 12 | from napari_graph.numba import njit, typed, types 13 | 14 | """ 15 | _NODE_EMPTY_PTR is used to fill the values of uninitialized/empty/removed nodes 16 | """ 17 | _NODE_EMPTY_PTR = -1 18 | 19 | """ 20 | _EDGE_EMPTY_PTR is used to fill the values of uninitialized/empty/removed edges 21 | """ 22 | _EDGE_EMPTY_PTR = -1 23 | 24 | 25 | @njit(inline='always') 26 | def _remove_edge( 27 | src_node: int, 28 | tgt_node: int, 29 | empty_idx: int, 30 | edges_buffer: np.ndarray, 31 | node2edges: np.ndarray, 32 | edge_size: int, 33 | ll_edge_pos: int, 34 | ) -> int: 35 | """Generic function to remove directed or undirected nodes. 36 | 37 | An additional removal of the target edges linked list is necessary for 38 | directed edges. 39 | 40 | Parameters 41 | ---------- 42 | src_node : int 43 | Source node buffer index. 44 | tgt_node : int 45 | Target node buffer index. 46 | empty_idx : int 47 | First index of empty edges linked list. 48 | edges_buffer : np.ndarray 49 | Buffer of edges data. 50 | node2edges : np.ndarray 51 | Mapping from node indices to edge buffer indices -- head of edges linked list. 52 | edge_size : int 53 | Size of the edges on the buffer. It should be inlined when compiled. 54 | ll_edge_pos : int 55 | Position (shift) of the edge linked list on the edge buffer. It should 56 | be inlined when compiled. 57 | 58 | Returns 59 | ------- 60 | int 61 | New first index of empty edges linked list. 62 | """ 63 | 64 | idx = node2edges[src_node] 65 | prev_buffer_idx = _EDGE_EMPTY_PTR 66 | 67 | # safe guard against a corrupted buffer causing an infite loop 68 | for _ in range(edges_buffer.shape[0] // edge_size): 69 | if idx == _EDGE_EMPTY_PTR: 70 | raise ValueError("Could not find/remove edge.") 71 | 72 | buffer_idx = idx * edge_size 73 | next_edge_idx = edges_buffer[buffer_idx + ll_edge_pos] 74 | 75 | # edge found 76 | if edges_buffer[buffer_idx + 1] == tgt_node: 77 | # skipping found edge from linked list 78 | if prev_buffer_idx == _EDGE_EMPTY_PTR: 79 | node2edges[src_node] = next_edge_idx 80 | else: 81 | edges_buffer[prev_buffer_idx + ll_edge_pos] = next_edge_idx 82 | 83 | # clean up not necessary but good practice 84 | edges_buffer[buffer_idx : buffer_idx + edge_size] = _EDGE_EMPTY_PTR 85 | edges_buffer[buffer_idx + ll_edge_pos] = empty_idx 86 | 87 | break 88 | 89 | # moving to next edge 90 | idx = next_edge_idx 91 | prev_buffer_idx = buffer_idx 92 | else: 93 | raise ValueError( 94 | "Infinite loop detected at edge removal, edges buffer must be corrupted." 95 | ) 96 | 97 | return idx 98 | 99 | 100 | @njit(inline='always') 101 | def _iterate_edges( 102 | edge_ptr_indices: np.ndarray, 103 | edges_buffer: np.ndarray, 104 | edge_size: int, 105 | ll_edge_pos: int, 106 | ) -> typed.List: 107 | """Iterate over the edges linked lists given their starting edges. 108 | 109 | It returns list of multiplicity 2, where each pair is an edge. 110 | 111 | Parameters 112 | ---------- 113 | edge_ptr_indices : np.ndarray 114 | Array of starting indices. 115 | edges_buffer : np.ndarray 116 | Edges buffer. 117 | edge_size : int 118 | Size of the edges on the buffer. It should be inlined when compiled. 119 | ll_edge_pos : int 120 | Position (shift) of the edge linked list on the edge buffer. It should 121 | be inlined when compiled. 122 | 123 | Returns 124 | ------- 125 | typed.List 126 | List of lists edges, adjacent nodes are at indices (k, k+1) such that k 127 | is even. 128 | """ 129 | edges_list = typed.List() 130 | 131 | for idx in edge_ptr_indices: 132 | edges = typed.List.empty_list(types.int64) 133 | edges_list.append(edges) 134 | 135 | while idx != _EDGE_EMPTY_PTR: 136 | buffer_idx = idx * edge_size 137 | edges.append(edges_buffer[buffer_idx]) # src 138 | edges.append(edges_buffer[buffer_idx + 1]) # tgt 139 | idx = edges_buffer[buffer_idx + ll_edge_pos] 140 | 141 | return edges_list 142 | 143 | 144 | @njit 145 | def _contains_keys( 146 | map: typed.Dict, 147 | keys: np.ndarray, 148 | ) -> bool: 149 | """Returns true if at least one `key` is present on `map`.""" 150 | for k in keys: 151 | if k in map: 152 | return True 153 | return False 154 | 155 | 156 | @njit 157 | def _update_world2buffer( 158 | world2buffer: typed.Dict, 159 | world_idx: np.ndarray, 160 | buffer_idx: np.ndarray, 161 | ) -> None: 162 | """Updates `world_idx` (keys) and `buffer_idx` (values) to `world2buffer` mapping.""" 163 | for w, b in zip(world_idx, buffer_idx): 164 | world2buffer[w] = b 165 | 166 | 167 | @njit 168 | def _vmap_world2buffer( 169 | world2buffer: typed.Dict, world_idx: np.ndarray 170 | ) -> typed.Dict: 171 | """Maps world indices to buffer indices.""" 172 | buffer_idx = np.empty(world_idx.shape[0], dtype=types.int64) 173 | for i in range(world_idx.shape[0]): 174 | buffer_idx[i] = world2buffer[world_idx[i]] 175 | return buffer_idx 176 | 177 | 178 | class BaseGraph: 179 | """Abstract base graph class. 180 | 181 | Parameters 182 | ---------- 183 | edges : ArrayLike 184 | Nx2 array of pair of nodes (edges). 185 | coords : 186 | Optional array of spatial coordinates of nodes. 187 | ndim : int 188 | Number of spatial dimensions of graph. 189 | n_nodes : int 190 | Optional number of nodes to pre-allocate in the graph. 191 | n_edges : int 192 | Optional number of edges to pre-allocate in the graph. 193 | """ 194 | 195 | # abstract constants 196 | _EDGE_DUPLICATION: int 197 | _EDGE_SIZE: int 198 | _LL_EDGE_POS: int 199 | 200 | # allocation constants 201 | _ALLOC_MULTIPLIER = 1.1 202 | _ALLOC_MIN = 25 203 | 204 | def __init__( 205 | self, 206 | edges: ArrayLike = (), 207 | coords: Optional[Union[pd.DataFrame, ArrayLike]] = None, 208 | ndim: Optional[int] = None, 209 | n_nodes: Optional[int] = None, 210 | n_edges: Optional[int] = None, 211 | ): 212 | # validate nodes 213 | if coords is not None: 214 | if not isinstance(coords, pd.DataFrame): 215 | coords = pd.DataFrame(coords) 216 | if not np.issubdtype(coords.index.dtype, np.integer): 217 | raise ValueError( 218 | f"The index of `coords` (data type: {coords.index.dtype}) must be an integer." 219 | ) 220 | 221 | # validate nodes: ndim 222 | if len(coords.index) > 0: 223 | if ndim is not None: 224 | if ndim != len(coords.columns): 225 | raise ValueError( 226 | f"`ndim` ({ndim}) does not match the number of columns in `coords` ({len(coords.columns)})." 227 | ) 228 | else: 229 | ndim = len(coords.columns) 230 | 231 | # validate nodes: n_nodes 232 | if n_nodes is not None: 233 | if n_nodes < len(coords.index): 234 | raise ValueError( 235 | f"`n_nodes` ({n_nodes}) must be greater or equal than `coords` length ({len(coords.index)})." 236 | ) 237 | else: 238 | n_nodes = len(coords.index) 239 | 240 | # initialize nodes 241 | if n_nodes is None: 242 | n_nodes = self._ALLOC_MIN 243 | else: 244 | n_nodes = max(n_nodes, self._ALLOC_MIN) 245 | 246 | self._init_node_buffers(n_nodes) 247 | 248 | if ndim is not None: 249 | self._coords = np.empty((n_nodes, ndim), dtype=np.float32) 250 | else: 251 | self._coords = None 252 | 253 | if coords is not None: 254 | assert self._coords is not None 255 | self.add_nodes(indices=coords.index, coords=coords) 256 | 257 | # validate edges 258 | edges = np.asarray(edges) 259 | if len(edges) > 0: 260 | if edges.ndim != 2: 261 | raise ValueError( 262 | f"`edges` ({edges.ndim} dimensions) must have 2 dimensions." 263 | ) 264 | if edges.shape[1] != 2: 265 | raise ValueError( 266 | f"`edges` (shape: {edges.shape}) must have shape E x 2." 267 | ) 268 | 269 | # validate edges: n_edges 270 | if n_edges is not None: 271 | if n_edges < len(edges): 272 | raise ValueError( 273 | f"`n_edges` ({n_edges}) must be greater or equal than `edges` length ({len(edges)}." 274 | ) 275 | else: 276 | n_edges = len(edges) 277 | 278 | n_edges = max(n_edges, self._ALLOC_MIN) 279 | 280 | # initialize edges 281 | self._init_edge_buffers(n_edges) 282 | if len(edges) > 0: 283 | if coords is None: 284 | self.add_nodes(indices=np.unique(edges)) 285 | self.add_edges(edges) 286 | 287 | def _init_node_buffers(self, n_nodes: int) -> None: 288 | self._empty_nodes: List[int] = list(reversed(range(n_nodes))) 289 | self._node2edges = np.full( 290 | n_nodes, fill_value=_EDGE_EMPTY_PTR, dtype=int 291 | ) 292 | self._world2buffer = typed.Dict.empty(types.int64, types.int64) 293 | self._buffer2world = np.full( 294 | n_nodes, fill_value=_NODE_EMPTY_PTR, dtype=int 295 | ) 296 | 297 | def _init_edge_buffers(self, n_edges: int) -> None: 298 | # if condition just to be safe, in case MIN_ALLOC is set to 0 299 | self._empty_edge_idx = 0 if n_edges > 0 else _EDGE_EMPTY_PTR 300 | self._n_edges = 0 301 | self._edges_buffer = np.full( 302 | n_edges * self._EDGE_DUPLICATION * self._EDGE_SIZE, 303 | fill_value=_EDGE_EMPTY_PTR, 304 | dtype=int, 305 | ) 306 | self._edges_buffer[ 307 | self._LL_EDGE_POS : -self._EDGE_SIZE : self._EDGE_SIZE 308 | ] = np.arange(1, self._EDGE_DUPLICATION * n_edges) 309 | 310 | @property 311 | def ndim(self) -> int: 312 | return self._coords.shape[1] 313 | 314 | @property 315 | def n_nodes(self) -> int: 316 | """Number of nodes in use.""" 317 | return self.n_allocated_nodes - self.n_empty_nodes 318 | 319 | @property 320 | def n_allocated_nodes(self) -> int: 321 | """Number of total allocated nodes.""" 322 | return len(self._buffer2world) 323 | 324 | @property 325 | def n_empty_nodes(self) -> int: 326 | """Number of nodes allocated but not used.""" 327 | return len(self._empty_nodes) 328 | 329 | def get_nodes(self) -> np.ndarray: 330 | """Indices of graph nodes.""" 331 | return self._buffer2world[self._buffer2world != _NODE_EMPTY_PTR] 332 | 333 | def get_coordinates( 334 | self, node_indices: Optional[ArrayLike] = None 335 | ) -> np.ndarray: 336 | """Coordinates of the given nodes. 337 | 338 | If none is provided it returns the coordinates of every node. 339 | """ 340 | if self._coords is None: 341 | raise ValueError( 342 | "`get_coordinates` is only available for spatial graphs." 343 | ) 344 | 345 | node_indices = self._validate_nodes(node_indices) 346 | node_indices = self._map_world2buffer(node_indices) 347 | return self._coords[node_indices] 348 | 349 | def _realloc_nodes_buffers(self, size: int) -> None: 350 | """Rellocs the nodes buffers and copies existing data. 351 | 352 | NOTE: Currently, only increasing the buffers' size is implemented. 353 | 354 | Parameters 355 | ---------- 356 | size : int 357 | New buffer size. 358 | """ 359 | prev_size = self.n_allocated_nodes 360 | size_diff = size - prev_size 361 | 362 | if size_diff < 0: 363 | raise NotImplementedError( 364 | "Node buffers size decrease not implemented." 365 | ) 366 | 367 | elif size_diff == 0: 368 | raise ValueError("Tried to realloc to current buffer size.") 369 | 370 | if self._coords is not None: 371 | self._coords.resize( 372 | (size, self._coords.shape[1]), refcheck=False 373 | ) # zero-filled 374 | 375 | self._node2edges = np.append( 376 | self._node2edges, 377 | np.full(size_diff, fill_value=_EDGE_EMPTY_PTR, dtype=int), 378 | ) 379 | self._buffer2world = np.append( 380 | self._buffer2world, 381 | np.full(size_diff, fill_value=_NODE_EMPTY_PTR, dtype=int), 382 | ) 383 | self._empty_nodes = list(reversed(range(prev_size, size))) 384 | 385 | def get_next_valid_indices(self, count: int) -> ArrayLike: 386 | if count <= 0: 387 | raise ValueError( 388 | f"`count` must be a positive integer. Found {count}" 389 | ) 390 | 391 | next_indices = self._buffer2world.max() + 1 392 | return np.arange(next_indices, next_indices + count) 393 | 394 | def add_nodes( 395 | self, 396 | *, 397 | indices: Optional[ArrayLike] = None, 398 | coords: Optional[ArrayLike] = None, 399 | count: Optional[int] = None, 400 | ) -> ArrayLike: 401 | """ 402 | Add nodes to graph, at least one of the arguments must be supplied. 403 | `count` cannot be supplied with other arguments. 404 | 405 | Parameters 406 | ---------- 407 | index : int 408 | Node index. 409 | coords : np.ndarray 410 | Node coordinates, optional for non-spatial graph. 411 | count : int: 412 | Number of nodes to be added. 413 | 414 | Returns 415 | ------- 416 | ArrayLike 417 | Added nodes indices. 418 | """ 419 | if count is not None and (indices is not None or coords is not None): 420 | raise ValueError( 421 | "`count` cannot be supplied with `indices` and `coords`." 422 | ) 423 | 424 | if count is None and indices is None and coords is None: 425 | raise ValueError( 426 | "One of `indices`, `coords` or `count` must be supplied." 427 | ) 428 | 429 | if coords is not None: 430 | coords = np.atleast_2d(coords) 431 | 432 | if indices is None: 433 | if count is None: 434 | count = coords.shape[0] 435 | indices = self.get_next_valid_indices(count) 436 | 437 | indices = np.atleast_1d(indices) 438 | if indices.ndim > 1: 439 | raise ValueError( 440 | f"`indices` must be 1-dimensional. Found {indices.ndim}." 441 | ) 442 | 443 | if (self._coords is None) != (coords is None): 444 | if coords is None: 445 | raise ValueError( 446 | "`coords` must be provided for spatial graphs." 447 | ) 448 | else: 449 | raise ValueError( 450 | "`coords` cannot be provided for non-spatial graphs." 451 | ) 452 | 453 | if _contains_keys(self._world2buffer, indices): 454 | raise ValueError( 455 | f"One of the nodes {indices} are already present in the buffer." 456 | ) 457 | 458 | if self.n_empty_nodes < len(indices): 459 | self._realloc_nodes_buffers( 460 | self._get_alloc_size(self.n_nodes + len(indices)) 461 | ) 462 | 463 | # flipping since _empty_nodes is a stack 464 | buffer_indices = np.flip(self._empty_nodes[-len(indices) :]) 465 | 466 | if coords is not None: 467 | if indices.shape[0] != coords.shape[0]: 468 | raise ValueError( 469 | f"`indices` and `coords` must be equal. Found {len(indices)} and {len(coords)}." 470 | ) 471 | 472 | self._coords[buffer_indices] = coords 473 | 474 | _update_world2buffer(self._world2buffer, indices, buffer_indices) 475 | self._empty_nodes = self._empty_nodes[: -len(indices)] 476 | self._buffer2world[buffer_indices] = indices 477 | 478 | return indices 479 | 480 | def _get_alloc_size(self, size: int) -> int: 481 | return int(max(size * self._ALLOC_MULTIPLIER, self._ALLOC_MIN)) 482 | 483 | def remove_node(self, index: int, is_buffer_domain: bool = False) -> None: 484 | """Remove node of given `index`, by default it's the world index. 485 | 486 | Parameters 487 | ---------- 488 | index : int 489 | node index 490 | is_buffer_domain : bool, optional 491 | indicates if the index in on the buffer domain, by default False 492 | """ 493 | if is_buffer_domain: 494 | index = self._buffer2world[index] 495 | buffer_index = self._world2buffer.pop(index) 496 | self._remove_incident_edges(buffer_index) 497 | self._buffer2world[buffer_index] = _NODE_EMPTY_PTR 498 | self._empty_nodes.append(buffer_index) 499 | 500 | @abstractmethod 501 | def _remove_incident_edges(self, node_buffer_index: int) -> None: 502 | """Abstract method, removes node at given buffer index.""" 503 | raise NotImplementedError 504 | 505 | def _realloc_edges_buffers(self, n_edges: int) -> None: 506 | """Reallocs the edges buffer and copies existing data. 507 | 508 | NOTE: Currently, only increasing the buffers' size is implemented. 509 | 510 | Parameters 511 | ---------- 512 | n_edges : int 513 | New number of edges. 514 | """ 515 | 516 | # augmenting size to match dummy edges 517 | size = n_edges * self._EDGE_DUPLICATION 518 | prev_size = self.n_allocated_edges * self._EDGE_DUPLICATION 519 | diff_size = size - prev_size 520 | 521 | if diff_size < 0: 522 | raise NotImplementedError( 523 | "Edge buffer size decrease not implemented." 524 | ) 525 | elif diff_size == 0: 526 | raise ValueError( 527 | f"Tried to realloc to current buffer size ({self.n_allocated_edges})." 528 | ) 529 | 530 | prev_buffer_size = len(self._edges_buffer) 531 | 532 | self._edges_buffer = np.append( 533 | self._edges_buffer, 534 | np.full( 535 | diff_size * self._EDGE_SIZE, 536 | fill_value=_EDGE_EMPTY_PTR, 537 | dtype=int, 538 | ), 539 | ) 540 | 541 | # fills empty edges ptr 542 | self._edges_buffer[ 543 | prev_buffer_size 544 | + self._LL_EDGE_POS : -self._EDGE_SIZE : self._EDGE_SIZE 545 | ] = np.arange(prev_size + 1, size) 546 | 547 | # appends existing empty edges linked list to the end of the new list 548 | self._edges_buffer[self._LL_EDGE_POS - self._EDGE_SIZE] = ( 549 | self._empty_edge_idx 550 | ) 551 | self._empty_edge_idx = prev_size 552 | 553 | @property 554 | def n_allocated_edges(self) -> int: 555 | """Number of total allocated edges.""" 556 | return len(self._edges_buffer) // ( 557 | self._EDGE_DUPLICATION * self._EDGE_SIZE 558 | ) 559 | 560 | @property 561 | def n_empty_edges(self) -> int: 562 | """Number of allocated edges but not used.""" 563 | return self.n_allocated_edges - self.n_edges 564 | 565 | @property 566 | def n_edges(self) -> int: 567 | """Number of edges in use.""" 568 | return self._n_edges 569 | 570 | def _validate_nodes( 571 | self, node_indices: Optional[ArrayLike] = None 572 | ) -> np.ndarray: 573 | """Converts and validates the nodes indices.""" 574 | 575 | # NOTE: maybe the nodes could be mappend inside this function 576 | if node_indices is None: 577 | return self.get_nodes() 578 | 579 | node_indices = np.atleast_1d(node_indices) 580 | 581 | if not np.issubdtype(node_indices.dtype, np.integer): 582 | raise ValueError( 583 | f"Node indices must be integer. Found {node_indices.dtype}." 584 | ) 585 | 586 | if node_indices.ndim != 1: 587 | raise ValueError( 588 | "Node indices must be 1-dimensional. " 589 | f"Found {node_indices.ndim}-dimensional." 590 | ) 591 | 592 | return node_indices 593 | 594 | def _validate_edges(self, edges: ArrayLike) -> np.ndarray: 595 | """Converts and validates the edges.""" 596 | edges = np.atleast_2d(edges) 597 | 598 | if not np.issubdtype(edges.dtype, np.integer): 599 | raise ValueError(f"Edges must be integer. Found {edges.dtype}.") 600 | 601 | if edges.ndim != 2: 602 | raise ValueError( 603 | "Edges must be 1- or 2-dimensional. " 604 | f"Found {edges.ndim}-dimensional." 605 | ) 606 | 607 | if edges.shape[1] != 2: 608 | raise ValueError( 609 | f"Edges must be a sequence of length 2 arrays. " 610 | f"Found length {edges.shape[1]}" 611 | ) 612 | 613 | return edges 614 | 615 | @abstractmethod 616 | def _add_edges(self, edges: np.ndarray) -> None: 617 | """Abstract method. 618 | 619 | Requires different implementation for undirected and directed graphs. 620 | """ 621 | raise NotImplementedError 622 | 623 | def add_edges(self, edges: ArrayLike) -> None: 624 | """Add edges into the graph. 625 | 626 | TODO : add parameter for edges features 627 | 628 | Parameters 629 | ---------- 630 | edges : ArrayLike 631 | A list of 2-dimensional tuples or an Nx2 array with a pair of 632 | node indices. 633 | """ 634 | edges = self._validate_edges(edges) 635 | 636 | if self.n_empty_edges < len(edges): 637 | self._realloc_edges_buffers( 638 | self._get_alloc_size(self.n_edges + len(edges)) 639 | ) 640 | 641 | self._add_edges(self._map_world2buffer(edges)) 642 | 643 | @abstractmethod 644 | def _remove_edges(self, edges: np.ndarray) -> None: 645 | raise NotImplementedError 646 | 647 | def remove_edges(self, edges: ArrayLike) -> None: 648 | """Remove edges from the graph. 649 | 650 | Parameters 651 | ---------- 652 | edges : ArrayLike 653 | A list of 2-dimensional tuples or an Nx2 array with a pair of 654 | node indices. 655 | """ 656 | edges = self._validate_edges(edges) 657 | edges = self._map_world2buffer(edges) 658 | self._remove_edges(edges) 659 | 660 | def _map_world2buffer(self, world_idx: np.ndarray) -> np.ndarray: 661 | """Flatten the world indices buffer maps into buffer coordinates. 662 | 663 | ... and reshape back to original space. 664 | """ 665 | shape = world_idx.shape 666 | buffer_idx = _vmap_world2buffer( 667 | self._world2buffer, world_idx.reshape(-1) 668 | ) 669 | return buffer_idx.reshape(shape) 670 | 671 | def _iterate_edges( 672 | self, 673 | node_world_indices: ArrayLike, 674 | node2edges: np.ndarray, 675 | iterate_edges_func: Callable[ 676 | [np.ndarray, np.ndarray], List[np.ndarray] 677 | ], 678 | ) -> List[List]: 679 | """Helper function to iterate over edges and return buffer indices. 680 | 681 | Parameters 682 | ---------- 683 | node_world_indices : ArrayLike 684 | Nodes world indices. 685 | node2edges : np.ndarray 686 | Mapping from node indices to edge buffer indices -- head of edges linked list. 687 | iterate_edges_func : [np.ndarray, np.ndarray] -> List[np.ndarray] 688 | Function that iterates the edges from `edges_ptr_indices` and 689 | `edges_buffer`. 690 | 691 | Returns 692 | ------- 693 | List[List] 694 | List of Lists of length 2 * N_i, where N_i is the number of edges 695 | at the ith node. 696 | """ 697 | node_world_indices = self._validate_nodes(node_world_indices) 698 | 699 | flat_edges = iterate_edges_func( 700 | node2edges[self._map_world2buffer(node_world_indices)], 701 | self._edges_buffer, 702 | ) 703 | return flat_edges 704 | 705 | def _iterate_edges_generic( 706 | self, 707 | node_world_indices: ArrayLike, 708 | node2edges: np.ndarray, 709 | iterate_edges_func: Callable[ 710 | [np.ndarray, np.ndarray], List[np.ndarray] 711 | ], 712 | mode: str, 713 | ) -> Union[List[np.ndarray], np.ndarray]: 714 | """Iterate over any kind of edges and return their world indices. 715 | 716 | Parameters 717 | ---------- 718 | node_world_indices : ArrayLike 719 | Nodes world indices. 720 | node2edges : np.ndarray 721 | Mapping from node indices to edge buffer indices -- head of edges linked list. 722 | iterate_edges_func : [np.ndarray, np.ndarray] -> List[np.ndarray] 723 | Function that iterates the edges from `edges_ptr_indices` and 724 | `edges_buffer`. 725 | mode : str 726 | Type of data queried from the edges. For example, `indices` or 727 | `coords`. 728 | 729 | Returns 730 | ------- 731 | List[np.ndarray] 732 | List of N_i x 2 x D arrays, where N_i is the number of edges at 733 | the ith node. D is the dimensionality of `coords` when 734 | mode == `coords` and it's ignored when mode == `indices`. 735 | """ 736 | if mode.lower() == 'coords' and self._coords is None: 737 | raise ValueError( 738 | "`coords` mode only available for spatial graphs." 739 | ) 740 | 741 | flat_edges = self._iterate_edges( 742 | node_world_indices, node2edges, iterate_edges_func 743 | ) 744 | 745 | if mode.lower() == 'indices': 746 | edges_data = [ 747 | ( 748 | self._buffer2world[e].reshape(-1, 2) 749 | if len(e) > 0 750 | else np.empty((0, 2)) 751 | ) 752 | for e in flat_edges 753 | ] 754 | elif mode.lower() == 'coords': 755 | ndim = self._coords.shape[1] 756 | edges_data = [ 757 | ( 758 | self._coords[e].reshape(-1, 2, ndim) 759 | if len(e) > 0 760 | else np.empty((0, 2, ndim)) 761 | ) 762 | for e in flat_edges 763 | ] 764 | # NOTE: here `mode` could also query the edges features. 765 | # Not implemented yet. 766 | else: 767 | modes = ('indices', 'coords') 768 | raise ValueError( 769 | f"Edge iteration mode not found. Received {mode}, " 770 | f"expected {modes}." 771 | ) 772 | 773 | if len(edges_data) == 1: 774 | return edges_data[0] 775 | else: 776 | return edges_data 777 | 778 | @abstractmethod 779 | def get_edges( 780 | self, nodes: Optional[ArrayLike] = None, mode: str = 'indices' 781 | ) -> Union[List[np.ndarray], np.ndarray]: 782 | raise NotImplementedError 783 | 784 | def get_edges_buffers( 785 | self, is_buffer_domain: bool = False 786 | ) -> Tuple[np.ndarray, np.ndarray]: 787 | """Return valid edges in buffer or world domain. 788 | 789 | Return the indices (buffer domain) and the (source, target) values 790 | (world domain) of all valid edges. 791 | 792 | Undirected edges are not duplicated. 793 | 794 | This function is useful for loading the data for visualization. 795 | 796 | Parameters 797 | ---------- 798 | is_buffer_domain : bool 799 | flag indicating if it should return `world` or `buffer` domain. 800 | 801 | Returns 802 | ------- 803 | Tuple[np.ndarray, np.ndarray] 804 | Buffer indices (buffer domain) and (source, target) (world domain by default). 805 | """ 806 | unique_edge_size = self._EDGE_SIZE * self._EDGE_DUPLICATION 807 | buffer_size = len(self._edges_buffer) 808 | indices = np.arange(0, buffer_size, unique_edge_size) 809 | 810 | # reshaping such that each row is (source id, target id, ...) 811 | buffer = self._edges_buffer.reshape((-1, unique_edge_size)) 812 | edges = buffer[:, :2] # (source, target) 813 | 814 | valid = edges[:, 0] != _EDGE_EMPTY_PTR 815 | 816 | indices = indices[valid] 817 | edges = edges[valid] 818 | if not is_buffer_domain: 819 | edges = self._buffer2world[edges] 820 | 821 | return indices, edges 822 | 823 | def __len__(self) -> int: 824 | """Number of nodes in use.""" 825 | return self.n_nodes 826 | 827 | def initialized_buffer_mask(self) -> np.ndarray: 828 | """Compute mask of nodes that have already been initialized. 829 | 830 | Returns 831 | ------- 832 | np.ndarray 833 | Boolean array of valid node, it has the same length the buffer. 834 | """ 835 | return self._buffer2world != _NODE_EMPTY_PTR 836 | 837 | @property 838 | def coords_buffer(self) -> np.ndarray: 839 | """Returns the actual coordinates buffer. It's not a copy.""" 840 | if self._coords is None: 841 | raise ValueError( 842 | "graph does not have a `coords` attribute. " 843 | "It is not a spatial graph." 844 | ) 845 | return self._coords 846 | 847 | def is_spatial(self) -> bool: 848 | """True if self is a spatial graph (has coordinates attribute).""" 849 | return self._coords is not None 850 | 851 | @staticmethod 852 | def from_networkx(graph: nx.Graph) -> BaseGraph: 853 | """Loads a Directed or Undirected napari-graph from a NetworkX graph. 854 | 855 | Parameters 856 | ---------- 857 | graph : nx.Graph 858 | The NetworkX graph to be converted. 859 | """ 860 | from napari_graph.directed_graph import DirectedGraph 861 | from napari_graph.undirected_graph import UndirectedGraph 862 | 863 | nodes = np.array(list(graph.nodes())) 864 | if not np.issubdtype(nodes.dtype, np.integer) or nodes.ndim > 1: 865 | graph_int_nodes = nx.convert_node_labels_to_integers( 866 | graph, label_attribute='_node_id' 867 | ) 868 | warnings.warn( 869 | 'Node IDs must be integers. They have been converted ' 870 | 'automatically.' 871 | ) 872 | else: 873 | graph_int_nodes = graph 874 | coords_dict = nx.get_node_attributes(graph_int_nodes, "pos") 875 | if len(coords_dict) > 0: 876 | coords_df = pd.DataFrame.from_dict(coords_dict, orient="index") 877 | else: 878 | coords_df = None 879 | 880 | edges = graph_int_nodes.edges 881 | if len(edges) > 0: 882 | edges = np.atleast_2d(edges) 883 | 884 | return ( 885 | DirectedGraph(edges, coords_df) 886 | if graph_int_nodes.is_directed() 887 | else UndirectedGraph(edges, coords_df) 888 | ) 889 | 890 | def to_networkx(self) -> nx.Graph: 891 | """Convert it self into NetworkX graph. 892 | 893 | Parameters 894 | ---------- 895 | graph : BaseGraph 896 | napari-graph Graph 897 | 898 | Returns 899 | ------- 900 | nx.Graph 901 | An equivalent NetworkX graph. 902 | """ 903 | from napari_graph.directed_graph import DirectedGraph 904 | 905 | if isinstance(self, DirectedGraph): 906 | out_graph = nx.DiGraph() 907 | else: 908 | out_graph = nx.Graph() 909 | 910 | if self.is_spatial(): 911 | for node_id, pos in zip(self.get_nodes(), self.get_coordinates()): 912 | # note: some nx functions are unhappy with arrays in node 913 | # attributes because you can't compare arrays with ==. 914 | # So one day we might want to cast to tuple. 915 | out_graph.add_node(node_id, pos=pos) 916 | else: 917 | out_graph.add_nodes_from(self.get_nodes()) 918 | 919 | edges = self.get_edges() 920 | if isinstance(edges, list) and len(edges) > 0: 921 | edges = np.concatenate(edges, axis=0) 922 | 923 | edges_as_tuples = list(map(tuple, edges)) 924 | out_graph.add_edges_from(edges_as_tuples) 925 | 926 | return out_graph 927 | 928 | def subgraph_edges( 929 | self, 930 | node_indices: ArrayLike, 931 | is_buffer_domain: bool = False, 932 | ) -> ArrayLike: 933 | """Returns edges (node pair) where both nodes are presents. 934 | 935 | Parameters 936 | ---------- 937 | nodes_indices : np.ndarray 938 | Subset of nodes used for selection. 939 | is_buffer_domain : bool 940 | When true `node_indices` and returned edges are on buffer domain. 941 | 942 | Returns 943 | ------- 944 | np.ndarray 945 | (N x 2) array of nodes indices, where N is the number of valid edges from the induced subgraph. 946 | """ 947 | _, edges = self.get_edges_buffers(is_buffer_domain) 948 | 949 | if is_buffer_domain: 950 | mask = np.zeros(self._buffer2world.shape[0], dtype=bool) 951 | mask[node_indices] = True 952 | subgraph_edges = edges[mask[edges[:, 0]] & mask[edges[:, 1]]] 953 | 954 | else: 955 | mask = np.isin(edges, node_indices).all(axis=1) 956 | assert mask.shape[0] == edges.shape[0] 957 | subgraph_edges = edges[mask] 958 | 959 | return subgraph_edges 960 | --------------------------------------------------------------------------------