├── .devcontainer
├── Dockerfile
└── devcontainer.json
├── .gitattributes
├── .github
├── actions
│ └── setup
│ │ └── action.yml
└── workflows
│ ├── docs-dev.yml
│ ├── docs-stable.yml
│ ├── full_testing.yml
│ ├── linting.yml
│ └── testing.yml
├── .gitignore
├── .vscode
└── settings.json
├── CODE_OF_CONDUCT.md
├── LICENSE
├── README.md
├── SECURITY.md
├── SUPPORT.md
├── docs
├── about.md
├── contributing.md
├── data
│ ├── ants_1_1.tedges
│ ├── ants_1_2_val.csv
│ ├── ants_1_2_val_small.csv
│ ├── manufacturing_email.tedges
│ ├── temporal_clusters.ngram
│ ├── temporal_clusters.tedges
│ └── tube_paths_train.ngram
├── docker_installation.md
├── gen_ref_pages.py
├── getting_started.md
├── img
│ ├── favicon.ico
│ ├── pathpy_logo.png
│ └── pathpy_logo_new.png
├── index.md
├── overrides
│ ├── assets
│ │ ├── icons
│ │ │ ├── conda_logo.svg
│ │ │ ├── docker_logo.svg
│ │ │ ├── jupyter_logo.svg
│ │ │ ├── pyg_logo.svg
│ │ │ ├── python_logo.svg
│ │ │ ├── pytorch_logo.svg
│ │ │ └── vscode_logo.svg
│ │ ├── images
│ │ │ ├── CAIDAS_logo.png
│ │ │ ├── JMU_logo.png
│ │ │ ├── Princeton_logo.png
│ │ │ ├── UZH_logo.png
│ │ │ └── network.jpg
│ │ ├── scripts
│ │ │ ├── home.js
│ │ │ ├── katex.js
│ │ │ └── mathjax.js
│ │ └── stylesheets
│ │ │ ├── code_select.css
│ │ │ └── home.css
│ ├── home.html
│ └── main.html
├── plot_tutorial.md
├── tutorial.md
└── tutorial
│ ├── _higher_order_scalability.ipynb
│ ├── _lift_order.ipynb
│ ├── _multi_order_concepts.ipynb
│ ├── _new_pathData_test.ipynb
│ ├── _new_pathData_working.ipynb
│ ├── _new_paths.ipynb
│ ├── _scalability_analysis.ipynb
│ ├── _time_respecting_paths_gpu.ipynb
│ ├── _xx_test.ipynb
│ ├── basic_concepts.ipynb
│ ├── dbgnn.ipynb
│ ├── generative_models.ipynb
│ ├── netzschleuder.ipynb
│ ├── paths_higher_order.ipynb
│ ├── temporal_betweenness.ipynb
│ ├── temporal_clusters.html
│ ├── temporal_graphs.ipynb
│ ├── temporal_shortest_paths.ipynb
│ ├── trp_higher_order.ipynb
│ ├── visualisation.ipynb
│ ├── wl.ipynb
│ ├── xx_temporal_centralities.ipynb
│ └── xx_test_random_walks.ipynb
├── mkdocs.yml
├── pyproject.toml
├── src
├── README.md
└── pathpyG
│ ├── __init__.py
│ ├── algorithms
│ ├── __init__.py
│ ├── centrality.py
│ ├── components.py
│ ├── generative_models.py
│ ├── lift_order.py
│ ├── rolling_time_window.py
│ ├── shortest_paths.py
│ ├── temporal.py
│ └── weisfeiler_leman.py
│ ├── core
│ ├── __init__.py
│ ├── graph.py
│ ├── index_map.py
│ ├── multi_order_model.py
│ ├── path_data.py
│ └── temporal_graph.py
│ ├── io
│ ├── __init__.py
│ ├── graphtool.py
│ ├── netzschleuder.py
│ └── pandas.py
│ ├── logging.toml
│ ├── nn
│ ├── __init__.py
│ └── dbgnn.py
│ ├── pathpyG.toml
│ ├── processes
│ ├── __init__.py
│ ├── process.py
│ ├── random_walk.py
│ └── sampling.py
│ ├── statistics
│ ├── __init__.py
│ ├── clustering.py
│ ├── degrees.py
│ └── node_similarities.py
│ ├── utils
│ ├── __init__.py
│ ├── config.py
│ ├── convert.py
│ ├── dbgnn.py
│ ├── logger.py
│ └── progress.py
│ └── visualisations
│ ├── __init__.py
│ ├── _d3js
│ ├── __init__.py
│ ├── core.py
│ ├── network_plots.py
│ └── templates
│ │ ├── d3.v5.min.js
│ │ ├── network.js
│ │ ├── setup.html
│ │ ├── setup.js
│ │ ├── static.js
│ │ ├── styles.css
│ │ └── temporal.js
│ ├── _matplotlib
│ ├── __init__.py
│ ├── core.py
│ └── network_plots.py
│ ├── _tikz
│ ├── __init__.py
│ ├── core.py
│ ├── network_plots.py
│ └── templates
│ │ ├── network.tex
│ │ ├── static.tex
│ │ └── temporal.tex
│ ├── hist_plots.py
│ ├── layout.py
│ ├── network_plots.py
│ ├── plot.py
│ └── utils.py
└── tests
├── __init__.py
├── algorithms
├── __init__.py
├── conftest.py
├── test_centrality.py
├── test_components.py
├── test_generative_models.py
├── test_lift_order.py
├── test_random_graph.py
├── test_rolling_time_window.py
├── test_shortest_paths.py
├── test_similarities.py
├── test_temporal.py
└── test_wl.py
├── benchmark
├── conftest.py
└── test_benchmark.py
├── core
├── __init__.py
├── conftest.py
├── test_graph.py
├── test_index_map.py
├── test_multi_order_model.py
├── test_path_data.py
└── test_temporal_graph.py
├── io
├── __init__.py
├── conftest.py
├── test_netzschleuder.py
└── test_pandas.py
├── nn
├── __init__.py
├── conftest.py
└── test_dbgnn.py
├── processes
├── __init__.py
├── conftest.py
└── test_random_walk.py
├── statistics
├── __init__.py
├── conftest.py
└── test_statistics.py
├── utils
├── __init__.py
└── test_convert.py
└── visualizations
├── __init__.py
├── test_hist.py
└── test_plot.py
/.devcontainer/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM pytorch/pytorch:2.4.1-cuda12.1-cudnn9-runtime
2 | WORKDIR /workspaces/pathpyG
3 | RUN apt-get update
4 | RUN apt-get -y install git
5 |
6 | # For signed commits: https://code.visualstudio.com/remote/advancedcontainers/sharing-git-credentials#_sharing-gpg-keys
7 | RUN apt install gnupg2 -y
8 |
9 | # Install torch
10 | RUN pip install torch==2.4.1+cu121 --index-url https://download.pytorch.org/whl/cu121
11 | # pip install torch==2.4.1+cpu --index-url https://download.pytorch.org/whl/cpu # CPU only
12 |
13 | # Install torch_geometric and dependencies
14 | RUN pip install torch_geometric>=2.5.0
15 | RUN pip install pyg_lib torch_scatter torch_sparse torch_cluster torch_spline_conv -f https://data.pyg.org/whl/torch-2.4.0+cu121.html
16 | # RUN pip install pyg_lib torch_scatter torch_sparse torch_cluster torch_spline_conv -f https://data.pyg.org/whl/torch-2.4.0+cpu.html # CPU only
17 |
--------------------------------------------------------------------------------
/.devcontainer/devcontainer.json:
--------------------------------------------------------------------------------
1 | {
2 | "build": {
3 | "dockerfile": "Dockerfile"
4 | },
5 | "name": "pathpyG-torch2.4.1-cuda12.1",
6 | "customizations": {
7 | "vscode": {
8 | "extensions": [
9 | "ms-python.python",
10 | "ms-python.black-formatter",
11 | "ms-python.isort",
12 | "ms-python.pylint",
13 | "ms-python.flake8",
14 | "ms-python.mypy-type-checker",
15 | "ms-toolsai.jupyter"
16 | ],
17 | "python.defaultInterpreterPath": "/opt/conda/bin/python",
18 | "python.linting.enabled": true,
19 | "python.linting.pylintEnabled": true,
20 | "python.analysis.typeCheckingMode": "basic"
21 | },
22 | // Add IDs of extensions you want to be installed when container is created.
23 | "extensions": [
24 | "ms-python.python",
25 | "ms-python.black-formatter",
26 | "ms-python.isort",
27 | "ms-python.pylint",
28 | "ms-python.flake8",
29 | "ms-python.mypy-type-checker",
30 | "ms-toolsai.jupyter"
31 | ]
32 | },
33 | // Use GPU for CUDA
34 | "runArgs": [
35 | "-it",
36 | "--gpus",
37 | "all"
38 | ],
39 | // Install pathpyG as editable python package
40 | "postCreateCommand": "pip install -e '.[dev,test,doc]' && git config --global --add safe.directory /workspaces/pathpyG"
41 | }
42 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | * text=auto eol=lf
2 | *.{cmd,[cC][mM][dD]} text eol=crlf
3 | *.{bat,[bB][aA][tT]} text eol=crlf
--------------------------------------------------------------------------------
/.github/actions/setup/action.yml:
--------------------------------------------------------------------------------
1 | name: Setup # Inspired by https://github.com/pyg-team/pytorch_geometric/blob/737707c37fc2bd712a2289b683ec14549926ff49/.github/actions/setup/action.yml
2 |
3 | description: Set up Python, PyTorch and PyTorch Geometric.
4 |
5 | inputs: # defaults are set to the version used in the dev container
6 | python-version:
7 | required: false
8 | default: "3.10"
9 | description: "The Python version to use."
10 | torch-version:
11 | required: false
12 | default: "2.4.0"
13 | description: "The PyTorch version to use."
14 | cuda-version:
15 | required: false
16 | default: cpu # For GPU support a self-hosted runner is required.
17 | description: "If CUDA should be used or not."
18 | full_install:
19 | required: false
20 | default: true
21 | description: "If all side packages of PyTorch Geometric should be installed."
22 |
23 | runs:
24 | using: composite
25 |
26 | steps:
27 | - name: Set up Python ${{ inputs.python-version }}
28 | uses: actions/setup-python@v5
29 | with:
30 | python-version: ${{ inputs.python-version }}
31 | check-latest: true
32 | cache: pip
33 | cache-dependency-path: |
34 | pyproject.toml
35 |
36 | - name: Install PyTorch ${{ inputs.torch-version }}+${{ inputs.cuda-version }}
37 | run: |
38 | pip install torch==${{ inputs.torch-version }} --extra-index-url https://download.pytorch.org/whl/${{ inputs.cuda-version }}
39 | python -c "import torch; print('PyTorch:', torch.__version__)"
40 | python -c "import torch; print('CUDA available:', torch.cuda.is_available())"
41 | python -c "import torch; print('CUDA:', torch.version.cuda)"
42 | shell: bash
43 |
44 | - name: Install pyg-lib # pyg-lib is currently only available on Linux.
45 | if: ${{runner.os == 'Linux' }}
46 | run: |
47 | pip uninstall -y pyg-lib
48 | pip install --no-index pyg-lib -f https://data.pyg.org/whl/torch-${{ inputs.torch-version }}+${{ inputs.cuda-version }}.html
49 | shell: bash
50 |
51 | - name: Install extension packages
52 | if: ${{ inputs.full_install == 'true' }}
53 | run: | # scipy installation before torch-scatter is required to solve dependency conflicts
54 | pip install torch_geometric
55 | pip install scipy
56 | pip install --no-index --upgrade torch-scatter torch-sparse torch-cluster torch-spline-conv -f https://data.pyg.org/whl/torch-${{ inputs.torch-version }}+${{ inputs.cuda-version }}.html
57 | shell: bash
58 |
--------------------------------------------------------------------------------
/.github/workflows/docs-dev.yml:
--------------------------------------------------------------------------------
1 | name: Publish Dev Docs
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 |
8 | jobs:
9 | publish:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - name: Checkout repository
13 | uses: actions/checkout@v4
14 | with:
15 | fetch-depth: 0
16 |
17 | - name: Setup packages
18 | uses: ./.github/actions/setup
19 |
20 | - name: Install dependencies
21 | run: |
22 | pip install -e .[doc]
23 |
24 | - name: Get variables
25 | run: |
26 | python -c 'import pathpyG; print(f"package_version={pathpyG.__version__}")' >> "$GITHUB_ENV"
27 | echo "sha_short=$(git rev-parse --short "$GITHUB_SHA")" >> "$GITHUB_ENV"
28 |
29 | - name: Publish docs
30 | # First configure git correctly
31 | # Then use the alias of the current dev version to delete it based on its identifier
32 | # Then deploy the new version so that there is only one dev version at a time
33 | run: |
34 | git config user.name github-actions
35 | git config user.email github-actions@github.com
36 | git fetch origin gh-pages --depth=1
37 | mike delete --push $(mike list | grep dev | awk '{print $1}' | tr -d '()')
38 | mike deploy --push --update-aliases "${{ env.package_version }}-dev" dev
39 | mike set-default --push --allow-empty dev
40 |
--------------------------------------------------------------------------------
/.github/workflows/docs-stable.yml:
--------------------------------------------------------------------------------
1 | name: Publish Stable Docs
2 |
3 | on:
4 | workflow_dispatch:
5 |
6 | jobs:
7 | publish:
8 | runs-on: ubuntu-latest
9 | steps:
10 | - name: Checkout repository
11 | uses: actions/checkout@v4
12 | with:
13 | fetch-depth: 0
14 |
15 | - name: Setup packages
16 | uses: ./.github/actions/setup
17 |
18 | - name: Install dependencies
19 | run: |
20 | pip install -e .[doc]
21 |
22 | - name: Get package version
23 | run: |
24 | python -c 'import pathpyG; print(f"package_version={pathpyG.__version__}")' >> "$GITHUB_ENV"
25 |
26 | - name: Publish docs
27 | run: |
28 | git config user.name github-actions
29 | git config user.email github-actions@github.com
30 | git fetch origin gh-pages --depth=1
31 | mike deploy --push --update-aliases "${{ env.package_version }}" stable
32 | mike set-default --push stable
33 |
--------------------------------------------------------------------------------
/.github/workflows/full_testing.yml:
--------------------------------------------------------------------------------
1 | # Inspired by https://github.com/pyg-team/pytorch_geometric/blob/ee30973ed0957a7f29f345d4eeaf9cfd70805109/.github/workflows/full_testing.yml
2 | name: Full Testing CPU
3 |
4 | on:
5 | workflow_dispatch:
6 |
7 | jobs:
8 |
9 | test_all_versions:
10 | runs-on: ${{ matrix.os }}
11 |
12 | strategy:
13 | fail-fast: false
14 | matrix:
15 | os: [ubuntu-latest, windows-latest]
16 | python-version: ['3.10']
17 | torch-version: [1.13.0, 2.0.0, 2.1.0]
18 |
19 | steps:
20 | - name: Checkout repository
21 | uses: actions/checkout@v4
22 |
23 | - name: Setup packages
24 | uses: ./.github/actions/setup
25 | with:
26 | python-version: ${{ matrix.python-version }}
27 | torch-version: ${{ matrix.torch-version }}
28 |
29 | - name: Install main package
30 | run: |
31 | pip install -e .[test]
32 |
33 | - name: Run tests
34 | run: |
35 | pytest
36 | shell: bash
37 |
--------------------------------------------------------------------------------
/.github/workflows/linting.yml:
--------------------------------------------------------------------------------
1 | # Inspired by https://github.com/pyg-team/pytorch_geometric/blob/ee30973ed0957a7f29f345d4eeaf9cfd70805109/.github/workflows/linting.yml
2 | name: Linting
3 |
4 | on:
5 | push:
6 | branches:
7 | - main
8 | pull_request:
9 |
10 | jobs:
11 | pylint: # linter: tool that checks for errors in Python code, tries to enforce a coding standard and looks for bad code smells
12 | runs-on: ubuntu-latest
13 |
14 | steps:
15 | - name: Checkout repository
16 | uses: actions/checkout@v4
17 |
18 | - name: Setup packages
19 | uses: ./.github/actions/setup
20 | with:
21 | full_install: false
22 |
23 | - name: Install dependencies
24 | run: pip install pylint
25 |
26 | - name: Run linting
27 | continue-on-error: true
28 | run: pylint $(git ls-files '*.py')
29 |
30 | flake8: # Another linter: Mostly checks if code is PEP8 conform but has some additional plugins.
31 | runs-on: ubuntu-latest
32 |
33 | steps:
34 | - name: Checkout repository
35 | uses: actions/checkout@v4
36 |
37 | - name: Set up Python
38 | uses: actions/setup-python@v5
39 | with:
40 | python-version: "3.10.13" # As in docker image (pytorch/pytorch:2.1.0-cuda12.1-cudnn8-runtime) used as dev container
41 |
42 | - name: Install dependencies
43 | run: pip install flake8 flake8-pyproject flake8-bugbear
44 |
45 | - name: Run linting
46 | continue-on-error: true
47 | run: flake8 $(git ls-files '*.py')
48 |
49 | mypy: # stricter static type checker
50 | runs-on: ubuntu-latest
51 |
52 | steps:
53 | - name: Checkout repository
54 | uses: actions/checkout@v4
55 |
56 | - name: Setup packages
57 | uses: ./.github/actions/setup
58 | with:
59 | full_install: false
60 |
61 | - name: Install dependencies
62 | run: pip install mypy
63 |
64 | - name: Check type hints
65 | continue-on-error: true
66 | run: |
67 | mypy src/pathpyG
68 |
--------------------------------------------------------------------------------
/.github/workflows/testing.yml:
--------------------------------------------------------------------------------
1 | # Inspired by https://github.com/pyg-team/pytorch_geometric/blob/ee30973ed0957a7f29f345d4eeaf9cfd70805109/.github/workflows/testing.yml
2 | name: Testing
3 |
4 | on:
5 | push:
6 | branches:
7 | - main
8 | pull_request:
9 |
10 | jobs:
11 |
12 | pytest:
13 | runs-on: ubuntu-latest
14 |
15 | steps:
16 | - name: Checkout repository
17 | uses: actions/checkout@v4
18 |
19 | # Only run workflow if certain files have been changed.
20 | - name: Get changed files
21 | id: changed-files
22 | uses: tj-actions/changed-files@v42
23 | with:
24 | files: |
25 | .github/workflows/testing.yml
26 | src/**
27 | tests/**
28 | pyproject.toml
29 |
30 | - name: Setup packages
31 | if: steps.changed-files.outputs.any_changed == 'true'
32 | uses: ./.github/actions/setup
33 |
34 | - name: Install main package
35 | if: steps.changed-files.outputs.any_changed == 'true'
36 | run: |
37 | pip install -e .[test]
38 |
39 | - name: Run tests
40 | if: steps.changed-files.outputs.any_changed == 'true'
41 | run: |
42 | pytest
43 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Benchmarks
55 | .benchmarks/
56 |
57 | # Translations
58 | *.mo
59 | *.pot
60 |
61 | # Django stuff:
62 | *.log
63 | local_settings.py
64 | db.sqlite3
65 | db.sqlite3-journal
66 |
67 | # Flask stuff:
68 | instance/
69 | .webassets-cache
70 |
71 | # Scrapy stuff:
72 | .scrapy
73 |
74 | # Sphinx documentation
75 | docs/_build/
76 |
77 | # PyBuilder
78 | target/
79 |
80 | # Jupyter Notebook
81 | .ipynb_checkpoints
82 |
83 | # IPython
84 | profile_default/
85 | ipython_config.py
86 |
87 | # pyenv
88 | .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
98 | __pypackages__/
99 |
100 | # Celery stuff
101 | celerybeat-schedule
102 | celerybeat.pid
103 |
104 | # SageMath parsed files
105 | *.sage.py
106 |
107 | # Environments
108 | .env
109 | .venv
110 | env/
111 | venv/
112 | ENV/
113 | env.bak/
114 | venv.bak/
115 |
116 | # Spyder project settings
117 | .spyderproject
118 | .spyproject
119 |
120 | # Rope project settings
121 | .ropeproject
122 |
123 | # mkdocs documentation
124 | /site
125 |
126 | # mypy
127 | .mypy_cache/
128 | .dmypy.json
129 | dmypy.json
130 |
131 | # Pyre type checker
132 | .pyre/
133 |
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "files.autoSave": "onFocusChange",
3 | "git.autofetch": true,
4 | "[jsonc]": {
5 | "editor.defaultFormatter": "vscode.json-language-features"
6 | },
7 | "[python]": {
8 | "editor.defaultFormatter": "ms-python.black-formatter",
9 | },
10 | "python.defaultInterpreterPath": "/opt/conda/bin/python",
11 | "python.formatting.provider": "none", // disable for now. Will be deprecated in the future
12 | "python.testing.unittestEnabled": false,
13 | "python.testing.pytestEnabled": true,
14 | "black-formatter.importStrategy": "fromEnvironment",
15 | "black-formatter.args": [
16 | "--config=pyproject.toml"
17 | ],
18 | "pylint.importStrategy": "fromEnvironment",
19 | "pylint.args": [
20 | "--rcfile=pyproject.toml"
21 | ],
22 | "flake8.importStrategy": "fromEnvironment",
23 | "flake8.args": [
24 | "--toml-config=pyproject.toml"
25 | ],
26 | "isort.importStrategy": "fromEnvironment",
27 | "isort.args": [
28 | "--settings-path=pyproject.toml"
29 | ],
30 | "mypy-type-checker.importStrategy": "fromEnvironment",
31 | "mypy-type-checker.args": [
32 | "--config-file=pyproject.toml"
33 | ],
34 | "python.testing.pytestArgs": [
35 | "tests"
36 | ],
37 | "mypy-type-checker.severity": {"error": "Warning"}
38 | }
39 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Code of Conduct
2 |
3 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
4 |
5 | Resources:
6 |
7 | - [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/)
8 | - [Microsoft Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)
9 | - Contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with questions or concerns
10 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [testing-image]: https://github.com/pathpy/pathpyG/actions/workflows/testing.yml/badge.svg
2 | [testing-url]: https://github.com/pathpy/pathpyG/actions/workflows/testing.yml
3 | [linting-image]: https://github.com/pathpy/pathpyG/actions/workflows/linting.yml/badge.svg
4 | [linting-url]: https://github.com/pathpy/pathpyG/actions/workflows/linting.yml
5 |
6 |
7 | pathpyG
8 | =======
9 |
10 | 
11 |
12 | [![Testing Status][testing-image]][testing-url]
13 | [![Linting Status][linting-image]][linting-url]
14 |
15 | pathpyG provides methods for GPU-accelerated Next-Generation Network Analytics and Graph Learning in Time Series Data on Temporal Networks.
16 |
17 | pathpyG builds on recent research on the modelling of causal structures in time series data based on higher-order De Bruijn graph models that generalize common graphs. This perspective has been developed at ETH Zürich, University of Zürich, Princeton University and Julius-Maximilians-Universität Würzburg. Recently published works include:
18 |
19 | - F Heeg, I Scholtes: [Using Time-Aware Graph Neural Networks to Predict Temporal Centralities in Dynamic Graphs](https://arxiv.org/abs/2310.15865), NeurIPS 2024, December 2024
20 | - L Qarkaxhija, V Perri, I Scholtes: [De Bruijn goes Neural: Causality-Aware Graph Neural Networks for Time Series Data on Dynamic Graphs](https://proceedings.mlr.press/v198/qarkaxhija22a.html), Proceedings of the First Learning on Graphs Conference, PMLR 198:51:1-51:21, December 2022
21 | - L Petrovic, I Scholtes: [Learning the Markov order of paths in graphs](https://doi.org/10.1145/3485447.3512091), Proceedings of WWW '22: The Web Conference 2022, Lyon, France, April 2022
22 | - V Perri, I Scholtes: [HOTVis: Higher-Order Time-Aware Visualisation of Dynamic Graphs](https://doi.org/10.1007/978-3-030-68766-3_8), Proceedings of the 28th International Symposium on Graph Drawing and Network Visualization (GD 2020), Vancouver, BC, Canada, September 15-18, 2020
23 | - R Lambiotte, M Rosvall, I Scholtes: [From Networks to Optimal Higher-Order Models of Complex Systems](https://www.nature.com/articles/s41567-019-0459-y), Nature Physics, Vol. 15, p. 313-320, March 25 2019
24 | - I Scholtes: [When is a network a network? Multi-Order Graphical Model Selection in Pathways and Temporal Networks](http://dl.acm.org/citation.cfm?id=3098145), KDD'17 - Proceedings of the 23rd ACM SIGKDD International Conference on Knowledge Discovery and Data Mining, Halifax, Nova Scotia, Canada, August 13-17, 2017
25 |
26 |
27 | Documentation
28 | -------------
29 |
30 | Online documentation is available at [pathpy.net](https://www.pathpy.net).
31 |
32 | The documentation includes multiple tutorials that introduce the use of pathpyG to model temporal graph and path data. You will also find an API reference and other useful information that will help you to get started.
33 |
34 |
35 | Dependencies
36 | ------------
37 |
38 | pathpyG supports Python 3.10+.
39 |
40 | Installation requires [numpy](http://www.numpy.org/), [scipy](https://www.scipy.org/), [torch](hhttps://pytorch.org/), and [torch-geometric](https://pytorch-geometric.readthedocs.io/en/latest/).
41 |
42 |
43 | Installation
44 | ------------
45 |
46 | The latest development version can be installed from Github as follows:
47 |
48 | pip install git+https://github.com/pathpy/pathpyg.git
49 |
50 |
51 | Testing
52 | -------
53 |
54 | To test pathpy, run `pytest` in the root directory.
55 |
56 | This will exercise both the unit tests and docstring examples (using `pytest`).
57 |
58 |
59 | Development
60 | -----------
61 |
62 | pathpyG development takes place on Github: https://github.com/pathpy/pathpyG
63 |
64 | Please submit any reproducible bugs you encounter to the [issue tracker](https://github.com/pathpy/pathpyG/issues).
65 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | ## Security
4 |
5 | Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/).
6 |
7 | If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://docs.microsoft.com/en-us/previous-versions/tn-archive/cc751383(v=technet.10)), please report it to us as described below.
8 |
9 | ## Reporting Security Issues
10 |
11 | **Please do not report security vulnerabilities through public GitHub issues.**
12 |
13 | Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://msrc.microsoft.com/create-report).
14 |
15 | If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://www.microsoft.com/en-us/msrc/pgp-key-msrc).
16 |
17 | You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc).
18 |
19 | Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue:
20 |
21 | * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.)
22 | * Full paths of source file(s) related to the manifestation of the issue
23 | * The location of the affected source code (tag/branch/commit or direct URL)
24 | * Any special configuration required to reproduce the issue
25 | * Step-by-step instructions to reproduce the issue
26 | * Proof-of-concept or exploit code (if possible)
27 | * Impact of the issue, including how an attacker might exploit the issue
28 |
29 | This information will help us triage your report more quickly.
30 |
31 | If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://microsoft.com/msrc/bounty) page for more details about our active programs.
32 |
33 | ## Preferred Languages
34 |
35 | We prefer all communications to be in English.
36 |
37 | ## Policy
38 |
39 | Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://www.microsoft.com/en-us/msrc/cvd).
40 |
41 |
42 |
--------------------------------------------------------------------------------
/SUPPORT.md:
--------------------------------------------------------------------------------
1 | # TODO: The maintainer of this repo has not yet edited this file
2 |
3 | **REPO OWNER**: Do you want Customer Service & Support (CSS) support for this product/project?
4 |
5 | - **No CSS support:** Fill out this template with information about how to file issues and get help.
6 | - **Yes CSS support:** Fill out an intake form at [aka.ms/spot](https://aka.ms/spot). CSS will work with/help you to determine next steps. More details also available at [aka.ms/onboardsupport](https://aka.ms/onboardsupport).
7 | - **Not sure?** Fill out a SPOT intake as though the answer were "Yes". CSS will help you decide.
8 |
9 | *Then remove this first heading from this SUPPORT.MD file before publishing your repo.*
10 |
11 | # Support
12 |
13 | ## How to file issues and get help
14 |
15 | This project uses GitHub Issues to track bugs and feature requests. Please search the existing
16 | issues before filing new issues to avoid duplicates. For new issues, file your bug or
17 | feature request as a new Issue.
18 |
19 | For help and questions about using this project, please **REPO MAINTAINER: INSERT INSTRUCTIONS HERE
20 | FOR HOW TO ENGAGE REPO OWNERS OR COMMUNITY FOR HELP. COULD BE A STACK OVERFLOW TAG OR OTHER
21 | CHANNEL. WHERE WILL YOU HELP PEOPLE?**.
22 |
23 | ## Microsoft Support Policy
24 |
25 | Support for this **PROJECT or PRODUCT** is limited to the resources listed above.
26 |
--------------------------------------------------------------------------------
/docs/about.md:
--------------------------------------------------------------------------------
1 | # About
2 |
3 | ## What is pathpyG?
4 |
5 | pathpyG is an Open Source package facilitating GPU-accelerated next-generation network analytics and graph learning for time series data on graphs.
6 |
7 | pathpyG is tailored to analyse time-stamped network data as well as sequential data that capture multiple short walks or paths observed in a graph or network. Examples for data that can be analysed with pathpyG include high-resolution time-stamped network data, dynamic social networks, user click streams on the Web, biological pathway data, directed acyclic graphs like citation networks, passenger trajectories in transportation networks, or trajectories of information propagation in social networks.
8 |
9 | pathpyG is fully integrated with jupyter, providing rich interactive visualisations of networks, temporal networks, and higher-order models. Visualisations can be exported to HTML5 files that can be shared and published on the Web.
10 |
11 | ## What is the science behind pathpyG?
12 |
13 | The theoretical foundation of this package, higher- and multi-order network models, was developed in the following peer-reviewed research articles:
14 |
15 | 1. L Qarkaxhija, V Perri, I Scholtes: [De Bruijn goes Neural: Causality-Aware Graph Neural Networks for Time Series Data on Dynamic Graphs](https://proceedings.mlr.press/v198/qarkaxhija22a.html), In Proceedings of the First Learning on Graphs Conference, PMLR 198:51:1-51:21, December 2022
16 | 2. L Petrovic, I Scholtes: [Learning the Markov order of paths in graphs](https://doi.org/10.1145/3485447.3512091), In Proceedings of WWW '22: The Web Conference 2022, Lyon, France, April 2022
17 | 3. V Perri, I Scholtes: [HOTVis: Higher-Order Time-Aware Visualisation of Dynamic Graphs](https://doi.org/10.1007/978-3-030-68766-3_8), In Proceedings of the 28th International Symposium on Graph Drawing and Network Visualization (GD 2020), Vancouver, BC, Canada, September 15-18, 2020
18 | 4. I Scholtes: [When is a network a network? Multi-Order Graphical Model Selection in Pathways and Temporal Networks](http://dl.acm.org/citation.cfm?id=3098145), In KDD'17 - Proceedings of the 23rd ACM SIGKDD International Conference on Knowledge Discovery and Data Mining, Halifax, Nova Scotia, Canada, August 13-17, 2017
19 | 5. I Scholtes, N Wider, A Garas: [Higher-Order Aggregate Networks in the Analysis of Temporal Networks: Path structures and centralities](https://link.springer.com/article/10.1140/epjb/e2016-60663-0), The European Physical Journal B, 89:61, March 2016
20 | 6. I Scholtes, N Wider, R Pfitzner, A Garas, CJ Tessone, F Schweitzer: [Causality-driven slow-down and speed-up of diffusion in non-Markovian temporal networks](https://www.nature.com/articles/ncomms6024), Nature Communications, 5, September 2014
21 | 7. R Pfitzner, I Scholtes, A Garas, CJ Tessone, F Schweitzer: [Betweenness preference: Quantifying correlations in the topological dynamics of temporal networks](https://journals.aps.org/prl/abstract/10.1103/PhysRevLett.110.198701), Phys Rev Lett, 110(19), 198701, May 2013
22 |
23 | A broader view on the importance of higher-order graph models for complex systems can be found in [this overview article](https://www.nature.com/articles/s41567-019-0459-y).
--------------------------------------------------------------------------------
/docs/data/ants_1_2_val_small.csv:
--------------------------------------------------------------------------------
1 | _GWY,GY__,1697
2 | YYYY,WR__,1697
3 | W_WR,YY_W,1698
4 | _WYG,Y_BW,1698
5 | GY__,_GWY,1698
6 | G___right,_GWY,1698
7 | WRR_,Y_BW,1699
8 | ____still,YYBR,1699
9 | _GYW,_W__curl,1701
10 | ____hidden,Y___,1702
11 | YYGG,GRY_,1702
12 | GBGR,YYRR,1702
13 | YY_W,_WRW,1702
14 | _GWY,YYGW,1702
15 | GY__,_W__curl,1702
16 | GRGY,YYGG,1702
17 | GR_Y,YGWY,1703
18 | _RWG,Y_BW,1703
19 | YYBR,YYGW,1703
20 | WB__,WRR_,1704
21 | _R__right,G___right,1704
22 | ____new,Q,1704
23 | _W__curl,G___right,1704
24 | _GYW,YGWR,1704
25 | YYBR,_GWY,1704
26 | GGGR,GRGY,1705
27 | _GWY,Y_B_,1705
28 | GY__,GBG_,1705
29 | _GWY,WR__,1706
30 | G__Y,____mid,1706
31 | _GYW,GY__,1706
32 | YYGW,YYBR,1707
33 | ____mid,G__Y,1707
34 | Q,GR_Y,1707
35 | _WYG,WRR_,1707
36 | WRR_,_WWY,1707
37 | WRRY,GB__,1708
38 | _WWY,WRR_,1708
39 | Q,YYGW,1709
40 | WRR_,_WWY,1709
41 | GY__,_GYW,1709
42 | _RWG,WB__,1710
43 | GBG_,GY__,1710
44 | YYBR,Q,1710
45 | GB__,WRRY,1711
46 | YGWY,____new,1711
47 | ____new,YGWY,1711
48 | Q,GBG_,1712
49 | YGWR,GY__,1713
50 | GY__,YGWR,1714
51 | _W__,_WYG,1715
52 | G___right,_R__right,1715
53 | YYGW,GGW_,1717
54 | YYBR,Q,1718
55 | GR_W,_WWY,1719
56 | GGGR,____mid,1719
57 | GGRG,W_WR,1719
58 | _WRW,YY__,1720
59 | YY__,_WRW,1720
60 | ____still,Q,1720
61 | YYBR,YYGW,1720
62 | YYGW,YYBR,1721
63 | _W__curl,GY__,1721
64 | Q,GGRG,1723
65 | YYGW,GGRG,1724
66 | _GWY,YYYY,1724
67 | _WYG,Y_BW,1726
68 | WB__,_WYG,1726
69 | WRR_,YGWY,1727
70 | GR_W,YGWY,1728
71 | Y_BW,_W__,1728
72 | YGWY,WRR_,1728
73 | YYBR,GRGY,1728
74 | _W__,_RWG,1729
75 | GRGY,YYBR,1729
76 | Y_B_,_GWY,1730
77 | YYGG,GRY_,1731
78 | G__Y,GGGG,1731
79 | ____still,Q,1732
80 | GGGR,GGW_,1733
81 | GRGY,YYGG,1734
82 | _W__,G_RG,1735
83 | _GWY,_YGG,1735
84 | GGGG,GBGR,1735
85 | GBGR,GGGG,1736
86 | G_RG,_W__,1736
87 | __W_,WRR_,1737
88 | Y_BW,YY__,1737
89 | YYYY,WR__,1737
90 | _WRW,____hidden,1738
91 | _YGG,_GWY,1738
92 | ____hidden,_WRW,1739
93 | YY_W,_WRW,1742
94 | G___right,YWWW,1742
95 | G_RG,_WYG,1743
96 | YGWR,GBG_,1743
97 | _GYW,YYGW,1743
98 | YYBR,YYGG,1743
99 | _WYG,G_RG,1744
100 | _W__,_WYG,1744
101 | YYGG,GRY_,1745
102 | YYYY,WR__,1745
103 | _GWY,_YGG,1746
104 | _GYW,GBG_,1746
105 | WRR_,GY_W,1747
106 | GBG_,_GYW,1748
107 | YYYY,_GWY,1749
108 |
--------------------------------------------------------------------------------
/docs/docker_installation.md:
--------------------------------------------------------------------------------
1 | # Docker Installation {#docker_installation}
2 |
3 | :pytorch_logo: PyTorch provides a :docker_logo: [Docker image](https://hub.docker.com/r/pytorch/pytorch) with PyTorch preinstalled. Using this image, the Dockerfile below creates a Docker image with PathpyG installed.
4 |
5 | === "GPU"
6 | ```dockerfile
7 | FROM pytorch/pytorch:2.1.0-cuda12.1-cudnn8-runtime
8 | WORKDIR /workspaces/pathpyG
9 | RUN apt-get update
10 | RUN apt-get -y install git
11 |
12 | RUN pip install torch==2.1.0+cu121 --index-url https://download.pytorch.org/whl/cu121
13 |
14 | RUN pip install torch_geometric>=2.4.0
15 | RUN pip install pyg_lib torch_scatter torch_sparse torch_cluster torch_spline_conv -f https://data.pyg.org/whl/torch-2.1.0+cu121.html
16 | RUN pip install git+https://github.com/pathpy/pathpyG.git
17 | ```
18 | === "CPU"
19 | ```dockerfile
20 | FROM pytorch/pytorch:2.1.0-cuda12.1-cudnn8-runtime
21 | WORKDIR /workspaces/pathpyG
22 | RUN apt-get update
23 | RUN apt-get -y install git
24 |
25 | RUN pip install torch==2.1.0+cpu --index-url https://download.pytorch.org/whl/cpu # CPU only
26 |
27 | RUN pip install torch_geometric>=2.4.0
28 | RUN pip install pyg_lib torch_scatter torch_sparse torch_cluster torch_spline_conv -f https://data.pyg.org/whl/torch-2.1.0+cpu.html # CPU only
29 | RUN pip install git+https://github.com/pathpy/pathpyG.git
30 | ```
31 |
--------------------------------------------------------------------------------
/docs/gen_ref_pages.py:
--------------------------------------------------------------------------------
1 | """Generate the code reference pages and navigation."""
2 | # See for more detail: https://mkdocstrings.github.io/recipes/
3 |
4 | from pathlib import Path
5 |
6 | import mkdocs_gen_files
7 |
8 | nav = mkdocs_gen_files.Nav()
9 |
10 | for path in sorted(Path("src").rglob("*.py")):
11 | module_path = path.relative_to("src").with_suffix("")
12 | doc_path = path.relative_to("src").with_suffix(".md")
13 | full_doc_path = Path("reference", doc_path)
14 |
15 | parts = tuple(module_path.parts)
16 |
17 | if parts[-1] == "__init__":
18 | parts = parts[:-1]
19 | doc_path = doc_path.with_name("index.md")
20 | full_doc_path = full_doc_path.with_name("index.md")
21 | elif parts[-1] == "__main__":
22 | continue
23 |
24 | nav[parts] = doc_path.as_posix()
25 |
26 | with mkdocs_gen_files.open(full_doc_path, "w") as fd:
27 | ident = ".".join(parts)
28 | fd.write(f"::: {ident}")
29 |
30 | mkdocs_gen_files.set_edit_path(full_doc_path, Path("../") / path)
31 |
32 | with mkdocs_gen_files.open("reference/SUMMARY.md", "w") as nav_file:
33 | nav_file.writelines(nav.build_literate_nav())
34 |
--------------------------------------------------------------------------------
/docs/getting_started.md:
--------------------------------------------------------------------------------
1 | # Getting Started
2 |
3 | The following will guide you through the installation of the package and the first steps to use it.
4 |
5 | ## Prerequisites
6 |
7 | PathpyG is available for :python_logo: Python versions 3.10 and above. It is not recommended to install it on your system Python. Instead, we recommend using a virtual environment such as [:conda_logo: conda](https://docs.conda.io/en/latest/) or [virtualenv](https://virtualenv.pypa.io/en/latest/). You can also set up a :docker_logo: Docker image as described in the [next section](docker_installation.md).
8 |
9 | ## Installation
10 |
11 | Once you have an environment up and running, you can install the package simply via pip. But first make sure that you installed the necessary dependencies.
12 |
13 | ### Dependencies
14 |
15 | This package is based on [:pytorch_logo: PyTorch](https://pytorch.org/) and [:pyg_logo: PyTorch Geometric](https://pytorch-geometric.readthedocs.io/). Please install both libraries before installing PathpyG. You can follow the installation instructions in their respective documentation ([:pytorch_logo: PyTorch](https://pytorch.org/get-started/locally/) and [:pyg_logo: PyG](https://pytorch-geometric.readthedocs.io/en/stable/install/installation.html)).
16 |
17 | !!! warning
18 | We currently only support PyG version 2.5.0 and above.
19 |
20 | ### Install Stable Release
21 |
22 | You can install the latest stable release of PathpyG via pip:
23 |
24 | !!! warning "TODO"
25 | This is not yet available. We will release the first stable version soon.
26 |
27 | ```bash
28 | pip install pathpyg
29 | ```
30 |
31 | ### Install Latest Development Version
32 |
33 | If you want to install the latest development version, you can do so via pip directly from the GitHub repository:
34 |
35 | ```bash
36 | pip install git+https://github.com/pathpy/pathpyG.git
37 | ```
38 |
--------------------------------------------------------------------------------
/docs/img/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pathpy/pathpyG/760571a0829f0bc04c5a127d5b059f25c6ab74ca/docs/img/favicon.ico
--------------------------------------------------------------------------------
/docs/img/pathpy_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pathpy/pathpyG/760571a0829f0bc04c5a127d5b059f25c6ab74ca/docs/img/pathpy_logo.png
--------------------------------------------------------------------------------
/docs/img/pathpy_logo_new.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pathpy/pathpyG/760571a0829f0bc04c5a127d5b059f25c6ab74ca/docs/img/pathpy_logo_new.png
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | template: home.html
3 | title: pathpyG
4 | hide:
5 | - navigation
6 | - toc
7 | ---
8 |
9 | This is the index page of the pathpyG documentation.
10 |
--------------------------------------------------------------------------------
/docs/overrides/assets/icons/conda_logo.svg:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/docs/overrides/assets/icons/docker_logo.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
15 |
--------------------------------------------------------------------------------
/docs/overrides/assets/icons/pyg_logo.svg:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/docs/overrides/assets/icons/python_logo.svg:
--------------------------------------------------------------------------------
1 |
2 |
79 |
--------------------------------------------------------------------------------
/docs/overrides/assets/icons/pytorch_logo.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
10 |
--------------------------------------------------------------------------------
/docs/overrides/assets/icons/vscode_logo.svg:
--------------------------------------------------------------------------------
1 |
42 |
--------------------------------------------------------------------------------
/docs/overrides/assets/images/CAIDAS_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pathpy/pathpyG/760571a0829f0bc04c5a127d5b059f25c6ab74ca/docs/overrides/assets/images/CAIDAS_logo.png
--------------------------------------------------------------------------------
/docs/overrides/assets/images/JMU_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pathpy/pathpyG/760571a0829f0bc04c5a127d5b059f25c6ab74ca/docs/overrides/assets/images/JMU_logo.png
--------------------------------------------------------------------------------
/docs/overrides/assets/images/Princeton_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pathpy/pathpyG/760571a0829f0bc04c5a127d5b059f25c6ab74ca/docs/overrides/assets/images/Princeton_logo.png
--------------------------------------------------------------------------------
/docs/overrides/assets/images/UZH_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pathpy/pathpyG/760571a0829f0bc04c5a127d5b059f25c6ab74ca/docs/overrides/assets/images/UZH_logo.png
--------------------------------------------------------------------------------
/docs/overrides/assets/images/network.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pathpy/pathpyG/760571a0829f0bc04c5a127d5b059f25c6ab74ca/docs/overrides/assets/images/network.jpg
--------------------------------------------------------------------------------
/docs/overrides/assets/scripts/home.js:
--------------------------------------------------------------------------------
1 | window.addEventListener('scroll', function () {
2 | var header = document.querySelector('.md-header');
3 | var firstSection = document.getElementById('firstSection');
4 |
5 | if (window.scrollY > firstSection.offsetHeight) {
6 | header.classList.add('md-bg-color');
7 | } else {
8 | header.classList.remove('md-bg-color');
9 | }
10 | });
11 |
12 | window.addEventListener('scroll', function () {
13 | var items = document.querySelectorAll('.landing_second__item');
14 | var windowHeight = window.innerHeight;
15 |
16 | items.forEach(function (item, index) {
17 | var itemPosition = item.getBoundingClientRect();
18 |
19 | if (itemPosition.top < windowHeight && itemPosition.bottom >= 0) {
20 | if (index % 2 === 1) { // Check if the item is even
21 | item.style.animation = 'flyInRight 2s ease-in-out 1';
22 | } else { // The item is odd
23 | item.style.animation = 'flyInLeft 2s ease-in-out 1';
24 | }
25 | item.style.animationFillMode = 'forwards';
26 | }
27 | });
28 | });
29 |
30 | var hasScrolledPast = false;
31 |
32 | window.addEventListener('scroll', function() {
33 | var heroContent = document.getElementById('hero-content');
34 | var imageElement = document.querySelector('.parallax__image');
35 | var imageHeight = imageElement ? imageElement.offsetHeight : 0;
36 | var scrollPosition = window.scrollY;
37 |
38 | if (scrollPosition > imageHeight / 2) {
39 | heroContent.style.animation = "fadeOutUp 1s forwards";
40 | hasScrolledPast = true;
41 | } else if (hasScrolledPast) {
42 | heroContent.style.animation = "fadeInDown 1s forwards";
43 | }
44 | });
45 |
46 | /* Affiliations */
47 |
48 | $(document).ready(function(){
49 | $("#affiliation-slider").owlCarousel({
50 | loop:true,
51 | nav: true,
52 | autoplay:true,
53 | autoplayHoverPause:true,
54 | dotsEach: 1,
55 | responsive:{
56 | 0:{
57 | items:1
58 | },
59 | 768:{
60 | items:2
61 | },
62 | 1220:{
63 | items:3
64 | }
65 | }
66 | });
67 | });
68 |
--------------------------------------------------------------------------------
/docs/overrides/assets/scripts/katex.js:
--------------------------------------------------------------------------------
1 | // https://squidfunk.github.io/mkdocs-material/reference/math/#katex
2 |
3 | document$.subscribe(({ body }) => {
4 | renderMathInElement(body, {
5 | delimiters: [
6 | { left: "$$", right: "$$", display: true },
7 | { left: "$", right: "$", display: false },
8 | { left: "\\(", right: "\\)", display: false },
9 | { left: "\\[", right: "\\]", display: true }
10 | ],
11 | })
12 | })
--------------------------------------------------------------------------------
/docs/overrides/assets/scripts/mathjax.js:
--------------------------------------------------------------------------------
1 | // https://squidfunk.github.io/mkdocs-material/reference/math/#mathjax
2 | window.MathJax = {
3 | tex: {
4 | inlineMath: [["\\(", "\\)"]],
5 | displayMath: [["\\[", "\\]"]],
6 | processEscapes: true,
7 | processEnvironments: true
8 | },
9 | options: {
10 | ignoreHtmlClass: ".*|",
11 | processHtmlClass: "arithmatex"
12 | }
13 | };
14 |
15 | document$.subscribe(() => {
16 | MathJax.typesetPromise()
17 | })
--------------------------------------------------------------------------------
/docs/overrides/assets/stylesheets/code_select.css:
--------------------------------------------------------------------------------
1 | /* https://mkdocstrings.github.io/recipes/#prevent-selection-of-prompts-and-output-in-python-code-blocks */
2 |
3 | .highlight .gp, .highlight .go { /* Generic.Prompt, Generic.Output */
4 | user-select: none;
5 | }
6 |
7 | .language-pycon .md-clipboard { display: none; }
--------------------------------------------------------------------------------
/docs/overrides/main.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 |
3 |
4 |
5 |
6 | {% block content %}
7 | {% if page.nb_url %}
8 |
9 | {% include ".icons/material/download.svg" %}
10 |
11 |
12 | {% include ".icons/simple/googlecolab.svg" %}
13 |
14 | {% endif %}
15 | {{ super() }}
16 | {% endblock %}
17 |
18 |
19 | {% block outdated %}
20 | You're not viewing the latest version.
21 |
22 | Click here to go to latest.
23 |
24 | {% endblock %}
25 |
--------------------------------------------------------------------------------
/docs/tutorial.md:
--------------------------------------------------------------------------------
1 | # Overview
2 |
3 | In this tutorial, we will introduce basic concepts of pathpyG. pathpyG can be used as a wrapper around pytorch-geometric that facilitates network analysis, graph learning, and interactive data visualization. However, its real power comes into play when modelling causal path structures in time series data on networks, such as trajectories on graphs or temporal graphs with time-stamped interactions. pathpyG allows to compute causal paths in temporal graphs and model them based on [higher-order De Bruijn graphs](https://doi.org/10.1145/3097983.3098145), a higher-dimensional generalization of standard graph models for relational data.
4 |
5 | The following introductory video explains the basic idea of higher-order De Bruijn graph models for causal path structures in time series data:
6 |
7 |
8 |
29 |
30 |
31 |
32 |
33 |
34 | The science behind pathpyG has been published in outlets like SIGKDD, WWW, Learning on Graphs, Nature Communications, Nature Physics, and Physical Review Letters. Please [check here](about.md) for more details on key scientific works that have laid the foundations for this package.
35 |
36 | Different from previous versions of pathpy, the latest version pathpyG fully utilizes the power of torch and tensor-based representations of sparse graph models to failitate the use of higher-order De Bruijn graph models. pathpyG's data structures naturally generalize the concepts of [pytorch-geometric](https://pytorch-geometric.readthedocs.io/en/latest/), which makes it easy to apply it in (temnporal) graph learning tasks.
37 |
38 | Finally, pathpyG comes with an implementation of [De Bruijn Graph Neural Networks (DBGNN)](https://proceedings.mlr.press/v198/qarkaxhija22a.html), a causality-aware deep learning architecture for temporal graph data. In the tutorial, we illustrate this temporal graph learning approach in a simple toy example.
39 |
40 |
--------------------------------------------------------------------------------
/docs/tutorial/_multi_order_concepts.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# TODO: Create a Notebook that explains the new concepts for order lifting for DAGs and temporal graphs."
8 | ]
9 | },
10 | {
11 | "cell_type": "code",
12 | "execution_count": null,
13 | "metadata": {},
14 | "outputs": [],
15 | "source": [
16 | "def lift_order_edge_index(edge_index: torch.Tensor, num_nodes: int, edge_weights: torch.Tensor) -> torch.Tensor:\n",
17 | " \"\"\"\n",
18 | " Do a line graph transformation on the edge index to lift the order of the graph by one.\n",
19 | "\n",
20 | " Args:\n",
21 | " edge_index: A **sorted** edge index tensor of shape (2, num_edges).\n",
22 | " num_nodes: The number of nodes in the graph.\n",
23 | " \"\"\"\n",
24 | "\n",
25 | " # Since this is a complicated function, we will use the following example to explain the steps:\n",
26 | " # Example:\n",
27 | " # edge_index = [[0, 0, 1, 1, 1, 3, 4, 5, 6],\n",
28 | " # [1, 3, 2, 3, 6, 4, 5, 7, 5]]\n",
29 | "\n",
30 | " # Compute the outdegree of each node used to get all the edge combinations leading to a higher-order edge\n",
31 | " # Example:\n",
32 | " # outdegree = [2, 3, 0, 1, 1, 1, 1, 0]\n",
33 | " outdegree = degree(edge_index[0], dtype=torch.long, num_nodes=num_nodes)\n",
34 | "\n",
35 | " # For each center node, we need to combine each outgoing edge with each incoming edge\n",
36 | " # We achieve this by creating `outdegree` number of edges for each destination node of the old edge index\n",
37 | " # Example:\n",
38 | " # outdegree_per_dst = [3, 1, 0, 1, 1, 1, 1, 0, 1]\n",
39 | " # num_new_edges = 9\n",
40 | " outdegree_per_dst = outdegree[edge_index[1]]\n",
41 | " num_new_edges = outdegree_per_dst.sum()\n",
42 | "\n",
43 | " # Use each edge from the edge index as node and assign the new indices in the order of the original edge index\n",
44 | " # Each higher order node has one outgoing edge for each outgoing edge of the original destination node\n",
45 | " # Since we keep the ordering, we can just repeat each node using the outdegree_per_dst tensor\n",
46 | " # Example:\n",
47 | " # ho_edge_srcs = [0, 0, 0, 1, 3, 4, 5, 6, 8]\n",
48 | " ho_edge_srcs = torch.repeat_interleave(outdegree_per_dst)\n",
49 | "\n",
50 | " # For each node, we calculate pointers of shape (num_nodes,) that indicate the start of the original edges\n",
51 | " # (new higher-order nodes) that have the node as source node\n",
52 | " # (Note we use PyG's cumsum function because it adds a 0 at the beginning of the tensor and\n",
53 | " # we want the `left` boundaries of the intervals, so we also remove the last element of the result with [:-1])\n",
54 | " # Example:\n",
55 | " # ptrs = [0, 2, 5, 5, 6, 7, 8, 9]\n",
56 | " ptrs = cumsum(outdegree, dim=0)[:-1]\n",
57 | "\n",
58 | " # Use these pointers to get the start of the edges for each higher-order src and repeat it `outdegree` times\n",
59 | " # Since we keep the ordering, all new higher-order edges that have the same src are indexed consecutively\n",
60 | " # Example:\n",
61 | " # ho_edge_dsts = [2, 2, 2, 5, 5, 8, 6, 7, 7]\n",
62 | " ho_edge_dsts = torch.repeat_interleave(ptrs[edge_index[1]], outdegree_per_dst)\n",
63 | "\n",
64 | " # Since the above only repeats the start of the edges, we need to add (0, 1, 2, 3, ...)\n",
65 | " # for all `outdegree` number of edges consecutively to get the correct destination nodes\n",
66 | " # We can achieve this by starting with a range from (0, 1, ..., num_new_edges)\n",
67 | " # Example:\n",
68 | " # idx_correction = [0, 1, 2, 3, 4, 5, 6, 7, 8]\n",
69 | " idx_correction = torch.arange(num_new_edges, dtype=torch.long, device=edge_index.device)\n",
70 | " # Then, we subtract the cumulative sum of the outdegree for each destination node to get a tensor.\n",
71 | " # Example:\n",
72 | " # idx_correction = [0, 1, 2, 0, 0, 0, 0, 0, 0]\n",
73 | " idx_correction -= cumsum(outdegree_per_dst, dim=0)[ho_edge_srcs]\n",
74 | " # Add this tensor to the destination nodes to get the correct destination nodes for each higher-order edge\n",
75 | " # Example:\n",
76 | " # ho_edge_dsts = [2, 3, 4, 5, 5, 8, 6, 7, 7]\n",
77 | " ho_edge_dsts += idx_correction\n",
78 | " # tensor([[0, 0, 0, 1, 3, 4, 5, 6, 8],\n",
79 | " # [2, 3, 4, 5, 5, 8, 6, 7, 7]])\n",
80 | " return torch.stack([ho_edge_srcs, ho_edge_dsts], dim=0)"
81 | ]
82 | }
83 | ],
84 | "metadata": {
85 | "language_info": {
86 | "name": "python"
87 | }
88 | },
89 | "nbformat": 4,
90 | "nbformat_minor": 2
91 | }
92 |
--------------------------------------------------------------------------------
/docs/tutorial/_new_pathData_working.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "from typing import Optional\n",
10 | "\n",
11 | "from tqdm import trange\n",
12 | "import torch\n",
13 | "from torch import Tensor\n",
14 | "from torch_geometric.data import Data\n",
15 | "from torch_geometric.loader import DataLoader\n",
16 | "from torch_geometric.nn import MessagePassing\n",
17 | "from torch_geometric.experimental import disable_dynamic_shapes\n",
18 | "from torch_geometric.nn.aggr import Aggregation\n",
19 | "from torch_geometric.utils import coalesce, degree, cumsum\n",
20 | "from torch_geometric import EdgeIndex\n",
21 | "\n",
22 | "import pathpyG as pp\n",
23 | "pp.config['torch']['device'] = 'cuda'"
24 | ]
25 | },
26 | {
27 | "cell_type": "code",
28 | "execution_count": 2,
29 | "metadata": {},
30 | "outputs": [
31 | {
32 | "name": "stdout",
33 | "output_type": "stream",
34 | "text": [
35 | "DAGData with 2 dags with total weight 3.0\n"
36 | ]
37 | }
38 | ],
39 | "source": [
40 | "# Example with walks as node sequences\n",
41 | "g = pp.Graph.from_edge_list([('a', 'b'), ('b', 'c'), ('a', 'c')])\n",
42 | "dags = pp.DAGData(mapping = g.mapping)\n",
43 | "\n",
44 | "dags.append_walk(('a', 'b', 'c', 'b'), weight=1.0)\n",
45 | "dags.append_walk(('a', 'c'), weight = 2.0)\n",
46 | "print(dags)"
47 | ]
48 | },
49 | {
50 | "cell_type": "code",
51 | "execution_count": 5,
52 | "metadata": {},
53 | "outputs": [
54 | {
55 | "name": "stdout",
56 | "output_type": "stream",
57 | "text": [
58 | "DAGData with 3 dags with total weight 3.0\n"
59 | ]
60 | }
61 | ],
62 | "source": [
63 | "# Example with walks as edge indices (with no mapping)\n",
64 | "dags = pp.DAGData()\n",
65 | "dags.append_dag(torch.tensor([[3,0,1],[0,1,2]]))\n",
66 | "dags.append_dag(torch.tensor([[1,0,2],[0,2,0]]))\n",
67 | "dags.append_dag(torch.tensor([[0,1],[1,2]]))\n",
68 | "print(dags)"
69 | ]
70 | },
71 | {
72 | "cell_type": "code",
73 | "execution_count": null,
74 | "metadata": {},
75 | "outputs": [],
76 | "source": [
77 | "# Example with mix of walks or dags\n",
78 | "dags = pp.DAGData(mapping = g.mapping)\n",
79 | "\n",
80 | "dags.append_dag(torch.tensor([[0,0,1],[1,2,2]]))\n",
81 | "dags.append_walk(('a', 'b', 'c'))\n",
82 | "print(dags)"
83 | ]
84 | },
85 | {
86 | "cell_type": "code",
87 | "execution_count": null,
88 | "metadata": {},
89 | "outputs": [],
90 | "source": [
91 | "m = pp.MultiOrderModel.from_DAGs(dags, max_order=2)"
92 | ]
93 | },
94 | {
95 | "cell_type": "code",
96 | "execution_count": null,
97 | "metadata": {},
98 | "outputs": [],
99 | "source": [
100 | "print(m.layers[1].data.edge_index)\n",
101 | "print(m.layers[1].data.node_sequences)\n",
102 | "print(m.layers[1].mapping)"
103 | ]
104 | },
105 | {
106 | "cell_type": "code",
107 | "execution_count": null,
108 | "metadata": {},
109 | "outputs": [],
110 | "source": [
111 | "print(m.layers[2].data.edge_index)\n",
112 | "print(m.layers[2].data.node_sequences)\n",
113 | "print(m.layers[2].mapping)"
114 | ]
115 | },
116 | {
117 | "cell_type": "code",
118 | "execution_count": null,
119 | "metadata": {},
120 | "outputs": [],
121 | "source": [
122 | "# Real-world example\n",
123 | "dags = pp.DAGData.from_ngram('../data/tube_paths_train.ngram')\n",
124 | "print(dags)"
125 | ]
126 | },
127 | {
128 | "cell_type": "code",
129 | "execution_count": null,
130 | "metadata": {},
131 | "outputs": [],
132 | "source": [
133 | "m = pp.MultiOrderModel.from_DAGs(dags, max_order=10)"
134 | ]
135 | },
136 | {
137 | "cell_type": "code",
138 | "execution_count": null,
139 | "metadata": {},
140 | "outputs": [],
141 | "source": [
142 | "print(m.layers[3].mapping)"
143 | ]
144 | },
145 | {
146 | "cell_type": "code",
147 | "execution_count": null,
148 | "metadata": {},
149 | "outputs": [],
150 | "source": [
151 | "pp.plot(m.layers[10], node_label=list(map(str, m.layers[1].data.node_sequences.tolist())))"
152 | ]
153 | },
154 | {
155 | "cell_type": "code",
156 | "execution_count": null,
157 | "metadata": {},
158 | "outputs": [],
159 | "source": [
160 | "dags.map_node_seq(m.layers[10].data.node_sequences[5].tolist())"
161 | ]
162 | },
163 | {
164 | "cell_type": "code",
165 | "execution_count": null,
166 | "metadata": {},
167 | "outputs": [],
168 | "source": [
169 | "print(m.layers[2].data.edge_index)"
170 | ]
171 | },
172 | {
173 | "cell_type": "code",
174 | "execution_count": null,
175 | "metadata": {},
176 | "outputs": [],
177 | "source": [
178 | "print(m.layers[2].data.edge_weights)"
179 | ]
180 | },
181 | {
182 | "cell_type": "code",
183 | "execution_count": null,
184 | "metadata": {},
185 | "outputs": [],
186 | "source": [
187 | "print(m.layers[2].data.node_sequences)"
188 | ]
189 | },
190 | {
191 | "cell_type": "code",
192 | "execution_count": null,
193 | "metadata": {},
194 | "outputs": [],
195 | "source": []
196 | }
197 | ],
198 | "metadata": {
199 | "kernelspec": {
200 | "display_name": "Python 3 (ipykernel)",
201 | "language": "python",
202 | "name": "python3"
203 | },
204 | "language_info": {
205 | "codemirror_mode": {
206 | "name": "ipython",
207 | "version": 3
208 | },
209 | "file_extension": ".py",
210 | "mimetype": "text/x-python",
211 | "name": "python",
212 | "nbconvert_exporter": "python",
213 | "pygments_lexer": "ipython3",
214 | "version": "3.10.13"
215 | }
216 | },
217 | "nbformat": 4,
218 | "nbformat_minor": 4
219 | }
220 |
--------------------------------------------------------------------------------
/src/README.md:
--------------------------------------------------------------------------------
1 | This directoy stores each Python Package.
2 |
--------------------------------------------------------------------------------
/src/pathpyG/__init__.py:
--------------------------------------------------------------------------------
1 | """pathpyG is an Open Source package facilitating next-generation network analytics and
2 | graph learning for time series data on graphs.
3 |
4 | Building on the industry-proven data structures and concepts of `pytorch`
5 | and `torch_geometric`, pathpyG makes it easier than ever to apply machine learning
6 | to temporal graph data.
7 |
8 | pathpyG is jointly developed at University of Wuerzburg, Princeton University,
9 | and University of Zurich. The research behind pathpyG has been funded by the
10 | Swiss National Science Foundation via
11 | [grant 176938](https://data.snf.ch/grants/grant/176938).
12 | """
13 |
14 | import torch
15 |
16 | __version__ = "0.2.0"
17 |
18 | from pathpyG.utils.config import config
19 | from pathpyG.utils.progress import tqdm
20 | from pathpyG.utils.logger import logger
21 |
22 | from pathpyG.core.graph import Graph
23 | from pathpyG.core.index_map import IndexMap
24 | from pathpyG.core.temporal_graph import TemporalGraph
25 | from pathpyG.core.path_data import PathData
26 | from pathpyG.core.multi_order_model import MultiOrderModel
27 | from pathpyG import io
28 | from pathpyG import nn
29 | from pathpyG import algorithms
30 | from pathpyG import processes
31 | from pathpyG import statistics
32 | from pathpyG.visualisations import plot, layout
33 |
34 |
35 | if config["device"] == "cuda":
36 | config["device"] = "cuda" if torch.cuda.is_available() else "cpu"
37 |
--------------------------------------------------------------------------------
/src/pathpyG/algorithms/__init__.py:
--------------------------------------------------------------------------------
1 | """Algorithms for temporal path calculation and graph metrics.
2 |
3 | The functions and submodules in this module allow to compute
4 | time-respecting or causal paths in temporal graphs and to
5 | calculate (temporal) and higher-order graph metrics like centralities.
6 |
7 | Example:
8 | ```py
9 | # Import pathpyG and configure your torch device if you want to use GPU .
10 | import pathpyG as pp
11 | pp.config['torch']['device'] = 'cuda'
12 |
13 | # Generate a toy example for a temporal graph.
14 | g = pp.TemporalGraph.from_edge_list([
15 | ('b', 'c', 2),
16 | ('a', 'b', 1),
17 | ('c', 'd', 3),
18 | ('d', 'a', 4),
19 | ('b', 'd', 2),
20 | ('d', 'a', 6),
21 | ('a', 'b', 7)
22 | ])
23 |
24 | # Extract DAG capturing causal interaction sequences in temporal graph.
25 | e_i = pp.algorithms.lift_order_temporal(g, delta=1)
26 | dag = pp.Graph.from_edge_index(e_i)
27 | print(dag)
28 |
29 | # Calculate shortest time-respecting pathas
30 | dist, pred = pp.algorithms.temporal.temporal_shortest_paths(g, delta=1)
31 | ```
32 | """
33 |
34 | from pathpyG.algorithms.temporal import *
35 | from pathpyG.algorithms import centrality
36 | from pathpyG.algorithms import generative_models
37 | from pathpyG.algorithms import shortest_paths
38 | from pathpyG.algorithms.components import connected_components, largest_connected_component
39 | from pathpyG.algorithms.rolling_time_window import RollingTimeWindow
40 | from pathpyG.algorithms.weisfeiler_leman import WeisfeilerLeman_test
41 |
--------------------------------------------------------------------------------
/src/pathpyG/algorithms/components.py:
--------------------------------------------------------------------------------
1 | """Algorithms to calculate connected components"""
2 |
3 | from __future__ import annotations
4 | from typing import (
5 | TYPE_CHECKING,
6 | Any,
7 | Dict,
8 | Tuple,
9 | )
10 |
11 | from collections import Counter
12 |
13 | import numpy as _np
14 | from scipy.sparse.csgraph import connected_components as _cc
15 |
16 | from pathpyG.core.graph import Graph
17 |
18 |
19 | def connected_components(graph: Graph, connection="weak") -> Tuple[int, _np.ndarray]:
20 |
21 | m = graph.sparse_adj_matrix()
22 | n, labels = _cc(m, directed=graph.is_directed(), connection=connection, return_labels=True)
23 | return n, labels
24 |
25 |
26 | def largest_connected_component(graph: Graph, connection="weak") -> Graph:
27 | m = graph.sparse_adj_matrix()
28 | n, labels = _cc(m, directed=graph.is_directed(), connection=connection, return_labels=True)
29 |
30 | # find largest component C
31 | ctr = Counter(labels.tolist())
32 | x, x_c = ctr.most_common(1)[0]
33 | # create graph only consisting of nodes in C
34 | C = []
35 | for v, w in graph.edges:
36 | if labels[graph.mapping.to_idx(v)] == x and labels[graph.mapping.to_idx(w)] == x:
37 | C.append((v, w))
38 | return Graph.from_edge_list(C, is_undirected=graph.is_undirected(), num_nodes=x_c)
39 |
--------------------------------------------------------------------------------
/src/pathpyG/algorithms/rolling_time_window.py:
--------------------------------------------------------------------------------
1 | """Iterator interface for rolling time window analysis in temporal graphs."""
2 |
3 | from __future__ import annotations
4 | from typing import TYPE_CHECKING, Dict, Union, List
5 | from collections import defaultdict
6 |
7 | import numpy as np
8 | import torch
9 |
10 | from pathpyG import Graph
11 | from pathpyG import TemporalGraph
12 | from pathpyG import config
13 |
14 |
15 | class RollingTimeWindow:
16 | """An iterable rolling time window that can be used to perform time slice analysis of temporal graphs."""
17 |
18 | def __init__(self, temporal_graph, window_size, step_size=1, return_window=False, weighted=True):
19 | """Initialize a RollingTimeWindow instance that can be used to
20 | iterate through a sequence of time-slice networks for a given
21 | TemporalNetwork instance.
22 |
23 | Args:
24 | temporal_graph: TemporalGraphinstance that will be used to generate the
25 | sequence of time-slice networks.
26 | window_size: The width of the rolling time window used to create time-slice networks.
27 | step_size: The step size in time units by which the starting
28 | time of the rolling window will be incremented on each iteration.
29 | return_window: Whether or not the iterator shall return the current time window as a second return value. Default is False.
30 | weighted: Whether or not to return a weighted graph
31 |
32 | Example:
33 | ```py
34 | tedges = [('a', 'b', 1), ('b', 'c', 5), ('c', 'd', 9), ('c', 'e', 9),
35 | ('c', 'f', 11), ('f', 'a', 13), ('a', 'g', 18), ('b', 'f', 21),
36 | ('a', 'g', 26), ('c', 'f', 27), ('h', 'f', 27), ('g', 'h', 28),
37 | ('a', 'c', 30), ('a', 'b', 31), ('c', 'h', 32), ('f', 'h', 33),
38 | ('b', 'i', 42), ('i', 'b', 42), ('c', 'i', 47), ('h', 'i', 50)]
39 | t = pp.TemporalGraph.from_edge_list(tedges)
40 | r = pp.algorithms.RollingTimeWindow(t, 10, 10, return_window=True)
41 | for g, w in r:
42 | print('Time window ', w)
43 | print(g)
44 | print(g.data.edge_index)
45 | print('---')
46 | ```
47 | """
48 | self.g = temporal_graph
49 | self.window_size = window_size
50 | self.step_size = step_size
51 | self.current_time = self.g.start_time
52 | self.return_window = return_window
53 | self.weighted = weighted
54 |
55 | def __iter__(self):
56 | return self
57 |
58 | def __next__(self):
59 | if self.current_time <= self.g.end_time:
60 | time_window = (self.current_time, self.current_time + self.window_size)
61 | s = self.g.to_static_graph(weighted=self.weighted, time_window=time_window)
62 | self.current_time += self.step_size
63 | if self.return_window:
64 | return s, time_window
65 | else:
66 | return s
67 | else:
68 | raise StopIteration()
69 |
--------------------------------------------------------------------------------
/src/pathpyG/algorithms/shortest_paths.py:
--------------------------------------------------------------------------------
1 | """Algorithms to calculate shortest paths in static networks
2 |
3 | The functions in this module allow to compute shortest paths
4 | in static networks."""
5 |
6 | from __future__ import annotations
7 | from typing import (
8 | TYPE_CHECKING,
9 | Any,
10 | Dict,
11 | )
12 |
13 | import numpy as _np
14 | from scipy.sparse.csgraph import dijkstra
15 |
16 | from pathpyG.core.graph import Graph
17 |
18 |
19 | def shortest_paths_dijkstra(graph: Graph) -> (_np.ndarray, _np.ndarray):
20 |
21 | m = graph.sparse_adj_matrix()
22 |
23 | dist, pred = dijkstra(m, directed=graph.is_directed(), return_predecessors=True, unweighted=True)
24 |
25 | return dist, pred
26 |
27 |
28 | def diameter(graph: Graph) -> float:
29 |
30 | m = graph.sparse_adj_matrix()
31 |
32 | dist = dijkstra(m, directed=graph.is_directed(), return_predecessors=False, unweighted=True)
33 | return _np.max(dist)
34 |
35 |
36 | def avg_path_length(graph: Graph) -> float:
37 |
38 | m = graph.sparse_adj_matrix()
39 |
40 | dist = dijkstra(m, directed=graph.is_directed(), return_predecessors=False, unweighted=True)
41 | return _np.sum(dist) / (graph.n * (graph.n - 1))
42 |
--------------------------------------------------------------------------------
/src/pathpyG/algorithms/temporal.py:
--------------------------------------------------------------------------------
1 | """Algorithms for the analysis of time-respecting paths in temporal graphs."""
2 |
3 | from __future__ import annotations
4 | from typing import TYPE_CHECKING, Dict, Union, List, Tuple
5 |
6 | import numpy as np
7 | from tqdm import tqdm
8 | import torch
9 | from scipy.sparse.csgraph import dijkstra
10 |
11 | from pathpyG import Graph
12 | from pathpyG.utils import to_numpy
13 | from pathpyG.core.temporal_graph import TemporalGraph
14 |
15 |
16 | def lift_order_temporal(g: TemporalGraph, delta: int = 1):
17 |
18 | # first-order edge index
19 | edge_index, timestamps = g.data.edge_index, g.data.time
20 |
21 | delta = torch.tensor(delta, device=edge_index.device)
22 | indices = torch.arange(0, edge_index.size(1), device=edge_index.device)
23 |
24 | unique_t = torch.unique(timestamps, sorted=True)
25 | second_order = []
26 |
27 | # lift order: find possible continuations for edges in each time stamp
28 | for t in tqdm(unique_t):
29 |
30 | # find indices of all source edges that occur at unique timestamp t
31 | src_time_mask = timestamps == t
32 | src_edge_idx = indices[src_time_mask]
33 |
34 | # find indices of all edges that can possibly continue edges occurring at time t for the given delta
35 | dst_time_mask = (timestamps > t) & (timestamps <= t + delta)
36 | dst_edge_idx = indices[dst_time_mask]
37 |
38 | if dst_edge_idx.size(0) > 0 and src_edge_idx.size(0) > 0:
39 |
40 | # compute second-order edges between src and dst idx
41 | # for all edges where dst in src_edges (edge_index[1, x[:, 0]]) matches src in dst_edges (edge_index[0, x[:, 1]])
42 | x = torch.cartesian_prod(src_edge_idx, dst_edge_idx)
43 | ho_edge_index = x[edge_index[1, x[:, 0]] == edge_index[0, x[:, 1]]]
44 | second_order.append(ho_edge_index)
45 |
46 | ho_index = torch.cat(second_order, dim=0).t().contiguous()
47 | return ho_index
48 |
49 |
50 | def temporal_shortest_paths(g: TemporalGraph, delta: int) -> Tuple[np.ndarray, np.ndarray]:
51 | """Compute shortest time-respecting paths in a temporal graph.
52 |
53 | Args:
54 | g: Temporal graph to compute shortest paths on.
55 | delta: Maximum time difference between events in a path.
56 |
57 | Returns:
58 | Tuple of two numpy arrays:
59 | - dist: Shortest time-respecting path distances between all first-order nodes.
60 | - pred: Predecessor matrix for shortest time-respecting paths between all first-order nodes.
61 | """
62 | # generate temporal event DAG
63 | edge_index = lift_order_temporal(g, delta)
64 |
65 | # Add indices of first-order nodes as src and dst of paths in augmented
66 | # temporal event DAG
67 | src_edges_src = g.data.edge_index[0] + g.m
68 | src_edges_dst = torch.arange(0, g.data.edge_index.size(1), device=g.data.edge_index.device)
69 |
70 | dst_edges_src = torch.arange(0, g.data.edge_index.size(1), device=g.data.edge_index.device)
71 | dst_edges_dst = g.data.edge_index[1] + g.m + g.n
72 |
73 | # add edges from source to edges and from edges to destinations
74 | src_edges = torch.stack([src_edges_src, src_edges_dst])
75 | dst_edges = torch.stack([dst_edges_src, dst_edges_dst])
76 | edge_index = torch.cat([edge_index, src_edges, dst_edges], dim=1)
77 |
78 | # create sparse scipy matrix
79 | event_graph = Graph.from_edge_index(edge_index, num_nodes=g.m + 2 * g.n)
80 | m = event_graph.sparse_adj_matrix()
81 |
82 | # print(f"Created temporal event DAG with {event_graph.n} nodes and {event_graph.m} edges")
83 |
84 | # run disjktra for all source nodes
85 | dist, pred = dijkstra(
86 | m, directed=True, indices=np.arange(g.m, g.m + g.n), return_predecessors=True, unweighted=True
87 | )
88 |
89 | # limit to first-order destinations and correct distances
90 | dist_fo = dist[:, g.m + g.n :] - 1
91 | np.fill_diagonal(dist_fo, 0)
92 |
93 | # limit to first-order destinations and correct predecessors
94 | pred_fo = pred[:, g.n + g.m :]
95 | pred_fo[pred_fo == -9999] = -1
96 | idx_map = np.concatenate([to_numpy(g.data.edge_index[0].cpu()), [-1]])
97 | pred_fo = idx_map[pred_fo]
98 | np.fill_diagonal(pred_fo, np.arange(g.n))
99 |
100 | return dist_fo, pred_fo
101 |
--------------------------------------------------------------------------------
/src/pathpyG/algorithms/weisfeiler_leman.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 | from typing import Tuple, List, Dict
3 |
4 | from pathpyG.core.graph import Graph
5 |
6 |
7 | def WeisfeilerLeman_test(
8 | g1: Graph, g2: Graph, features_g1: dict = None, features_g2: dict = None
9 | ) -> Tuple[bool, List[str], List[str]]:
10 | """Run Weisfeiler-Leman isomorphism test on two graphs.
11 |
12 | The algorithm heuristically checks whether two graphs are isomorphic. If it returns False,
13 | we can be sure that the graphs are non-isomoprhic. If the test returns True we did not find
14 | conclusive evidence that they are not isomorphic, i.e. the graphs may or may not be isomophic.
15 |
16 | The two graphs must have IndexMap mappings that assign different node IDs to the nodes
17 | in both graphs. The function will raise an error if the node labels of both graphs overlap.
18 |
19 | The function returns a tuple (bool, list, list), where the first entry is the result of the test
20 | and the two lists represent the fingerprints of the two graphs. If the test yields true the fingerprints
21 | are identical. If the test fails, the fingerprints do not correspond.
22 |
23 | Args:
24 | g1: pp.Graph
25 | g2: pp.Graph
26 | """
27 | if g1.mapping is None or g2.mapping is None:
28 | raise Exception("Graphs must contain IndexMap that assigns node IDs")
29 | if len(set(g1.mapping.node_ids).intersection(g2.mapping.node_ids)) > 0:
30 | raise Exception("node identifiers of graphs must not overlap")
31 | g_combined = g1 + g2
32 | # initialize labels of all nodes to zero
33 | if features_g1 is None or features_g2 is None:
34 | fingerprint: Dict[str | int, str] = {v: "0" for v in g_combined.nodes}
35 | else:
36 | fingerprint = features_g1.copy()
37 | fingerprint.update(features_g2)
38 | labels = {}
39 | label_count = 1
40 | stop = False
41 | while not stop:
42 | new_fingerprint = {}
43 | for node in g_combined.nodes:
44 | # create new label based on own label and sorted labels of all neighbors
45 | n_label = [fingerprint[x] for x in g_combined.successors(node)]
46 | n_label.sort()
47 | label = str(fingerprint[node]) + str(n_label)
48 | # previously unknown label
49 | if label not in labels:
50 | # create a new label based on next consecutive number
51 | labels[label] = label_count
52 | label_count += 1
53 | new_fingerprint[node] = labels[label]
54 | if len(set(fingerprint.values())) == len(set(new_fingerprint.values())):
55 | # we processed all nodes in both graphs without encountering a new label, so we stop
56 | stop = True
57 | else:
58 | # update fingerprint and continue
59 | fingerprint = new_fingerprint.copy()
60 |
61 | # Reduce fingerprints to nodes of g1 and g2 respectively
62 | fingerprint_1 = [fingerprint[v] for v in g1.nodes]
63 | fingerprint_1_sorted = fingerprint_1.copy()
64 | fingerprint_1_sorted.sort()
65 | fingerprint_2 = [fingerprint[v] for v in g2.nodes]
66 | fingerprint_2_sorted = fingerprint_2.copy()
67 | fingerprint_2_sorted.sort()
68 |
69 | # perform WL-test
70 | if fingerprint_1_sorted == fingerprint_2_sorted:
71 | return True, fingerprint_1, fingerprint_2
72 | return False, fingerprint_1, fingerprint_2
73 |
--------------------------------------------------------------------------------
/src/pathpyG/core/__init__.py:
--------------------------------------------------------------------------------
1 | """Core classes for (temporal) graphs, paths, and higher-order De Bruijn graphs.
2 |
3 | The classes in the `core` module can be used to implement integrated pipelines to
4 | preprocess time-stamped network data, do inference and model selection of higher-order
5 | De Bruijn graph models and address temporal graph learning tasks based on time-aware
6 | graph neural networks.
7 |
8 | Example:
9 | ```py
10 | import pathpyG as pp
11 | pp.config['torch']['device'] = 'cuda'
12 |
13 | # Generate toy example temporal graph
14 | g = pp.TemporalGraph.from_edge_list([
15 | ('b', 'c', 2),
16 | ('a', 'b', 1),
17 | ('c', 'd', 3),
18 | ('d', 'a', 4),
19 | ('b', 'd', 2),
20 | ('d', 'a', 6),
21 | ('a', 'b', 7)])
22 |
23 | # Create Multi-Order model that models time-respecting paths
24 | m = pp.MultiOrderModel.from_temporal_graph(g, delta=1, max_order=3)
25 | print(m.layers[1])
26 | print(m.layers[2])
27 | print(m.layers[3])
28 | ```
29 | """
30 |
--------------------------------------------------------------------------------
/src/pathpyG/io/__init__.py:
--------------------------------------------------------------------------------
1 | from pathpyG.io.netzschleuder import read_netzschleuder_graph, read_netzschleuder_record, list_netzschleuder_records
2 | from pathpyG.io.graphtool import read_graphtool
3 | from pathpyG.io.pandas import (
4 | df_to_graph,
5 | df_to_temporal_graph,
6 | read_csv_graph,
7 | read_csv_temporal_graph,
8 | write_csv,
9 | add_node_attributes,
10 | add_edge_attributes,
11 | graph_to_df,
12 | temporal_graph_to_df,
13 | )
14 |
--------------------------------------------------------------------------------
/src/pathpyG/logging.toml:
--------------------------------------------------------------------------------
1 | [loggers]
2 | keys=root,pathpyg
3 |
4 | [handlers]
5 | keys=consoleHandler,fileHandler
6 |
7 | [formatters]
8 | keys=simpleFormatter, customFormatter
9 |
10 | [logger_root]
11 | level=ERROR
12 | handlers=consoleHandler,fileHandler
13 |
14 | [logger_pathpyg]
15 | level=INFO
16 | handlers=consoleHandler,fileHandler
17 | qualname=pathpyg
18 | propagate=0
19 |
20 | [handler_fileHandler]
21 | class=FileHandler
22 | level=DEBUG
23 | formatter=customFormatter
24 | args=('pathpyG.log', )
25 |
26 | [handler_consoleHandler]
27 | class=StreamHandler
28 | level=DEBUG
29 | formatter=customFormatter
30 | args=(sys.stdout,)
31 |
32 | [formatter_simpleFormatter]
33 | format=%(asctime)s - %(name)s - %(levelname)s - %(message)s
34 |
35 | [formatter_customFormatter]
36 | format=%(asctime)s - %(message)s
37 | datefmt=%Y-%m-%d %H:%M:%S
38 |
--------------------------------------------------------------------------------
/src/pathpyG/nn/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pathpy/pathpyG/760571a0829f0bc04c5a127d5b059f25c6ab74ca/src/pathpyG/nn/__init__.py
--------------------------------------------------------------------------------
/src/pathpyG/nn/dbgnn.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import torch
4 | from torch.nn import Linear, ModuleList, Module
5 | import torch.nn.functional as F
6 | from torch_geometric.nn import MessagePassing, GCNConv
7 |
8 |
9 | class BipartiteGraphOperator(MessagePassing):
10 | def __init__(self, in_ch, out_ch):
11 | super(BipartiteGraphOperator, self).__init__("add")
12 | self.lin1 = Linear(in_ch, out_ch)
13 | self.lin2 = Linear(in_ch, out_ch)
14 |
15 | def forward(self, x, bipartite_index, N, M):
16 | x = (self.lin1(x[0]), self.lin2(x[1]))
17 | return self.propagate(bipartite_index, size=(N, M), x=x)
18 |
19 | def message(self, x_i, x_j):
20 | return x_i + x_j
21 |
22 |
23 | class DBGNN(Module):
24 | """Implementation of time-aware graph neural network DBGNN ([Reference paper](https://openreview.net/pdf?id=Dbkqs1EhTr)).
25 |
26 | Args:
27 | num_classes: number of classes
28 | num_features: number of features for first order and higher order nodes, e.g. [first_order_num_features, second_order_num_features]
29 | hidden_dims: number of hidden dimensions per each layer in the first/higher order network
30 | p_dropout: drop-out probability
31 | """
32 |
33 | def __init__(self, num_classes: int, num_features: list[int], hidden_dims: list[int], p_dropout: float = 0.0):
34 | super().__init__()
35 |
36 | self.num_features = num_features
37 | self.num_classes = num_classes
38 | self.hidden_dims = hidden_dims
39 | self.p_dropout = p_dropout
40 |
41 | # higher-order layers
42 | self.higher_order_layers = ModuleList()
43 | self.higher_order_layers.append(GCNConv(self.num_features[1], self.hidden_dims[0]))
44 |
45 | # first-order layers
46 | self.first_order_layers = ModuleList()
47 | self.first_order_layers.append(GCNConv(self.num_features[0], self.hidden_dims[0]))
48 |
49 | for dim in range(1, len(self.hidden_dims) - 1):
50 | # higher-order layers
51 | self.higher_order_layers.append(GCNConv(self.hidden_dims[dim - 1], self.hidden_dims[dim]))
52 | # first-order layers
53 | self.first_order_layers.append(GCNConv(self.hidden_dims[dim - 1], self.hidden_dims[dim]))
54 |
55 | self.bipartite_layer = BipartiteGraphOperator(self.hidden_dims[-2], self.hidden_dims[-1])
56 |
57 | # Linear layer
58 | self.lin = torch.nn.Linear(self.hidden_dims[-1], num_classes)
59 |
60 | def forward(self, data):
61 |
62 | x = data.x
63 | x_h = data.x_h
64 |
65 | # First-order convolutions
66 | for layer in self.first_order_layers:
67 | x = F.dropout(x, p=self.p_dropout, training=self.training)
68 | x = F.elu(layer(x, data.edge_index, data.edge_weights))
69 | x = F.dropout(x, p=self.p_dropout, training=self.training)
70 |
71 | # Second-order convolutions
72 | for layer in self.higher_order_layers:
73 | x_h = F.dropout(x_h, p=self.p_dropout, training=self.training)
74 | x_h = F.elu(layer(x_h, data.edge_index_higher_order, data.edge_weights_higher_order))
75 | x_h = F.dropout(x_h, p=self.p_dropout, training=self.training)
76 |
77 | # Bipartite message passing
78 | x = torch.nn.functional.elu(
79 | self.bipartite_layer((x_h, x), data.bipartite_edge_index, N=data.num_ho_nodes, M=data.num_nodes)
80 | )
81 | x = F.dropout(x, p=self.p_dropout, training=self.training)
82 |
83 | # Linear layer
84 | x = self.lin(x)
85 |
86 | return x
87 |
--------------------------------------------------------------------------------
/src/pathpyG/pathpyG.toml:
--------------------------------------------------------------------------------
1 | device = "cuda"
2 |
3 | [environment]
4 | IDE = "vs code"
5 | interactive = true
6 |
7 | [progress]
8 | enabled = true
9 | leave = false
10 | min_iter = 1000
11 |
12 | [torch]
13 | device = "cuda"
14 |
15 | [attributes]
16 | history = true
17 | multiple = false
18 | frequency = "frequency"
19 |
20 | [object]
21 | separator =","
22 |
23 | [node]
24 | color = "CornFlowerBlue"
25 | size = 15
26 |
27 | [edge]
28 | color = "darkgray"
29 | width = 2
30 | curved = true
31 |
32 | [path]
33 | separator = "|"
34 | replace = "_"
35 | max_name_length = 5
36 |
37 | [hon]
38 | separator = "="
39 | replace = "_"
40 |
41 | [temporal]
42 | start = "start"
43 | end = "end"
44 | timestamp = "timestamp"
45 | duration = "duration"
46 | duration_value = 1
47 | start_synonyms = ["beginning", "begin"]
48 | end_synonyms = ["finished", "ending"]
49 | timestamp_synonyms = ['time', "t"]
50 | duration_synonyms = ["delta", "dt"]
51 | active = "active"
52 | is_active = true
53 | unit = "s"
--------------------------------------------------------------------------------
/src/pathpyG/processes/__init__.py:
--------------------------------------------------------------------------------
1 | """Module for pathpy processes."""
2 |
3 | # !/usr/bin/python -tt
4 | # -*- coding: utf-8 -*-
5 | # =============================================================================
6 | # File : __init__.py -- initialisation of processes
7 | # Author : Ingo Scholtes
8 | # Time-stamp:
9 | #
10 | # Copyright (c) 2016-2020 Pathpy Developers
11 | # =============================================================================
12 | # flake8: noqa
13 | # pylint: disable=unused-import
14 |
15 | from pathpyG.processes.random_walk import RandomWalk
16 |
17 | # from pathpyG.processes.epidemic_spreading import EpidemicSIR
18 | from pathpyG.processes.sampling import VoseAliasSampling
19 | from pathpyG.processes.random_walk import HigherOrderRandomWalk
20 |
--------------------------------------------------------------------------------
/src/pathpyG/processes/sampling.py:
--------------------------------------------------------------------------------
1 | """Classes for efficient random sampling from discrete distributions
2 | """
3 |
4 | # !/usr/bin/python -tt
5 | # -*- coding: utf-8 -*-
6 | # =============================================================================
7 | # File : vose_sampling.py -- Class to sample from discrete distributions
8 | # Author : Ingo Scholtes
9 | # Time-stamp:
10 | #
11 | # Copyright (c) 2016-2021 Pathpy Developers
12 | # =============================================================================
13 | from __future__ import annotations
14 | from typing import Union
15 |
16 | import numpy as np
17 |
18 |
19 | class VoseAliasSampling:
20 | """
21 | Implementation of fast biased sampling of discrete values [0, ..., n]
22 |
23 | For a concise explanation see https://www.keithschwarz.com/darts-dice-coins/
24 |
25 | Args:
26 | weights: relative weights of the n events, where weights[i] is the relative
27 | statistical weight of event i. The weights do not need to be
28 | normalized.
29 |
30 | For an array with length n, generated random values
31 | will be from range(n).
32 |
33 | Examples:
34 | Create a VoseAliasSampling instance
35 |
36 | >>> from pathpy.processes import VoseAliasSampling
37 | >>> sampler = VoseAliasSampling([1,1,2])
38 |
39 | Fast biased sampling in O(1)
40 |
41 | >>> [ sampler.sample() for i in range(10) ]
42 | [ 0 2 0 1 2 1 2 1 2 0 2 2 ]
43 | """
44 |
45 | def __init__(self, weights: Union[np.array, list]) -> None:
46 | """
47 | Initializes probability and alias tables
48 | """
49 | self.n = len(weights)
50 | self.probs = dict()
51 | self.scaled_probs = dict()
52 | self.aliases = dict()
53 |
54 | small = list()
55 | large = list()
56 |
57 | for i in range(1, self.n + 1):
58 | self.probs[i] = weights[i - 1]
59 | self.scaled_probs[i] = self.n * weights[i - 1]
60 | if self.scaled_probs[i] > 1:
61 | large.append(i)
62 | elif self.scaled_probs[i] <= 1:
63 | small.append(i)
64 |
65 | while small and large:
66 | l = small.pop()
67 | g = large.pop()
68 |
69 | self.probs[l] = self.scaled_probs[l]
70 | self.aliases[l] = g
71 | self.scaled_probs[g] = self.scaled_probs[l] + self.scaled_probs[g] - 1
72 |
73 | if self.scaled_probs[g] < 1:
74 | small.append(g)
75 | else:
76 | large.append(g)
77 | while large:
78 | g = large.pop()
79 | self.probs[g] = 1
80 | while small:
81 | l = small.pop()
82 | self.probs[l] = 1
83 |
84 | def sample(self) -> int:
85 | """
86 | Biased sampling of discrete value in O(1)
87 |
88 | Returns: integer value from range(n), where n is the length
89 | of the weight array used to create the instance.
90 | """
91 | i = np.random.randint(1, self.n + 1)
92 | x = np.random.rand()
93 | if x < self.probs[i]:
94 | return i - 1
95 | else:
96 | return self.aliases[i] - 1
97 |
--------------------------------------------------------------------------------
/src/pathpyG/statistics/__init__.py:
--------------------------------------------------------------------------------
1 | """Functions to compute various graph statistics.
2 |
3 | The functions in this module allow to compute
4 | various statistics on graphs
5 |
6 | Example:
7 | ```py
8 | import pathpyG as pp
9 |
10 | # Generate a toy example graph.
11 | g = pp.Graph.from_edge_list([
12 | ('b', 'c'),
13 | ('a', 'b'),
14 | ('c', 'd'),
15 | ('d', 'a'),
16 | ('b', 'd')
17 | ])
18 |
19 | # Calculate degree distribution and raw moments
20 | d_dist = pp.statistics.degree_distribution(g)
21 | k_1 = pp.statistics.degree_raw_moment(g, k=1)
22 | k_2 = pp.statistics.degree_raw_moment(g, k=2)
23 | ```
24 | """
25 |
26 | from pathpyG.statistics.degrees import *
27 | from pathpyG.statistics.clustering import *
28 | from pathpyG.statistics import node_similarities
29 |
--------------------------------------------------------------------------------
/src/pathpyG/statistics/clustering.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 | from typing import TYPE_CHECKING, Set
3 |
4 | from pathpyG.core.graph import Graph
5 | import numpy as _np
6 |
7 |
8 | def local_clustering_coefficient(g: Graph, u: str) -> float:
9 |
10 | # Compute number of directly connected neighbour pairs
11 | k_u = len(closed_triads(g, u))
12 |
13 | # Normalise fraction based on number of possible edges
14 | if g.is_directed():
15 | if g.out_degrees[u] > 1:
16 | return k_u / (g.out_degrees[u] * (g.out_degrees[u] - 1))
17 | return 0.0
18 | else:
19 | k_u /= 2
20 | if g.degrees()[u] > 1:
21 | return 2 * k_u / (g.degrees()[u] * (g.degrees()[u] - 1))
22 | return 0.0
23 |
24 |
25 | def avg_clustering_coefficient(g: Graph) -> float:
26 | return _np.mean([local_clustering_coefficient(g, v) for v in g.nodes])
27 |
28 |
29 | def closed_triads(g: Graph, v: str) -> Set:
30 | """Calculates the set of edges that represent a closed triad
31 | around a given node v.
32 |
33 | Parameters
34 | ----------
35 |
36 | network : Network
37 |
38 | The network in which to calculate the list of closed triads
39 |
40 | """
41 | c_triads: set = set()
42 | edges = set()
43 |
44 | # Collect all edges of successors
45 | for x in g.successors(v):
46 | for y in g.successors(x):
47 | edges.add((x, y))
48 |
49 | for x, y in edges:
50 | if y in g.successors(v):
51 | c_triads.add((x, y))
52 | return c_triads
53 |
--------------------------------------------------------------------------------
/src/pathpyG/statistics/node_similarities.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 | from typing import TYPE_CHECKING, Any, Dict
3 |
4 | import numpy as _np
5 | import scipy as _sp
6 |
7 | from pathpyG.core.graph import Graph
8 | from pathpyG.algorithms.shortest_paths import shortest_paths_dijkstra
9 | from pathpyG.statistics.degrees import degree_sequence
10 |
11 |
12 | def inverse_path_length(graph: Graph, v, w) -> float:
13 | dist, _ = shortest_paths_dijkstra(graph)
14 | d = dist[graph.mapping.to_idx(v), graph.mapping.to_idx(w)]
15 | if d == 0:
16 | return _np.inf
17 | else:
18 | return 1 / d
19 |
20 |
21 | def common_neighbors(graph: Graph, v, w) -> float:
22 | N_v = set([x for x in graph.successors(v)])
23 | N_w = set([x for x in graph.successors(w)])
24 | return len(N_v.intersection(N_w))
25 |
26 |
27 | def overlap_coefficient(graph: Graph, v, w) -> float:
28 | N_v = set([x for x in graph.successors(v)])
29 | N_w = set([x for x in graph.successors(w)])
30 | return len(N_v.intersection(N_w)) / min(len(N_v), len(N_w))
31 |
32 |
33 | def jaccard_similarity(graph: Graph, v, w) -> float:
34 | N_v = set([x for x in graph.successors(v)])
35 | N_w = set([x for x in graph.successors(w)])
36 | intersection = N_v.intersection(N_w)
37 | if len(N_v) == 0 and len(N_w) == 0:
38 | return 1
39 | else:
40 | return len(intersection) / (len(N_v) + len(N_w) - len(intersection))
41 |
42 |
43 | def adamic_adar_index(graph: Graph, v, w) -> float:
44 | A = 0
45 | N_v = set([x for x in graph.successors(v)])
46 | N_w = set([x for x in graph.successors(w)])
47 | for u in N_v.intersection(N_w):
48 | A += 1 / _np.log(graph.out_degrees[u])
49 | return A
50 |
51 |
52 | def cosine_similarity(graph: Graph, v, w) -> float:
53 | if graph.degrees()[v] == 0 or graph.degrees()[w] == 0:
54 | return 0
55 | else:
56 | A = graph.sparse_adj_matrix().todense()
57 | v_v = A[graph.mapping.to_idx(v)].A1
58 | v_w = A[graph.mapping.to_idx(w)].A1
59 | return _np.dot(v_v, v_w) / (_np.linalg.norm(v_v) * _np.linalg.norm(v_w))
60 |
61 |
62 | def katz_index(graph: Graph, v, w, beta) -> float:
63 | A = graph.sparse_adj_matrix()
64 | I = _sp.sparse.identity(graph.n)
65 | S = _sp.sparse.linalg.inv(I - beta * A) - I
66 | return S[graph.mapping.to_idx(v), graph.mapping.to_idx(w)]
67 |
68 |
69 | def LeichtHolmeNewman_index(graph: Graph, v, w, alpha) -> float:
70 | A = graph.sparse_adj_matrix()
71 | ev = _sp.sparse.linalg.eigs(A, which="LM", k=2, return_eigenvectors=False)
72 | if graph.is_directed():
73 | m = graph.m
74 | else:
75 | m = graph.m / 2
76 | eigenvalues_sorted = _np.sort(_np.absolute(ev))
77 | lambda_1 = eigenvalues_sorted[1]
78 | D = _sp.sparse.diags(degree_sequence(graph)).tocsc()
79 | I = _sp.sparse.identity(graph.n).tocsc()
80 | S = (
81 | 2
82 | * m
83 | * lambda_1
84 | * _sp.sparse.linalg.inv(D)
85 | * _sp.sparse.linalg.inv(I - alpha * A / lambda_1)
86 | * _sp.sparse.linalg.inv(D)
87 | )
88 | return S[graph.mapping.to_idx(v), graph.mapping.to_idx(w)]
89 |
--------------------------------------------------------------------------------
/src/pathpyG/utils/__init__.py:
--------------------------------------------------------------------------------
1 | from .convert import to_numpy
2 |
--------------------------------------------------------------------------------
/src/pathpyG/utils/config.py:
--------------------------------------------------------------------------------
1 | """Config reader."""
2 |
3 | import os
4 | import sys
5 |
6 | # Official workaround: https://github.com/hukkin/tomli
7 | if sys.version_info >= (3, 11):
8 | import tomllib
9 | else:
10 | import tomli as tomllib
11 |
12 | from pathlib import Path
13 |
14 |
15 | # path to the module
16 | path = Path(sys.modules[__name__].__file__).resolve().parents[1]
17 |
18 | # default config file name
19 | configfile_name = "pathpyG.toml"
20 |
21 | # load default config
22 | configfile_path = os.path.join(path, configfile_name)
23 |
24 | # load config file
25 | with open(configfile_path, "rb") as f:
26 | config = tomllib.load(f)
27 |
28 | # check if local config file is defined
29 | if os.path.exists(os.path.join(os.getcwd(), configfile_name)):
30 | # get location of local config file
31 | configfile_path = os.path.join(os.getcwd(), configfile_name)
32 |
33 | # load local config file
34 | with open(configfile_path, "rb") as f:
35 | _config = tomllib.load(f)
36 |
37 | # update default config file
38 | config.update(_config)
39 |
--------------------------------------------------------------------------------
/src/pathpyG/utils/convert.py:
--------------------------------------------------------------------------------
1 | """
2 | Utility functions for converting between different data types.
3 | """
4 |
5 | import torch
6 | import numpy as np
7 | from torch_geometric import EdgeIndex
8 |
9 | # Ensure backward compatibility with torch_geometric==2.5
10 | try:
11 | from torch_geometric import Index
12 | except ImportError:
13 |
14 | class Index:
15 | def __init__(self) -> None:
16 | raise NotImplementedError("torch_geometric.Index is not available. Please upgrade to torch_geometric>=2.6.")
17 |
18 |
19 | def to_numpy(tensor: torch.Tensor) -> np.ndarray:
20 | """
21 | Convert a tensor or tensor subclasses like `torch_geometric.Edge_Index` to numpy.
22 |
23 | Args:
24 | tensor: Tensor or tensor subclass.
25 |
26 | Returns:
27 | Numpy array.
28 | """
29 | if isinstance(tensor, (EdgeIndex, Index)):
30 | return tensor.as_tensor().numpy()
31 | return tensor.numpy()
32 |
--------------------------------------------------------------------------------
/src/pathpyG/utils/dbgnn.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import torch
4 |
5 | from pathpyG.core.graph import Graph
6 |
7 |
8 | def generate_bipartite_edge_index(g: Graph, g2: Graph, mapping: str = "last") -> torch.Tensor:
9 | """Generate edge_index for bipartite graph connecting nodes of a second-order graph to first-order nodes."""
10 |
11 | if mapping == "last":
12 | bipartide_edge_index = torch.tensor([list(range(g2.n)), [v[1] for v in g2.data.node_sequence]])
13 |
14 | elif mapping == "first":
15 | bipartide_edge_index = torch.tensor([list(range(g2.n)), [v[0] for v in g2.data.node_sequence]])
16 | else:
17 | bipartide_edge_index = torch.tensor(
18 | [
19 | list(range(g2.n)) + list(range(g2.n)),
20 | [v[0] for v in g2.data.node_sequence] + [v[1] for v in g2.data.node_sequence],
21 | ]
22 | )
23 |
24 | return bipartide_edge_index
25 |
--------------------------------------------------------------------------------
/src/pathpyG/utils/logger.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python -tt
2 | # -*- coding: utf-8 -*-
3 | # =============================================================================
4 | # File : logger.py -- Module to log comments
5 | # Author : Jürgen Hackl
6 | # Time-stamp:
7 | #
8 | # Copyright (c) 2016-2019 Pathpy Developers
9 | # =============================================================================
10 | import os
11 | import sys
12 | import logging.config
13 | from pathlib import Path
14 |
15 |
16 | # path to the module
17 | path = Path(sys.modules[__name__].__file__).resolve().parents[1]
18 |
19 | # default logging config file name
20 | loggingfile_name = "logging.toml"
21 |
22 | # check if local logging config file is defined
23 | if os.path.exists(os.path.join(os.getcwd(), loggingfile_name)):
24 | # get location of local config file
25 | loggingfile_path = os.path.join(os.getcwd(), loggingfile_name)
26 | else:
27 | # get location of default config file
28 | loggingfile_path = os.path.join(path, loggingfile_name)
29 |
30 | # update logging confing
31 | logging.config.fileConfig(loggingfile_path)
32 |
33 | # create logger
34 | logger = logging.getLogger("pathpyg")
35 |
36 | # Status message of the logger
37 | logger.debug("Logger successful initialized.")
38 |
--------------------------------------------------------------------------------
/src/pathpyG/utils/progress.py:
--------------------------------------------------------------------------------
1 | """Progressbar for pathpy."""
2 |
3 | # !/usr/bin/python -tt
4 | # -*- coding: utf-8 -*-
5 | # =============================================================================
6 | # File : progress.py -- A progress bar for pathpy
7 | # Author : Jürgen Hackl
8 | # Time-stamp:
9 | #
10 | # Copyright (c) 2016-2019 Pathpy Developers
11 | # =============================================================================
12 | from typing import Any
13 | from tqdm import tqdm as tq # pylint: disable=import-error
14 | from tqdm.notebook import tqdm as tqn # pylint: disable=import-error
15 | from pathpyG import config
16 |
17 |
18 | def tqdm_disabled(it, *args, **kwargs):
19 | """Disable the progress bar and return initial iterator."""
20 | return it
21 |
22 |
23 | def tqdm_console(*args, **kwargs):
24 | """Progressbar for a console environment."""
25 | if len(args[0]) > config["progress"]["min_iter"]:
26 | return tq(*args, **kwargs)
27 | else:
28 | return args[0]
29 |
30 |
31 | def tqdm_notebook(*args, **kwargs):
32 | """Progressbar for a notebook environment."""
33 | if len(args[0]) > config["progress"]["min_iter"]:
34 | return tqn(*args, **kwargs)
35 | else:
36 | return args[0]
37 |
38 |
39 | # overwrite original tqdm typing
40 | tqdm: Any
41 |
42 | # if progress is enabled show bar
43 | if config["progress"]["enabled"]:
44 | if config["environment"]["interactive"] and config["environment"]["IDE"] != "vs code":
45 | tqdm = tqdm_notebook
46 | else:
47 | tqdm = tqdm_console
48 | else:
49 | tqdm = tqdm_disabled
50 |
51 | # =============================================================================
52 | # eof
53 | #
54 | # Local Variables:
55 | # mode: python
56 | # mode: linum
57 | # mode: auto-fill
58 | # fill-column: 79
59 | # End:
60 |
--------------------------------------------------------------------------------
/src/pathpyG/visualisations/__init__.py:
--------------------------------------------------------------------------------
1 | """PathpyG visualizations."""
2 |
3 | # !/usr/bin/python -tt
4 | # -*- coding: utf-8 -*-
5 | # =============================================================================
6 | # File : __init__.py -- plotting functions
7 | # Author : Jürgen Hackl
8 | # Time-stamp:
9 | #
10 | # Copyright (c) 2016-2023 Pathpy Developers
11 | # =============================================================================
12 | # flake8: noqa
13 | # pylint: disable=unused-import
14 | from typing import Optional, Any
15 |
16 | from pathpyG.core.graph import Graph
17 | from pathpyG.core.temporal_graph import TemporalGraph
18 |
19 | from pathpyG.visualisations.plot import PathPyPlot
20 | from pathpyG.visualisations.network_plots import NetworkPlot
21 | from pathpyG.visualisations.network_plots import StaticNetworkPlot
22 | from pathpyG.visualisations.network_plots import TemporalNetworkPlot
23 |
24 | from pathpyG.visualisations.layout import layout
25 |
26 | PLOT_CLASSES: dict = {
27 | "network": NetworkPlot,
28 | "static": StaticNetworkPlot,
29 | "temporal": TemporalNetworkPlot,
30 | }
31 |
32 |
33 | def plot(data: Graph, kind: Optional[str] = None, **kwargs: Any) -> PathPyPlot:
34 | """Make plot of pathpyG objects.
35 |
36 | Creates and displays a plot for a given `pathpyG` object. This function can
37 | generate different types of network plots based on the nature of the input
38 | data and specified plot kind.
39 |
40 | The function dynamically determines the plot type if not explicitly
41 | provided, based on the input data type. It supports static network plots
42 | for `Graph` objects, temporal network plots for `TemporalGraph` objects,
43 | and potentially other types if specified in `kind`.
44 |
45 | Args:
46 |
47 | data (Graph): A `pathpyG` object representing the network data. This can
48 | be a `Graph` or `TemporalGraph` object, or other compatible types.
49 |
50 | kind (Optional[str], optional): A string keyword defining the type of
51 | plot to generate. Options include:
52 |
53 | - 'static' : Generates a static (aggregated) network plot. Ideal
54 | for `Graph` objects.
55 |
56 | - 'temporal' : Creates a temporal network plot, which includes time
57 | components. Suitable for `TemporalGraph` objects.
58 |
59 | - 'hist' : Produces a histogram of network properties. (Note:
60 | Implementation for 'hist' is not present in the given function
61 | code, it's mentioned for possible extension.)
62 |
63 | The default behavior (when `kind` is None) is to infer the plot type from the data type.
64 |
65 | **kwargs (Any): Optional keyword arguments to customize the plot. These
66 | arguments are passed directly to the plotting class. Common options
67 | could include layout parameters, color schemes, and plot size.
68 |
69 | Returns:
70 |
71 | PathPyPlot: A `PathPyPlot` object representing the generated plot.
72 | This could be an instance of a plot class from
73 | `pathpyG.visualisations.network_plots`, depending on the kind of
74 | plot generated.
75 |
76 | Raises:
77 |
78 | NotImplementedError: If the `kind` is not recognized or if the function
79 | cannot infer the plot type from the `data` type.
80 |
81 | Example Usage:
82 |
83 | >>> import pathpyG as pp
84 | >>> graph = Graph.from_edge_list([["a", "b"], ["b", "c"], ["a", "c"]])
85 | >>> plot(graph, kind='static', filename='graph.png')
86 |
87 | This will create a static network plot of the `graph` and save it to 'graph.png'.
88 |
89 | Note:
90 |
91 | - If a 'filename' is provided in `kwargs`, the plot will be saved to
92 | that file. Otherwise, it will be displayed using `plt.show()`.
93 |
94 | - The function's behavior and the available options in `kwargs` might
95 | change based on the type of plot being generated.
96 |
97 | Todo:
98 |
99 | - Cleanup the file and use `plt.show()` only in an interactive environment.
100 | """
101 | if kind is None:
102 | if isinstance(data, TemporalGraph):
103 | kind = "temporal"
104 | elif isinstance(data, Graph):
105 | kind = "static"
106 | else:
107 | raise NotImplementedError
108 |
109 | filename = kwargs.pop("filename", None)
110 |
111 | plt = PLOT_CLASSES[kind](data, **kwargs)
112 | if filename:
113 | plt.save(filename, **kwargs)
114 | else:
115 | plt.show(**kwargs)
116 | return plt
117 |
118 |
119 | # =============================================================================
120 | # eof
121 | #
122 | # Local Variables:
123 | # mode: python
124 | # mode: linum
125 | # mode: auto-fill
126 | # fill-column: 79
127 | # End:
128 |
--------------------------------------------------------------------------------
/src/pathpyG/visualisations/_d3js/__init__.py:
--------------------------------------------------------------------------------
1 | """Initialize d3js plotting functions."""
2 |
3 | # !/usr/bin/python -tt
4 | # -*- coding: utf-8 -*-
5 | # =============================================================================
6 | # File : __init__.py -- d3js plotting cunctions
7 | # Author : Jürgen Hackl
8 | # Time-stamp:
9 | #
10 | # Copyright (c) 2016-2023 Pathpy Developers
11 | # =============================================================================
12 | # flake8: noqa
13 | # pylint: disable=unused-import
14 | from typing import Any
15 | from pathpyG.visualisations._d3js.network_plots import NetworkPlot
16 | from pathpyG.visualisations._d3js.network_plots import StaticNetworkPlot
17 | from pathpyG.visualisations._d3js.network_plots import TemporalNetworkPlot
18 |
19 | PLOT_CLASSES: dict = {
20 | "network": NetworkPlot,
21 | "static": StaticNetworkPlot,
22 | "temporal": TemporalNetworkPlot,
23 | }
24 |
25 |
26 | def plot(data: dict, kind: str = "network", **kwargs: Any) -> Any:
27 | """Plot function."""
28 | return PLOT_CLASSES[kind](data, **kwargs)
29 |
30 |
31 | # =============================================================================
32 | # eof
33 | #
34 | # Local Variables:
35 | # mode: python
36 | # mode: linum
37 | # mode: auto-fill
38 | # fill-column: 79
39 | # End:
40 |
--------------------------------------------------------------------------------
/src/pathpyG/visualisations/_d3js/core.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python -tt
2 | # -*- coding: utf-8 -*-
3 | # =============================================================================
4 | # File : core.py -- Plots with d3js
5 | # Author : Jürgen Hackl
6 | # Time-stamp:
7 | #
8 | # Copyright (c) 2016-2021 Pathpy Developers
9 | # =============================================================================
10 | from __future__ import annotations
11 |
12 | import os
13 | import json
14 | import uuid
15 | import logging
16 | import tempfile
17 | import webbrowser
18 |
19 | from typing import Any
20 | from string import Template
21 |
22 | from pathpyG.utils.config import config
23 | from pathpyG.visualisations.plot import PathPyPlot
24 |
25 | # create logger
26 | logger = logging.getLogger("root")
27 |
28 |
29 | class D3jsPlot(PathPyPlot):
30 | """Base class for plotting d3js objects."""
31 |
32 | def generate(self) -> None:
33 | """Generate the plot."""
34 | raise NotImplementedError
35 |
36 | def save(self, filename: str, **kwargs: Any) -> None:
37 | """Save the plot to the hard drive."""
38 | with open(filename, "w+") as new:
39 | new.write(self.to_html())
40 |
41 | def show(self, **kwargs: Any) -> None:
42 | """Show the plot on the device."""
43 | if config["environment"]["interactive"]:
44 | from IPython.display import display_html, HTML
45 |
46 | display_html(HTML(self.to_html()))
47 | else:
48 | # create temporal file
49 | with tempfile.NamedTemporaryFile(delete=False) as temp_file:
50 | # save html
51 | self.save(temp_file.name)
52 | # open the file
53 | webbrowser.open(r"file:///" + temp_file.name)
54 |
55 | def to_json(self) -> str:
56 | """Convert data to json."""
57 | raise NotImplementedError
58 |
59 | def to_html(self) -> str:
60 | """Convert data to html."""
61 | # generate unique dom uids
62 | dom_id = "#x" + uuid.uuid4().hex
63 |
64 | # get path to the pathpy templates
65 | template_dir = os.path.join(
66 | os.path.dirname(os.path.dirname(__file__)),
67 | os.path.normpath("_d3js/templates"),
68 | )
69 |
70 | # get d3js version
71 | local = self.config.get("d3js_local", False)
72 | if local:
73 | d3js = os.path.join(template_dir, "d3.v5.min.js")
74 | else:
75 | d3js = "https://d3js.org/d3.v5.min.js"
76 |
77 | # get template files
78 | with open(os.path.join(template_dir, f"{self._kind}.js")) as template:
79 | js_template = template.read()
80 |
81 | with open(os.path.join(template_dir, "setup.js")) as template:
82 | setup_template = template.read()
83 |
84 | with open(os.path.join(template_dir, "styles.css")) as template:
85 | css_template = template.read()
86 |
87 | # load custom template
88 | _template = self.config.get("template", None)
89 | if _template and os.path.isfile(_template):
90 | with open(_template) as template:
91 | js_template = template.read()
92 |
93 | # load custom css template
94 | _template = self.config.get("css", None)
95 | if _template and os.path.isfile(_template):
96 | with open(_template) as template:
97 | css_template += template.read()
98 |
99 | # update config
100 | self.config["selector"] = dom_id
101 | data = self.to_json()
102 |
103 | # generate html file
104 | html = "\n"
105 |
106 | # div environment for the plot object
107 | html += f'\n
\n'
108 |
109 | # add d3js library
110 | html += f'\n'
111 |
112 | # start JavaScript
113 | html += '"
133 |
134 | return html
135 |
136 |
137 | # =============================================================================
138 | # eof
139 | #
140 | # Local Variables:
141 | # mode: python
142 | # mode: linum
143 | # mode: auto-fill
144 | # fill-column: 79
145 | # End:
146 |
--------------------------------------------------------------------------------
/src/pathpyG/visualisations/_d3js/network_plots.py:
--------------------------------------------------------------------------------
1 | """Network plots with d3js."""
2 |
3 | # !/usr/bin/python -tt
4 | # -*- coding: utf-8 -*-
5 | # =============================================================================
6 | # File : network_plots.py -- Network plots with d3js
7 | # Author : Jürgen Hackl
8 | # Time-stamp:
9 | #
10 | # Copyright (c) 2016-2023 Pathpy Developers
11 | # =============================================================================
12 | from __future__ import annotations
13 |
14 | import json
15 | import numpy as np
16 |
17 | # import logging
18 |
19 | from typing import Any
20 |
21 | from pathpyG.visualisations._d3js.core import D3jsPlot
22 |
23 | # create logger
24 | # logger = logging.getLogger("root")
25 |
26 | class NpEncoder(json.JSONEncoder):
27 | """Encode np values to python for json export."""
28 | def default(self, obj):
29 | if isinstance(obj, np.integer):
30 | return int(obj)
31 | if isinstance(obj, np.floating):
32 | return float(obj)
33 | if isinstance(obj, np.ndarray):
34 | return obj.tolist()
35 | return super(NpEncoder, self).default(obj)
36 |
37 | class NetworkPlot(D3jsPlot):
38 | """Network plot class for a static network."""
39 |
40 | _kind = "network"
41 |
42 | def __init__(self, data: dict, **kwargs: Any) -> None:
43 | """Initialize network plot class."""
44 | super().__init__()
45 | self.data = data
46 | self.config = kwargs
47 | self.generate()
48 |
49 | def generate(self) -> None:
50 | """Clen up data."""
51 | self.config.pop("node_cmap", None)
52 | self.config.pop("edge_cmap", None)
53 | for node in self.data["nodes"]:
54 | node.pop("x", None)
55 | node.pop("y", None)
56 |
57 | def to_json(self) -> Any:
58 | """Convert data to json."""
59 | return json.dumps(self.data, cls=NpEncoder)
60 |
61 |
62 | class StaticNetworkPlot(NetworkPlot):
63 | """Network plot class for a temporal network."""
64 |
65 | _kind = "static"
66 |
67 |
68 | class TemporalNetworkPlot(NetworkPlot):
69 | """Network plot class for a temporal network."""
70 |
71 | _kind = "temporal"
72 |
73 |
74 | # =============================================================================
75 | # eof
76 | #
77 | # Local Variables:
78 | # mode: python
79 | # mode: linum
80 | # mode: auto-fill
81 | # fill-column: 79
82 | # End:
83 |
--------------------------------------------------------------------------------
/src/pathpyG/visualisations/_d3js/templates/network.js:
--------------------------------------------------------------------------------
1 | console.log("Static Network Template");
2 | /* Resources
3 | https://bl.ocks.org/mapio/53fed7d84cd1812d6a6639ed7aa83868
4 | https://codepen.io/smlo/pen/JdMOej
5 | */
6 |
7 | // variables from the config file
8 | const selector = config.selector;
9 | const width = config.width || 400;
10 | const height = config.height || 400;
11 |
12 | /* Create a svg element to display the network */
13 | var svg = d3.select(selector)
14 | .append('svg')
15 | .attr('width', width)
16 | .attr('height', height)
17 |
18 | // add container to store the elements
19 | var container = svg.append("g");
20 |
21 | /*Add zoom function to the container */
22 | svg.call(
23 | d3.zoom()
24 | .scaleExtent([.1, 4])
25 | .on("zoom", function() { container.attr("transform", d3.event.transform); })
26 | );
27 |
28 | /*Load nodes and links from the data */
29 | var nodes = data.nodes
30 | var links = data.edges
31 |
32 | /*Link creation template */
33 | var link = container.append("g").attr("class", "links")
34 | .selectAll(".link")
35 | .data(links)
36 | .enter()
37 | .append("line")
38 | .attr("class", "link")
39 | .style("stroke", function(d) { return d.color; })
40 | .style("stroke-opacity", function(d) { return d.opacity; })
41 | .style("stroke-width", function(d){ return d.size });
42 |
43 | /*Node creation template */
44 | var node = container.append("g").attr("class", "nodes")
45 | .selectAll("circle.node")
46 | .data(nodes)
47 | .enter().append("circle")
48 | .attr("class", "node")
49 | .attr("x", function(d) { return d.x; })
50 | .attr("y", function(d) { return d.y; })
51 | .style("r", function(d){ return d.size; })
52 | .style("fill", function(d) { return d.color; })
53 | .style("opacity", function(d) { return d.opacity; });
54 |
55 |
56 | /*Simulation of the forces*/
57 | var simulation = d3.forceSimulation(nodes)
58 | .force("charge", d3.forceManyBody().strength(-3000))
59 | .force("center", d3.forceCenter(width / 2, height / 2))
60 | .force("x", d3.forceX(width / 2).strength(1))
61 | .force("y", d3.forceY(height / 2).strength(1))
62 | .force("links", d3.forceLink(links)
63 | .id(function(d) {return d.uid; })
64 | .distance(50).strength(1))
65 | .on("tick", ticked);
66 |
67 | /*Update of the node and edge objects*/
68 | function ticked() {
69 | node.call(updateNode);
70 | link.call(updateLink);
71 | };
72 |
73 | /*Update link positions */
74 | function updateLink(link) {
75 | link
76 | .attr("x1", function(d) { return d.source.x; })
77 | .attr("y1", function(d) { return d.source.y; })
78 | .attr("x2", function(d) { return d.target.x; })
79 | .attr("y2", function(d) { return d.target.y; });
80 | };
81 |
82 | /*Update node positions */
83 | function updateNode(node) {
84 | node.attr("transform", function(d) {
85 | return "translate(" + d.x + "," + d.y + ")";
86 | });
87 | };
88 |
89 | /*Add drag functionality to the node objects*/
90 | node.call(
91 | d3.drag()
92 | .on("start", dragstarted)
93 | .on("drag", dragged)
94 | .on("end", dragended)
95 | );
96 |
97 | function dragstarted(d) {
98 | d3.event.sourceEvent.stopPropagation();
99 | if (!d3.event.active) simulation.alphaTarget(0.3).restart();
100 | d.fx = d.x;
101 | d.fy = d.y;
102 | };
103 |
104 | function dragged(d) {
105 | d.fx = d3.event.x;
106 | d.fy = d3.event.y;
107 | };
108 |
109 | function dragended(d) {
110 | if (!d3.event.active) simulation.alphaTarget(0);
111 | d.fx = null;
112 | d.fy = null;
113 | };
114 |
--------------------------------------------------------------------------------
/src/pathpyG/visualisations/_d3js/templates/setup.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
18 |
--------------------------------------------------------------------------------
/src/pathpyG/visualisations/_d3js/templates/setup.js:
--------------------------------------------------------------------------------
1 | // Load via requireJS if available (jupyter notebook environment)
2 | try {
3 | // Problem: require.config will raise an exception when called for the second time
4 | require.config({
5 | paths: {
6 | d3: "$d3js".replace(".js", "")
7 | }
8 | });
9 | console.log("OKAY: requireJS was detected.");
10 | }
11 | catch(err){
12 | // a reference error indicates that requireJS does not exist.
13 | // other errors may occur due to multiple calls to config
14 | if (err instanceof ReferenceError){
15 | console.log("WARNING: NO requireJS was detected!");
16 |
17 | // Helper function that waits for d3js to be loaded
18 | require = function require(symbols, callback) {
19 | var ms = 10;
20 | window.setTimeout(function(t) {
21 | if (window[symbols[0]])
22 | callback(window[symbols[0]]);
23 | else
24 | window.setTimeout(arguments.callee, ms);
25 | }, ms);
26 | }
27 | }
28 | };
29 |
--------------------------------------------------------------------------------
/src/pathpyG/visualisations/_d3js/templates/styles.css:
--------------------------------------------------------------------------------
1 | svg circle.node {
2 | fill: #3b5998;
3 | stroke: #1b3978;
4 | stroke-width: 2.5px;
5 | r: 15px;
6 | opacity: 1;
7 | }
8 |
9 | .link {
10 | stroke: #969595;
11 | stroke-opacity: .75;
12 | stroke-width: 2.5px;
13 | }
14 |
15 | .arrow {
16 | fill: #969595;
17 | }
18 |
19 |
20 | .label-text {
21 | fill: #969595;
22 | font-size: 16px;
23 | font-family: sans-serif;
24 | }
25 |
--------------------------------------------------------------------------------
/src/pathpyG/visualisations/_matplotlib/__init__.py:
--------------------------------------------------------------------------------
1 | """Initialize matplotlib plotting functions."""
2 |
3 | # !/usr/bin/python -tt
4 | # -*- coding: utf-8 -*-
5 | # =============================================================================
6 | # File : __init__.py -- matplotlib plotting cunctions
7 | # Author : Jürgen Hackl
8 | # Time-stamp:
9 | #
10 | # Copyright (c) 2016-2023 Pathpy Developers
11 | # =============================================================================
12 | # flake8: noqa
13 | # pylint: disable=unused-import
14 | from typing import Any
15 | from pathpyG.visualisations._matplotlib.network_plots import NetworkPlot
16 | from pathpyG.visualisations._matplotlib.network_plots import StaticNetworkPlot
17 | from pathpyG.visualisations._matplotlib.network_plots import TemporalNetworkPlot
18 |
19 | PLOT_CLASSES: dict = {
20 | "network": NetworkPlot,
21 | "static": StaticNetworkPlot,
22 | "temporal": TemporalNetworkPlot,
23 | }
24 |
25 |
26 | def plot(data: dict, kind: str = "network", **kwargs: Any) -> Any:
27 | """Plot function."""
28 | return PLOT_CLASSES[kind](data, **kwargs)
29 |
30 |
31 | # =============================================================================
32 | # eof
33 | #
34 | # Local Variables:
35 | # mode: python
36 | # mode: linum
37 | # mode: auto-fill
38 | # fill-column: 79
39 | # End:
40 |
--------------------------------------------------------------------------------
/src/pathpyG/visualisations/_matplotlib/core.py:
--------------------------------------------------------------------------------
1 | """Generic matplotlib plot class."""
2 |
3 | # =============================================================================
4 | # File : core.py -- Plots with matplotlib backend
5 | # Author : Jürgen Hackl
6 | # Time-stamp:
7 | #
8 | # Copyright (c) 2016-2023 Pathpy Developers
9 | # =============================================================================
10 | from __future__ import annotations
11 |
12 | import logging
13 |
14 | from typing import Any
15 |
16 | from pathpyG.visualisations.plot import PathPyPlot
17 |
18 | # create logger
19 | logger = logging.getLogger("root")
20 |
21 |
22 | class MatplotlibPlot(PathPyPlot):
23 | """Base class for plotting matplotlib objects."""
24 |
25 | def generate(self) -> None:
26 | """Generate the plot."""
27 | raise NotImplementedError
28 |
29 | def save(self, filename: str, **kwargs: Any) -> None: # type: ignore
30 | """Save the plot to the hard drive."""
31 | self.to_fig().savefig(filename)
32 |
33 | def show(self, **kwargs: Any) -> None: # type: ignore
34 | """Show the plot on the device."""
35 | self.to_fig().show()
36 |
37 | def to_fig(self) -> Any: # type: ignore
38 | """Convert to matplotlib figure."""
39 | raise NotImplementedError
40 |
41 |
42 | # =============================================================================
43 | # eof
44 | #
45 | # Local Variables:
46 | # mode: python
47 | # mode: linum
48 | # mode: auto-fill
49 | # fill-column: 79
50 | # End:
51 |
--------------------------------------------------------------------------------
/src/pathpyG/visualisations/_matplotlib/network_plots.py:
--------------------------------------------------------------------------------
1 | """Network plots with matplotlib."""
2 |
3 | # =============================================================================
4 | # File : network_plots.py -- Network plots with matplotlib
5 | # Author : Jürgen Hackl
6 | # Time-stamp:
7 | #
8 | # Copyright (c) 2016-2023 Pathpy Developers
9 | # =============================================================================
10 | from typing import Any
11 |
12 | import logging
13 | from pathpyG.visualisations._matplotlib.core import MatplotlibPlot
14 |
15 | # create logger
16 | logger = logging.getLogger("root")
17 |
18 |
19 | class NetworkPlot(MatplotlibPlot):
20 | """Network plot class for a static network."""
21 |
22 | _kind = "network"
23 |
24 | def __init__(self, data: dict, **kwargs: Any) -> None:
25 | """Initialize network plot class."""
26 | super().__init__()
27 | self.data = data
28 | self.config = kwargs
29 | self.generate()
30 |
31 | def generate(self) -> None:
32 | """Clen up data."""
33 | self._compute_node_data()
34 | self._compute_edge_data()
35 |
36 | def _compute_node_data(self) -> None:
37 | """Generate the data structure for the nodes."""
38 | default = {
39 | "uid": None,
40 | "x": 0,
41 | "y": 0,
42 | "size": 30,
43 | "color": "blue",
44 | "opacity": 1.0,
45 | }
46 |
47 | nodes: dict = {key: [] for key in default}
48 |
49 | for node in self.data["nodes"]:
50 | for key, value in default.items():
51 | nodes[key].append(node.get(key, value))
52 |
53 | self.data["nodes"] = nodes
54 |
55 | def _compute_edge_data(self) -> None:
56 | """Generate the data structure for the edges."""
57 | default = {
58 | "uid": None,
59 | "size": 5,
60 | "color": "red",
61 | "opacity": 1.0,
62 | }
63 |
64 | edges: dict = {**{key: [] for key in default}, **{"line": []}}
65 |
66 | for edge in self.data["edges"]:
67 | source = self.data["nodes"]["uid"].index(edge.get("source"))
68 | target = self.data["nodes"]["uid"].index(edge.get("target"))
69 | edges["line"].append(
70 | [
71 | (self.data["nodes"]["x"][source], self.data["nodes"]["x"][target]),
72 | (self.data["nodes"]["y"][source], self.data["nodes"]["y"][target]),
73 | ]
74 | )
75 |
76 | for key, value in default.items():
77 | edges[key].append(edge.get(key, value))
78 |
79 | self.data["edges"] = edges
80 |
81 | def to_fig(self) -> Any:
82 | """Convert data to figure."""
83 | import matplotlib.pyplot as plt
84 |
85 | fig, ax = plt.subplots()
86 | ax.set_axis_off()
87 |
88 | # plot edges
89 | for i in range(len(self.data["edges"]["uid"])):
90 | ax.plot(
91 | *self.data["edges"]["line"][i],
92 | color=self.data["edges"]["color"][i],
93 | alpha=self.data["edges"]["opacity"][i],
94 | zorder=1,
95 | )
96 |
97 | # plot nodes
98 | ax.scatter(
99 | self.data["nodes"]["x"],
100 | self.data["nodes"]["y"],
101 | s=self.data["nodes"]["size"],
102 | c=self.data["nodes"]["color"],
103 | alpha=self.data["nodes"]["opacity"],
104 | zorder=2,
105 | )
106 | return plt
107 |
108 |
109 | class StaticNetworkPlot(NetworkPlot):
110 | """Network plot class for a static network."""
111 |
112 | _kind = "static"
113 |
114 |
115 | class TemporalNetworkPlot(NetworkPlot):
116 | """Network plot class for a static network."""
117 |
118 | _kind = "temporal"
119 |
120 | def __init__(self, data: dict, **kwargs: Any) -> None:
121 | """Initialize network plot class."""
122 | raise NotImplementedError
123 |
124 |
125 | # =============================================================================
126 | # eof
127 | #
128 | # Local Variables:
129 | # mode: python
130 | # mode: linum
131 | # mode: auto-fill
132 | # fill-column: 79
133 | # End:
134 |
--------------------------------------------------------------------------------
/src/pathpyG/visualisations/_tikz/__init__.py:
--------------------------------------------------------------------------------
1 | """Initialize tikz plotting functions."""
2 |
3 | # !/usr/bin/python -tt
4 | # -*- coding: utf-8 -*-
5 | # =============================================================================
6 | # File : __init__.py -- tikz plotting cunctions
7 | # Author : Jürgen Hackl
8 | # Time-stamp:
9 | #
10 | # Copyright (c) 2016-2023 Pathpy Developers
11 | # =============================================================================
12 | # flake8: noqa
13 | # pylint: disable=unused-import
14 | from typing import Any
15 | from pathpyG.visualisations._tikz.network_plots import NetworkPlot
16 | from pathpyG.visualisations._tikz.network_plots import StaticNetworkPlot
17 | from pathpyG.visualisations._tikz.network_plots import TemporalNetworkPlot
18 |
19 | PLOT_CLASSES: dict = {
20 | "network": NetworkPlot,
21 | "static": StaticNetworkPlot,
22 | "temporal": TemporalNetworkPlot,
23 | }
24 |
25 |
26 | def plot(data: dict, kind: str = "network", **kwargs: Any) -> Any:
27 | """Plot function."""
28 | return PLOT_CLASSES[kind](data, **kwargs)
29 |
30 |
31 | # =============================================================================
32 | # eof
33 | #
34 | # Local Variables:
35 | # mode: python
36 | # mode: linum
37 | # mode: auto-fill
38 | # fill-column: 79
39 | # End:
40 |
--------------------------------------------------------------------------------
/src/pathpyG/visualisations/_tikz/core.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python -tt
2 | # -*- coding: utf-8 -*-
3 | # =============================================================================
4 | # File : core.py -- Plots with tikz
5 | # Author : Jürgen Hackl
6 | # Time-stamp:
7 | #
8 | # Copyright (c) 2016-2021 Pathpy Developers
9 | # =============================================================================
10 | from __future__ import annotations
11 |
12 | import os
13 | import time
14 | import shutil
15 | import logging
16 | import tempfile
17 | import subprocess
18 | import webbrowser
19 |
20 | from typing import Any
21 | from string import Template
22 |
23 | from pathpyG.utils.config import config
24 | from pathpyG.visualisations.plot import PathPyPlot
25 |
26 | # create logger
27 | logger = logging.getLogger("root")
28 |
29 |
30 | class TikzPlot(PathPyPlot):
31 | """Base class for plotting d3js objects."""
32 |
33 | def __init__(self, **kwargs: Any) -> None:
34 | """Initialize plot class."""
35 | super().__init__()
36 | if kwargs:
37 | self.config = kwargs
38 |
39 | def generate(self) -> None:
40 | """Generate the plot."""
41 | raise NotImplementedError
42 |
43 | def save(self, filename: str, **kwargs: Any) -> None:
44 | """Save the plot to the hard drive."""
45 | if filename.endswith("tex"):
46 | with open(filename, "w+") as new:
47 | new.write(self.to_tex())
48 | elif filename.endswith("pdf"):
49 | # compile temporary pdf
50 | temp_file, temp_dir = self.compile_pdf()
51 | # Copy a file with new name
52 | shutil.copy(temp_file, filename)
53 | # remove the temporal directory
54 | shutil.rmtree(temp_dir)
55 |
56 | else:
57 | raise NotImplementedError
58 |
59 | def show(self, **kwargs: Any) -> None:
60 | """Show the plot on the device."""
61 | # compile temporary pdf
62 | temp_file, temp_dir = self.compile_pdf()
63 |
64 | if config["environment"]["interactive"]:
65 | from IPython.display import IFrame, display
66 |
67 | # open the file in the notebook
68 | display(IFrame(temp_file, width=600, height=300))
69 | else:
70 | # open the file in the webbrowser
71 | webbrowser.open(r"file:///" + temp_file)
72 |
73 | # Wait for .1 second before temp file is deleted
74 | time.sleep(0.1)
75 |
76 | # remove the temporal directory
77 | shutil.rmtree(temp_dir)
78 |
79 | def compile_pdf(self) -> tuple:
80 | """Compile pdf from tex."""
81 | # basename
82 | basename = "default"
83 | # get current directory
84 | current_dir = os.getcwd()
85 |
86 | # template directory
87 | tikz_dir = str(
88 | os.path.join(
89 | os.path.dirname(os.path.dirname(__file__)),
90 | os.path.normpath("templates"),
91 | "tikz-network.sty",
92 | )
93 | )
94 |
95 | # get temporal directory
96 | temp_dir = tempfile.mkdtemp()
97 |
98 | # copy tikz-network to temporal directory
99 | shutil.copy(tikz_dir, temp_dir)
100 |
101 | # change to output dir
102 | os.chdir(temp_dir)
103 |
104 | # save the tex file
105 | self.save(basename + ".tex")
106 |
107 | # latex compiler
108 | command = [
109 | "latexmk",
110 | "--pdf",
111 | "-shell-escape",
112 | "--interaction=nonstopmode",
113 | basename + ".tex",
114 | ]
115 |
116 | try:
117 | subprocess.check_output(command, stderr=subprocess.STDOUT)
118 | except Exception:
119 | # If compiler does not exist, try next in the list
120 | logger.error("No latexmk compiler found")
121 | raise AttributeError
122 | finally:
123 | # change back to the current directory
124 | os.chdir(current_dir)
125 |
126 | # return the name of the folder and temp pdf file
127 | return (os.path.join(temp_dir, basename + ".pdf"), temp_dir)
128 |
129 | def to_tex(self) -> str:
130 | """Convert data to tex."""
131 | # get path to the pathpy templates
132 | template_dir = os.path.join(
133 | os.path.dirname(os.path.dirname(__file__)),
134 | os.path.normpath("_tikz/templates"),
135 | )
136 |
137 | # get template files
138 | with open(os.path.join(template_dir, f"{self._kind}.tex")) as template:
139 | tex_template = template.read()
140 |
141 | # generate data
142 | data = self.to_tikz()
143 |
144 | # fill template with data
145 | tex = Template(tex_template).substitute(
146 | classoptions=self.config.get("latex_class_options", ""),
147 | width=self.config.get("width", "6cm"),
148 | height=self.config.get("height", "6cm"),
149 | tikz=data,
150 | )
151 |
152 | return tex
153 |
154 | def to_tikz(self) -> str:
155 | """Convert data to tikz."""
156 | raise NotImplementedError
157 |
158 |
159 | # =============================================================================
160 | # eof
161 | #
162 | # Local Variables:
163 | # mode: python
164 | # mode: linum
165 | # mode: auto-fill
166 | # fill-column: 79
167 | # End:
168 |
--------------------------------------------------------------------------------
/src/pathpyG/visualisations/_tikz/templates/network.tex:
--------------------------------------------------------------------------------
1 | \documentclass[$classoptions]{standalone}
2 | \usepackage[dvipsnames]{xcolor}
3 | \usepackage{tikz-network}
4 | \begin{document}
5 | \begin{tikzpicture}
6 | \tikzset{every node}=[font=\sffamily\bfseries]
7 | \clip (0,0) rectangle ($width,$height);
8 | $tikz
9 | \end{tikzpicture}
10 | \end{document}
--------------------------------------------------------------------------------
/src/pathpyG/visualisations/_tikz/templates/static.tex:
--------------------------------------------------------------------------------
1 | \documentclass[$classoptions]{standalone}
2 | \usepackage[dvipsnames]{xcolor}
3 | \usepackage{tikz-network}
4 | \begin{document}
5 | \begin{tikzpicture}
6 | \tikzset{every node}=[font=\sffamily\bfseries]
7 | \clip (0,0) rectangle ($width,$height);
8 | $tikz
9 | \end{tikzpicture}
10 | \end{document}
--------------------------------------------------------------------------------
/src/pathpyG/visualisations/_tikz/templates/temporal.tex:
--------------------------------------------------------------------------------
1 | \documentclass[$classoptions]{standalone}
2 | \usepackage[dvipsnames]{xcolor}
3 | \usepackage{tikz-network}
4 | \begin{document}
5 | \begin{tikzpicture}
6 | \tikzset{every node}=[font=\sffamily\bfseries]
7 | \clip (0,0) rectangle ($width,$height);
8 | $tikz
9 | \end{tikzpicture}
10 | \end{document}
--------------------------------------------------------------------------------
/src/pathpyG/visualisations/hist_plots.py:
--------------------------------------------------------------------------------
1 | """Histogram plot classes."""
2 |
3 | from __future__ import annotations
4 | import logging
5 |
6 | from typing import TYPE_CHECKING, Any
7 | from pathpyG.visualisations.plot import PathPyPlot
8 |
9 | # pseudo load class for type checking
10 | if TYPE_CHECKING:
11 | from pathpyG.core.graph import Graph
12 |
13 | # create logger
14 | logger = logging.getLogger("root")
15 |
16 |
17 | def hist(network: Graph, key: str = "indegrees", bins: int = 10, **kwargs: Any) -> HistogramPlot:
18 | """Plot a histogram."""
19 | return HistogramPlot(network, key, bins, **kwargs)
20 |
21 |
22 | class HistogramPlot(PathPyPlot):
23 | """Histogram plot class for a network property."""
24 |
25 | _kind = "hist"
26 |
27 | def __init__(self, network: Graph, key: str = "indegrees", bins: int = 10, **kwargs: Any) -> None:
28 | """Initialize network plot class."""
29 | super().__init__()
30 | self.network = network
31 | self.config = kwargs
32 | self.config["bins"] = bins
33 | self.config["key"] = key
34 | self.generate()
35 |
36 | def generate(self) -> None:
37 | """Generate the plot."""
38 | logger.debug("Generate histogram.")
39 |
40 | data: dict = {}
41 |
42 | match self.config["key"]:
43 | case "indegrees":
44 | logger.debug("Generate data for in-degrees")
45 | data["values"] = list(self.network.degrees(mode="in").values())
46 | case "outdegrees":
47 | logger.debug("Generate data for out-degrees")
48 | data["values"] = list(self.network.degrees(mode="out").values())
49 | case _:
50 | logger.error(
51 | f"The <{self.config['key']}> property",
52 | "is currently not supported for hist plots.",
53 | )
54 | raise KeyError
55 |
56 | data["title"] = self.config["key"]
57 | self.data["data"] = data
58 |
--------------------------------------------------------------------------------
/src/pathpyG/visualisations/plot.py:
--------------------------------------------------------------------------------
1 | """Class to plot pathpy networks."""
2 |
3 | # !/usr/bin/python -tt
4 | # -*- coding: utf-8 -*-
5 | # =============================================================================
6 | # File : plot.py -- Module to plot pathpyG networks
7 | # Author : Jürgen Hackl
8 | # Time-stamp:
9 | #
10 | # Copyright (c) 2016-2023 Pathpy Developers
11 | # =============================================================================
12 | import os
13 | import logging
14 | import importlib
15 |
16 | from copy import deepcopy
17 | from typing import Optional, Any
18 |
19 |
20 | # create logger
21 | logger = logging.getLogger("root")
22 |
23 | # supported backends
24 | BACKENDS: set = {"d3js", "tikz", "matplotlib"}
25 |
26 | # supported file formats
27 | FORMATS: dict = {
28 | ".html": "d3js",
29 | ".tex": "tikz",
30 | ".pdf": "tikz",
31 | ".png": "matplotlib",
32 | }
33 |
34 |
35 | def _get_plot_backend(
36 | backend: Optional[str] = None,
37 | filename: Optional[str] = None,
38 | default: str = "d3js",
39 | ) -> Any:
40 | """Return the plotting backend to use."""
41 | # use default backend per default
42 | _backend: str = default
43 |
44 | # Get file ending and infere backend
45 | if isinstance(filename, str):
46 | _backend = FORMATS.get(os.path.splitext(filename)[1], default)
47 |
48 | # if no backend was found use the backend suggested for the file format
49 | if backend is not None and backend not in BACKENDS and filename is not None:
50 | logger.error(f"The backend <{backend}> was not found.")
51 | raise KeyError
52 |
53 | # if no backend was given use the backend suggested for the file format
54 | elif isinstance(backend, str) and backend in BACKENDS:
55 | _backend = backend
56 |
57 | # try to load backend or return error
58 | try:
59 | module = importlib.import_module(f"pathpyG.visualisations._{_backend}")
60 | except ImportError:
61 | logger.error(f"The <{_backend}> backend could not be imported.")
62 | raise ImportError from None
63 |
64 | return module
65 |
66 |
67 | class PathPyPlot:
68 | """Abstract class for assemblig plots.
69 |
70 | Attributes
71 | ----------
72 | data : dict
73 | data of the plot object
74 | config : dict
75 | configuration for the plot
76 |
77 | """
78 |
79 | def __init__(self) -> None:
80 | """Initialize plot class."""
81 | logger.debug("Initalize PathPyPlot class")
82 | self.data: dict = {}
83 | self.config: dict = {}
84 |
85 | @property
86 | def _kind(self) -> str:
87 | """Specify kind str. Must be overridden in child class."""
88 | raise NotImplementedError
89 |
90 | def generate(self) -> None:
91 | """Generate the plot."""
92 | raise NotImplementedError
93 |
94 | def save(self, filename: str, **kwargs: Any) -> None:
95 | """Save the plot to the hard drive."""
96 | _backend: str = kwargs.pop("backend", self.config.get("backend", None))
97 |
98 | plot_backend = _get_plot_backend(_backend, filename)
99 | plot_backend.plot(deepcopy(self.data), self._kind, **deepcopy(self.config)).save(filename, **kwargs)
100 |
101 | def show(self, **kwargs: Any) -> None:
102 | """Show the plot on the device."""
103 | _backend: str = kwargs.pop("backend", self.config.get("backend", None))
104 |
105 | plot_backend = _get_plot_backend(_backend, None)
106 | plot_backend.plot(deepcopy(self.data), self._kind, **deepcopy(self.config)).show(**kwargs)
107 |
--------------------------------------------------------------------------------
/src/pathpyG/visualisations/utils.py:
--------------------------------------------------------------------------------
1 | """Helper functions for plotting."""
2 |
3 | # =============================================================================
4 | # File : utils.py -- Helpers for the plotting functions
5 | # Author : Jürgen Hackl
6 | # Time-stamp:
7 | #
8 | # Copyright (c) 2016-2023 Pathpy Developers
9 | # =============================================================================
10 | from typing import Optional
11 |
12 |
13 | def rgb_to_hex(rgb: tuple) -> str:
14 | """Convert rgb color tuple to hex string."""
15 | return "#%02x%02x%02x" % rgb
16 |
17 |
18 | def hex_to_rgb(value: str) -> tuple:
19 | """Convert hex string to rgb color tuple."""
20 | value = value.lstrip("#")
21 | _l = len(value)
22 | return tuple(int(value[i : i + _l // 3], 16) for i in range(0, _l, _l // 3))
23 |
24 |
25 | class Colormap:
26 | """Very simple colormap class."""
27 |
28 | def __call__(
29 | self,
30 | values: list,
31 | alpha: Optional[float] = None,
32 | bytes: bool = False,
33 | ) -> list:
34 | """Return color value."""
35 | vmin, vmax = min(values), max(values)
36 | if vmin == vmax:
37 | vmin -= 1
38 | vmax += 1
39 | return [self.color_tuple(v) for v in ((x - vmin) / (vmax - vmin) * 100 for x in values)]
40 |
41 | @staticmethod
42 | def color_tuple(n: float) -> tuple:
43 | """Return color ramp from green to red."""
44 | return (int((255 * n) * 0.01), int((255 * (100 - n)) * 0.01), 0, 255)
45 |
46 |
47 | # =============================================================================
48 | # eof
49 | #
50 | # Local Variables:
51 | # mode: python
52 | # mode: linum
53 | # mode: auto-fill
54 | # fill-column: 79
55 | # End:
56 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | """Necessary to make Python treat the tests directory as a module.
2 |
3 | This is required since mypy doesn't support the same file name otherwise
4 | It is also required to enable module specific overrides in pyproject.toml
5 | """
6 |
--------------------------------------------------------------------------------
/tests/algorithms/__init__.py:
--------------------------------------------------------------------------------
1 | """Necessary to make Python treat the tests directory as a module.
2 |
3 | This is required since mypy doesn't support the same file name otherwise
4 | It is also required to enable module specific overrides in pyproject.toml
5 | """
6 |
--------------------------------------------------------------------------------
/tests/algorithms/conftest.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import pytest
4 |
5 | from pathpyG.core.graph import Graph
6 | from pathpyG.core.index_map import IndexMap
7 | from pathpyG.core.temporal_graph import TemporalGraph
8 | from pathpyG.core.path_data import PathData
9 |
10 |
11 | @pytest.fixture
12 | def simple_graph() -> Graph:
13 | """Return a simple undirected graph."""
14 | return Graph.from_edge_list(
15 | [("a", "b"), ("b", "a"), ("b", "c"), ("c", "b"), ("a", "c"), ("c", "a")], is_undirected=True
16 | )
17 |
18 |
19 | @pytest.fixture
20 | def simple_graph_sp() -> Graph:
21 | """Return a undirected graph."""
22 | return Graph.from_edge_list([("a", "b"), ("b", "c"), ("c", "e"), ("b", "d"), ("d", "e")]).to_undirected()
23 |
24 |
25 | @pytest.fixture
26 | def toy_example_graph() -> Graph:
27 | """Return an undirected toy example graph."""
28 | return Graph.from_edge_list(
29 | [("a", "b"), ("b", "c"), ("c", "a"), ("d", "e"), ("e", "f"), ("f", "g"), ("g", "d"), ("d", "f"), ("b", "d")]
30 | ).to_undirected()
31 |
32 |
33 | @pytest.fixture
34 | def toy_example_graph_directed() -> Graph:
35 | """Return a directed toy example graph."""
36 | return Graph.from_edge_list(
37 | [("a", "b"), ("b", "c"), ("c", "a"), ("d", "e"), ("e", "f"), ("f", "g"), ("g", "d"), ("d", "f"), ("b", "d")]
38 | )
39 |
40 |
41 | @pytest.fixture
42 | def simple_temporal_graph() -> TemporalGraph:
43 | """Return a simple temporal graph."""
44 | tedges = [("a", "b", 1), ("b", "c", 5), ("c", "d", 9), ("c", "e", 9)]
45 | return TemporalGraph.from_edge_list(tedges)
46 |
47 |
48 | @pytest.fixture
49 | def simple_walks() -> PathData:
50 | """Return a set of walks."""
51 | paths = PathData(mapping=IndexMap(["A", "B", "C", "D", "E", "F"]))
52 | paths.append_walk(("C", "B", "D", "F"), weight=1.0)
53 | paths.append_walk(("A", "B", "D"), weight=1.0)
54 | paths.append_walk(("D", "E"), weight=1.0)
55 | return paths
56 |
57 |
58 | @pytest.fixture
59 | def long_temporal_graph() -> TemporalGraph:
60 | """Return a temporal graph with 20 time-stamped edges."""
61 | tedges = [
62 | ("a", "b", 1),
63 | ("b", "c", 5),
64 | ("c", "d", 9),
65 | ("c", "e", 9),
66 | ("c", "f", 11),
67 | ("f", "a", 13),
68 | ("a", "g", 18),
69 | ("b", "f", 21),
70 | ("a", "g", 26),
71 | ("c", "f", 27),
72 | ("h", "f", 27),
73 | ("g", "h", 28),
74 | ("a", "c", 30),
75 | ("a", "b", 31),
76 | ("c", "h", 32),
77 | ("f", "h", 33),
78 | ("b", "i", 42),
79 | ("i", "b", 42),
80 | ("c", "i", 47),
81 | ("h", "i", 50),
82 | ]
83 | return TemporalGraph.from_edge_list(tedges)
84 |
--------------------------------------------------------------------------------
/tests/algorithms/test_centrality.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from pathpyG.algorithms import centrality
4 | from pathpyG.algorithms.centrality import (
5 | path_node_traversals,
6 | path_visitation_probabilities,
7 | temporal_betweenness_centrality,
8 | temporal_closeness_centrality,
9 | )
10 |
11 |
12 | def test_closeness_centrality(simple_graph):
13 | r = centrality.closeness_centrality(simple_graph)
14 | assert r == {"a": 1.0, "b": 1.0, "c": 1.0}
15 |
16 |
17 | def test_betweenness_centrality(simple_graph):
18 | r = centrality.betweenness_centrality(simple_graph)
19 | assert r == {"a": 0.0, "b": 0.0, "c": 0.0}
20 |
21 |
22 | def test_node_traversals(simple_walks):
23 | print(simple_walks)
24 | traversals_dict = path_node_traversals(simple_walks)
25 | assert set(traversals_dict.keys()) == {"A", "B", "C", "D", "E", "F"}
26 | assert traversals_dict["A"] == 1
27 | assert traversals_dict["B"] == 2
28 | assert traversals_dict["C"] == 1
29 | assert traversals_dict["D"] == 3
30 | assert traversals_dict["E"] == 1
31 | assert traversals_dict["F"] == 1
32 |
33 |
34 | def test_visitation_probabilities(simple_walks):
35 | visitations_dict = path_visitation_probabilities(simple_walks)
36 | assert set(visitations_dict.keys()) == {"A", "B", "C", "D", "E", "F"}
37 | assert visitations_dict["A"] == 1 / 9
38 | assert visitations_dict["B"] == 2 / 9
39 | assert visitations_dict["C"] == 1 / 9
40 | assert visitations_dict["D"] == 3 / 9
41 | assert visitations_dict["E"] == 1 / 9
42 | assert visitations_dict["F"] == 1 / 9
43 |
44 |
45 | def test_temporal_betweenness(long_temporal_graph):
46 | bw = temporal_betweenness_centrality(long_temporal_graph, delta=5)
47 | assert bw["a"] == 2.0
48 | assert bw["b"] == 2.0
49 | assert bw["c"] == 4.5
50 | assert bw["d"] == 0
51 | assert bw["e"] == 0
52 | assert bw["f"] == 2.0
53 | assert bw["g"] == 0.5
54 | assert bw["h"] == 0
55 | assert bw["i"] == 0
56 |
57 |
58 | def test_temporal_closeness(long_temporal_graph):
59 | c = temporal_closeness_centrality(long_temporal_graph, delta=5)
60 | assert c == {
61 | "a": 12.0,
62 | "b": 16.0,
63 | "c": 16.0,
64 | "d": 14.666666666666666,
65 | "e": 14.666666666666666,
66 | "f": 24.0,
67 | "g": 14.666666666666666,
68 | "h": 28.0,
69 | "i": 24.0,
70 | }
71 |
--------------------------------------------------------------------------------
/tests/algorithms/test_components.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import numpy as _np
4 |
5 | from pathpyG.core.graph import Graph
6 | from pathpyG.algorithms import connected_components, largest_connected_component
7 |
8 |
9 | def test_connected_components_undirected_1():
10 | # undirected graph with two connectecd components
11 | n = Graph.from_edge_list(
12 | [("a", "b"), ("b", "c"), ("c", "a"), ("d", "e"), ("e", "f"), ("f", "g"), ("g", "d"), ("d", "f")]
13 | ).to_undirected()
14 | n, labels = connected_components(n)
15 | assert n == 2
16 | assert (labels == _np.array([0, 0, 0, 1, 1, 1, 1])).all()
17 |
18 |
19 | def test_lcc_undirected_1():
20 | # undirected graph with two connectecd components
21 | n = Graph.from_edge_list(
22 | [("a", "b"), ("b", "c"), ("c", "a"), ("d", "e"), ("e", "f"), ("f", "g"), ("g", "d"), ("d", "f")]
23 | ).to_undirected()
24 | lcc = largest_connected_component(n)
25 | assert lcc.n == 4
26 | assert set(lcc.mapping.node_ids) == set(["d", "e", "f", "g"])
27 |
28 |
29 | def test_connected_components_undirected_2():
30 | # undirected graph with single connected component
31 | n = Graph.from_edge_list(
32 | [("a", "b"), ("b", "c"), ("c", "a"), ("d", "e"), ("e", "f"), ("f", "g"), ("g", "d"), ("d", "f"), ("c", "d")]
33 | ).to_undirected()
34 | n, labels = connected_components(n)
35 | assert n == 1
36 | assert (labels == _np.array([0, 0, 0, 0, 0, 0, 0])).all()
37 |
38 |
39 | def test_lcc_undirected_2():
40 | # undirected graph with single connectecd component
41 | n = Graph.from_edge_list(
42 | [("a", "b"), ("b", "c"), ("c", "a"), ("d", "e"), ("e", "f"), ("f", "g"), ("g", "d"), ("d", "f"), ("c", "d")]
43 | ).to_undirected()
44 | lcc = largest_connected_component(n)
45 | assert lcc.n == 7
46 | assert set(lcc.mapping.node_ids) == set(["a", "b", "c", "d", "e", "f", "g"])
47 |
48 |
49 | def test_connected_components_directed_1():
50 | # directed graph with single weak and two strongly connected components
51 | g = Graph.from_edge_list(
52 | [("a", "b"), ("b", "c"), ("c", "a"), ("d", "e"), ("e", "f"), ("f", "g"), ("g", "d"), ("d", "f"), ("c", "d")]
53 | )
54 | n, labels = connected_components(g, connection="weak")
55 | assert n == 1
56 | assert (labels == _np.array([0, 0, 0, 0, 0, 0, 0])).all()
57 |
58 | n, labels = connected_components(g, connection="strong")
59 | assert n == 2
60 | assert (labels == _np.array([1, 1, 1, 0, 0, 0, 0])).all()
61 |
62 |
63 | def test_lcc_directed_1():
64 | # directed graph with single weak and two strongly connected components
65 | g = Graph.from_edge_list(
66 | [("a", "b"), ("b", "c"), ("c", "a"), ("d", "e"), ("e", "f"), ("f", "g"), ("g", "d"), ("d", "f"), ("c", "d")]
67 | )
68 | lcc = largest_connected_component(g, connection="weak")
69 | assert lcc.n == 7
70 | assert set(lcc.mapping.node_ids) == set(["a", "b", "c", "d", "e", "f", "g"])
71 |
72 | lcc = largest_connected_component(g, connection="strong")
73 | assert lcc.n == 4
74 | assert set(lcc.mapping.node_ids) == set(["d", "e", "f", "g"])
75 |
76 |
77 | def test_connected_components_directed_2():
78 | # directed graph with two weak and two strongly connected components
79 | g = Graph.from_edge_list(
80 | [("a", "b"), ("b", "c"), ("c", "a"), ("d", "e"), ("e", "f"), ("f", "g"), ("g", "d"), ("d", "f")]
81 | )
82 | n, labels = connected_components(g, connection="weak")
83 | assert n == 2
84 | assert (labels == _np.array([0, 0, 0, 1, 1, 1, 1])).all()
85 |
86 | n, labels = connected_components(g, connection="strong")
87 | assert n == 2
88 | assert (labels == _np.array([0, 0, 0, 1, 1, 1, 1])).all()
89 |
90 |
91 | def test_lcc_directed_2():
92 | # directed graph with two weak and two strongly connected components
93 | g = Graph.from_edge_list(
94 | [("a", "b"), ("b", "c"), ("c", "a"), ("d", "e"), ("e", "f"), ("f", "g"), ("g", "d"), ("d", "f")]
95 | )
96 | lcc = largest_connected_component(g, connection="weak")
97 | assert lcc.n == 4
98 | assert set(lcc.mapping.node_ids) == set(["d", "e", "f", "g"])
99 |
100 | lcc = largest_connected_component(g, connection="strong")
101 | assert lcc.n == 4
102 | assert set(lcc.mapping.node_ids) == set(["d", "e", "f", "g"])
103 |
--------------------------------------------------------------------------------
/tests/algorithms/test_lift_order.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import torch
3 |
4 | from pathpyG.algorithms.lift_order import (
5 | aggregate_node_attributes,
6 | lift_order_edge_index,
7 | lift_order_edge_index_weighted,
8 | aggregate_edge_index,
9 | )
10 |
11 |
12 | def test_aggregate_node_attributes():
13 | edge_index = torch.tensor(
14 | [
15 | [0, 1, 2, 2, 3],
16 | [1, 2, 0, 3, 0],
17 | ]
18 | )
19 | node_attribute = torch.tensor([1, 2, 3, 4])
20 | aggr_attributes = aggregate_node_attributes(edge_index=edge_index, node_attribute=node_attribute, aggr="src")
21 | assert aggr_attributes.tolist() == [1, 2, 3, 3, 4]
22 | aggr_attributes = aggregate_node_attributes(edge_index=edge_index, node_attribute=node_attribute, aggr="dst")
23 | assert aggr_attributes.tolist() == [2, 3, 1, 4, 1]
24 | aggr_attributes = aggregate_node_attributes(edge_index=edge_index, node_attribute=node_attribute, aggr="max")
25 | assert aggr_attributes.tolist() == [2, 3, 3, 4, 4]
26 | aggr_attributes = aggregate_node_attributes(edge_index=edge_index, node_attribute=node_attribute, aggr="mul")
27 | assert aggr_attributes.tolist() == [2, 6, 3, 12, 4]
28 | aggr_attributes = aggregate_node_attributes(edge_index=edge_index, node_attribute=node_attribute, aggr="add")
29 | assert aggr_attributes.tolist() == [3, 5, 4, 7, 5]
30 | with pytest.raises(ValueError):
31 | aggregate_node_attributes(edge_index=edge_index, node_attribute=node_attribute, aggr="unknown")
32 |
33 |
34 | def test_lift_order_edge_index():
35 | # Inspired by https://github.com/pyg-team/pytorch_geometric/blob/master/test/transforms/test_line_graph.py
36 | # Directed.
37 | edge_index = torch.tensor(
38 | [
39 | [0, 1, 2, 2, 3],
40 | [1, 2, 0, 3, 0],
41 | ]
42 | )
43 | ho_index = lift_order_edge_index(edge_index=edge_index, num_nodes=4)
44 | assert ho_index.tolist() == [[0, 1, 1, 2, 3, 4], [1, 2, 3, 0, 4, 0]]
45 |
46 |
47 | def test_lift_order_edge_index_weighted():
48 | edge_index = torch.tensor(
49 | [
50 | [0, 1, 2, 2, 3],
51 | [1, 2, 0, 3, 0],
52 | ]
53 | )
54 | edge_weight = torch.tensor([1, 2, 3, 4, 5])
55 | ho_index, ho_weight = lift_order_edge_index_weighted(edge_index=edge_index, edge_weight=edge_weight, num_nodes=4)
56 | assert ho_index.tolist() == [[0, 1, 1, 2, 3, 4], [1, 2, 3, 0, 4, 0]]
57 | assert ho_weight.tolist() == [1, 2, 2, 3, 4, 5]
58 |
59 |
60 | def test_aggregate_edge_index():
61 | edge_index = torch.tensor(
62 | [
63 | [0, 2, 2, 1],
64 | [1, 1, 3, 0],
65 | ]
66 | )
67 | edge_weight = torch.tensor([1, 2, 3, 4])
68 | node_sequence = torch.tensor(
69 | [
70 | [1, 2], # Node 0
71 | [2, 3], # Node 1
72 | [1, 2], # Node 2
73 | [4, 5], # Node 3
74 | ]
75 | )
76 | g = aggregate_edge_index(edge_index=edge_index, edge_weight=edge_weight, node_sequence=node_sequence)
77 | assert g.data.edge_index.as_tensor().tolist() == [[0, 0, 1], [1, 2, 0]]
78 | assert g.data.edge_weight.tolist() == [3, 3, 4]
79 | assert g.data.node_sequence.tolist() == [[1, 2], [2, 3], [4, 5]]
80 |
--------------------------------------------------------------------------------
/tests/algorithms/test_random_graph.py:
--------------------------------------------------------------------------------
1 | # pylint: disable=missing-module-docstring,missing-function-docstring
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/tests/algorithms/test_rolling_time_window.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import torch
4 |
5 | from pathpyG.core.graph import Graph
6 | from pathpyG.core.temporal_graph import TemporalGraph
7 | from pathpyG.algorithms.rolling_time_window import RollingTimeWindow
8 |
9 |
10 | def test_rolling_time_window(long_temporal_graph):
11 | r = RollingTimeWindow(long_temporal_graph, 10, 10, False)
12 | snapshots = []
13 | for g in r:
14 | snapshots.append(g)
15 | # aggregate network from 1 to 10
16 | assert snapshots[0].n == 5 and snapshots[0].m == 4
17 | # aggregate network from 11 to 20
18 | assert snapshots[1].n == 7 and snapshots[1].m == 3
19 | # aggregate network from 21 to 30
20 | assert snapshots[2].n == 8 and snapshots[2].m == 6
21 | # aggregate network from 31 to 40
22 | assert snapshots[3].n == 8 and snapshots[3].m == 3
23 | # aggregate network from 41 to 50
24 | assert snapshots[4].n == 9 and snapshots[4].m == 4
25 |
--------------------------------------------------------------------------------
/tests/algorithms/test_shortest_paths.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import numpy as _np
4 |
5 | from pathpyG.core.graph import Graph
6 | from pathpyG.visualisations.hist_plots import hist
7 | from pathpyG.algorithms import centrality
8 | from pathpyG.algorithms.shortest_paths import shortest_paths_dijkstra, diameter, avg_path_length
9 |
10 |
11 | def test_shortest_paths_dijkstra(simple_graph_sp):
12 | dist, pred = shortest_paths_dijkstra(simple_graph_sp)
13 | print(pred)
14 | assert (dist == _np.matrix("0 1 2 2 3; 1 0 1 1 2; 2 1 0 2 1; 2 1 2 0 1; 3 2 1 1 0").A).all()
15 | assert (pred == _np.matrix("-9999 0 1 1 2; 1 -9999 1 1 3; 1 2 -9999 1 2; 1 3 1 -9999 3; 1 2 4 4 -9999").A).all()
16 |
17 |
18 | def test_diameter(simple_graph_sp):
19 | assert diameter(simple_graph_sp) == 3
20 |
21 |
22 | def test_avg_path_length(simple_graph_sp):
23 | assert avg_path_length(simple_graph_sp) == 1.6
24 |
--------------------------------------------------------------------------------
/tests/algorithms/test_similarities.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import numpy as _np
4 |
5 | from pathpyG.core.graph import Graph
6 | from pathpyG.statistics.node_similarities import (
7 | overlap_coefficient,
8 | LeichtHolmeNewman_index,
9 | jaccard_similarity,
10 | common_neighbors,
11 | adamic_adar_index,
12 | cosine_similarity,
13 | katz_index,
14 | )
15 |
16 |
17 | def test_common_neighbors(toy_example_graph):
18 | assert common_neighbors(toy_example_graph, "c", "a") == 1
19 | assert common_neighbors(toy_example_graph, "a", "g") == 0
20 | assert common_neighbors(toy_example_graph, "d", "d") == 4
21 | assert common_neighbors(toy_example_graph, "f", "d") == 2
22 |
23 |
24 | def test_overlap_coefficient(toy_example_graph):
25 | assert overlap_coefficient(toy_example_graph, "a", "b") == 1 / 2
26 | assert overlap_coefficient(toy_example_graph, "d", "f") == 2 / 3
27 | assert overlap_coefficient(toy_example_graph, "a", "a") == 1
28 |
29 |
30 | def test_jaccard_similarity(toy_example_graph):
31 | assert jaccard_similarity(toy_example_graph, "a", "b") == 1 / 4
32 | assert jaccard_similarity(toy_example_graph, "a", "c") == 1 / 3
33 | assert jaccard_similarity(toy_example_graph, "d", "e") == 1 / 5
34 |
35 |
36 | def test_adamic_adar_index(toy_example_graph):
37 | assert adamic_adar_index(toy_example_graph, "e", "g") == 1.0 / _np.log(3) + 1.0 / _np.log(4)
38 |
39 |
40 | def test_cosine_similarity(toy_example_graph):
41 | assert _np.isclose(cosine_similarity(toy_example_graph, "c", "a"), 0.5)
42 | assert _np.isclose(cosine_similarity(toy_example_graph, "a", "g"), 0.0)
43 |
44 |
45 | def test_LeichtHolmeNewman_index(toy_example_graph):
46 | assert _np.isclose(LeichtHolmeNewman_index(toy_example_graph, "e", "g", alpha=0.02), 0.0013079553726262417)
47 | assert _np.isclose(LeichtHolmeNewman_index(toy_example_graph, "e", "g", alpha=0.2), 0.14353902083713282)
48 |
49 |
50 | def test_katz_index(toy_example_graph):
51 | assert _np.isclose(katz_index(toy_example_graph, "e", "g", beta=0.02), 0.0008178287973506426)
52 | assert _np.isclose(katz_index(toy_example_graph, "e", "g", beta=0.2), 0.12958435772871946)
53 |
--------------------------------------------------------------------------------
/tests/algorithms/test_temporal.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import numpy as np
4 | import torch
5 | from torch_geometric import EdgeIndex
6 |
7 | from pathpyG.core.graph import Graph
8 | from pathpyG.algorithms.temporal import temporal_shortest_paths, lift_order_temporal
9 |
10 |
11 | def test_lift_order_temporal(simple_temporal_graph):
12 | edge_index = lift_order_temporal(simple_temporal_graph, delta=5)
13 | event_graph = Graph.from_edge_index(edge_index)
14 | assert event_graph.n == simple_temporal_graph.m
15 | # for delta=5 we have three time-respecting paths (a,b,1) -> (b,c,5), (b,c,5) -> (c,d,9) and (b,c,5) -> (c,e,9)
16 | assert event_graph.m == 3
17 | assert torch.equal(event_graph.data.edge_index, EdgeIndex([[0, 1, 1], [1, 2, 3]]))
18 |
19 |
20 | def test_temporal_shortest_paths(long_temporal_graph):
21 | dist, pred = temporal_shortest_paths(long_temporal_graph, delta=10)
22 | assert dist.shape == (long_temporal_graph.n, long_temporal_graph.n)
23 | assert pred.shape == (long_temporal_graph.n, long_temporal_graph.n)
24 |
25 | true_dist = np.array(
26 | [
27 | [0.0, 1.0, 1.0, 3.0, 3.0, 3.0, 1.0, 2.0, float("inf")],
28 | [3.0, 0.0, 1.0, 2.0, 2.0, 1.0, 4.0, 5.0, 1.0],
29 | [2.0, float("inf"), 0.0, 1.0, 1.0, 1.0, 3.0, 1.0, 1.0],
30 | [
31 | float("inf"),
32 | float("inf"),
33 | float("inf"),
34 | 0.0,
35 | float("inf"),
36 | float("inf"),
37 | float("inf"),
38 | float("inf"),
39 | float("inf"),
40 | ],
41 | [
42 | float("inf"),
43 | float("inf"),
44 | float("inf"),
45 | float("inf"),
46 | 0.0,
47 | float("inf"),
48 | float("inf"),
49 | float("inf"),
50 | float("inf"),
51 | ],
52 | [1.0, float("inf"), float("inf"), float("inf"), float("inf"), 0.0, 2.0, 1.0, float("inf")],
53 | [
54 | float("inf"),
55 | float("inf"),
56 | float("inf"),
57 | float("inf"),
58 | float("inf"),
59 | float("inf"),
60 | 0.0,
61 | 1.0,
62 | float("inf"),
63 | ],
64 | [float("inf"), float("inf"), float("inf"), float("inf"), float("inf"), 1.0, float("inf"), 0.0, 1.0],
65 | [
66 | float("inf"),
67 | 1.0,
68 | float("inf"),
69 | float("inf"),
70 | float("inf"),
71 | float("inf"),
72 | float("inf"),
73 | float("inf"),
74 | 0.0,
75 | ],
76 | ]
77 | )
78 | assert np.allclose(dist, true_dist, equal_nan=True)
79 |
80 | true_pred = np.array(
81 | [
82 | [0, 0, 0, 2, 2, 2, 0, 2, -1],
83 | [5, 1, 1, 2, 2, 1, 0, 6, 1],
84 | [5, -1, 2, 2, 2, 2, 0, 2, 2],
85 | [-1, -1, -1, 3, -1, -1, -1, -1, -1],
86 | [-1, -1, -1, -1, 4, -1, -1, -1, -1],
87 | [5, -1, -1, -1, -1, 5, 0, 5, -1],
88 | [-1, -1, -1, -1, -1, -1, 6, 6, -1],
89 | [-1, -1, -1, -1, -1, 7, -1, 7, 7],
90 | [-1, 8, -1, -1, -1, -1, -1, -1, 8],
91 | ]
92 | )
93 | assert np.allclose(pred, true_pred)
94 |
--------------------------------------------------------------------------------
/tests/algorithms/test_wl.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from pathpyG.core.graph import Graph
4 | from pathpyG.algorithms import WeisfeilerLeman_test
5 |
6 |
7 | def test_WeisfeilerLeman_test_1():
8 | # isomorphic graphs
9 | g1 = Graph.from_edge_list([("a", "b"), ("b", "c")])
10 | g2 = Graph.from_edge_list([("y", "z"), ("x", "y")])
11 | test, c1, c2 = WeisfeilerLeman_test(g1, g2)
12 | assert c1 == c2
13 | assert test is True
14 |
15 |
16 | def test_WeisfeilerLeman_test_2():
17 | # non-isomorphic graphs
18 | g1 = Graph.from_edge_list([("a", "b"), ("b", "c")])
19 | g2 = Graph.from_edge_list([("y", "z"), ("x", "z")])
20 | test, c1, c2 = WeisfeilerLeman_test(g1, g2)
21 | assert c1 != c2
22 | assert test is False
23 |
24 |
25 | def test_WeisfeilerLeman_test_3():
26 | # isomorphic graphs
27 | g1 = Graph.from_edge_list(
28 | [
29 | ("a", "g"),
30 | ("a", "h"),
31 | ("a", "i"),
32 | ("b", "g"),
33 | ("b", "h"),
34 | ("b", "j"),
35 | ("c", "g"),
36 | ("c", "i"),
37 | ("c", "j"),
38 | ("d", "h"),
39 | ("d", "i"),
40 | ("d", "j"),
41 | ]
42 | ).to_undirected()
43 | g2 = Graph.from_edge_list(
44 | [
45 | ("1", "2"),
46 | ("1", "5"),
47 | ("1", "4"),
48 | ("2", "6"),
49 | ("2", "3"),
50 | ("3", "7"),
51 | ("3", "4"),
52 | ("4", "8"),
53 | ("5", "6"),
54 | ("6", "7"),
55 | ("7", "8"),
56 | ("8", "5"),
57 | ]
58 | ).to_undirected()
59 | test, c1, c2 = WeisfeilerLeman_test(g1, g2)
60 | print(c1, c2)
61 | assert c1 == c2
62 | assert test is True
63 |
--------------------------------------------------------------------------------
/tests/benchmark/conftest.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import pytest
4 | import torch
5 |
6 | from pathpyG import Graph
7 | from pathpyG import TemporalGraph
8 |
9 | # from pathpyG import PathData
10 |
11 | # @pytest.fixture
12 | # def tube_paths() -> PathData:
13 | # return PathData.from_csv('docs/data/tube_paths_train.ngram')
14 |
--------------------------------------------------------------------------------
/tests/benchmark/test_benchmark.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import pytest
4 | import torch
5 |
6 | from pathpyG.core.graph import Graph
7 | from pathpyG.core.temporal_graph import TemporalGraph
8 | from pathpyG.core.path_data import PathData
9 | from pathpyG import config
10 | from pathpyG.core.multi_order_model import MultiOrderModel
11 |
12 | # to run benchmarks, do the following:
13 | # > pip install pytest-benchmark
14 | # > run pytest -m benchmark
15 |
16 |
17 | def higher_order_paths(max_order):
18 | paths = PathData.from_ngram("docs/data/tube_paths_train.ngram")
19 | m = MultiOrderModel.from_PathData(paths, max_order=max_order)
20 |
21 |
22 | def higher_order_temporal_graph(max_order):
23 | t = TemporalGraph.from_csv("docs/data/ants_1_1.tedges")
24 | m = MultiOrderModel.from_temporal_graph(t, delta=30, max_order=max_order)
25 |
26 |
27 | @pytest.mark.benchmark
28 | def test_higher_order_paths_gpu(benchmark):
29 |
30 | config["torch"]["device"] = "cuda"
31 | benchmark.pedantic(higher_order_paths, kwargs={"max_order": 10}, iterations=1, rounds=2)
32 |
33 |
34 | @pytest.mark.benchmark
35 | def test_higher_order_paths_cpu(benchmark):
36 |
37 | config["torch"]["device"] = "cpu"
38 | benchmark.pedantic(higher_order_paths, kwargs={"max_order": 10}, iterations=1, rounds=2)
39 |
40 |
41 | @pytest.mark.benchmark
42 | def test_higher_order_temporal_graph_gpu(benchmark):
43 |
44 | config["torch"]["device"] = "cuda"
45 | benchmark.pedantic(higher_order_temporal_graph, kwargs={"max_order": 5}, iterations=1, rounds=10)
46 |
47 |
48 | @pytest.mark.benchmark
49 | def test_higher_order_temporal_graph_cpu(benchmark):
50 |
51 | config["torch"]["device"] = "cpu"
52 | benchmark.pedantic(higher_order_temporal_graph, kwargs={"max_order": 5}, iterations=1, rounds=10)
53 |
--------------------------------------------------------------------------------
/tests/core/__init__.py:
--------------------------------------------------------------------------------
1 | """Necessary to make Python treat the tests directory as a module.
2 |
3 | This is required since mypy doesn't support the same file name otherwise
4 | It is also required to enable module specific overrides in pyproject.toml
5 | """
6 |
--------------------------------------------------------------------------------
/tests/core/conftest.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import pytest
4 |
5 | from pathpyG.core.graph import Graph
6 | from pathpyG.core.index_map import IndexMap
7 | from pathpyG.core.temporal_graph import TemporalGraph
8 | from pathpyG.core.path_data import PathData
9 |
10 |
11 | @pytest.fixture
12 | def simple_graph() -> Graph:
13 | """Return a simple directed graph."""
14 | return Graph.from_edge_list([("a", "b"), ("b", "c"), ("a", "c")])
15 |
16 |
17 | @pytest.fixture
18 | def simple_graph_multi_edges() -> Graph:
19 | """Return a directed graph with multiple edges."""
20 | return Graph.from_edge_list([("a", "b"), ("b", "c"), ("a", "c"), ("a", "b")])
21 |
22 |
23 | @pytest.fixture
24 | def simple_walks() -> PathData:
25 | """Return a simple example for path data."""
26 | paths = PathData(mapping=IndexMap(["A", "B", "C", "D", "E"]))
27 | paths.append_walk(("A", "C", "D"))
28 | paths.append_walk(("A", "C", "D"))
29 | paths.append_walk(("B", "C", "E"))
30 | paths.append_walk(("B", "C", "E"))
31 | return paths
32 |
33 |
34 | @pytest.fixture
35 | def simple_walks_2() -> PathData:
36 | """Return a simple example for path data."""
37 | paths = PathData(mapping=IndexMap(["A", "B", "C", "D", "E"]))
38 | paths.append_walk(("A", "C", "D"), weight=2.0)
39 | paths.append_walk(("B", "C", "E"), weight=2.0)
40 | return paths
41 |
42 |
43 | @pytest.fixture
44 | def simple_temporal_graph() -> TemporalGraph:
45 | """Return a simple temporal graph."""
46 | tedges = [("a", "b", 1), ("b", "c", 5), ("c", "d", 9), ("c", "e", 9)]
47 | return TemporalGraph.from_edge_list(tedges)
48 |
49 |
50 | @pytest.fixture
51 | def long_temporal_graph() -> TemporalGraph:
52 | """Return a temporal graph with 20 time-stamped edges."""
53 | tedges = [
54 | ("a", "b", 1),
55 | ("b", "c", 5),
56 | ("c", "d", 9),
57 | ("c", "e", 9),
58 | ("c", "f", 11),
59 | ("f", "a", 13),
60 | ("a", "g", 18),
61 | ("b", "f", 21),
62 | ("a", "g", 26),
63 | ("c", "f", 27),
64 | ("h", "f", 27),
65 | ("g", "h", 28),
66 | ("a", "c", 30),
67 | ("a", "b", 31),
68 | ("c", "h", 32),
69 | ("f", "h", 33),
70 | ("b", "i", 42),
71 | ("i", "b", 42),
72 | ("c", "i", 47),
73 | ("h", "i", 50),
74 | ]
75 | return TemporalGraph.from_edge_list(tedges)
76 |
--------------------------------------------------------------------------------
/tests/core/test_index_map.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import pytest
4 | import torch
5 | import numpy as np
6 |
7 | from pathpyG.core.index_map import IndexMap
8 |
9 |
10 | def test_index_mapping():
11 | mapping = IndexMap()
12 |
13 | assert mapping.to_idx(0) == 0
14 | assert mapping.to_idx(42) == 42
15 |
16 | assert mapping.to_id(0) == 0
17 | assert mapping.to_id(42) == 42
18 |
19 | assert mapping.to_ids([0, 1, 2]) == [0, 1, 2]
20 | assert (mapping.to_idxs([0, 1, 2]) == torch.tensor([0, 1, 2])).all()
21 |
22 | mapping.add_id("a")
23 |
24 | assert mapping.to_idx("a") == 0
25 | assert mapping.to_id(0) == "a"
26 | assert mapping.num_ids() == 1
27 | assert mapping.node_ids == ["a"]
28 |
29 | with pytest.raises(ValueError):
30 | mapping.add_id("a")
31 |
32 | assert mapping.num_ids() == 1
33 | assert mapping.node_ids == ["a"]
34 |
35 | mapping.add_id("c")
36 |
37 | assert mapping.to_idx("c") == 1
38 | assert mapping.to_id(1) == "c"
39 | assert mapping.num_ids() == 2
40 | assert (mapping.node_ids == ["a", "c"]).all()
41 |
42 |
43 | def test_index_mapping_bulk():
44 | mapping = IndexMap()
45 |
46 | mapping.add_ids(["a", "b", "c", "d", "e"])
47 | assert mapping.num_ids() == 5
48 | assert (mapping.node_ids == ["a", "b", "c", "d", "e"]).all()
49 | assert mapping.to_idxs(["a", "b", "c", "d", "e"]).tolist() == [0, 1, 2, 3, 4]
50 | assert mapping.to_ids([0, 1, 2, 3, 4]).tolist() == ["a", "b", "c", "d", "e"]
51 |
52 | with pytest.raises(ValueError):
53 | mapping.add_ids(("a", "a", "f", "f"))
54 | mapping.add_ids(["f"])
55 | assert mapping.num_ids() == 6
56 | assert (mapping.node_ids == ["a", "b", "c", "d", "e", "f"]).all()
57 | assert mapping.to_idxs(["a", "b", "c", "d", "e", "f"]).tolist() == [0, 1, 2, 3, 4, 5]
58 | assert mapping.to_ids([0, 1, 2, 3, 4, 5]).tolist() == ["a", "b", "c", "d", "e", "f"]
59 |
60 | with pytest.raises(ValueError):
61 | mapping.add_id("a")
62 | assert mapping.num_ids() == 6
63 | assert (mapping.node_ids == ["a", "b", "c", "d", "e", "f"]).all()
64 | assert mapping.to_idxs(("a", "b", "c", "d", "e", "f")).tolist() == [0, 1, 2, 3, 4, 5]
65 | assert mapping.to_ids(torch.tensor([0, 1, 2, 3, 4, 5])).tolist() == ["a", "b", "c", "d", "e", "f"]
66 | assert mapping.to_idxs(np.array(["a", "b", "c", "d", "e", "f"])).tolist() == [0, 1, 2, 3, 4, 5]
67 |
68 | mapping.add_ids(np.array(["h", "i", "g"]))
69 | assert mapping.num_ids() == 9
70 | assert (mapping.node_ids == ["a", "b", "c", "d", "e", "f", "h", "i", "g"]).all()
71 | assert mapping.to_idxs(["a", "b", "c", "d", "e", "f", "h", "i", "g"]).tolist() == [0, 1, 2, 3, 4, 5, 6, 7, 8]
72 |
73 |
74 | def test_integer_ids():
75 | mapping = IndexMap([0, 2, 3, 1, 4])
76 | print(mapping.node_ids)
77 | print(mapping.id_to_idx)
78 |
79 | assert mapping.to_idx(0) == 0
80 | assert mapping.to_idx(1) == 3
81 |
82 | assert mapping.to_id(0) == 0
83 | assert mapping.to_id(3) == 1
84 |
85 | assert mapping.to_ids([0, 1, 2]).tolist() == [0, 2, 3]
86 | assert (mapping.to_idxs([0, 1, 2]) == torch.tensor([0, 3, 1])).all()
87 |
88 |
89 | def test_tuple_ids():
90 | mapping = IndexMap([(1, 2), (3, 4), (5, 6)])
91 |
92 | assert mapping.to_idx((1, 2)) == 0
93 | assert mapping.to_idx((3, 4)) == 1
94 |
95 | assert mapping.to_id(0) == (1, 2)
96 | assert mapping.to_id(1) == (3, 4)
97 |
98 | assert (mapping.to_ids([0, 1]) == np.array([(1, 2), (3, 4)])).all()
99 | assert (mapping.to_idxs([(1, 2), (3, 4)]) == torch.tensor([0, 1])).all()
100 |
101 | mapping = IndexMap([(1, 2, 3), (4, 5, 6)])
102 | assert (mapping.to_ids([[0, 1], [0, 0]]) == np.array([[(1, 2, 3), (4, 5, 6)], [(1, 2, 3), (1, 2, 3)]])).all()
103 | assert (mapping.to_idxs([[(1, 2, 3), (4, 5, 6)], [(1, 2, 3), (1, 2, 3)]]) == torch.tensor([[0, 1], [0, 0]])).all()
104 |
105 |
106 | def test_float_ids():
107 | mapping = IndexMap([0.0, 2.0, 3.0, 1.0, 4.0])
108 | mapping.add_id(1.5)
109 | mapping.add_ids(np.array([8.0, 9.0]))
110 |
111 | assert mapping.to_idx(0.0) == 0
112 | assert mapping.to_idx(1.0) == 3
113 |
114 | assert mapping.to_id(0) == 0.0
115 | assert mapping.to_id(3) == 1.0
116 |
117 | assert mapping.to_ids([0, 1, 7]).tolist() == [0.0, 2.0, 9.0]
118 | assert (mapping.to_idxs([0.0, 1.0, 2.0]) == torch.tensor([0, 3, 1])).all()
119 |
120 |
121 | def test_bulk_ids():
122 | node_ids = np.array(["a", "c", "b", "d", "a", "e"])
123 | with pytest.raises(ValueError):
124 | mapping = IndexMap(node_ids)
125 | mapping = IndexMap(np.unique(node_ids))
126 |
127 | assert mapping.to_idx("a") == 0
128 | assert mapping.to_idx("c") == 2
129 |
130 | assert mapping.to_ids(torch.tensor([[0, 2], [1, 3], [0, 4]])).tolist() == [["a", "c"], ["b", "d"], ["a", "e"]]
131 | assert (mapping.to_idxs([["a", "c"], ["b", "d"], ["a", "e"]]) == torch.tensor([[0, 2], [1, 3], [0, 4]])).all()
132 |
--------------------------------------------------------------------------------
/tests/core/test_path_data.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from torch import equal, tensor
4 |
5 | from pathpyG import config
6 |
7 | from pathpyG.core.path_data import PathData
8 | from pathpyG.core.index_map import IndexMap
9 |
10 |
11 | def test_constructor():
12 | p = PathData()
13 | assert p.num_paths == 0
14 |
15 |
16 | def test_num_walks(simple_walks):
17 | assert simple_walks.num_paths == 4
18 |
19 |
20 | def test_path_data_properties(simple_walks):
21 | assert isinstance(simple_walks.mapping, IndexMap)
22 | assert simple_walks.data.num_nodes == 12
23 | assert simple_walks.data.num_edges == 8
24 | assert simple_walks.data.dag_weight.mean().item() == 1.0
25 | assert simple_walks.data.dag_num_nodes.float().mean().item() == 3.0
26 | assert simple_walks.data.dag_num_edges.float().mean().item() == 2.0
27 | assert simple_walks.data.edge_index.shape == (2, 8)
28 | assert simple_walks.data.node_sequence.shape == (12, 1)
29 |
30 |
31 | def test_mapping(simple_walks):
32 | assert simple_walks.map_node_seq([0, 1, 2]) == ["A", "B", "C"]
33 |
34 |
35 | def test_get_walk(simple_walks):
36 | assert simple_walks.get_walk(0) == ("A", "C", "D")
37 | assert simple_walks.get_walk(1) == ("A", "C", "D")
38 | assert simple_walks.get_walk(2) == ("B", "C", "E")
39 | assert simple_walks.get_walk(3) == ("B", "C", "E")
40 |
41 |
42 | def test_add_walk_seq():
43 |
44 | paths = PathData(IndexMap(["a", "c", "b", "d", "e"]))
45 |
46 | paths.append_walk(("a", "c", "d"), weight=1.0)
47 | paths.append_walk(("a", "c"), weight=1.0)
48 | paths.append_walk(("b", "c", "d"), weight=1.5)
49 | paths.append_walk(("b", "c", "e"), weight=1.0)
50 |
51 | assert paths.num_paths == 4
52 | assert paths.get_walk(0) == ("a", "c", "d")
53 | assert paths.get_walk(1) == ("a", "c")
54 | assert paths.get_walk(2) == ("b", "c", "d")
55 | assert paths.get_walk(3) == ("b", "c", "e")
56 |
57 | assert equal(paths.data.dag_weight, tensor([1.0, 1.0, 1.5, 1.0]))
58 | assert paths.data.dag_weight.shape[0] == 4
59 | assert equal(paths.data.dag_num_nodes, tensor([3, 2, 3, 3]))
60 | assert equal(paths.data.dag_num_edges, tensor([2, 1, 2, 2]))
61 |
62 |
63 | def test_add_walk_seqs():
64 | paths = PathData(IndexMap(["a", "c", "b", "d", "e"]))
65 | paths.append_walks([("a", "c", "d"), ("a", "c"), ("b", "c", "d"), ("b", "c", "e")], weights=[1.0] * 4)
66 |
67 | assert paths.num_paths == 4
68 | assert paths.get_walk(0) == ("a", "c", "d")
69 | assert paths.get_walk(1) == ("a", "c")
70 | assert paths.get_walk(2) == ("b", "c", "d")
71 | assert paths.get_walk(3) == ("b", "c", "e")
72 |
73 | assert equal(paths.data.dag_weight, tensor([1.0] * 4))
74 | assert paths.data.dag_weight.shape[0] == 4
75 | assert equal(paths.data.dag_num_nodes, tensor([3, 2, 3, 3]))
76 | assert equal(paths.data.dag_num_edges, tensor([2, 1, 2, 2]))
77 |
78 |
79 | def test_str(simple_walks):
80 | assert str(simple_walks)
81 |
82 |
83 | def test_from_ngram():
84 | path = "docs/data/tube_paths_train.ngram"
85 | paths = PathData.from_ngram(path)
86 | assert paths.num_paths == 61748
87 |
--------------------------------------------------------------------------------
/tests/core/test_temporal_graph.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import numpy as np
4 | import torch
5 | from torch import equal
6 | from torch_geometric.data import Data
7 |
8 | from pathpyG.core.temporal_graph import TemporalGraph
9 | from pathpyG.utils import to_numpy
10 |
11 |
12 | def test_init():
13 | tdata = Data(edge_index=torch.IntTensor([[1, 3, 2, 4], [2, 4, 3, 5]]), time=torch.Tensor([1000, 1100, 1010, 2000]))
14 | tgraph = TemporalGraph(tdata)
15 | # After ordering the edges by time
16 | assert (to_numpy(tgraph.data.edge_index) == np.array([[1, 2, 3, 4], [2, 3, 4, 5]])).all()
17 | assert equal(tgraph.data.time, torch.tensor([1000, 1010, 1100, 2000]))
18 |
19 | # Case where n == m
20 | tdata = Data(edge_index=torch.IntTensor([[0, 1, 2, 3], [1, 2, 3, 2]]), time=torch.Tensor([1000, 1100, 1010, 2000]), edge_weight=torch.Tensor([1, 2, 3, 4]))
21 | tgraph = TemporalGraph(tdata)
22 | assert (to_numpy(tgraph.data.edge_index) == np.array([[0, 2, 1, 3], [1, 3, 2, 2]])).all()
23 | assert equal(tgraph.data.time, torch.tensor([1000, 1010, 1100, 2000]))
24 | assert equal(tgraph.data.edge_weight, torch.tensor([1, 3, 2, 4]))
25 |
26 |
27 | def test_from_edge_list():
28 | tedges = [("a", "b", 1), ("b", "c", 5), ("c", "d", 9), ("c", "e", 9)]
29 | tgraph = TemporalGraph.from_edge_list(tedges)
30 | assert tgraph.n == 5
31 | assert tgraph.m == 4
32 | assert tgraph.start_time == 1
33 | assert tgraph.end_time == 9
34 | assert tgraph.data.edge_index.shape == (2, 4)
35 |
36 |
37 | def test_N(long_temporal_graph):
38 | assert long_temporal_graph.n == 9
39 |
40 |
41 | def test_M(long_temporal_graph):
42 | assert long_temporal_graph.m == 20
43 |
44 |
45 | def test_temporal_edges(long_temporal_graph):
46 | for i, (u, v, t) in enumerate(long_temporal_graph.temporal_edges):
47 | assert u == long_temporal_graph.mapping.to_id(long_temporal_graph.data.edge_index[0, i])
48 | assert v == long_temporal_graph.mapping.to_id(long_temporal_graph.data.edge_index[1, i])
49 | assert t == long_temporal_graph.data.time[i]
50 |
51 |
52 | def test_shuffle_time(long_temporal_graph):
53 | g_1 = long_temporal_graph.to_static_graph()
54 | long_temporal_graph.shuffle_time()
55 | assert long_temporal_graph.n == 9
56 | assert long_temporal_graph.m == 20
57 |
58 | g_2 = long_temporal_graph.to_static_graph()
59 | assert g_1.n == g_2.n
60 | assert g_2.m == g_2.m
61 |
62 |
63 | def test_to_static_graph(long_temporal_graph):
64 | g = long_temporal_graph.to_static_graph()
65 | assert g.n == long_temporal_graph.n
66 | assert g.m == long_temporal_graph.m
67 |
68 | g = long_temporal_graph.to_static_graph(weighted=True)
69 | assert g.n == long_temporal_graph.n
70 | assert g.data.edge_weight[0].item() == 2.0 # A -> B is two times in the temporal graph
71 | assert g.data.edge_weight[1].item() == 1.0 # B -> C is one time in the temporal graph
72 |
73 |
74 | def test_to_undirected(long_temporal_graph):
75 | g = long_temporal_graph.to_undirected()
76 | assert g.n == long_temporal_graph.n
77 | assert g.m == long_temporal_graph.m * 2
78 |
79 |
80 | def test_get_batch(long_temporal_graph):
81 | t_1 = long_temporal_graph.get_batch(1, 9)
82 | # N stays the same
83 | assert t_1.n == 9
84 | assert t_1.m == 8
85 | t_2 = long_temporal_graph.get_batch(9, 13)
86 | assert t_2.n == 9
87 | assert t_2.m == 4
88 |
89 |
90 | def test_get_window(long_temporal_graph):
91 | t_1 = long_temporal_graph.get_window(1, 9)
92 | assert t_1.m == 4
93 | t_2 = long_temporal_graph.get_window(9, 13)
94 | assert t_2.m == 4
95 |
96 |
97 | def test_str(simple_temporal_graph):
98 | assert str(simple_temporal_graph)
99 |
--------------------------------------------------------------------------------
/tests/io/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pathpy/pathpyG/760571a0829f0bc04c5a127d5b059f25c6ab74ca/tests/io/__init__.py
--------------------------------------------------------------------------------
/tests/io/conftest.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import pytest
4 |
5 | from pathpyG.core.graph import Graph
6 | import pandas as pd
7 |
8 |
9 | @pytest.fixture
10 | def df_graph() -> pd.DataFrame:
11 | """DataFrame for simple graph with header and no edge attributes."""
12 | df = pd.DataFrame({"v": ["a", "b", "c"], "w": ["b", "c", "a"]})
13 | return df
14 |
15 |
16 | @pytest.fixture
17 | def df_graph_attribute() -> pd.DataFrame:
18 | """DataFrame for simple graph with edge attributes and header."""
19 | df = pd.DataFrame({"v": ["a", "b", "c"], "w": ["b", "c", "a"], "edge_weight": [2.0, 1.0, 42.0]})
20 | return df
21 |
22 |
23 | @pytest.fixture
24 | def df_graph_attribute_no_header() -> pd.DataFrame:
25 | """DataFrame for simple graph with edge attributes and no header."""
26 | df = pd.DataFrame([["a", "b", 2.0], ["b", "c", 1.0], ["c", "a", 42.0]])
27 | return df
28 |
29 |
30 | @pytest.fixture
31 | def df_temporal_graph() -> pd.DataFrame:
32 | """DataFrame for simple temporal graph with header."""
33 | df = pd.DataFrame({"v": ["a", "b", "c"], "w": ["b", "c", "a"], "t": [1, 2, 3]})
34 | return df
35 |
36 |
37 | @pytest.fixture
38 | def df_temporal_graph_no_header() -> pd.DataFrame:
39 | """DataFrame for simple temporal graph without header."""
40 | df = pd.DataFrame([["a", "b", 1], ["b", "c", 2], ["c", "a", 3]])
41 | return df
42 |
--------------------------------------------------------------------------------
/tests/io/test_netzschleuder.py:
--------------------------------------------------------------------------------
1 | """This module tests high-level functions of the netzschleuder module."""
2 |
3 | import pytest
4 |
5 | import torch
6 |
7 | from pathpyG import Graph, TemporalGraph
8 | from pathpyG.io import list_netzschleuder_records, read_netzschleuder_graph, read_netzschleuder_record
9 |
10 |
11 | def test_list_netzschleuder_records():
12 | """Test the list_netzschleuder_records() function."""
13 |
14 | # Test the function with a valid URL.
15 | records = list_netzschleuder_records()
16 | print(records)
17 | assert len(records) > 0
18 |
19 | # Test the function with an invalid URL.
20 | url = "https://networks.skewed.de/invalid-url"
21 | with pytest.raises(Exception, match="Could not connect to netzschleuder repository at"):
22 | records = list_netzschleuder_records(url)
23 |
24 |
25 | def test_node_attrs():
26 | """Test the extraction of node attributes"""
27 | g = read_netzschleuder_graph("karate", "77")
28 | assert "node__pos" in g.node_attrs()
29 | assert "node_name" in g.node_attrs()
30 | assert "node_groups" in g.node_attrs()
31 |
32 |
33 | def test_edge_attrs():
34 | """Test the extraction of edge attributes"""
35 | g = read_netzschleuder_graph("ambassador", "1985_1989", multiedges=True)
36 | assert "edge_weight" in g.edge_attrs()
37 | print(g.data.edge_weight)
38 | assert torch.equal(
39 | g.data.edge_weight,
40 | torch.tensor(
41 | [
42 | 1,
43 | 1,
44 | 1,
45 | 1,
46 | 1,
47 | 1,
48 | 1,
49 | 1,
50 | 1,
51 | 1,
52 | 1,
53 | 1,
54 | 3,
55 | 1,
56 | 1,
57 | 1,
58 | 3,
59 | 1,
60 | 3,
61 | 3,
62 | 1,
63 | 1,
64 | 3,
65 | 2,
66 | 1,
67 | 1,
68 | 1,
69 | 1,
70 | 1,
71 | 1,
72 | 1,
73 | 1,
74 | 3,
75 | 1,
76 | 3,
77 | 3,
78 | 1,
79 | 2,
80 | ]
81 | ),
82 | )
83 |
84 |
85 | def test_graph_attrs():
86 | """Test the extraction of graph attributes"""
87 | g = read_netzschleuder_graph("karate", "77")
88 | assert "analyses_diameter" in g.data
89 | assert g.data.analyses_diameter == 5
90 |
91 |
92 | def test_read_netzschleuder_record():
93 | """Test the read_netzschleuder_record() function."""
94 |
95 | # Test the function with a valid URL.
96 | record_name = list_netzschleuder_records()[0]
97 | record = read_netzschleuder_record(record_name)
98 | assert isinstance(record, dict)
99 | assert record
100 |
101 | # Test the function with an invalid URL.
102 | url = "https://networks.skewed.de/invalid-url"
103 | with pytest.raises(Exception, match="Could not connect to netzschleuder repository at"):
104 | record = read_netzschleuder_record(record_name, url)
105 |
106 |
107 | def test_read_netzschleuder_graph():
108 | """Test the read_netzschleuder_graph() function for timestamped data."""
109 |
110 | g = read_netzschleuder_graph(name="email_company")
111 | assert isinstance(g, Graph)
112 | assert g.n == 167
113 | assert g.m == 5784
114 |
115 |
116 | def test_read_netzschleuder_graph_temporal():
117 | """Test the read_netzschleuder_graph() function for timestamped data."""
118 |
119 | g = read_netzschleuder_graph(name="email_company", time_attr="time", multiedges=True)
120 | assert isinstance(g, TemporalGraph)
121 | assert g.n == 167
122 | assert g.m == 82927
123 | assert g.start_time == 1262454010
124 | assert g.end_time == 1285884492
125 | assert "edge_weight" in g.edge_attrs()
126 |
--------------------------------------------------------------------------------
/tests/io/test_pandas.py:
--------------------------------------------------------------------------------
1 | """This module tests high-level functions of the pandas module."""
2 |
3 | import pytest
4 |
5 | from torch import tensor, equal
6 | import numpy as np
7 |
8 | from pathpyG.core.graph import Graph
9 | from pathpyG.core.temporal_graph import TemporalGraph
10 | from pathpyG.io.pandas import df_to_graph, df_to_temporal_graph
11 |
12 |
13 | def test_df_to_graph(df_graph, df_graph_attribute, df_graph_attribute_no_header):
14 | g: Graph = df_to_graph(df_graph)
15 | assert g.n == 3
16 | assert g.m == 3
17 |
18 | g: Graph = df_to_graph(df_graph_attribute)
19 | assert g.n == 3
20 | assert g.m == 3
21 | assert "edge_weight" in g.edge_attrs()
22 | assert equal(g.data.edge_weight, tensor([2.0, 1.0, 42.0]))
23 |
24 | g: Graph = df_to_graph(df_graph_attribute_no_header)
25 | assert g.n == 3
26 | assert g.m == 3
27 | assert "edge_attr_0" in g.edge_attrs()
28 | assert equal(g.data.edge_attr_0, tensor([2.0, 1.0, 42.0]))
29 |
30 |
31 | def test_df_to_temporal_graph(df_temporal_graph, df_temporal_graph_no_header):
32 | g: TemporalGraph = df_to_temporal_graph(df_temporal_graph)
33 | assert g.n == 3
34 | assert g.m == 3
35 | assert equal(g.data.time, tensor([1.0, 2.0, 3.0]))
36 |
37 | g: TemporalGraph = df_to_temporal_graph(df_temporal_graph_no_header)
38 | assert g.n == 3
39 | assert g.m == 3
40 | assert equal(g.data.time, tensor([1.0, 2.0, 3.0]))
41 |
--------------------------------------------------------------------------------
/tests/nn/__init__.py:
--------------------------------------------------------------------------------
1 | """Necessary to make Python treat the tests directory as a module.
2 |
3 | This is required since mypy doesn't support the same file name otherwise
4 | It is also required to enable module specific overrides in pyproject.toml
5 | """
6 |
--------------------------------------------------------------------------------
/tests/nn/conftest.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import pytest
4 |
5 | from pathpyG.core.index_map import IndexMap
6 | from pathpyG.core.path_data import PathData
7 |
8 |
9 | @pytest.fixture
10 | def simple_walks() -> PathData:
11 | """Return a simple example for path data."""
12 | paths = PathData(mapping=IndexMap(["A", "B", "C", "D", "E"]))
13 | paths.append_walk(("A", "C", "D"), weight=2.0)
14 | paths.append_walk(("B", "C", "E"), weight=2.0)
15 | return paths
16 |
--------------------------------------------------------------------------------
/tests/nn/test_dbgnn.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import torch
4 | from torch import equal, tensor
5 |
6 | from pathpyG import config
7 | from pathpyG.nn.dbgnn import DBGNN
8 | from pathpyG.core.multi_order_model import MultiOrderModel
9 | from pathpyG.utils.dbgnn import generate_bipartite_edge_index
10 |
11 |
12 | def test_bipartite_edge_index(simple_walks):
13 | m = MultiOrderModel.from_PathData(simple_walks, max_order=2)
14 | g = m.layers[1]
15 | print(g.data.edge_index)
16 | print(g.mapping)
17 | g2 = m.layers[2]
18 | print(g2.data.edge_index)
19 | print(g2.mapping)
20 |
21 | bipartite_edge_index = generate_bipartite_edge_index(g, g2, mapping="last")
22 | print(bipartite_edge_index)
23 | # ensure that A,C and B,C are mapped to C, C,D is mapped to D and C,E is mapped to E
24 |
25 | assert equal(bipartite_edge_index, tensor([[0, 1, 2, 3], [2, 2, 3, 4]]))
26 |
27 | bipartite_edge_index = generate_bipartite_edge_index(g, g2, mapping="first")
28 | print(bipartite_edge_index)
29 | # ensure that A,C is mapped A, B,C is mapped to B, and C,D and C,E are mapped to C
30 |
31 | assert equal(bipartite_edge_index, tensor([[0, 1, 2, 3], [0, 1, 2, 2]]))
32 |
33 |
34 | def test_dbgnn(simple_walks):
35 | m = MultiOrderModel.from_PathData(simple_walks, max_order=2)
36 | data = m.to_dbgnn_data()
37 | g1 = m.layers[1]
38 | g2 = m.layers[2]
39 | data.y = torch.tensor([g1.mapping.to_idx(i) // 10 for i in g1.mapping.node_ids])
40 |
41 | model = DBGNN(num_features=[g1.n, g2.n], num_classes=len(data.y.unique()), hidden_dims=[16, 32, 8], p_dropout=0.4)
42 |
43 | out = model(data)
44 | assert out is not None
45 |
--------------------------------------------------------------------------------
/tests/processes/__init__.py:
--------------------------------------------------------------------------------
1 | """Necessary to make Python treat the tests directory as a module.
2 |
3 | This is required since mypy doesn't support the same file name otherwise
4 | It is also required to enable module specific overrides in pyproject.toml
5 | """
6 |
--------------------------------------------------------------------------------
/tests/processes/conftest.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 | from typing import TYPE_CHECKING, Tuple
3 |
4 | import pytest
5 | import torch
6 |
7 | from pathpyG.core.graph import Graph
8 | from pathpyG.core.path_data import PathData
9 | from pathpyG.core.multi_order_model import MultiOrderModel
10 |
11 |
12 | @pytest.fixture
13 | def simple_graph() -> Graph:
14 | """Return a simple example for a graph with a ring topology."""
15 | return Graph.from_edge_list(
16 | [
17 | ("a", "b"),
18 | ("b", "c"),
19 | ("c", "d"),
20 | ("d", "e"),
21 | ("e", "f"),
22 | ("f", "g"),
23 | ("g", "h"),
24 | ("h", "i"),
25 | ("i", "j"),
26 | ("j", "k"),
27 | ("k", "l"),
28 | ("l", "m"),
29 | ("m", "n"),
30 | ("n", "o"),
31 | ("o", "a"),
32 | ]
33 | )
34 |
35 |
36 | @pytest.fixture
37 | def simple_second_order_graph() -> Tuple[Graph, Graph]:
38 | """Return a simple second-order graph."""
39 | g = Graph.from_edge_list([["a", "b"], ["b", "c"], ["c", "a"], ["c", "d"], ["d", "a"]])
40 |
41 | g.data["edge_weight"] = torch.tensor([[1], [1], [2], [1], [1]])
42 |
43 | paths = PathData(g.mapping)
44 | paths.append_walk(["a", "b", "c"], weight=1)
45 | paths.append_walk(["b", "c", "a"], weight=1)
46 | paths.append_walk(["b", "c", "d"], weight=0.2)
47 | paths.append_walk(["c", "a", "b"], weight=1)
48 | paths.append_walk(["c", "d", "a"], weight=0.2)
49 | paths.append_walk(["d", "a", "b"], weight=1)
50 |
51 | m = MultiOrderModel.from_PathData(paths, max_order=2)
52 | return (g, m.layers[2])
53 |
--------------------------------------------------------------------------------
/tests/processes/test_random_walk.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 | from typing import TYPE_CHECKING, Tuple
3 |
4 | from torch import IntTensor, equal, tensor
5 |
6 | from pathpyG import config
7 | from pathpyG.processes.random_walk import RandomWalk, HigherOrderRandomWalk
8 | from pathpyG.core.path_data import PathData
9 | from pathpyG.core.graph import Graph
10 | from pathpyG.core.multi_order_model import MultiOrderModel
11 |
12 |
13 | def check_transitions(g, paths: PathData):
14 | for i in range(paths.num_paths):
15 | w = paths.get_walk(i)
16 | for j in range(len(w) - 1):
17 | assert g.is_edge(w[j], w[j + 1])
18 |
19 |
20 | def test_random_walk(simple_graph):
21 | rw = RandomWalk(simple_graph)
22 |
23 | steps = 20
24 | data = rw.run_experiment(steps=steps, runs=[v for v in simple_graph.nodes])
25 |
26 | assert len(data) == simple_graph.n * steps * 2 + simple_graph.n * simple_graph.n
27 |
28 | # make sure that all transitions correspond to edges
29 | paths = rw.get_paths(data)
30 | check_transitions(simple_graph, paths)
31 |
32 |
33 | def test_transition_matrix(simple_graph):
34 | rw = RandomWalk(simple_graph)
35 |
36 | assert (rw.transition_matrix.data == 1.0).all()
37 | assert rw.transition_probabilities("a")[1] == 1.0
38 |
39 |
40 | def test_higher_order_random_walk(simple_second_order_graph: Tuple[Graph, Graph]):
41 | g = simple_second_order_graph[0]
42 | g2 = simple_second_order_graph[1]
43 | print(g2.mapping)
44 | rw = HigherOrderRandomWalk(g2, g, weight=True)
45 | steps = 100
46 | data = rw.run_experiment(steps=steps, runs=g2.nodes)
47 |
48 | assert len(data) == g2.n * steps * 2 + g2.n * g2.n
49 | paths = rw.get_paths(data)
50 | check_transitions(g, paths)
51 |
52 | # rw.first_order_stationary_state()
53 |
--------------------------------------------------------------------------------
/tests/statistics/__init__.py:
--------------------------------------------------------------------------------
1 | """Necessary to make Python treat the tests directory as a module.
2 |
3 | This is required since mypy doesn't support the same file name otherwise
4 | It is also required to enable module specific overrides in pyproject.toml
5 | """
6 |
--------------------------------------------------------------------------------
/tests/statistics/conftest.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import pytest
4 | import torch
5 |
6 | from pathpyG.core.graph import Graph
7 |
8 |
9 | @pytest.fixture
10 | def simple_graph() -> Graph:
11 | """Return a simple undirected graph."""
12 | return Graph.from_edge_list(
13 | [
14 | ("a", "b"),
15 | ("b", "a"),
16 | ("b", "c"),
17 | ("c", "b"),
18 | ("b", "d"),
19 | ("d", "b"),
20 | ("d", "e"),
21 | ("e", "d"),
22 | ("c", "e"),
23 | ("e", "c"),
24 | ],
25 | is_undirected=True,
26 | )
27 |
28 |
29 | @pytest.fixture
30 | def toy_example_graph() -> Graph:
31 | """Return an undirected toy example graph."""
32 | return Graph.from_edge_list(
33 | [("a", "b"), ("b", "c"), ("c", "a"), ("d", "e"), ("e", "f"), ("f", "g"), ("g", "d"), ("d", "f"), ("b", "d")]
34 | ).to_undirected()
35 |
36 |
37 | @pytest.fixture
38 | def toy_example_graph_directed() -> Graph:
39 | """Return a directed toy example graph."""
40 | return Graph.from_edge_list(
41 | [("a", "b"), ("b", "c"), ("c", "a"), ("d", "e"), ("e", "f"), ("f", "g"), ("g", "d"), ("d", "f"), ("b", "d")]
42 | )
43 |
--------------------------------------------------------------------------------
/tests/statistics/test_statistics.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import numpy as _np
4 |
5 | from pathpyG.statistics import (
6 | degree_distribution,
7 | degree_raw_moment,
8 | degree_sequence,
9 | degree_central_moment,
10 | degree_generating_function,
11 | mean_degree,
12 | degree_assortativity,
13 | )
14 | from pathpyG.statistics.clustering import avg_clustering_coefficient, local_clustering_coefficient, closed_triads
15 |
16 |
17 | def test_degree_distribution(simple_graph):
18 | dist = degree_distribution(simple_graph)
19 | assert _np.isclose(dist[1], 1 / 5)
20 | assert _np.isclose(dist[2], 3 / 5)
21 | assert _np.isclose(dist[3], 1 / 5)
22 |
23 |
24 | def test_degree_sequence(simple_graph):
25 | seq = degree_sequence(simple_graph)
26 | assert (seq == _np.array([1.0, 3.0, 2.0, 2.0, 2.0])).all()
27 |
28 |
29 | def test_degree_raw_moment(simple_graph):
30 | k_1 = degree_raw_moment(simple_graph, k=1)
31 | assert k_1 == 2.0
32 | k_2 = degree_raw_moment(simple_graph, k=2)
33 | assert k_2 == 4.4
34 | k_3 = degree_raw_moment(simple_graph, k=3)
35 | assert _np.isclose(k_3, 10.4)
36 |
37 |
38 | def test_degree_central_moment(simple_graph):
39 | k_1 = degree_central_moment(simple_graph, k=1)
40 | assert k_1 == 0.0
41 | k_2 = degree_central_moment(simple_graph, k=2)
42 | assert k_2 == 0.4
43 | k_3 = degree_central_moment(simple_graph, k=3)
44 | assert _np.isclose(k_3, 0.0)
45 |
46 |
47 | def test_degree_generating_function(simple_graph):
48 | y = degree_generating_function(simple_graph, x=0.5)
49 | assert y == 0.275
50 | y = degree_generating_function(simple_graph, x=_np.array([0, 0.5, 1.0]))
51 | assert (y == _np.array([0, 0.275, 1.0])).all()
52 |
53 |
54 | def test_mean_degree(toy_example_graph):
55 | assert _np.isclose(degree_raw_moment(toy_example_graph, k=1), mean_degree(toy_example_graph), atol=1e-6)
56 |
57 |
58 | def test_clustering_coefficient(toy_example_graph):
59 | assert local_clustering_coefficient(toy_example_graph, "a") == 1.0
60 | assert local_clustering_coefficient(toy_example_graph, "b") == 1 / 3
61 | assert local_clustering_coefficient(toy_example_graph, "f") == 2 / 3
62 | assert _np.isclose(avg_clustering_coefficient(toy_example_graph), 0.7619, atol=0.0001)
63 |
64 |
65 | def test_closed_triads_undirected(toy_example_graph):
66 | assert closed_triads(toy_example_graph, "a") == set([("b", "c"), ("c", "b")])
67 | assert closed_triads(toy_example_graph, "d") == set([("e", "f"), ("f", "e"), ("f", "g"), ("g", "f")])
68 |
69 |
70 | def test_closed_triads_directed(toy_example_graph_directed):
71 | assert closed_triads(toy_example_graph_directed, "a") == set()
72 | assert closed_triads(toy_example_graph_directed, "d") == set([("e", "f")])
73 |
74 |
75 | def test_degree_assortativity(toy_example_graph):
76 | assert _np.isclose(degree_assortativity(toy_example_graph), -0.26, atol=1e-2)
77 |
--------------------------------------------------------------------------------
/tests/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pathpy/pathpyG/760571a0829f0bc04c5a127d5b059f25c6ab74ca/tests/utils/__init__.py
--------------------------------------------------------------------------------
/tests/utils/test_convert.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import numpy as np
3 | from torch_geometric import EdgeIndex
4 |
5 | from pathpyG.utils import to_numpy
6 |
7 |
8 | def test_to_numpy():
9 | tensor = torch.tensor([1, 2, 3])
10 | assert isinstance(to_numpy(tensor), np.ndarray)
11 | assert np.array_equal(to_numpy(tensor), np.array([1, 2, 3]))
12 |
13 | edge_index = torch.tensor([[0, 1, 1, 2], [1, 0, 2, 1]])
14 | assert isinstance(to_numpy(edge_index), np.ndarray)
15 | assert np.array_equal(to_numpy(edge_index), np.array([[0, 1, 1, 2], [1, 0, 2, 1]]))
16 |
17 | edge_index = EdgeIndex(edge_index)
18 | assert isinstance(to_numpy(edge_index), np.ndarray)
19 | assert np.array_equal(to_numpy(edge_index), np.array([[0, 1, 1, 2], [1, 0, 2, 1]]))
20 |
21 | index_col, index_row = edge_index
22 | assert isinstance(to_numpy(index_col), np.ndarray)
23 | assert np.array_equal(to_numpy(index_col), np.array([0, 1, 1, 2]))
24 | assert isinstance(to_numpy(index_row), np.ndarray)
25 | assert np.array_equal(to_numpy(index_row), np.array([1, 0, 2, 1]))
26 |
--------------------------------------------------------------------------------
/tests/visualizations/__init__.py:
--------------------------------------------------------------------------------
1 | """Necessary to make Python treat the tests directory as a module.
2 |
3 | This is required since mypy doesn't support the same file name otherwise
4 | It is also required to enable module specific overrides in pyproject.toml
5 | """
6 |
--------------------------------------------------------------------------------
/tests/visualizations/test_hist.py:
--------------------------------------------------------------------------------
1 | from pathpyG.core.graph import Graph
2 | from pathpyG.visualisations.hist_plots import hist
3 |
4 |
5 | def test_hist_plot() -> None:
6 | """Test to plot a histogram."""
7 | net = Graph.from_edge_list([["a", "b"], ["b", "c"], ["a", "c"]])
8 | deg = net.degrees()
9 |
10 | # print(deg)
11 | # hist(net)
12 |
--------------------------------------------------------------------------------
/tests/visualizations/test_plot.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import pytest
3 |
4 | from types import ModuleType
5 | from pathpyG.core.graph import Graph
6 | from pathpyG.core.temporal_graph import TemporalGraph
7 | from pathpyG.visualisations.plot import PathPyPlot
8 | from pathpyG.visualisations.plot import _get_plot_backend
9 | from pathpyG.visualisations.network_plots import (
10 | network_plot,
11 | temporal_plot,
12 | static_plot,
13 | )
14 | from pathpyG.visualisations import plot
15 |
16 |
17 | def test_PathPyPlot() -> None:
18 | """Test PathPyPlot class."""
19 | plot = PathPyPlot()
20 |
21 | assert isinstance(plot.data, dict)
22 | assert isinstance(plot.config, dict)
23 |
24 |
25 | def test_get_plot_backend() -> None:
26 | """Test to get a valid plot backend."""
27 |
28 | # backend which does not exist
29 | with pytest.raises(ImportError):
30 | _get_plot_backend(default="does not exist")
31 |
32 | # load matplotlib backend
33 | plt = _get_plot_backend(backend="matplotlib")
34 | assert isinstance(plt, ModuleType)
35 |
36 | # test .png file
37 | png = _get_plot_backend(filename="test.png")
38 | assert isinstance(png, ModuleType)
39 |
40 | assert png == plt
41 |
42 | # load d3js backend
43 | d3js = _get_plot_backend(backend="d3js")
44 | assert isinstance(d3js, ModuleType)
45 |
46 | # test .html file
47 | html = _get_plot_backend(filename="test.html")
48 | assert isinstance(html, ModuleType)
49 |
50 | assert d3js == html
51 |
52 | # load tikz backend
53 | tikz = _get_plot_backend(backend="tikz")
54 | assert isinstance(tikz, ModuleType)
55 |
56 | # test .tex file
57 | tex = _get_plot_backend(filename="test.tex")
58 | assert isinstance(tex, ModuleType)
59 |
60 | assert tikz == tex
61 |
62 |
63 | # Uses a default pytest fixture: see https://docs.pytest.org/en/6.2.x/tmpdir.html
64 | def test_network_plot_png(tmp_path) -> None:
65 | """Test to plot a static network as png file."""
66 | net = Graph.from_edge_list([["a", "b"], ["b", "c"], ["a", "c"]])
67 | net.data["edge_weight"] = torch.tensor([[1], [1], [2]])
68 | net.data["edge_size"] = torch.tensor([[3], [4], [5]])
69 | net.data["node_size"] = torch.tensor([[90], [8], [7]])
70 |
71 | plot = network_plot(net, edge_color="green", layout="fr")
72 | plot.save(tmp_path / "test.png")
73 | assert (tmp_path / "test.png").exists()
74 |
75 |
76 | def test_network_plot_html(tmp_path) -> None:
77 | """Test to plot a static network as html file."""
78 | net = Graph.from_edge_list([["a", "b"], ["b", "c"], ["a", "c"]])
79 | net.data["node_size"] = torch.tensor([[90], [8], [7]])
80 | plot = network_plot(net)
81 | plot.save(tmp_path / "test.html")
82 | assert (tmp_path / "test.html").exists()
83 |
84 |
85 | def test_plot_function(tmp_path) -> None:
86 | """Test generic plot function."""
87 | net = Graph.from_edge_list([["a", "b"], ["b", "c"], ["a", "c"]])
88 | fig = plot(net)
89 | fig.save(tmp_path / "generic.html")
90 | assert (tmp_path / "generic.html").exists()
91 |
92 |
93 | def test_network_plot_tex(tmp_path) -> None:
94 | """Test to plot a static network as tex file."""
95 | net = Graph.from_edge_list([["a", "b"], ["b", "c"], ["a", "c"]])
96 |
97 | plot = network_plot(net, layout="fr")
98 | # PDF probably not supported at github
99 | # plot.save("test.pdf")
100 | plot.save(tmp_path / "test.tex")
101 | assert (tmp_path / "test.tex").exists()
102 |
103 |
104 | def test_temporal_plot(tmp_path) -> None:
105 | """Test to plot a temporal network."""
106 | net = TemporalGraph.from_edge_list(
107 | [
108 | ("a", "b", 1),
109 | ("b", "c", 5),
110 | ("c", "d", 9),
111 | ("d", "a", 9),
112 | ("a", "b", 10),
113 | ("b", "c", 10),
114 | ]
115 | )
116 | net.data["edge_size"] = torch.tensor([[3], [4], [5], [1], [2], [3]])
117 |
118 | color = {"a": "blue", "b": "red", "c": "green", "d": "yellow"}
119 | plot = temporal_plot(
120 | net,
121 | node_color=color,
122 | start=3,
123 | end=25,
124 | delta=1000,
125 | layout="fr",
126 | d3js_local=False,
127 | )
128 | plot.save(tmp_path / "temp.html")
129 | assert (tmp_path / "temp.html").exists()
130 |
--------------------------------------------------------------------------------