├── .auto-changelog ├── .auto-changelog-template.hbs ├── .codespellignorelines ├── .codespellrc ├── .copier-answers.yml ├── .flake8 ├── .gitattributes ├── .github └── workflows │ ├── check-packaging.yml │ ├── lint.yml │ ├── publish-sdist.yml │ └── test.yml ├── .gitignore ├── .gitreview ├── .pre-commit-config.yaml ├── .pylintrc ├── .readthedocs.yaml ├── AUTHORS.txt ├── CHANGELOG.md ├── CITATION.cff ├── CONTRIBUTING.md ├── LICENSE.txt ├── MANIFEST.in ├── README.md ├── astrovascpy ├── PetscBinaryIO.py ├── __init__.py ├── bloodflow.py ├── exceptions.py ├── io.py ├── ou.py ├── plotting.py ├── report_reader.py ├── report_writer.py ├── scipy_petsc_conversions.py ├── typing.py ├── utils.py ├── version.py └── vtk_io.py ├── commitlint.config.js ├── docs ├── Makefile ├── requirements.txt └── source │ ├── api_ref.rst │ ├── changelog.rst │ ├── conf.py │ ├── index.rst │ └── logo │ └── BBP-AstroVascPy-Github.jpg ├── examples ├── compute_static_flow_pressure.py ├── data │ ├── graphs_folder │ │ ├── edge_dataset.csv │ │ ├── node_dataset.csv │ │ ├── toy_graph.bin │ │ └── toy_graph.h5 │ └── params.yaml ├── job_script.sbatch ├── load_graph_archngv.py ├── load_graph_archngv.sbatch └── simulate_OU_process.py ├── package.json ├── pyproject.toml ├── setup.py ├── setup.sh ├── tests ├── __init__.py ├── data │ ├── dataset.h5 │ ├── edge_dataset.csv │ ├── input_flow │ │ └── sine.csv │ ├── node_dataset.csv │ ├── reporting │ │ ├── compartment_report.h5 │ │ ├── create_reports.py │ │ └── diff_unit_compartment_report.h5 │ ├── toy_graph.bin │ └── toy_graph.h5 ├── test_bloodflow.py ├── test_graphs.py ├── test_io.py ├── test_mpi.py ├── test_ou.py ├── test_report_reader.py ├── test_report_writer.py └── test_utils.py └── tox.ini /.auto-changelog: -------------------------------------------------------------------------------- 1 | { 2 | "output": "CHANGELOG.md", 3 | "template": ".auto-changelog-template.hbs", 4 | "tagPrefix": "bloodflow-v", 5 | "commitLimit": false, 6 | "backfillLimit": false, 7 | "ignoreCommitPattern": "Release [0-9]+\\.[0-9]+\\.[0-9]+|Update CHANGELOG.*|.*\\[skip-changelog\\].*|\\[pre-commit.ci\\]", 8 | "commitUrl": "https://bbpgitlab.epfl.ch/molsys/bloodflow/commit/{id}", 9 | "issueUrl": "https://bbpgitlab.epfl.ch/molsys/bloodflow/issues/{id}", 10 | "mergeUrl": "https://bbpgitlab.epfl.ch/molsys/bloodflow/merge_requests/{id}", 11 | "compareUrl": "https://bbpgitlab.epfl.ch/molsys/bloodflow/compare/{from}...{to}", 12 | "startingVersion": "0.2.0" 13 | } 14 | -------------------------------------------------------------------------------- /.auto-changelog-template.hbs: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | {{!-- 4 | Introduction 5 | • This template tries to follow conventional commits format https://www.conventionalcommits.org/en/v1.0.0/ 6 | • The template uses regex to filter commit types into their own headings (this is more than just fixes and features headings) 7 | • It also uses the replaceText function in package.json to remove the commit type text from the message, because the headers are shown instead. 8 | • The text 'Breaking:' or 'Breaking changes:' can be located anywhere in the commit. 9 | • The types feat:, fix:, chore:, docs:, refactor:, test:, style:, perf: must be at the beginning of the commit subject with an : on end. 10 | • They can optionally have a scope set to outline the module or component that is affected eg feat(bldAssess): 11 | • There is a short hash on the end of every commit that is currently commented out so that change log did not grow too long (due to some system's file size limitations). You can uncomment if you wish [`{{shorthash}}`]({{href}}) 12 | Example Definitions 13 | • feat: A new feature 14 | • fix: A bug fix 15 | • perf: A code change that improves performance 16 | • refactor: A code change that neither fixes a bug nor adds a feature 17 | • style: Changes that do not affect the meaning of the code (white-space, formatting, spelling mistakes, missing semi-colons, etc) 18 | • test: Adding missing tests or correcting existing tests 19 | • docs: Adding/updating documentation 20 | • chore: Something like updating a library version, or moving files to be in a better location and updating all file refs 21 | --}} 22 | 23 | 24 | {{!-- In package.json need to add this to remove label text from the change log output (because the markdown headers are now used to group them). 25 | NOTES • Individual brackets have been escaped twice to be Json compliant. 26 | • For items that define a scope eg feat(bldAssess): We remove the 1st bracket and then re-add it so we can select the right piece of text 27 | { 28 | "name": "my-awesome-package", 29 | "auto-changelog": { 30 | "replaceText": { 31 | "([bB]reaking:)": "", 32 | "([bB]reaking change:)": "", 33 | "(^[fF]eat:)": "", 34 | "(^[fF]eat\\()": "\\(", 35 | "(^[fF]ix:)": "", 36 | "(^[fF]ix\\()": "\\(", 37 | "(^[cC]hore:)": "", 38 | "(^[cC]hore\\()": "\\(", 39 | "(^[dD]ocs:)": "", 40 | "(^[dD]ocs\\()": "\\(", 41 | "(^[rR]efactor:)": "", 42 | "(^[rR]efactor\\()": "\\(", 43 | "(^[tT]est:)": "", 44 | "(^[tT]est\\()": "\\(", 45 | "(^[sS]tyle:)": "", 46 | "(^[sS]tyle\\()": "\\(", 47 | "(^[pP]erf:)": "", 48 | "(^[pP]erf\\()": "\\(" 49 | } 50 | } 51 | } 52 | --}} 53 | 54 | {{!-- 55 | Regex reminders 56 | ^ = starts with 57 | \( = ( character (otherwise it is interpreted as a regex lookup group) 58 | * = zero or more of the previous character 59 | \s = whitespace 60 | . = any character except newline 61 | | = or 62 | [aA] = charcter a or character A 63 | --}} 64 | 65 | 66 | {{#each releases}} 67 | {{#if href}} 68 | ## [{{title}}]({{href}}) 69 | {{else}} 70 | ## {{title}} 71 | {{/if}} 72 | 73 | > {{niceDate}} 74 | 75 | {{#if summary}} 76 | {{summary}} 77 | {{/if}} 78 | 79 | {{! List commits that fix a given issues}} 80 | {{#each fixes}} 81 | - {{#if commit.breaking}}**Breaking change:** {{/if}}{{commit.subject}}{{#each fixes}} ({{author}}{{#if href}} - [#{{id}}]({{href}}){{/if}}){{/each}} 82 | {{/each}} 83 | 84 | {{! List commits with 'breaking:' or 'Breaking change:' anywhere in the message under a heading}} 85 | {{#commit-list merges heading='### Breaking Changes :warning:' message='[bB]reaking [cC]hange:|[bB]reaking:' exclude='\[skip-changelog\]'}} 86 | - {{message}} @{{author}} 87 | {{/commit-list}} 88 | 89 | {{! List commits organised under a heading, but not those already listed in the breaking section }} 90 | {{#commit-list merges heading='### New Features' message='^[fF]eat:|[fF]eat\(' exclude='[bB]reaking [cC]hange:|[bB]reaking:|\[skip-changelog\]'}} 91 | - {{message}} ({{author}}{{#if href}} - [#{{id}}]({{href}}){{/if}}) 92 | {{/commit-list}} 93 | 94 | {{#commit-list merges heading='### Fixes' message='^[fF]ix:|^[fF]ix\(' exclude='[bB]reaking [cC]hange:|[bB]reaking:|\[skip-changelog\]'}} 95 | - {{message}} ({{author}}{{#if href}} - [#{{id}}]({{href}}){{/if}}) 96 | {{/commit-list}} 97 | 98 | {{#commit-list merges heading='### Chores And Housekeeping' message='^[cC]hore:|^[cC]hore\(' exclude='[bB]reaking [cC]hange:|[bB]reaking:|\[skip-changelog\]'}} 99 | - {{message}} ({{author}}{{#if href}} - [#{{id}}]({{href}}){{/if}}) 100 | {{/commit-list}} 101 | 102 | {{#commit-list merges heading='### Documentation Changes' message='^[dD]ocs:|^[dD]ocs\(' exclude='[bB]reaking [cC]hange:|[bB]reaking:|\[skip-changelog\]'}} 103 | - {{message}} ({{author}}{{#if href}} - [#{{id}}]({{href}}){{/if}}) 104 | {{/commit-list}} 105 | 106 | {{#commit-list merges heading='### Refactoring and Updates' message='^[rR]efactor:|^[rR]efactor\(' exclude='[bB]reaking [cC]hange:|[bB]reaking:|\[skip-changelog\]'}} 107 | - {{message}} ({{author}}{{#if href}} - [#{{id}}]({{href}}){{/if}}) 108 | {{/commit-list}} 109 | 110 | {{#commit-list merges heading='### Changes to Test Assests' message='^[tT]est:|^[tT]est\(' exclude='[bB]reaking [cC]hange:|[bB]reaking:|\[skip-changelog\]'}} 111 | - {{message}} ({{author}}{{#if href}} - [#{{id}}]({{href}}){{/if}}) 112 | {{/commit-list}} 113 | 114 | {{#commit-list merges heading='### Tidying of Code eg Whitespace' message='^[sS]tyle:|^[sS]tyle\(' exclude='[bB]reaking [cC]hange:|[bB]reaking:|\[skip-changelog\]'}} 115 | - {{message}} ({{author}}{{#if href}} - [#{{id}}]({{href}}){{/if}}) 116 | {{/commit-list}} 117 | 118 | {{#commit-list merges heading='### Performance Improvements' message='^[pP]erf:|^[pP]erf\(' exclude='[bB]reaking [cC]hange:|[bB]reaking:|\[skip-changelog\]'}} 119 | - {{message}} ({{author}}{{#if href}} - [#{{id}}]({{href}}){{/if}}) 120 | {{/commit-list}} 121 | 122 | {{#commit-list merges heading='### CI Improvements' message='^[cC][iI]:|^[cC][iI]\(' exclude='[bB]reaking [cC]hange:|[bB]reaking:|\[skip-changelog\]'}} 123 | - {{message}} ({{author}}{{#if href}} - [#{{id}}]({{href}}){{/if}}) 124 | {{/commit-list}} 125 | 126 | {{#commit-list merges heading='### Uncategorized Changes' exclude='[bB]reaking [cC]hange:|[bB]reaking:|^[fF]eat:|^[fF]eat\(|^[fF]ix:|^[fF]ix\(|^[cC]hore:|^[cC]hore\(|^[cC][iI]:|^[cC][iI]\(|^[dD]ocs:|^[dD]ocs\(|^[rR]efactor:|^[rR]efactor\(|^[tT]est:|^[tT]est\(|^[sS]tyle:|^[sS]tyle\(|^[pP]erf:|^[pP]erf\(|\[skip-changelog\]'}} 127 | - {{message}} ({{author}}{{#if href}} - [#{{id}}]({{href}}){{/if}}) 128 | {{/commit-list}} 129 | 130 | {{! List commits with 'breaking:' or 'Breaking change:' anywhere in the message under a heading}} 131 | {{#commit-list commits heading='### Breaking Changes :warning:' message='[bB]reaking [cC]hange:|[bB]reaking:' exclude='\[skip-changelog\]'}} 132 | - {{subject}} ({{author}}{{#if href}} - [{{shorthash}}]({{href}}){{/if}}) 133 | {{/commit-list}} 134 | 135 | {{! List commits organised under a heading, but not those already listed in the breaking section }} 136 | {{#commit-list commits heading='### New Features' message='^[fF]eat:|[fF]eat\(' exclude='[bB]reaking [cC]hange:|[bB]reaking:|\[skip-changelog\]'}} 137 | - {{subject}} ({{author}}{{#if href}} - [{{shorthash}}]({{href}}){{/if}}) 138 | {{/commit-list}} 139 | 140 | {{#commit-list commits heading='### Fixes' message='^[fF]ix:|^[fF]ix\(' exclude='[bB]reaking [cC]hange:|[bB]reaking:|\[skip-changelog\]'}} 141 | - {{subject}} ({{author}}{{#if href}} - [{{shorthash}}]({{href}}){{/if}}) 142 | {{/commit-list}} 143 | 144 | {{#commit-list commits heading='### Chores And Housekeeping' message='^[cC]hore:|^[cC]hore\(' exclude='[bB]reaking [cC]hange:|[bB]reaking:|\[skip-changelog\]'}} 145 | - {{subject}} ({{author}}{{#if href}} - [{{shorthash}}]({{href}}){{/if}}) 146 | {{/commit-list}} 147 | 148 | {{#commit-list commits heading='### Documentation Changes' message='^[dD]ocs:|^[dD]ocs\(' exclude='[bB]reaking [cC]hange:|[bB]reaking:|\[skip-changelog\]'}} 149 | - {{subject}} ({{author}}{{#if href}} - [{{shorthash}}]({{href}}){{/if}}) 150 | {{/commit-list}} 151 | 152 | {{#commit-list commits heading='### Refactoring and Updates' message='^[rR]efactor:|^[rR]efactor\(' exclude='[bB]reaking [cC]hange:|[bB]reaking:|\[skip-changelog\]'}} 153 | - {{subject}} ({{author}}{{#if href}} - [{{shorthash}}]({{href}}){{/if}}) 154 | {{/commit-list}} 155 | 156 | {{#commit-list commits heading='### Changes to Test Assests' message='^[tT]est:|^[tT]est\(' exclude='[bB]reaking [cC]hange:|[bB]reaking:|\[skip-changelog\]'}} 157 | - {{subject}} ({{author}}{{#if href}} - [{{shorthash}}]({{href}}){{/if}}) 158 | {{/commit-list}} 159 | 160 | {{#commit-list commits heading='### Tidying of Code eg Whitespace' message='^[sS]tyle:|^[sS]tyle\(' exclude='[bB]reaking [cC]hange:|[bB]reaking:|\[skip-changelog\]'}} 161 | - {{subject}} ({{author}}{{#if href}} - [{{shorthash}}]({{href}}){{/if}}) 162 | {{/commit-list}} 163 | 164 | {{#commit-list commits heading='### Performance Improvements' message='^[pP]erf:|^[pP]erf\(' exclude='[bB]reaking [cC]hange:|[bB]reaking:|\[skip-changelog\]'}} 165 | - {{subject}} ({{author}}{{#if href}} - [{{shorthash}}]({{href}}){{/if}}) 166 | {{/commit-list}} 167 | 168 | {{#commit-list commits heading='### CI Improvements' message='^[cC][iI]:|^[cC][iI]\(' exclude='[bB]reaking [cC]hange:|[bB]reaking:|\[skip-changelog\]'}} 169 | - {{subject}} ({{author}}{{#if href}} - [{{shorthash}}]({{href}}){{/if}}) 170 | {{/commit-list}} 171 | 172 | {{#commit-list commits heading='### Uncategorized Changes' exclude='[bB]reaking [cC]hange:|[bB]reaking:|^[fF]eat:|^[fF]eat\(|^[fF]ix:|^[fF]ix\(|^[cC]hore:|^[cC]hore\(|^[cC][iI]:|^[cC][iI]\(|^[dD]ocs:|^[dD]ocs\(|^[rR]efactor:|^[rR]efactor\(|^[tT]est:|^[tT]est\(|^[sS]tyle:|^[sS]tyle\(|^[pP]erf:|^[pP]erf\(|\[skip-changelog\]'}} 173 | - {{subject}} ({{author}}{{#if href}} - [{{shorthash}}]({{href}}){{/if}}) 174 | {{/commit-list}} 175 | 176 | {{/each}} 177 | -------------------------------------------------------------------------------- /.codespellignorelines: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BlueBrain/AstroVascPy/322eaa8ebb32133521428b46700f54fb3f8b9af3/.codespellignorelines -------------------------------------------------------------------------------- /.codespellrc: -------------------------------------------------------------------------------- 1 | [codespell] 2 | skip = .git/* 3 | ignore-regex = @groupes.epfl.ch 4 | ignore-words-list = coo 5 | -------------------------------------------------------------------------------- /.copier-answers.yml: -------------------------------------------------------------------------------- 1 | # Changes here will be overwritten by Copier 2 | 3 | _commit: 0.1.20 4 | _src_path: git@bbpgitlab.epfl.ch:neuromath/python-template.git 5 | author_email: bbp-ou-molsys@groupes.epfl.ch 6 | author_name: bbp-ou-molsys 7 | copyright_license: BBP-internal-confidential 8 | copyright_year: '2023' 9 | distribution_name: AstroVascPy 10 | download_url: https://bbpgitlab.epfl.ch/Molecular Systems/astrovascpy 11 | init_git: false 12 | maintainer: Stephanie Battini 13 | package_name: AstroVascPy 14 | project_description: AstroVascPy is a Python library for computing the blood pressure and flow through a large vasculature network, incorporating the effect of astrocytic endfeet on the blood vessels radii. 15 | project_name: AstroVascPy 16 | project_url: https://bbpteam.epfl.ch/documentation/projects/astrovascpy 17 | repository_name: AstroVascPy 18 | repository_namespace: Molecular Systems 19 | repository_provider: gitlab 20 | ssh_url: git@bbpgitlab.epfl.ch:Molecular Systems/astrovascpy.git 21 | support_py37: true 22 | team_name: Molecular Systems 23 | tracker_url: https://bbpteam.epfl.ch/project/issues/projects 24 | version: 0.0.1 25 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = 3 | E203 4 | W503 5 | max-line-length = 100 6 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.asc linguist-detectable=false 2 | .auto-changelog-template.hbs linguist-detectable=false 3 | commitlint.config.js linguist-detectable=false 4 | -------------------------------------------------------------------------------- /.github/workflows/check-packaging.yml: -------------------------------------------------------------------------------- 1 | name: Check packaging 2 | on: 3 | pull_request: 4 | push: 5 | branches: 6 | - main 7 | jobs: 8 | check-packaging: 9 | name: Build and check the distribution package 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Checkout repository 13 | uses: actions/checkout@v4 14 | - name: Install test dependencies 15 | run: | 16 | python3 -m pip install --upgrade pip setuptools 17 | python3 -m pip install tox-gh-actions 18 | - name: Run tox 19 | run: | 20 | tox -e check-packaging 21 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Static code analysis 2 | on: 3 | pull_request: 4 | push: 5 | branches: 6 | - main 7 | jobs: 8 | lint: 9 | name: Perform static analysis of the code 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Checkout repository 13 | uses: actions/checkout@v4 14 | - name: Install test dependencies 15 | run: | 16 | python3 -m pip install --upgrade pip setuptools 17 | python3 -m pip install tox-gh-actions 18 | - name: Run tox 19 | run: | 20 | tox -e lint 21 | -------------------------------------------------------------------------------- /.github/workflows/publish-sdist.yml: -------------------------------------------------------------------------------- 1 | name: Publish source tarball to PyPi 2 | on: 3 | push: 4 | tags: 5 | - '*' 6 | jobs: 7 | build-n-publish: 8 | name: Build and publish the release on PyPI 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v4 12 | - name: Set up Python 3.12 13 | uses: actions/setup-python@v4 14 | with: 15 | python-version: "3.12" 16 | - name: Install test dependencies 17 | run: | 18 | python -m pip install --upgrade pip setuptools 19 | - name: Build a source tarball 20 | run: | 21 | python setup.py sdist 22 | - name: Publish distribution package to PyPI 23 | uses: pypa/gh-action-pypi-publish@release/v1 24 | with: 25 | user: __token__ 26 | password: ${{ secrets.PYPI_PASSWORD }} 27 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test the contribution 2 | on: 3 | pull_request: 4 | push: 5 | branches: 6 | - main 7 | env: 8 | apt_options: -o Acquire::Retries=3 9 | PETSC_DIR: /usr/lib/petscdir/petsc3.15/x86_64-linux-gnu-real 10 | PETSC_VERSION: "3.15.1" 11 | PYTHON_VERSION: "3.11" 12 | jobs: 13 | test: 14 | name: Build and test the contribution 15 | runs-on: ubuntu-latest 16 | steps: 17 | - name: Set up Python 18 | uses: actions/setup-python@v3 19 | with: 20 | python-version: ${{ env.PYTHON_VERSION }} 21 | 22 | - name: Setup MPI 23 | uses: mpi4py/setup-mpi@v1 24 | with: 25 | mpi: openmpi 26 | 27 | - name: Checkout repository 28 | uses: actions/checkout@v4 29 | 30 | - name: Install system packages for MPI and PETSc 31 | run: | 32 | sudo apt-get ${{ env.apt_options }} update -y 33 | sudo apt-get ${{ env.apt_options }} install libopenmpi-dev libpetsc-real3.15-dev 34 | - name: Install petsc4py and other test dependencies 35 | run: | 36 | python -m pip install --upgrade pip setuptools 37 | python -m pip install tox-gh-actions 38 | 39 | - name: Cache petsc4py build 40 | id: cache-petsc4py 41 | uses: actions/cache@v3 42 | env: 43 | cache-name: cache-petsc4py 44 | with: 45 | path: venv 46 | key: ${{ runner.os }}-build-${{ env.cache-name}}-${{ env.PETSC_VERSION }}-py${{ env.PYTHON_VERSION }} 47 | 48 | # FIXME Once PETSc is updated to a more reasonable version, one should attempt to 49 | # build a wheel instead of installing into a virtual environment. This wheel can then 50 | # be referred to directly in Tox, and we can avoid passing PYTHONPATH around. 51 | - name: Build petsc4py 52 | if: steps.cache-petsc4py.outputs.cache-hit != 'true' 53 | run: | 54 | python -m venv venv 55 | . ./venv/bin/activate 56 | python -m pip install --upgrade pip setuptools wheel 57 | python -m pip install "cython<3" numpy 58 | git clone --branch "v${{ env.PETSC_VERSION }}" --depth 1 https://gitlab.com/petsc/petsc.git 59 | cd petsc/src/binding/petsc4py 60 | python -m pip install . 61 | 62 | - name: Run tox 63 | run: | 64 | tox -e py${{ env.PYTHON_VERSION }} 65 | env: 66 | PYTHONPATH: ${{ github.workspace }}/venv/lib/python${{ env.PYTHON_VERSION }}/site-packages 67 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # data 2 | .eggs 3 | .idea 4 | *.pyc 5 | *.swp 6 | *py~ 7 | *~ 8 | *.egg-info 9 | examples/data/graphs_folder/dumped_graph.bin 10 | 11 | # Mac related stuff 12 | .DS_Store 13 | 14 | # Packaging builds 15 | build 16 | dist 17 | 18 | # tox 19 | .tox 20 | 21 | # coverage 22 | .coverage 23 | .coverage* 24 | coverage.xml 25 | 26 | # venv 27 | venv 28 | python-venv/ 29 | test_venv 30 | 31 | # docs 32 | docs/build 33 | docs/source/generated 34 | docs/source/examples 35 | 36 | # reports 37 | reports 38 | 39 | # examples 40 | examples/.ipynb_checkpoints 41 | 42 | # pytest 43 | .pytest_cache 44 | 45 | tests/data/reporting/export 46 | # Node 47 | node_modules 48 | package-lock.json 49 | 50 | *.log 51 | *.pkl 52 | output/ 53 | settings.json 54 | -------------------------------------------------------------------------------- /.gitreview: -------------------------------------------------------------------------------- 1 | [gerrit] 2 | host=bbpcode.epfl.ch 3 | port=22 4 | project=molecularsystems/bloodflow 5 | defaultbranch=master 6 | defaultremote=origin 7 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v4.3.0 4 | hooks: 5 | - id: check-added-large-files 6 | - id: check-case-conflict 7 | - id: check-merge-conflict 8 | - id: check-symlinks 9 | - id: check-yaml 10 | - id: debug-statements 11 | - id: end-of-file-fixer 12 | - id: trailing-whitespace 13 | - repo: https://github.com/alessandrojcm/commitlint-pre-commit-hook 14 | rev: v9.0.0 15 | hooks: 16 | - id: commitlint 17 | stages: 18 | - commit-msg 19 | additional_dependencies: ['conventional-changelog-conventionalcommits'] 20 | - repo: https://github.com/codespell-project/codespell 21 | rev: v2.1.0 22 | hooks: 23 | - id: codespell 24 | args: ["-x", ".codespellignorelines"] 25 | - repo: https://github.com/astral-sh/ruff-pre-commit 26 | # Ruff version. 27 | rev: v0.3.7 28 | hooks: 29 | # Run the linter. 30 | - id: ruff 31 | # Run the formatter. 32 | - id: ruff-format 33 | 34 | #- repo: https://github.com/PyCQA/pydocstyle 35 | # rev: 6.1.1 36 | # hooks: 37 | # - id: pydocstyle 38 | # additional_dependencies: ["toml"] 39 | # exclude: 'examples|tests|scripts' 40 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MESSAGES CONTROL] 2 | disable= 3 | fixme, 4 | invalid-name, 5 | len-as-condition, 6 | no-else-return 7 | 8 | [FORMAT] 9 | # Regexp for a line that is allowed to be longer than the limit. 10 | ignore-long-lines=\bhttps?://\S 11 | # Maximum number of characters on a single line. 12 | max-line-length=100 13 | 14 | [DESIGN] 15 | # Maximum number of arguments for function / method 16 | max-args=10 17 | # Argument names that match this expression will be ignored. Default to name 18 | # with leading underscore 19 | ignored-argument-names=_.* 20 | # Maximum number of locals for function / method body 21 | max-locals=25 22 | # Maximum number of return / yield for function / method body 23 | max-returns=6 24 | # Maximum number of branch for function / method body 25 | max-branches=12 26 | # Maximum number of statements in function / method body 27 | max-statements=50 28 | # Maximum number of parents for a class (see R0901). 29 | max-parents=7 30 | # Maximum number of attributes for a class (see R0902). 31 | max-attributes=40 32 | # Minimum number of public methods for a class (see R0903). 33 | min-public-methods=0 34 | # Maximum number of public methods for a class (see R0904). 35 | max-public-methods=60 36 | 37 | [SIMILARITIES] 38 | # checks for similarities and duplicated code. This computation may be 39 | # memory / CPU intensive, so you should disable it if you experiments some 40 | # problems. 41 | 42 | # Minimum lines number of a similarity. 43 | min-similarity-lines=25 44 | # Ignore comments when computing similarities. 45 | ignore-comments=yes 46 | # Ignore docstrings when computing similarities. 47 | ignore-docstrings=yes 48 | 49 | [TYPECHECK] 50 | # List of classes names for which member attributes should not be checked 51 | # (useful for classes with attributes dynamically set). 52 | 53 | # List of module names for which member attributes should not be checked 54 | # (useful for modules/projects where namespaces are manipulated during runtime 55 | # and thus existing member attributes cannot be deduced by static analysis. It 56 | # supports qualified module names, as well as Unix pattern matching. 57 | ignored-modules=numpy,numpy.*,scipy,scipy.*,morphio,tqdm,networkx,matplotlib,vtk,mpi4py,petsc,petsc4py 58 | extension-pkg-whitelist=morphio 59 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | build: 3 | os: ubuntu-22.04 4 | apt_packages: 5 | - libopenmpi-dev 6 | tools: 7 | python: "3.11" 8 | python: 9 | install: 10 | # Markdown conversion requires a particular version of docutils 11 | - requirements: docs/requirements.txt 12 | - method: pip 13 | path: . 14 | extra_requirements: 15 | - docs 16 | - viz 17 | sphinx: 18 | builder: html 19 | configuration: docs/source/conf.py 20 | fail_on_warning: false 21 | -------------------------------------------------------------------------------- /AUTHORS.txt: -------------------------------------------------------------------------------- 1 | Stéphanie Battini 2 | Nicola Cantarutti 3 | Christos Kotsalos 4 | Tristan Carel 5 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ### v0.1.6 4 | * New parameter `entry_noise` to enable or disable the endfeet activity on entry nodes. (#41) 5 | * Add helper script to load archngv graphs and convert them in pickle binary format. (#40) 6 | 7 | ### v0.1.5 8 | * Lazy import of mpi4py module (#27) 9 | 10 | ### v0.1.4 11 | * Bump minimal supported version of Python to 3.10. Continuous integration now uses Python 3.11 (#23) 12 | 13 | ### v0.1.3 14 | * New function `distribute_array` for scattering numpy arrays. (#17) 15 | * PETSc solver: replaced GMRES with LGMRES. Added null space information. (#20) 16 | 17 | ### v0.1.2 18 | * Introduce the class `utils.Graph` to optimize the computation of node degrees. (#12) 19 | * Compute flow and pressure only on the main connected component. (#12) 20 | 21 | ### v0.1.1 22 | * vkt is now an optional dependency. Use `pip install astrovascpy[viz]` to enable it. (#14) 23 | * Fix PetscBinaryIO.get_conf() value on error. Returns a valid config when PETSc installation cannot be located. (#14) 24 | 25 | ### v0.1 26 | * Add unit tests for MPI-PETSc functions. (#5) 27 | * Add separate ROU calibration for arteries and capillaries. (#6) 28 | * Remove complex number support from petsc & petsc4py 29 | * Initial release of AstroVascPy Python library. 30 | -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | cff-version: 1.2.0 2 | title: AstroVascPy 3 | message: '"If you use this software, please cite it as below."' 4 | type: software 5 | authors: 6 | - given-names: >- 7 | Stéphanie Battini, Nicola Cantarutti, Christos 8 | Kotsalos and Tristan Carel 9 | affiliation: Blue Brain Project (EPFL) 10 | repository-code: 'https://github.com/BlueBrain/AstroVascPy' 11 | date-released: 2023-06-26 12 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contribution Guide 2 | 3 | We would love for you to contribute to the AstroVascPy project and help make it better than it is today. 4 | As a contributor, here are the guidelines we would like you to follow: 5 | 6 | - [Contribution Guide](#contribution-guide) 7 | - [Got a Question?](#got-a-question) 8 | - [Found a Bug?](#found-a-bug) 9 | - [Missing a Feature?](#missing-a-feature) 10 | - [Submission Guidelines](#submission-guidelines) 11 | - [Issues](#issues) 12 | - [Pull Requests](#pull-requests) 13 | - [After your pull request is merged](#after-your-pull-request-is-merged) 14 | - [Development Environment](#development-environment) 15 | - [Setup](#setup) 16 | - [Build](#build) 17 | - [Test](#test) 18 | - [Coding conventions](#coding-conventions) 19 | 20 | # Got a Question? 21 | 22 | Please do not hesitate to raise an issue on [github project page][github]. 23 | 24 | # Found a Bug? 25 | 26 | If you find a bug in the source code, you can help us by [submitting an issue](#issues) 27 | to our [GitHub Repository][github]. Even better, you can [submit a Pull Request](#pull-requests) with a fix. 28 | 29 | # Missing a Feature? 30 | 31 | You can *request* a new feature by [submitting an issue](#issues) to our GitHub Repository. 32 | If you would like to *implement* a new feature, please submit an issue with a proposal for your 33 | work first, to be sure that we can use it. 34 | 35 | Please consider what kind of change it is: 36 | 37 | * For a **Major Feature**, first open an issue and outline your proposal so that it can be 38 | discussed. This will also allow us to better coordinate our efforts, prevent duplication of work, 39 | and help you to craft the change so that it is successfully accepted into the project. 40 | * **Small Features** can be crafted and directly [submitted as a Pull Request](#pull-requests). 41 | 42 | # Submission Guidelines 43 | 44 | ## Issues 45 | 46 | Before you submit an issue, please search the issue tracker, maybe an issue for your problem 47 | already exists and the discussion might inform you of workarounds readily available. 48 | 49 | We want to fix all the issues as soon as possible, but before fixing a bug we need to reproduce 50 | and confirm it. In order to reproduce bugs we will need as much information as possible, and 51 | preferably with an example. 52 | 53 | ## Pull Requests 54 | 55 | When you wish to contribute to the code base, please consider the following guidelines: 56 | 57 | * Make a [fork](https://guides.github.com/activities/forking/) of this repository. 58 | * Make your changes in your fork, in a new git branch: 59 | 60 | ```shell 61 | git checkout -b my-fix-branch main 62 | ``` 63 | * Create your patch, **including appropriate Python test cases**. 64 | Please check the coding [conventions](#coding-conventions) for more information. 65 | * Run the full test suite, and ensure that all tests pass. 66 | * Commit your changes using a descriptive commit message. 67 | 68 | ```shell 69 | git commit -a 70 | ``` 71 | * Push your branch to GitHub: 72 | 73 | ```shell 74 | git push origin my-fix-branch 75 | ``` 76 | * In GitHub, send a Pull Request to the `main` branch of the upstream repository of the relevant component. 77 | * If we suggest changes then: 78 | * Make the required updates. 79 | * Re-run the test suites to ensure tests are still passing. 80 | * Rebase your branch and force push to your GitHub repository (this will update your Pull Request): 81 | 82 | ```shell 83 | git rebase main -i 84 | git push -f 85 | ``` 86 | 87 | That’s it! Thank you for your contribution! 88 | 89 | ### After your pull request is merged 90 | 91 | After your pull request is merged, you can safely delete your branch and pull the changes from 92 | the main (upstream) repository: 93 | 94 | * Delete the remote branch on GitHub either through the GitHub web UI or your local shell as follows: 95 | 96 | ```shell 97 | git push origin --delete my-fix-branch 98 | ``` 99 | * Check out the main branch: 100 | 101 | ```shell 102 | git checkout main -f 103 | ``` 104 | * Delete the local branch: 105 | 106 | ```shell 107 | git branch -D my-fix-branch 108 | ``` 109 | * Update your main with the latest upstream version: 110 | 111 | ```shell 112 | git pull --ff upstream main 113 | ``` 114 | 115 | [github]: https://github.com/BlueBrain/AstroVascPy 116 | 117 | # Development Environment 118 | 119 | Please make sure to install a recent version of Python (>=3.7) and the project requirements, 120 | see the [dependencies](./setup.py#dependencies). 121 | 122 | ## Setup 123 | 124 | It is recommended to use `conda` to develop in a sandbox environment and have mpi4py and petsc4py working properly: 125 | cf. the `setup.sh` file for more details. 126 | 127 | ## Build 128 | 129 | Run the following command to build incrementally the project: `pip install -e .` 130 | 131 | ## Test 132 | 133 | Run the following command to run the Python unit-tests: `pytest tests` 134 | 135 | ## Coding conventions 136 | 137 | The code coverage of the Python unit-tests may not decrease over time. 138 | It means that every change must go with their corresponding Python unit-tests to 139 | validate the library behavior as well as to demonstrate the API usage. 140 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusve, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2023 Blue Brain Project/EPFL 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | global-exclude *.py[co] .DS_Store 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | > [!WARNING] 2 | > The Blue Brain Project concluded in December 2024, so development has ceased under the BlueBrain GitHub organization. 3 | > Future development will take place at: https://github.com/openbraininstitute/AstroVascPy 4 | 5 | ![AstroVascPy Logo](docs/source/logo/BBP-AstroVascPy-Github.jpg) 6 | 7 | # AstroVascPy 8 | 9 | AstroVascPy is a Python library for computing the blood pressure and flow through the vasculature 10 | (whole cortical column). AstroVascPy incorporates the effect of astrocytic endfeet on the blood vessel radii. 11 | In particular, AstroVascPy replicates the dynamics of the radius of a vessel due to vasodilation. 12 | 13 | AstroVascPy uses vascpy Point Graph representation to access the vasculature database stored in h5 file (sonata format). 14 | 15 | vascpy standardizes the api for the vasculature datasets. 16 | PointVasculature (PointGraph) representation is basically a composition of two pandas data frames, 17 | one for node properties (x, y, z, radius, other...) and one for edge properties (start_node, end_node, other...). 18 | 19 | ### Inputs 20 | - pointgraph vasculature 21 | - endfeet locations with corresponding ids 22 | - radius of vessels at endfeet locations (possibly depending on simulation time) 23 | 24 | ### Outputs 25 | 26 | - blood pressure at each node of the vasculature (node vector) 27 | - blood flow at each segment (edge vector) 28 | 29 | ## Installation (Linux & MacOS) 30 | 31 | AstroVascPy can be git cloned here: 32 | 33 | https://github.com/BlueBrain/astrovascpy 34 | 35 | Either locally or in BB5, one can run: 36 | 37 | source setup.sh 38 | 39 | to install the AstroVascPy solver (+ all its dependencies) and set the environment. For the local installation (workstation), please install **conda** before running the command above. 40 | **Remark**: Run this command every time before using the solver in order to set the environment correctly. 41 | 42 | Backend Solvers: `export BACKEND_SOLVER_BFS='petsc'` or `export BACKEND_SOLVER_BFS='scipy'`, the user can choose the backend solver for the linear systems. 43 | **Remark**: PETSc is inherently parallel, while SciPy is not. Therefore, running the Blood Flow Solver with MPI makes sense only while using `petsc`! 44 | 45 | Blood Flow Solver (BFS) debugging: By typing `export DEBUG_BFS=1`, we run both PETSc & SciPy, and we compare their results. To disable this behavior please type `export DEBUG_BFS=0` (default behavior). 46 | 47 | ## Usage 48 | 49 | The code can be run using 50 | 51 | python3 compute_static_flow_pressure.py 52 | 53 | ### Load Archngv graph 54 | 55 | An archngv graph can be loaded and converted to a pickle binary format, using the script `load_graph_archngv.py` inside the folder `examples`. 56 | Run the script as 57 | 58 | python3 load_graph_archngv.py --filename_ngv "path_to_ngv_circuit" --output_graph "output_graph_name.bin" 59 | 60 | ### Sonata reports 61 | 62 | Structure of the reports: 63 | This is a particular type of compartment report for the vasculature. 64 | We get a set of 3 reports at each time-step storing the blood flow, 65 | the blood pressure and, the radius at each segment of the vasculature. 66 | Here are the units of these entities: 67 | -flow (µm^3.s^-1) 68 | -pressure (g.µm^-1.s^-2) 69 | -radius (µm) 70 | 71 | ## Authors 72 | 73 | Stéphanie Battini, Nicola Cantarutti, Christos Kotsalos and Tristan Carel 74 | 75 | Link to the article on [Bio-arxiv](https://www.biorxiv.org/content/10.1101/2024.11.14.623572v1) 76 | 77 | ## Funding and Acknowledgements 78 | 79 | The development of this software was supported by funding to the Blue Brain Project, a research center of the 80 | École polytechnique fédérale de Lausanne (EPFL), from the Swiss government’s ETH Board of the Swiss Federal 81 | Institutes of Technology. 82 | 83 | We would like to thank Alessandro Cattabiani, Thomas Delemontex and Eleftherios Zisis 84 | for reviewing the code and the engineering support. 85 | 86 | Copyright (c) 2023-2023 Blue Brain Project/EPFL 87 | -------------------------------------------------------------------------------- /astrovascpy/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2023-2024 Blue Brain Project/EPFL 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # Unless required by applicable law or agreed to in writing, software 7 | # distributed under the License is distributed on an "AS IS" BASIS, 8 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 9 | # See the License for the specific language governing permissions and 10 | # limitations under the License. 11 | -------------------------------------------------------------------------------- /astrovascpy/exceptions.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2023-2024 Blue Brain Project/EPFL 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # Unless required by applicable law or agreed to in writing, software 7 | # distributed under the License is distributed on an "AS IS" BASIS, 8 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 9 | # See the License for the specific language governing permissions and 10 | # limitations under the License. 11 | 12 | 13 | class BloodFlowError(Exception): 14 | """Base astrovascpy exception.""" 15 | -------------------------------------------------------------------------------- /astrovascpy/io.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2023-2024 Blue Brain Project/EPFL 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # Unless required by applicable law or agreed to in writing, software 7 | # distributed under the License is distributed on an "AS IS" BASIS, 8 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 9 | # See the License for the specific language governing permissions and 10 | # limitations under the License. 11 | 12 | import os 13 | import pickle 14 | from pathlib import Path 15 | 16 | import pandas as pd 17 | from vascpy import PointVasculature, SectionVasculature 18 | 19 | from .exceptions import BloodFlowError 20 | from .utils import Graph, rank0 21 | 22 | 23 | def load_graph(filename): 24 | """Load a vasculature from file. 25 | 26 | Args: 27 | filename (str): vasculature dataset. 28 | 29 | Returns: 30 | utils.Graph: graph containing point vasculature skeleton. 31 | 32 | Raises: 33 | BloodFlowError: if the file object identified by filename is not in h5 format. 34 | """ 35 | if Path(filename).suffix == ".h5": 36 | pv = SectionVasculature.load(filename).as_point_graph() 37 | graph = Graph.from_point_vasculature(pv) 38 | graph.edge_properties.index = pd.MultiIndex.from_frame( 39 | graph.edge_properties.loc[:, ["section_id", "segment_id"]] 40 | ) 41 | return graph 42 | raise BloodFlowError("File object type identified by filename is not supported") 43 | 44 | 45 | def load_graph_from_bin(filename): 46 | """ 47 | Loading of a graph from a binary file using pickle. 48 | Args: 49 | filename (str): vasculature dataset path. 50 | Returns: 51 | utils.Graph: graph containing point vasculature skeleton. 52 | """ 53 | if rank0(): 54 | if os.path.exists(filename): 55 | print("Loading graph from binary file using pickle", flush=True) 56 | filehandler = open(filename, "rb") 57 | pv = pd.read_pickle(filehandler) 58 | graph = Graph.from_point_vasculature(pv) 59 | else: 60 | raise BloodFlowError("Graph file not found") 61 | return graph 62 | else: 63 | return None 64 | 65 | 66 | def load_graph_from_h5(filename): 67 | """ 68 | Loading of a graph from a .h5 using PointVasculature.load_sonata. 69 | Args: 70 | filename (str): vasculature dataset path. 71 | Returns: 72 | utils.Graph: graph containing point vasculature skeleton. 73 | """ 74 | if rank0(): 75 | if os.path.exists(filename): 76 | print("Loading sonata graph using PointVasculature.load_sonata", flush=True) 77 | pv = PointVasculature.load_sonata(filename) 78 | graph = Graph.from_point_vasculature(pv) 79 | else: 80 | raise BloodFlowError("Graph file not found") 81 | return graph 82 | else: 83 | return None 84 | 85 | 86 | def load_graph_from_csv(node_filename, edge_filename): 87 | """ 88 | Loading of node dataset and edge dataset using pandas. 89 | It creates a PointVasculature graph object. 90 | 91 | Args: 92 | node_filename (str): node dataset path. 93 | edge_filename (str): edge dataset path. 94 | Returns: 95 | utils.Graph: graph containing point vasculature skeleton. 96 | """ 97 | if rank0(): 98 | print("Loading csv dataset using pandas", flush=True) 99 | graph_nodes = pd.read_csv(node_filename) 100 | graph_edges = pd.read_csv(edge_filename) 101 | 102 | column_entries = ["start_node", "end_node", "type", "section_id", "segment_id"] 103 | for col in column_entries: 104 | if col not in graph_edges.columns: 105 | raise BloodFlowError(f"Missing {col} in columns") 106 | 107 | pv = PointVasculature(graph_nodes, graph_edges) 108 | graph = Graph.from_point_vasculature(pv) 109 | return graph 110 | else: 111 | return None 112 | -------------------------------------------------------------------------------- /astrovascpy/ou.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2023-2023 Blue Brain Project/EPFL 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # Unless required by applicable law or agreed to in writing, software 7 | # distributed under the License is distributed on an "AS IS" BASIS, 8 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 9 | # See the License for the specific language governing permissions and 10 | # limitations under the License. 11 | 12 | import numpy as np 13 | import scipy.optimize as scpo 14 | import scipy.stats as ss 15 | from scipy import sparse 16 | from scipy.sparse.linalg import spsolve 17 | 18 | 19 | def ornstein_uhlenbeck_process( 20 | kappa, sigma, dt, iterations=1, zero_noise_time=1, seed=1234 21 | ): # pragma: no cover 22 | """Monte Carlo simulation of the OU process with a reflecting boundary at zero. 23 | 24 | Stochastic differential equation: 25 | dX(t) = - kappa X(t) dt + sigma dW 26 | with solution: 27 | X(t+dt) = X(t) * exp(-kappa dt) + alpha * epsilon(t) 28 | where: 29 | alpha = sqrt( sigma^2 / (2kappa) * (1-exp(-2*kappa*dt)) ) 30 | alpha is the standard deviation of epsilon. 31 | epsilon is a standard Gaussian random variable. 32 | The process starts at X(0) = 0 33 | 34 | Args: 35 | kappa (float): mean reversion coefficient. 36 | sigma (float): diffusion coefficient of the noise W 37 | dt (float): time-step. 38 | iterations (int): number of iterations. 39 | zero_noise_time (int): index at which the noise is set to zero 40 | for the rest of the simulation 41 | seed (int): RNG seed 42 | 43 | Returns: 44 | numpy.array: Reflected OU process starting at zero. 45 | """ 46 | np.random.seed(seed=seed) 47 | 48 | X = np.zeros(iterations + 1) 49 | alpha = np.sqrt(sigma**2 / (2 * kappa) * (1 - np.exp(-2 * kappa * dt))) 50 | exp_ = np.exp(-kappa * dt) 51 | epsilon = ss.norm.rvs(loc=0, scale=alpha, size=iterations) 52 | epsilon[zero_noise_time:] = 0 53 | 54 | for t in range(iterations): 55 | X[t + 1] = X[t] * exp_ + epsilon[t] # OU solution 56 | X[t + 1] = X[t + 1] if (X[t + 1] > 0.0) else 0.0 # reflecting boundary 57 | return X 58 | 59 | 60 | def expected_time(kappa, x_max, C, Nspace=10000): # pragma: no cover 61 | """Calculate the expected time to reach x_max starting from x_0=0. 62 | 63 | We assume that x_max = C sigma/ sqrt(2 kappa) 64 | The code solves the ODE: 65 | -kappa x U'(x) + 1/2 sigma^2 U''(x) = -1 66 | with U'(0)=0 and U(x_max) = 0 67 | We use backward discretization for the first order derivative. 68 | Using the BC we get U(-1)=U(0) and U(N)=0. 69 | In matrix form, we solve: DU = -1, with tri-diagonal matrix D. 70 | 71 | Args: 72 | kappa (float): mean reversion coefficient. 73 | x_max (float): maximum value for the radius. 74 | C (float): number of st dev distance from origin 75 | Nspace (int): number of discretization points. 76 | 77 | Returns: 78 | float: expected time. 79 | """ 80 | x_0 = 0 # starting point of the OU process 81 | sigma = x_max * np.sqrt(2 * kappa) / C 82 | x, dx = np.linspace(x_0, x_max, Nspace, retstep=True) # space discretization 83 | 84 | U = np.zeros(Nspace) # initialization of the solution 85 | constant_term = -np.ones(Nspace - 1) # -1 86 | 87 | sig2_dx = (sigma * sigma) / (dx * dx) 88 | # Construction of the tri-diagonal matrix D 89 | # a, lower diagonal 90 | a = kappa * x[:-1] / dx + 0.5 * sig2_dx 91 | aa = a[1:] 92 | # b, main diagonal 93 | b = -kappa * x[:-1] / dx - sig2_dx 94 | b[0] = -0.5 * sig2_dx # from BC at x0 95 | # c, upper diagonal 96 | c = 0.5 * sig2_dx * np.ones_like(a) 97 | cc = c[:-1] 98 | 99 | D = sparse.diags([aa, b, cc], [-1, 0, 1], shape=(Nspace - 1, Nspace - 1)).tocsc() 100 | 101 | U[:-1] = spsolve(D, constant_term) 102 | return U[0] 103 | 104 | 105 | def compute_OU_params(time, x_max, c): 106 | """Zero finder function to compute the value of kappa and sigma. 107 | 108 | Args: 109 | time (float): simulation time. 110 | x_max (float): maximum value for the radius. 111 | c (float): number of st dev distance from origin 112 | 113 | Returns: 114 | tuple: (kappa, sigma) 115 | """ 116 | 117 | def obj_fun(kappa): 118 | """Objective function. We want to find the zero. 119 | 120 | Args: 121 | kappa (float): mean reversion coefficient. 122 | 123 | Returns: 124 | float: expected time. 125 | """ 126 | return time - expected_time(kappa, x_max, c) 127 | 128 | x, r = scpo.brentq(obj_fun, a=1e-2, b=50, xtol=1e-8, rtol=1e-4, full_output=True) 129 | if r.converged: 130 | kappa = x 131 | sigma = x_max * np.sqrt(2 * kappa) / c 132 | return kappa, sigma 133 | 134 | raise ValueError 135 | -------------------------------------------------------------------------------- /astrovascpy/plotting.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2023-2024 Blue Brain Project/EPFL 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # Unless required by applicable law or agreed to in writing, software 7 | # distributed under the License is distributed on an "AS IS" BASIS, 8 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 9 | # See the License for the specific language governing permissions and 10 | # limitations under the License. 11 | 12 | import matplotlib.pyplot as plt 13 | import networkx as nx 14 | import numpy as np 15 | 16 | 17 | def plot_pressure( 18 | graph, params, radii, node_sol, vmin=None, vmax=None, node_label=False, cmap="Blues" 19 | ): # pragma: no cover 20 | """Plot the pressure on the graph. 21 | 22 | Args: 23 | graph (vasculatureAPI.PointVasculature): graph containing point vasculature skeleton. 24 | radii (float): average radius per section. 25 | params (float): changing parameters. 26 | 27 | Returns: 28 | float: pressure at each node. 29 | """ 30 | pos = {} 31 | for node_id in graph: 32 | pos[node_id] = list(graph.nodes[node_id]["position"][:2]) 33 | 34 | try: 35 | endfeet_edges = [ 36 | (node_u, node_v) 37 | for node_u, node_v in graph.edges() 38 | if graph[node_u][node_v]["endfeet_id"] > -1 39 | ] 40 | except BaseException: # pylint: disable=broad-except 41 | endfeet_edges = [] 42 | 43 | plt.figure(figsize=(10, 5)) 44 | axes = [] 45 | axes.append(plt.gca()) 46 | 47 | if "root_id" in params: 48 | nx.draw_networkx_nodes( 49 | graph, 50 | pos=pos, 51 | nodelist=[params["root_id"]], 52 | node_color="r", 53 | node_size=50, 54 | ax=axes[0], 55 | ) 56 | 57 | nx.draw_networkx_edges( 58 | graph, 59 | pos=pos, 60 | edgelist=endfeet_edges, 61 | width=5 * params["edge_scale"], 62 | edge_color="r", 63 | alpha=1, 64 | ) 65 | 66 | nx.draw_networkx_edges( 67 | graph, 68 | pos=pos, 69 | width=params["edge_scale"] * radii / np.max(radii), 70 | edge_color="k", 71 | ) 72 | 73 | nodes = nx.draw_networkx_nodes( 74 | graph, 75 | pos=pos, 76 | node_size=params["node_scale"], 77 | # * (node_sol - np.log10(params['p_min'])), 78 | node_color=node_sol, 79 | ax=axes[0], 80 | cmap=cmap, 81 | vmin=vmin, 82 | vmax=vmax, 83 | ) 84 | 85 | if node_label: 86 | nx.draw_networkx_labels(graph, pos=pos) 87 | 88 | plt.colorbar(nodes, ax=axes[0]) # , label='node pressure (log scale)') 89 | 90 | axes[0].axis("off") 91 | 92 | 93 | def plot_resistance( 94 | graph, 95 | params, 96 | radii, 97 | edge_vmin=None, 98 | edge_vmax=None, 99 | edge_label=False, 100 | edge_cmap=plt.cm.twilight, 101 | ): # pragma: no cover 102 | """Plot the resistance on the graph. 103 | 104 | Args: 105 | graph (vasculatureAPI.PointVasculature): graph containing point vasculature skeleton. 106 | radii (float): average radius per section. 107 | params (float): changing parameters. 108 | node_sol 109 | 110 | Returns: 111 | float: resistance at each edge. 112 | """ 113 | pos = {} 114 | for node_id in graph: 115 | pos[node_id] = list(graph.nodes[node_id]["position"][:2]) 116 | 117 | try: 118 | endfeet_edges = [ 119 | (node_u, node_v) 120 | for node_u, node_v in graph.edges() 121 | if graph[node_u][node_v]["endfeet_id"] > -1 122 | ] 123 | except BaseException: # pylint: disable=broad-except 124 | endfeet_edges = [] 125 | 126 | plt.figure(figsize=(10, 5)) 127 | axes = [] 128 | axes.append(plt.gca()) 129 | 130 | if "root_id" in params: 131 | nx.draw_networkx_nodes( 132 | graph, 133 | pos=pos, 134 | nodelist=[params["root_id"]], 135 | node_color="r", 136 | node_size=50, 137 | ax=axes[0], 138 | ) 139 | 140 | nx.draw_networkx_edges( 141 | graph, 142 | pos=pos, 143 | edgelist=endfeet_edges, 144 | width=5 * params["edge_scale"], 145 | edge_color="r", 146 | alpha=1, 147 | ) 148 | 149 | nx.draw_networkx_edges( 150 | graph, 151 | pos=pos, 152 | width=params["edge_scale"] * radii / np.max(radii), 153 | # width = 1*np.array(radii), 154 | edge_color=params["resistances"], 155 | ax=axes[0], 156 | edge_cmap=edge_cmap, 157 | # edge_vmin=edge_vmin, 158 | # edge_vmax=edge_vmax, 159 | ) 160 | 161 | nx.draw_networkx_nodes( 162 | graph, 163 | pos=pos, 164 | node_size=params["node_scale"], 165 | node_color="k", 166 | ) 167 | 168 | if edge_label: 169 | nx.draw_networkx_labels(graph, pos=pos) 170 | 171 | plt.colorbar( 172 | plt.cm.ScalarMappable(norm=plt.Normalize(edge_vmin, edge_vmax), cmap=edge_cmap), 173 | ax=axes[0], 174 | ) 175 | 176 | axes[0].axis("off") 177 | 178 | 179 | def plot_on_graph(nx_graph, graph, pos, node_key="pressure", edge_key="flow"): 180 | """Plot data on a graph.""" 181 | input_nodes = np.argwhere(graph.degrees == 1).T[0] 182 | graph.node_properties.loc[input_nodes, node_key] = np.nan 183 | mask = graph.edge_properties[graph.edge_properties.end_node.isin(input_nodes)].index 184 | graph.edge_properties.loc[mask, edge_key] = np.nan 185 | if node_key is not None: 186 | nx.draw_networkx_nodes( 187 | nx_graph, 188 | pos=pos, 189 | node_size=2, 190 | node_color=graph.node_properties[node_key].to_numpy(), 191 | cmap=plt.get_cmap("Reds"), 192 | ) 193 | if edge_key is not None: 194 | nx.draw_networkx_nodes(nx_graph, pos=pos, nodelist=input_nodes, node_size=5, node_color="g") 195 | e = nx.draw_networkx_edges( 196 | nx_graph, 197 | pos=pos, 198 | edge_color=graph.edge_properties[edge_key].to_numpy(), 199 | edge_cmap=plt.get_cmap("Blues"), 200 | ) 201 | plt.colorbar(e, shrink=0.5) 202 | edgelist = [e for i, e in enumerate(nx_graph.edges) if i in mask] 203 | nx.draw_networkx_edges(nx_graph, pos=pos, edgelist=edgelist, edge_color="g") 204 | plt.axis("equal") 205 | -------------------------------------------------------------------------------- /astrovascpy/report_reader.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2023-2024 Blue Brain Project/EPFL 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # Unless required by applicable law or agreed to in writing, software 7 | # distributed under the License is distributed on an "AS IS" BASIS, 8 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 9 | # See the License for the specific language governing permissions and 10 | # limitations under the License. 11 | 12 | """ 13 | Module dedicated to the report reading. 14 | This is the standalone version before having proper sonata simulation config etc. The final version 15 | will use snap and will be in archngv most probably. 16 | Also this is a bit over-engineered but makes things easier if we have different types of reports 17 | in the future. Plus, this is a light version of the snap classes which will make things easy to 18 | adapt from these classes to the snap ones. 19 | """ 20 | 21 | import numpy as np 22 | import pandas as pd 23 | from cached_property import cached_property 24 | from libsonata import ElementReportReader, SonataError 25 | 26 | from .exceptions import BloodFlowError 27 | from .utils import ensure_list 28 | 29 | # pylint: disable=missing-kwoa 30 | 31 | 32 | def _collect_population_reports(frame_report, cls): 33 | return { 34 | population: cls(frame_report, population) for population in frame_report.population_names 35 | } 36 | 37 | 38 | class PopulationFrameReport: 39 | """Access to PopulationFrameReport data. 40 | 41 | This function is generic for the soma-like or compartment-like reports. 42 | """ 43 | 44 | def __init__(self, frame_report, population_name): 45 | """Initialize a PopulationFrameReport object from a FrameReport. 46 | 47 | Args: 48 | frame_report (FrameReport): FrameReport containing this frame report population. 49 | population_name (str): the population name corresponding to this report. 50 | 51 | Returns: 52 | PopulationFrameReport: A PopulationFrameReport object. 53 | """ 54 | self.frame_report = frame_report 55 | self._frame_population = ElementReportReader(frame_report.filepath)[population_name] 56 | self._population_name = population_name 57 | 58 | @property 59 | def name(self): 60 | """Access to the population name.""" 61 | return self._population_name 62 | 63 | @staticmethod 64 | def _wrap_columns(columns): 65 | """Allow to change the columns names if needed.""" 66 | return columns 67 | 68 | def get(self, group=None, t_start=None, t_stop=None): 69 | """Fetch data from the report. 70 | 71 | Args: 72 | group (int/list/np.array): Get frames filtered by ids. 73 | t_start (float): Include only frames occurring at or after this time. 74 | t_stop (float): Include only frames occurring at or before this time. 75 | 76 | Returns: 77 | pandas.DataFrame: frame as columns indexed by timestamps. 78 | """ 79 | group = group if group is None else ensure_list(group) 80 | try: 81 | view = self._frame_population.get(node_ids=group, tstart=t_start, tstop=t_stop) 82 | except (SonataError, TypeError) as e: 83 | raise BloodFlowError(e) from e 84 | 85 | if len(view.ids) == 0: 86 | return pd.DataFrame() 87 | 88 | res = pd.DataFrame( 89 | data=view.data, 90 | columns=pd.MultiIndex.from_arrays(np.asarray(view.ids).T), 91 | index=view.times, 92 | ).sort_index(axis=1) 93 | 94 | # rename from multi index to index cannot be achieved easily through df.rename 95 | res.columns = self._wrap_columns(res.columns) 96 | return res 97 | 98 | @cached_property 99 | def node_ids(self): 100 | """Return the node ids present in the report. 101 | 102 | Returns: 103 | np.Array: Numpy array containing the node_ids included in the report 104 | """ 105 | return np.sort(self._frame_population.get_node_ids()) 106 | 107 | 108 | class FilteredFrameReport: 109 | """Access to filtered FrameReport data.""" 110 | 111 | def __init__(self, frame_report, group=None, t_start=None, t_stop=None): 112 | """Initialize a FilteredFrameReport. 113 | 114 | A FilteredFrameReport is a lazy and cached object which contains the filtered data 115 | from all the populations of a report. 116 | 117 | Args: 118 | frame_report (FrameReport): The FrameReport to filter. 119 | group (None/int/list/np.array/dict): Get frames filtered by group. See NodePopulation. 120 | t_start (float): Include only frames occurring at or after this time. 121 | t_stop (float): Include only frames occurring at or before this time. 122 | 123 | Returns: 124 | FilteredFrameReport: A FilteredFrameReport object. 125 | """ 126 | self.frame_report = frame_report 127 | self.group = group 128 | self.t_start = t_start 129 | self.t_stop = t_stop 130 | 131 | @cached_property 132 | def report(self): 133 | """Access to the report data. 134 | 135 | Returns: 136 | pandas.DataFrame: A DataFrame containing the data from the report. Row's indices are the 137 | different timestamps and the column's MultiIndex are : 138 | - (population_name, node_id, compartment id) for the CompartmentReport 139 | - (population_name, node_id) for the SomaReport 140 | """ 141 | res = pd.DataFrame() 142 | for population in self.frame_report.population_names: 143 | frames = self.frame_report[population] 144 | data = frames.get(group=self.group, t_start=self.t_start, t_stop=self.t_stop) 145 | if data.empty: 146 | continue 147 | new_index = tuple(tuple([population] + ensure_list(x)) for x in data.columns) 148 | data.columns = pd.MultiIndex.from_tuples(new_index) 149 | # need to do this in order to preserve MultiIndex for columns 150 | res = data if res.empty else data.join(res, how="outer") 151 | return res.sort_index().sort_index(axis=1) 152 | 153 | 154 | class FrameReport: 155 | """Access to FrameReport data.""" 156 | 157 | def __init__(self, filepath): 158 | """Initialize a FrameReport object from a filepath. 159 | 160 | Args: 161 | filepath (str/Path): path to the file containing the report 162 | 163 | Returns: 164 | FrameReport: A FrameReport object. 165 | """ 166 | self.filepath = filepath 167 | 168 | @cached_property 169 | def _frame_reader(self): 170 | """Access to the compartment report reader.""" 171 | return ElementReportReader(self.filepath) 172 | 173 | @property 174 | def time_units(self): 175 | """Return the data unit for this report.""" 176 | units = {self._frame_reader[pop].time_units for pop in self.population_names} 177 | if len(units) > 1: 178 | raise BloodFlowError("Multiple time units found in the different populations.") 179 | return units.pop() 180 | 181 | @cached_property 182 | def data_units(self): 183 | """Return the data unit for this report.""" 184 | units = {self._frame_reader[pop].data_units for pop in self.population_names} 185 | if len(units) > 1: 186 | raise BloodFlowError("Multiple data units found in the different populations.") 187 | return units.pop() 188 | 189 | @cached_property 190 | def population_names(self): 191 | """Return the population names included in this report.""" 192 | return sorted(self._frame_reader.get_population_names()) 193 | 194 | @cached_property 195 | def _population_report(self): 196 | """Collect the different PopulationFrameReport.""" 197 | return _collect_population_reports(self, PopulationFrameReport) 198 | 199 | def __getitem__(self, population_name): 200 | """Access the PopulationFrameReports corresponding to the population 'population_name'.""" 201 | return self._population_report[population_name] 202 | 203 | def __iter__(self): 204 | """Allow iteration over the different PopulationFrameReports.""" 205 | return iter(self._population_report) 206 | 207 | def filter(self, group=None, t_start=None, t_stop=None): 208 | """Return a FilteredFrameReport. 209 | 210 | A FilteredFrameReport is a lazy and cached object which contains the filtered data 211 | from all the populations of a report. 212 | 213 | Args: 214 | group (None/int/list/np.array/dict): Get frames filtered by group. See NodePopulation. 215 | t_start (float): Include only frames occurring at or after this time. 216 | t_stop (float): Include only frames occurring at or before this time. 217 | 218 | Returns: 219 | FilteredFrameReport: A FilteredFrameReport object. 220 | """ 221 | return FilteredFrameReport(self, group, t_start, t_stop) 222 | 223 | 224 | class PopulationBloodflowReport(PopulationFrameReport): 225 | """Access to PopulationBloodflowReport data.""" 226 | 227 | @staticmethod 228 | def _wrap_columns(columns): 229 | """Transform pandas.MultiIndex into pandas.Index for the pandas.DataFrame columns. 230 | 231 | Notes: 232 | the libsonata.ElementsReader.get() returns tuple as columns for the data. For the 233 | soma reports it means: pandas.MultiIndex([(0, 0), (1, 0), ..., (last_node_id, 0)]). 234 | So we convert this into pandas.Index([0,1,..., last_node_id]). 235 | """ 236 | return columns.levels[0] 237 | 238 | 239 | class BloodflowReport(FrameReport): 240 | """Access to a BloodflowReport data.""" 241 | 242 | @cached_property 243 | def _population_report(self): 244 | """Collect the different PopulationBloodflowReport.""" 245 | return _collect_population_reports(self, PopulationBloodflowReport) 246 | -------------------------------------------------------------------------------- /astrovascpy/report_writer.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2023-2024 Blue Brain Project/EPFL 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # Unless required by applicable law or agreed to in writing, software 7 | # distributed under the License is distributed on an "AS IS" BASIS, 8 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 9 | # See the License for the specific language governing permissions and 10 | # limitations under the License. 11 | 12 | from collections import namedtuple 13 | from shutil import copyfile 14 | 15 | import h5py 16 | import numpy as np 17 | 18 | 19 | def write_simulation_report( 20 | node_ids, report_folder, start_time, end_time, time_step, flows, pressures, radii, volumes 21 | ): 22 | """Write simulation report in sonata format. 23 | 24 | Args: 25 | node_ids (numpy.array): id of each edge of the vasculature. 26 | report_folder (path): folder containing the 3 sonata reports. 27 | start_time (float): beginning of simulation. 28 | end_time (float): end of simulation. 29 | time_step (float): time step of simulation. 30 | flows (numpy.array): flow values at each time-step for each edge. 31 | pressures (numpy.array): pressure values at each time-step for each node. 32 | radii (numpy.array): radius values at each time-step for each edge. 33 | volumes (numpy.array): volume values at each time-step for each edge. 34 | """ 35 | Report = namedtuple("Report", ["data", "name", "unit"]) 36 | 37 | flows = Report(data=flows, unit="µm^3.s^-1", name=report_folder / "report_flows.h5") 38 | pressures = Report( 39 | data=pressures, unit="g.µm^-1.s^-2", name=report_folder / "report_pressures.h5" 40 | ) 41 | radii = Report(data=radii, unit="µm", name=report_folder / "report_radii.h5") 42 | volumes = Report(data=volumes, unit="µm^3", name=report_folder / "report_volumes.h5") 43 | for report in [flows, pressures, radii, volumes]: 44 | write_report(report, node_ids, start_time, end_time, time_step) 45 | 46 | 47 | def write_report(report, node_ids, start_time, end_time, time_step): 48 | """Write simulation report in sonata format. 49 | 50 | Args: 51 | report (Report): folder containing the 3 sonata reports. 52 | node_ids (numpy.array): id of each edge of the vasculature. 53 | start_time (float): beginning of simulation. 54 | end_time (float): end of simulation. 55 | time_step (float): time step of simulation. 56 | """ 57 | index_pointers = np.arange(node_ids.size + 1, dtype=np.uint64) 58 | 59 | element_ids = np.zeros(node_ids.size, dtype=np.uint64) 60 | 61 | string_dtype = h5py.special_dtype(vlen=str) 62 | 63 | with h5py.File(report.name, "w") as fd: 64 | report_group = fd.create_group("report/vasculature") 65 | report_data = report_group.create_dataset("data", data=report.data, dtype=np.float32) 66 | report_data.attrs.create("units", report.unit, dtype=string_dtype) 67 | gmapping = fd.create_group("/report/vasculature/mapping") 68 | dnodes = gmapping.create_dataset("node_ids", data=node_ids, dtype=np.uint64) 69 | dnodes.attrs.create("sorted", data=True, dtype=np.uint8) 70 | gmapping.create_dataset("index_pointers", data=index_pointers, dtype=np.uint64) 71 | gmapping.create_dataset("element_ids", data=element_ids, dtype=np.uint32) 72 | dtimes = gmapping.create_dataset( 73 | "time", data=(start_time, end_time, time_step), dtype=np.double 74 | ) 75 | dtimes.attrs.create("units", data="s", dtype=string_dtype) 76 | 77 | 78 | def write_merged_report( 79 | input_filename, 80 | report_folder, 81 | subgraphs, 82 | types, 83 | entry_nodes, 84 | edges_bifurcations, 85 | pairs=None, 86 | ): # pragma: no cover 87 | """Write a combined report in sonata format. 88 | 89 | Args: 90 | input_filename (path): folder containing the sonata graph. 91 | report_folder (path): folder containing the sonata report. 92 | subgraphs (numpy.array): (nb_edges, ) ids of subgraphs group for big vessels. 93 | types (numpy.array): (vessels_type,) array of vessels' type id. 94 | entry_nodes (numpy.array): (nb_entry_nodes,) ids of entry nodes for the inflow. 95 | edges_bifurcations (numpy.array): (nb_edges, ) ids of edges forming bifurcations. 96 | pairs (numpy.array): (nb_pairs, ) ids of nodes forming pairs of nodes. 97 | """ 98 | string_dtype = h5py.special_dtype(vlen=str) 99 | 100 | report_name = report_folder / "report_vasculature.h5" 101 | copyfile(input_filename, report_name) 102 | 103 | with h5py.File(report_name, "a") as fd: 104 | report_group = fd["nodes/vasculature/0"] 105 | del report_group["type"] 106 | report_data = report_group.create_dataset("subgraph_id", data=subgraphs, dtype=np.uint64) 107 | report_data.attrs.create( 108 | "description", 109 | data="ids of subgraphs group for big vessels", 110 | dtype=string_dtype, 111 | ) 112 | report_data = report_group.create_dataset("type", data=types, dtype=np.uint64) 113 | report_data.attrs.create("description", data="types", dtype=string_dtype) 114 | report_data = report_group.create_dataset("entry_edges", data=entry_nodes, dtype=np.uint64) 115 | report_data.attrs.create( 116 | "description", data="ids of entry edges for the inflow", dtype=string_dtype 117 | ) 118 | report_data = report_group.create_dataset( 119 | "edges_bifurcations", data=edges_bifurcations, dtype=np.uint64 120 | ) 121 | report_data.attrs.create( 122 | "description", 123 | data="ids of exit edges given a bifurcation", 124 | dtype=string_dtype, 125 | ) 126 | if pairs is not None: 127 | report_data = report_group.create_dataset("pairs", data=pairs, dtype=np.uint64) 128 | report_data.attrs.create("description", data="ids of pairs", dtype=string_dtype) 129 | -------------------------------------------------------------------------------- /astrovascpy/scipy_petsc_conversions.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2023-2024 Blue Brain Project/EPFL 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # Unless required by applicable law or agreed to in writing, software 7 | # distributed under the License is distributed on an "AS IS" BASIS, 8 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 9 | # See the License for the specific language governing permissions and 10 | # limitations under the License. 11 | 12 | import os 13 | import random 14 | import string 15 | 16 | from numpy import concatenate, dtype 17 | from numpy import zeros as np_zeros 18 | from petsc4py import PETSc 19 | from scipy.sparse import csr_matrix 20 | 21 | from . import PetscBinaryIO 22 | from .utils import comm, mpi, rank, rank0, size 23 | 24 | 25 | def _from_numpy_dtype(np_type): 26 | """Convert NumPy datatype to MPI datatype.""" 27 | dtype_var = dtype(np_type) 28 | char_d = dtype_var.char 29 | mpi_type = mpi()._typedict[char_d] 30 | return mpi_type 31 | 32 | 33 | def BinaryIO2PETScMat(L, file_name="tempMat.dat"): 34 | """ 35 | Args: 36 | L: Numpy/SciPy array/matrix [on process 0] 37 | file_name: file name for the temporary container 38 | 39 | Returns: 40 | PETScMat [distributed across ranks] -> 41 | it uses the PetscBinaryIO interface (read/write from/to disk) 42 | """ 43 | 44 | # randomize the name of the temp container to avoid files with the same name, 45 | # when running more than two instances of the program 46 | fname_ = "".join(random.choices(string.ascii_lowercase, k=10)) 47 | fname_ += f"_{str(id(L))}_" + file_name 48 | fname_ = comm().bcast(fname_, root=0) 49 | 50 | if rank0(): 51 | PetscBinaryIO.PetscBinaryIO().writeBinaryFile( 52 | fname_, 53 | [ 54 | L, 55 | ], 56 | ) 57 | 58 | viewer = PETSc.Viewer().createBinary(fname_, "r") 59 | A = PETSc.Mat(comm=comm()).load(viewer) 60 | 61 | if rank0(): 62 | os.remove(fname_) 63 | try: 64 | os.remove(fname_ + ".info") 65 | except FileNotFoundError: 66 | pass 67 | 68 | return A 69 | 70 | 71 | def BinaryIO2PETScVec(v, file_name="tempVec.dat"): 72 | """ 73 | Args: 74 | v: Numpy array/vector [on process 0] 75 | file_name: file name for the temporary container 76 | 77 | Returns: 78 | PETSc Vec [distributed across ranks] -> 79 | it uses the PetscBinaryIO interface (read/write from/to disk) 80 | """ 81 | 82 | fname_ = "".join(random.choices(string.ascii_lowercase, k=10)) + f"_{str(id(v))}_" + file_name 83 | fname_ = comm().bcast(fname_, root=0) 84 | 85 | if rank0(): 86 | PetscBinaryIO.PetscBinaryIO().writeBinaryFile( 87 | fname_, 88 | [ 89 | v.view(PetscBinaryIO.Vec), 90 | ], 91 | ) 92 | 93 | viewer = PETSc.Viewer().createBinary(fname_, "r") 94 | x = PETSc.Vec(comm=comm()).load(viewer) 95 | 96 | if rank0(): 97 | os.remove(fname_) 98 | try: 99 | os.remove(fname_ + ".info") 100 | except FileNotFoundError: 101 | pass 102 | 103 | return x 104 | 105 | 106 | def BinaryIO2array(x, file_name="tempVec.dat"): 107 | """ 108 | Args: 109 | x: a distributed PETSc Vec 110 | file_name: file name for the temporary container 111 | 112 | Returns: 113 | numpy array on proc 0 -> it uses the PetscBinaryIO interface (read/write from/to disk) 114 | """ 115 | 116 | fname_ = "".join(random.choices(string.ascii_lowercase, k=10)) + f"_{str(id(x))}_" + file_name 117 | fname_ = comm().bcast(fname_, root=0) 118 | 119 | viewer = PETSc.Viewer().createBinary(fname_, "w") 120 | viewer(x) 121 | 122 | v = None 123 | if rank0(): 124 | (v,) = PetscBinaryIO.PetscBinaryIO().readBinaryFile(fname_) 125 | 126 | if rank0(): 127 | os.remove(fname_) 128 | try: 129 | os.remove(fname_ + ".info") 130 | except FileNotFoundError: 131 | pass 132 | 133 | return v 134 | 135 | 136 | def coomatrix2PETScMat(L): 137 | """ 138 | Converts a sequential scipy sparse matrix (on process 0) to a PETSc 139 | Mat ('aij') matrix distributed on all processes 140 | 141 | Args: 142 | L: scipy sparse matrix on proc 0 (COO format) 143 | 144 | Returns: 145 | PETSc matrix distributed on all procs 146 | """ 147 | 148 | # Get the data from the sequential scipy matrix 149 | if rank0(): 150 | if L.format == "coo": 151 | L2 = L 152 | else: 153 | L2 = L.tocoo() 154 | 155 | n, m = L2.shape 156 | 157 | # COO-related 158 | row_ = L2.row 159 | row_ = row_.astype(PETSc.IntType) 160 | col_ = L2.col 161 | col_ = col_.astype(PETSc.IntType) 162 | data_ = L2.data 163 | data_ = data_.astype(PETSc.ScalarType) 164 | 165 | # CSR-related 166 | # https://en.wikipedia.org/wiki/Sparse_matrix 167 | # ROW_INDEX 168 | Ai = L.tocsr().indptr 169 | Ai = Ai.astype(PETSc.IntType) 170 | else: 171 | n = None 172 | m = None 173 | row_ = None 174 | col_ = None 175 | data_ = None 176 | Ai = None 177 | 178 | # Broadcast sizes 179 | n = comm().bcast(n, root=0) 180 | m = comm().bcast(m, root=0) 181 | 182 | A = PETSc.Mat().create(comm=comm()) 183 | A.setSizes([n, m]) 184 | A.setType("aij") 185 | A.setFromOptions() 186 | A.setUp() 187 | 188 | # rows corresponding to the current mpi rank (range) 189 | istart, iend = A.getOwnershipRange() 190 | 191 | # gather all ranges in rank 0 (None for the other ranks) 192 | istart_loc = comm().gather(istart, root=0) 193 | iend_loc = comm().gather(iend, root=0) 194 | 195 | nnzloc = None 196 | if rank0(): 197 | nnzloc_0 = np_zeros(size(), PETSc.IntType) 198 | for i in range(size()): 199 | # Ai encodes the total number of nonzeros above row istart_loc[i] and iend_loc[i] 200 | # how many non-zero elements for rank i 201 | nnzloc_0[i] = Ai[iend_loc[i]] - Ai[istart_loc[i]] 202 | else: 203 | nnzloc_0 = None 204 | 205 | # every rank gets the corresponding number (from vector to number) 206 | nnzloc = comm().scatter(nnzloc_0, root=0) 207 | 208 | # distribute the matrix across ranks (COO format) - create local containers 209 | row_loc = np_zeros(nnzloc, PETSc.IntType) 210 | col_loc = np_zeros(nnzloc, PETSc.IntType) 211 | data_loc = np_zeros(nnzloc, PETSc.ScalarType) 212 | 213 | # For Scatterv 214 | displ_ = None 215 | if rank0(): 216 | displ_ = tuple(concatenate(([0], nnzloc_0[:-1])).cumsum()) 217 | 218 | # distribute the matrix across ranks (COO format) - populate local containers 219 | comm().Scatterv([row_, nnzloc_0, displ_, _from_numpy_dtype(PETSc.IntType)], row_loc, root=0) 220 | comm().Scatterv([col_, nnzloc_0, displ_, _from_numpy_dtype(PETSc.IntType)], col_loc, root=0) 221 | comm().Scatterv( 222 | [data_, nnzloc_0, displ_, _from_numpy_dtype(PETSc.ScalarType)], data_loc, root=0 223 | ) 224 | 225 | for r, c, v in zip(row_loc, col_loc, data_loc): 226 | A[r, c] = v 227 | 228 | A.assemble() 229 | 230 | return A 231 | 232 | 233 | def _distribute_array_helper(v, array_type=None): 234 | """ 235 | Scatter a NumPy array from rank 0 to all ranks using PETSc automatic 236 | chunk selection routine. 237 | 238 | Args: 239 | v: NumPy array on rank 0, None (or whatever) on other ranks 240 | array_type: set the type of the distributed array 241 | If None, it keeps the same type as v. 242 | 243 | Returns: 244 | tuple of 2 elements: 245 | - numpy.ndarray: distributed array on all processors 246 | - petsc4py.PETSc.Vec: distributed array on all processors. 247 | All entries are initialized to zero. 248 | """ 249 | 250 | if rank0(): 251 | n = len(v) 252 | if array_type is None: 253 | array_type = v.dtype 254 | else: 255 | v = v.astype(array_type) 256 | else: 257 | n = None 258 | 259 | # Broadcast size and type 260 | n = comm().bcast(n, root=0) 261 | array_type = comm().bcast(array_type, root=0) 262 | 263 | # distribute array using PETSc.Vec approach 264 | x = PETSc.Vec() 265 | x.create(comm()) 266 | x.setSizes(n) 267 | x.setFromOptions() 268 | istart, iend = x.getOwnershipRange() 269 | 270 | # slice of the global vector that belongs to this mpi rank (range: from -> to) 271 | nloc = iend - istart 272 | # gather nloc on rank zero 273 | nloc_loc = comm().gather(nloc, root=0) 274 | if not rank0(): 275 | nloc_loc = [0] 276 | # gather istart on rank zero. 277 | istart_loc = comm().gather(istart, root=0) 278 | if not rank0(): 279 | istart_loc = [0] 280 | 281 | # Initialize destination array on each rank 282 | vloc = np_zeros(nloc, dtype=array_type) 283 | 284 | # scatter the vector v on all ranks 285 | comm().Scatterv([v, nloc_loc, istart_loc, _from_numpy_dtype(array_type)], vloc, root=0) 286 | 287 | return vloc, x 288 | 289 | 290 | def distribute_array(v, array_type=None): 291 | """ 292 | Scatter a NumPy array from rank 0 to all ranks using PETSc automatic 293 | chunk selection routine. 294 | 295 | Args: 296 | v: NumPy array on rank 0, None (or whatever) on other ranks 297 | array_type: set the type of the distributed array 298 | If None, it keeps the same type as v. 299 | 300 | Returns: 301 | numpy.ndarray: distributed array on all processors 302 | """ 303 | 304 | vloc, x = _distribute_array_helper(v, array_type=array_type) 305 | x.destroy() # Free the memory of the PETSc vec 306 | 307 | return vloc 308 | 309 | 310 | def array2PETScVec(v): 311 | """ 312 | Converts (copies) a sequential array/vector on process 0 313 | to a distributed PETSc Vec 314 | 315 | Args: 316 | v: NumPy array on proc 0, None (or whatever) on other proc 317 | 318 | Returns: 319 | petsc4py.PETSc.Vec: distributed array on all procs. 320 | """ 321 | 322 | vloc, x = _distribute_array_helper(v, array_type=PETSc.ScalarType) 323 | x.setArray(vloc) 324 | 325 | return x 326 | 327 | 328 | def PETScVec2array(x, dest_rank=0): 329 | """ 330 | Converts (copies) a distributed PETSc Vec to a sequential array on specified rank 331 | 332 | Args: 333 | x: PETSc Vec distributed on all procs 334 | dest_rank: MPI rank receiving the numpy array 335 | 336 | Returns: 337 | NumPy array on proc 0 338 | """ 339 | 340 | vloc = x.getArray() 341 | n = x.getSize() 342 | 343 | istart, iend = x.getOwnershipRange() 344 | 345 | nloc = iend - istart 346 | nloc_loc = comm().gather(nloc, root=dest_rank) 347 | if rank() != dest_rank: 348 | nloc_loc = [0] 349 | 350 | istart_loc = comm().gather(istart, root=dest_rank) 351 | if rank() != dest_rank: 352 | istart_loc = [0] 353 | 354 | if rank() == dest_rank: 355 | v = np_zeros(n, PETSc.ScalarType) 356 | else: 357 | v = None 358 | 359 | comm().Gatherv(vloc, [v, nloc_loc, istart_loc, _from_numpy_dtype(PETSc.ScalarType)], root=0) 360 | 361 | return v 362 | 363 | 364 | def PETScMat2coo(A, dest_rank=0): 365 | """ 366 | Converts a distributed PETSc sparse matrice to a scipy coo 367 | sparse matrix on a specified rank 368 | 369 | Args: 370 | A: PETSc Mat distributed on all procs 371 | dest_rank: MPI rank receiving the coo sparse matrix 372 | 373 | Returns: 374 | scipy coo sparse matrix on the specified rank 375 | """ 376 | 377 | indptr_loc, indices_loc, data_loc = A.getValuesCSR() 378 | m, n = A.getSize() 379 | 380 | # gatered values 381 | iptr_gat = comm().gather(indptr_loc, root=dest_rank) 382 | ind_gat = comm().gather(indices_loc, root=dest_rank) 383 | data_gat = comm().gather(data_loc, root=dest_rank) 384 | 385 | if rank() == dest_rank: 386 | data = concatenate(data_gat) 387 | indices = concatenate(ind_gat) 388 | 389 | # reconstruct the indptr array 390 | indptr = np_zeros(shape=m + 1) 391 | ind = 1 392 | offset = 0 393 | for array in iptr_gat: 394 | last_elem_ind = len(array) - 2 395 | for i, elem in enumerate(array[1:]): 396 | indptr[ind] = offset + elem 397 | if i == last_elem_ind: 398 | offset = indptr[ind] 399 | ind += 1 400 | 401 | matrix_csr = csr_matrix((data, indices, indptr), shape=(m, n)) 402 | return matrix_csr.tocoo() 403 | else: 404 | return None 405 | -------------------------------------------------------------------------------- /astrovascpy/typing.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2023-2024 Blue Brain Project/EPFL 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # Unless required by applicable law or agreed to in writing, software 7 | # distributed under the License is distributed on an "AS IS" BASIS, 8 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 9 | # See the License for the specific language governing permissions and 10 | # limitations under the License. 11 | 12 | """ 13 | :pep:`484`-style type annotations for the public API of this package 14 | """ 15 | 16 | import enum 17 | from typing import NotRequired, TypedDict 18 | 19 | 20 | class VasculatureAxis(enum.IntEnum): 21 | X = 0 22 | Y = 1 23 | Z = 2 24 | 25 | 26 | class VasculatureParams(TypedDict): 27 | """ 28 | Required parameters used in the model: 29 | 30 | Args: 31 | vasc_axis: vasculature axis corresponding to x, y, or z. Should be set to 0, 1, or 2. 32 | depth_ratio: depth along the vasc_axis. This is the portion of the vasculature where there are inputs. 33 | max_nb_inputs: maximum number of entry nodes where we inject the flow into the vasculature. Should be >= 1. 34 | blood_viscosity: plasma viscosity in :math:`g\, \mu m^{-1}\, s^{-1}`. 35 | base_pressure: reference pressure in :math:`g \, \mu m^{-1}\, s^{-2}`. At resting state equal to the external pressure 36 | 37 | (Optional) Stochastic simulation parameters: 38 | 39 | Args: 40 | 41 | entry_noise: Boolean value to enable or disable the endfeet activity on entry nodes. 42 | 43 | threshold_r: radius (µm) threshold. A radius smaller than the threshold is considered a capillary. A radius bigger than the threshold is considered an artery. 44 | 45 | c_cap: constant used in the ROU parameter calibration for capillaries 46 | c_art: constant used in the ROU parameter calibration for arteries 47 | 48 | max_r_capill: max radius change factor for capillaries. 49 | t_2_max_capill: time (in seconds) to reach r_max_capill from 0. 50 | max_r_artery: max radius change factor for arteries. 51 | t_2_max_artery: time (in seconds) to reach r_max_artery from 0. 52 | 53 | (Optional) PETSc Linear solver parameters: 54 | 55 | Args: 56 | solver: iterative linear solver used by PETSc 57 | max_it: maximum number of solver iterations 58 | r_tol: relative tolerance 59 | """ 60 | 61 | max_nb_inputs: int 62 | depth_ratio: float 63 | vasc_axis: VasculatureAxis 64 | blood_viscosity: float 65 | base_pressure: float 66 | 67 | entry_noise: NotRequired[bool] 68 | c_cap: NotRequired[float] 69 | c_art: NotRequired[float] 70 | threshold_r: NotRequired[float] 71 | max_r_capill: NotRequired[float] 72 | t_2_max_capill: NotRequired[float] 73 | max_r_artery: NotRequired[float] 74 | t_2_max_artery: NotRequired[float] 75 | 76 | solver: NotRequired[str] 77 | max_it: NotRequired[int] 78 | r_tol: NotRequired[float] 79 | -------------------------------------------------------------------------------- /astrovascpy/version.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2023-2024 Blue Brain Project/EPFL 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # Unless required by applicable law or agreed to in writing, software 7 | # distributed under the License is distributed on an "AS IS" BASIS, 8 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 9 | # See the License for the specific language governing permissions and 10 | # limitations under the License. 11 | 12 | VERSION = "0.1.6" 13 | version = VERSION 14 | -------------------------------------------------------------------------------- /astrovascpy/vtk_io.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2023-2024 Blue Brain Project/EPFL 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # Unless required by applicable law or agreed to in writing, software 7 | # distributed under the License is distributed on an "AS IS" BASIS, 8 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 9 | # See the License for the specific language governing permissions and 10 | # limitations under the License. 11 | 12 | """ 13 | file taken from https://github.com/eleftherioszisis/VasculatureRepair. 14 | """ 15 | 16 | import logging 17 | 18 | import numpy as np 19 | import vtk 20 | from vtk.util import numpy_support as _ns 21 | 22 | log = logging.getLogger(__name__) 23 | 24 | # pylint: disable-all 25 | 26 | 27 | def vtk_points(points): # pragma: no cover 28 | """Convert an array of numpy points to vtk points. 29 | 30 | Args: 31 | points (np.array): section' points. 32 | 33 | Returns: 34 | vtkPoints: 3D points, an array of vx-vy-vz triplets accessible by (point or cell) id. 35 | """ 36 | vpoints = vtk.vtkPoints() 37 | vpoints.SetData(_ns.numpy_to_vtk(points.copy(), deep=1)) 38 | return vpoints 39 | 40 | 41 | def vtk_lines(edges): # pragma: no cover 42 | """Convert a list of edges into vtk lines. 43 | 44 | Args: 45 | edges (np.array): edges of the graph. 46 | 47 | Returns: 48 | vtkCellArray: vtk lines. 49 | """ 50 | vlines = vtk.vtkCellArray() 51 | 52 | n_edges = edges.shape[0] 53 | 54 | arr = np.empty((n_edges, 3), order="C", dtype=np.int) 55 | 56 | arr[:, 0] = 2 * np.ones(n_edges, dtype=np.int) 57 | arr[:, 1:] = edges 58 | 59 | # cell array structure: size of cell followed by edges 60 | # arr = np.column_stack((2 * np.ones(edges.shape[0], dtype=np.int), edges)).copy() 61 | 62 | # crucial to deep copy the data!!! 63 | vlines.SetCells(edges.shape[0], _ns.numpy_to_vtkIdTypeArray(arr, deep=1)) 64 | return vlines 65 | 66 | 67 | def vtk_attribute_array(name, arr): # pragma: no cover 68 | """Create a cell array with specified name and assigns the numpy array arr. 69 | 70 | Args: 71 | name (str): name of the cell array. 72 | arr (np.array): assigned to the cell array. 73 | 74 | Returns: 75 | vtkCellArray: cell array with specified name and assigns the numpy array arr. 76 | """ 77 | val_arr = vtk.util.numpy_support.numpy_to_vtk(arr) 78 | val_arr.SetName(name) 79 | 80 | return val_arr 81 | 82 | 83 | def create_polydata_from_data(points, edges, attribute_dict={}): # pragma: no cover 84 | """Create a PolyData vtk object from a set of points. 85 | 86 | Points are connected with edges and optionally have a set of attributes. 87 | 88 | Args: 89 | points (np.array): section's points. 90 | edges (np.array): edges of the graph. 91 | attribute_dict (dict): to store attributes. 92 | 93 | Returns: 94 | vtkPolyData: PolyData vtk object. 95 | """ 96 | polydata = vtk.vtkPolyData() 97 | 98 | polydata.SetPoints(vtk_points(points)) 99 | polydata.SetLines(vtk_lines(edges)) 100 | 101 | cell_data = polydata.GetCellData() 102 | 103 | for key, arr in attribute_dict.items(): 104 | cell_data.AddArray(vtk_attribute_array(key, arr)) 105 | 106 | return polydata 107 | 108 | 109 | def vtk_loader(filename): # pragma: no cover 110 | """Extract from a vtk file the points, edges, radii and types. 111 | 112 | Args: 113 | filename (str): name of the file. 114 | 115 | Returns: 116 | np.array: points, edges and radii. 117 | """ 118 | from vtk.util.numpy_support import vtk_to_numpy 119 | 120 | def get_points(polydata): # pragma: no cover 121 | vtk_points = polydata.GetPoints() 122 | return vtk_to_numpy(vtk_points.GetData()) 123 | 124 | def get_structure(polydata): # pragma: no cover 125 | vtk_lines = polydata.GetLines() 126 | 127 | nmp_lines = vtk_to_numpy(vtk_lines.GetData()) 128 | 129 | n_rows = int(len(nmp_lines) / 3) 130 | 131 | return nmp_lines.reshape(n_rows, 3)[:, (1, 2)].astype(np.intp) 132 | 133 | def get_radii(polydata): # pragma: no cover 134 | cell_data = polydata.GetCellData() 135 | 136 | N = cell_data.GetNumberOfArrays() 137 | 138 | names = [cell_data.GetArrayName(i) for i in range(N)] 139 | 140 | vtk_floats = cell_data.GetArray(names.index("radius")) 141 | 142 | return vtk_to_numpy(vtk_floats) 143 | 144 | def get_types(polydata): # pragma: no cover 145 | cell_data = polydata.GetCellData() 146 | 147 | N = cell_data.GetNumberOfArrays() 148 | 149 | names = [cell_data.GetArrayName(i) for i in range(N)] 150 | 151 | vtk_floats = cell_data.GetArray(names.index("types")) 152 | 153 | return vtk_to_numpy(vtk_floats) 154 | 155 | # create a polydata reader 156 | reader = vtk.vtkPolyDataReader() 157 | 158 | # add the filename that will be read 159 | reader.SetFileName(filename) 160 | 161 | # update the output of the reader 162 | reader.Update() 163 | 164 | polydata = reader.GetOutput() 165 | 166 | points = get_points(polydata) 167 | edges = get_structure(polydata) 168 | radii = get_radii(polydata) 169 | 170 | # if no types are provided, it will return an array of zeros. 171 | try: 172 | types = get_types(polydata) 173 | 174 | except Exception: 175 | # warnings.warn("Types were not found. Zeros are used instead." + str(exc)) 176 | types = np.zeros(edges.shape[0], dtype=np.int) 177 | 178 | return points, edges, radii, types 179 | 180 | 181 | def vtk_writer( 182 | filename, points, edges, radii, types, mode="ascii", extra_properties=None 183 | ): # pragma: no cover 184 | """Create a vtk legacy file and populate it with a polydata object. 185 | 186 | Polydata object is generated using the points, edges, radii and types. 187 | 188 | Args: 189 | filename (str): name of the file. 190 | points (np.array): shape: (n_nodes, 3). 191 | edges (np.array): shape: (n_edges, 2). 192 | radii (np.array): shape: (n_edges, 2). 193 | types (np.array): shape: (n_edges, 2). 194 | mode (str): ascii or binary 195 | extra_properties (iterable of callables): add extra property computation functions 196 | that are not included. 197 | """ 198 | # from .vtk_io import vtk_points, vtk_lines, vtk_attribute_array 199 | points = np.ascontiguousarray(points) 200 | edges = np.ascontiguousarray(edges) 201 | radii = np.ascontiguousarray(radii) 202 | types = np.ascontiguousarray(types) 203 | 204 | attr_dict = {"radius": radii, "type": types} 205 | 206 | if extra_properties is not None: 207 | assert isinstance(extra_properties, dict) 208 | attr_dict.update(extra_properties) 209 | 210 | polydata = create_polydata_from_data(points, edges, attribute_dict=attr_dict) 211 | 212 | writer = vtk.vtkPolyDataWriter() 213 | 214 | writer.SetFileName(filename + ".vtk") 215 | 216 | if mode == "binary": 217 | writer.SetFileTypeToBinary() 218 | elif mode == "ascii": 219 | writer.SetFileTypeToASCII() 220 | 221 | writer.SetInputData(polydata) 222 | writer.Write() 223 | -------------------------------------------------------------------------------- /commitlint.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | parserPreset: 'conventional-changelog-conventionalcommits', 3 | rules: { 4 | 'body-leading-blank': [1, 'always'], 5 | 'body-max-line-length': [2, 'always', 100], 6 | 'footer-leading-blank': [1, 'always'], 7 | 'footer-max-line-length': [2, 'always', 100], 8 | 'header-max-length': [2, 'always', 100], 9 | 'header-case': [2, 'always', 'sentence-case'], 10 | 'scope-case': [2, 'always', 'lower-case'], 11 | 'subject-case': [2, 'always', 'sentence-case'], 12 | 'subject-empty': [1, 'never'], 13 | 'subject-full-stop': [2, 'never', '.'], 14 | 'type-case': [2, 'always', 'start-case'], 15 | 'type-empty': [1, 'never'], 16 | 'type-enum': [ 17 | 2, 18 | 'always', 19 | [ 20 | 'Build', 21 | 'Chore', 22 | 'CI', 23 | 'Docs', 24 | 'Feat', 25 | 'Fix', 26 | 'Perf', 27 | 'Refactor', 28 | 'Revert', 29 | 'Style', 30 | 'Test', 31 | ], 32 | ], 33 | }, 34 | ignores: [ 35 | (message) => message.includes('Draft: ') 36 | ], 37 | prompt: { 38 | questions: { 39 | type: { 40 | description: "Select the type of change that you're committing", 41 | enum: { 42 | Feat: { 43 | description: 'A new feature', 44 | title: 'Features', 45 | emoji: '✨', 46 | }, 47 | Fix: { 48 | description: 'A bug fix', 49 | title: 'Bug Fixes', 50 | emoji: '🐛', 51 | }, 52 | Docs: { 53 | description: 'Documentation only changes', 54 | title: 'Documentation', 55 | emoji: '📚', 56 | }, 57 | Style: { 58 | description: 59 | 'Changes that do not affect the meaning of the code (white-space, formatting, missing semi-colons, etc)', 60 | title: 'Styles', 61 | emoji: '💎', 62 | }, 63 | Refactor: { 64 | description: 65 | 'A code change that neither fixes a bug nor adds a feature', 66 | title: 'Code Refactoring', 67 | emoji: '📦', 68 | }, 69 | Perf: { 70 | description: 'A code change that improves performance', 71 | title: 'Performance Improvements', 72 | emoji: '🚀', 73 | }, 74 | Test: { 75 | description: 'Adding missing tests or correcting existing tests', 76 | title: 'Tests', 77 | emoji: '🚨', 78 | }, 79 | Build: { 80 | description: 81 | 'Changes that affect the build system or external dependencies (example scopes: gulp, broccoli, npm)', 82 | title: 'Builds', 83 | emoji: '🛠', 84 | }, 85 | CI: { 86 | description: 87 | 'Changes to our CI configuration files and scripts (example scopes: Travis, Circle, BrowserStack, SauceLabs)', 88 | title: 'Continuous Integrations', 89 | emoji: '⚙️', 90 | }, 91 | Chore: { 92 | description: "Other changes that don't modify src or test files", 93 | title: 'Chores', 94 | emoji: '♻️', 95 | }, 96 | Revert: { 97 | description: 'Reverts a previous commit', 98 | title: 'Reverts', 99 | emoji: '🗑', 100 | }, 101 | }, 102 | }, 103 | scope: { 104 | description: 105 | 'What is the scope of this change (e.g. component or file name)', 106 | }, 107 | subject: { 108 | description: 109 | 'Write a short, imperative tense description of the change', 110 | }, 111 | body: { 112 | description: 'Provide a longer description of the change', 113 | }, 114 | isBreaking: { 115 | description: 'Are there any breaking changes?', 116 | }, 117 | breakingBody: { 118 | description: 119 | 'A BREAKING CHANGE commit requires a body. Please enter a longer description of the commit itself', 120 | }, 121 | breaking: { 122 | description: 'Describe the breaking changes', 123 | }, 124 | isIssueAffected: { 125 | description: 'Does this change affect any open issues?', 126 | }, 127 | issuesBody: { 128 | description: 129 | 'If issues are closed, the commit requires a body. Please enter a longer description of the commit itself', 130 | }, 131 | issues: { 132 | description: 'Add issue references (e.g. "fix #123", "re #123".)', 133 | }, 134 | }, 135 | }, 136 | }; 137 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile clean 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | # Generate the documentation 21 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 22 | 23 | clean: 24 | @rm -rf $(BUILDDIR) $(SOURCEDIR)/examples $(SOURCEDIR)/generated 25 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | docutils==0.19 2 | -------------------------------------------------------------------------------- /docs/source/api_ref.rst: -------------------------------------------------------------------------------- 1 | API Reference 2 | ============= 3 | 4 | This page presents the complete API documentation. 5 | 6 | .. autosummary:: 7 | :toctree: generated 8 | 9 | astrovascpy.bloodflow 10 | astrovascpy.exceptions 11 | astrovascpy.io 12 | astrovascpy.ou 13 | astrovascpy.PetscBinaryIO 14 | astrovascpy.plotting 15 | astrovascpy.report_reader 16 | astrovascpy.report_writer 17 | astrovascpy.scipy_petsc_conversions 18 | astrovascpy.typing 19 | astrovascpy.utils 20 | astrovascpy.vtk_io 21 | -------------------------------------------------------------------------------- /docs/source/changelog.rst: -------------------------------------------------------------------------------- 1 | .. mdinclude:: ../../CHANGELOG.md 2 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | """Configuration file for the Sphinx documentation builder.""" 2 | 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | 13 | from importlib.metadata import version as get_version 14 | 15 | # -- Project information ----------------------------------------------------- 16 | 17 | project = "AstroVascPy" 18 | 19 | # The short X.Y version 20 | version = get_version(project) 21 | 22 | # The full version, including alpha/beta/rc tags 23 | release = version 24 | 25 | 26 | # -- General configuration --------------------------------------------------- 27 | 28 | # Add any Sphinx extension module names here, as strings. They can be 29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 30 | # ones. 31 | extensions = [ 32 | "sphinx.ext.autodoc", 33 | "sphinx.ext.autosummary", 34 | "sphinx.ext.intersphinx", 35 | "sphinx.ext.napoleon", 36 | "sphinx.ext.todo", 37 | "sphinx_click", 38 | "sphinx_mdinclude", 39 | ] 40 | 41 | todo_include_todos = True 42 | 43 | # Add any paths that contain templates here, relative to this directory. 44 | # templates_path = ['_templates'] 45 | 46 | # List of patterns, relative to source directory, that match files and 47 | # directories to ignore when looking for source files. 48 | # This pattern also affects html_static_path and html_extra_path. 49 | exclude_patterns = [] 50 | 51 | 52 | # -- Options for HTML output ------------------------------------------------- 53 | 54 | # The theme to use for HTML and HTML Help pages. See the documentation for 55 | # a list of builtin themes. 56 | # 57 | html_theme = "sphinx-bluebrain-theme" 58 | 59 | # Add any paths that contain custom static files (such as style sheets) here, 60 | # relative to this directory. They are copied after the builtin static files, 61 | # so a file named "default.css" will overwrite the builtin "default.css". 62 | # html_static_path = ['_static'] 63 | 64 | html_theme_options = { 65 | "metadata_distribution": "AstroVascPy", 66 | } 67 | 68 | html_title = project 69 | 70 | # If true, links to the reST sources are added to the pages. 71 | html_show_sourcelink = False 72 | 73 | # autosummary settings 74 | autosummary_generate = True 75 | 76 | # autodoc settings 77 | autodoc_typehints = "signature" 78 | autodoc_default_options = { 79 | "members": True, 80 | "show-inheritance": True, 81 | } 82 | autodoc_mock_imports = ["petsc4py"] 83 | 84 | intersphinx_mapping = { 85 | # Uncomment these lines if you need them 86 | # "numpy": ("https://numpy.org/doc/stable/", None), 87 | # "pandas": ("https://pandas.pydata.org/docs", None), 88 | "python": ("https://docs.python.org/3", None), 89 | } 90 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. image:: /logo/BBP-AstroVascPy-Github.jpg 2 | 3 | .. mdinclude:: ../../README.md 4 | :start-line: 2 5 | 6 | Contents: 7 | ------------ 8 | 9 | .. toctree:: 10 | :maxdepth: 1 11 | 12 | Home 13 | api_ref 14 | changelog 15 | -------------------------------------------------------------------------------- /docs/source/logo/BBP-AstroVascPy-Github.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BlueBrain/AstroVascPy/322eaa8ebb32133521428b46700f54fb3f8b9af3/docs/source/logo/BBP-AstroVascPy-Github.jpg -------------------------------------------------------------------------------- /examples/compute_static_flow_pressure.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # coding: utf-8 3 | """ 4 | Copyright (c) 2023-2023 Blue Brain Project/EPFL 5 | Licensed under the Apache License, Version 2.0 (the "License"); 6 | you may not use this file except in compliance with the License. 7 | You may obtain a copy of the License at 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | """ 15 | 16 | import sys 17 | from functools import partial 18 | from pathlib import Path, PurePath 19 | 20 | import matplotlib.colors as c 21 | import matplotlib.pyplot as plt 22 | import numpy as np 23 | import petsc4py 24 | import yaml 25 | from mpi4py import MPI 26 | from petsc4py import PETSc 27 | 28 | from astrovascpy import bloodflow 29 | from astrovascpy.io import load_graph_from_bin 30 | 31 | # from astrovascpy.io import load_graph_from_csv 32 | # from astrovascpy.io import load_graph_from_h5 33 | from astrovascpy.report_writer import write_simulation_report 34 | from astrovascpy.utils import create_entry_largest_nodes, mpi_mem, mpi_timer 35 | 36 | petsc4py.init(sys.argv) 37 | 38 | MPI_COMM = MPI.COMM_WORLD 39 | MPI_RANK = MPI_COMM.Get_rank() 40 | MPI_SIZE = MPI_COMM.Get_size() 41 | PETSc.Sys.Print(f"Number of MPI tasks = {MPI_SIZE}") 42 | 43 | print = partial(print, flush=True) 44 | 45 | RAT = True 46 | plot_yz = True 47 | plot_xz = True 48 | test_vasodilation = False 49 | save_sonata = False 50 | save_vtk = False 51 | 52 | params = yaml.full_load(open(str(PurePath("data/params.yaml")))) 53 | 54 | output_path = Path(params["output_folder"]) 55 | output_join_path = Path(PurePath(params["output_folder"], "figures")) 56 | 57 | if MPI_RANK == 0 and not output_path.exists(): 58 | output_path.mkdir() 59 | 60 | PETSc.Sys.Print("loading circuit") 61 | 62 | node_dataset = "./data/graphs_folder/node_dataset.csv" 63 | edge_dataset = "./data/graphs_folder/edge_dataset.csv" 64 | graph_sonata = "./data/graphs_folder/toy_graph.h5" 65 | graph_bin = "./data/graphs_folder/toy_graph.bin" 66 | 67 | with mpi_timer.region("loading circuit"), mpi_mem.region("loading circuit"): 68 | graph = load_graph_from_bin(graph_bin) 69 | # Uncomment the following if you want to import with different methods 70 | # 71 | # graph = load_graph_from_csv(node_filename=node_dataset, edge_filename=edge_dataset) 72 | # graph = load_graph_from_h5(filename=graph_sonata) 73 | 74 | PETSc.Sys.Print("compute entry nodes") 75 | 76 | with mpi_timer.region("compute entry nodes"), mpi_mem.region("compute entry nodes"): 77 | entry_nodes = create_entry_largest_nodes(graph, params) 78 | 79 | PETSc.Sys.Print("entry nodes: ", entry_nodes) 80 | 81 | PETSc.Sys.Print("compute input flow") 82 | 83 | with mpi_timer.region("compute boundary flows"), mpi_mem.region("compute boundary flows"): 84 | if graph is not None: 85 | entry_speed = 35000 # speed um/s 86 | radii_at_entry_nodes = graph.diameters[entry_nodes] / 2 87 | input_flows = entry_speed * np.pi * radii_at_entry_nodes**2 88 | else: 89 | input_flows = None 90 | boundary_flow = bloodflow.boundary_flows_A_based(graph, entry_nodes, input_flows) 91 | 92 | PETSc.Sys.Print("end of input flow \n") 93 | 94 | PETSc.Sys.Print("compute static flow") 95 | 96 | with mpi_timer.region("compute static flow"), mpi_mem.region("compute static flow"): 97 | bloodflow.update_static_flow_pressure(graph, boundary_flow, params) 98 | 99 | PETSc.Sys.Print("end of static flow pressure") 100 | 101 | largest_nodes = entry_nodes 102 | 103 | if RAT and graph is not None: 104 | SMALL_SIZE = 26 105 | MEDIUM_SIZE = 32 106 | BIGGER_SIZE = 48 107 | plt.rc("font", size=SMALL_SIZE) # controls default text sizes 108 | plt.rc("axes", titlesize=BIGGER_SIZE) # fontsize of the axes title 109 | plt.rc("axes", labelsize=MEDIUM_SIZE) # fontsize of the x and y labels 110 | plt.rc("xtick", labelsize=SMALL_SIZE) # fontsize of the tick labels 111 | plt.rc("ytick", labelsize=SMALL_SIZE) # fontsize of the tick labels 112 | plt.rc("legend", fontsize=SMALL_SIZE) # legend fontsize 113 | plt.rc("figure", titlesize=BIGGER_SIZE) # fontsize of the figure title 114 | plt.rcParams["xtick.major.pad"] = 9.0 115 | plt.rcParams["axes.linewidth"] = 2.0 116 | plt.rcParams["xtick.major.size"] = 7.0 * 3.0 117 | plt.rcParams["xtick.minor.size"] = 4.0 * 3.0 118 | plt.rcParams["ytick.major.size"] = 7.0 * 3.0 119 | plt.rcParams["ytick.minor.size"] = 4.0 * 3.0 120 | plt.rcParams["xtick.major.width"] = 2.4 121 | plt.rcParams["xtick.minor.width"] = 1.8 122 | plt.rcParams["ytick.major.width"] = 2.4 123 | plt.rcParams["ytick.minor.width"] = 1.8 124 | 125 | if plot_yz: 126 | figure = plt.figure(figsize=(15, 25)) 127 | positions = graph.points 128 | locate_positions = positions[graph.degrees == 1, 1:] 129 | plt.scatter( 130 | locate_positions[:, 1], 131 | locate_positions[:, 0], 132 | s=10, 133 | color="darkgray", 134 | ) 135 | 136 | print("start plotting y and z axes") 137 | colors = [[0, 0, 0, 1] for i in range(len(largest_nodes))] 138 | print("set colors") 139 | cols = [ 140 | c.to_rgba("palevioletred"), 141 | c.to_rgba("mediumseagreen"), 142 | c.to_rgba("mediumturquoise"), 143 | c.to_rgba("teal"), 144 | c.to_rgba("mediumpurple"), 145 | c.to_rgba("darkorange"), 146 | c.to_rgba("mediumvioletred"), 147 | c.to_rgba("lightgreen"), 148 | c.to_rgba("paleturquoise"), 149 | c.to_rgba("darkslategrey"), 150 | c.to_rgba("rebeccapurple"), 151 | c.to_rgba("orangered"), 152 | ] 153 | sizes = [150 for i in range(len(largest_nodes))] 154 | print("set markers") 155 | markers = ["o", "s", "^", "p", "H", "D", "o", "s", "^", "p", "H", "D"] 156 | plt.scatter( 157 | positions[largest_nodes][:, 2], 158 | positions[largest_nodes][:, 1], 159 | s=sizes, 160 | color=colors, 161 | marker=markers[2], 162 | ) 163 | plt.hlines( 164 | np.max(positions[:, 1]) - (np.max(positions[:, 1]) - np.min(positions[:, 1])) * 1 / 10, 165 | np.min(positions[:, 2]), 166 | np.max(positions[:, 2]), 167 | linewidths=6, 168 | linestyle="dashed", 169 | colors="cornflowerblue", 170 | ) 171 | plt.xlabel("z-position (µm)") 172 | plt.ylabel("y-position (µm)") 173 | plt.tight_layout() 174 | plt.savefig(Path(params["output_folder"]) / "input_node_yz.png", dpi=300) 175 | print("end plotting y and z axes") 176 | 177 | if plot_xz: 178 | print("start plotting x and z axes") 179 | figure = plt.figure(figsize=(15, 15)) 180 | positions = graph.points 181 | filter_positions = ( 182 | positions[:, 1] 183 | > np.max(positions[:, 1]) - (np.max(positions[:, 1]) - np.min(positions[:, 1])) * 1 / 10 184 | ) 185 | locate_positions = positions[filter_positions][:, [0, -1]] 186 | locate_degrees = graph.degrees[filter_positions] 187 | 188 | plt.scatter( 189 | locate_positions[locate_degrees == 1, 1], 190 | locate_positions[locate_degrees == 1, 0], 191 | s=10, 192 | color="darkgray", 193 | ) 194 | 195 | colors = [[0, 0, 0, 1] for i in range(len(largest_nodes))] 196 | cols = [ 197 | c.to_rgba("palevioletred"), 198 | c.to_rgba("mediumseagreen"), 199 | c.to_rgba("mediumturquoise"), 200 | c.to_rgba("teal"), 201 | c.to_rgba("mediumpurple"), 202 | c.to_rgba("darkorange"), 203 | c.to_rgba("mediumvioletred"), 204 | c.to_rgba("lightgreen"), 205 | c.to_rgba("paleturquoise"), 206 | c.to_rgba("darkslategrey"), 207 | c.to_rgba("rebeccapurple"), 208 | c.to_rgba("orangered"), 209 | ] 210 | sizes = [150 for i in range(len(largest_nodes))] 211 | markers = ["o", "s", "^", "p", "H", "D", "o", "s", "^", "p", "H", "D"] 212 | plt.scatter( 213 | positions[largest_nodes][:, 2], 214 | positions[largest_nodes][:, 0], 215 | s=sizes, 216 | color=colors, 217 | marker=markers[2], 218 | ) 219 | plt.xlabel("z-position (µm)") 220 | plt.ylabel("x-position (µm)") 221 | plt.tight_layout() 222 | plt.savefig(Path(params["output_folder"]) / "input_node_xz.png", dpi=300) 223 | print("end xz plotting") 224 | 225 | if test_vasodilation: 226 | mask = ( 227 | (500 < graph.node_properties.x) 228 | & (graph.node_properties.x < 550) 229 | & (1500 < graph.node_properties.y) 230 | & (graph.node_properties.y < 1550) 231 | & (500 < graph.node_properties.z) 232 | & (graph.node_properties.z < 550) 233 | ) 234 | edge_index = graph.edge_properties[ 235 | graph.edge_properties.start_node.isin(graph.node_properties[mask].index) 236 | & graph.edge_properties.end_node.isin(graph.node_properties[mask].index) 237 | ].index 238 | graph.edge_properties.loc[edge_index, "radius"] *= 1.5 239 | 240 | if graph is not None: 241 | if save_sonata: 242 | sonata_path = Path(params["output_folder"]) / "sonata_files" 243 | if not sonata_path.exists(): 244 | Path.mkdir(sonata_path) 245 | print("start sonata reporting") 246 | filename = sonata_path / "simulate_ou_process" 247 | 248 | if not filename.exists(): 249 | Path.mkdir(filename) 250 | write_simulation_report( 251 | np.arange(graph.n_edges), 252 | filename, 253 | start_time=0.0, 254 | end_time=1.0, 255 | time_step=1.0, 256 | flows=graph.edge_properties["flow"], 257 | pressures=graph.node_properties["pressure"], 258 | radii=graph.edge_properties["radius"].to_numpy(), 259 | volumes=graph.edge_properties["volume"].to_numpy(), 260 | ) 261 | print("end of sonata reporting") 262 | 263 | if save_vtk: 264 | from astrovascpy.vtk_io import vtk_writer 265 | 266 | vtk_path = Path(params["output_folder"]) / "vtk_files" 267 | if not vtk_path.exists(): 268 | Path.mkdir(vtk_path) 269 | 270 | flow = graph.edge_properties["flow"] 271 | print("flow:", flow) 272 | pressure = graph.node_properties["pressure"] 273 | 274 | pressure_edge = [] 275 | for u, v in graph.edges: 276 | pressure_edge.append(0.5 * (pressure[u] + pressure[v])) 277 | 278 | properties = {"flow": flow, "pressure": pressure_edge} 279 | 280 | filename = str(vtk_path / "flow_pressure") 281 | points = graph.node_properties[["x", "y", "z"]].to_numpy() 282 | edges = graph.edges 283 | radii = graph.edge_properties.radius 284 | types = np.zeros(radii.size) 285 | 286 | print("number of edges:", len(edges)) 287 | print("number of connected nodes:", len(np.unique(edges))) 288 | 289 | vtk_writer(filename, points, edges, radii, types, extra_properties=properties) 290 | 291 | mpi_timer.print() 292 | mpi_mem.print() 293 | -------------------------------------------------------------------------------- /examples/data/graphs_folder/node_dataset.csv: -------------------------------------------------------------------------------- 1 | x,y,z,diameter 2 | 30.0,20.0,20.0,7.4542317 3 | 25.0,25.0,25.0,5.932791 4 | 30.0,30.0,30.0,7.829187 5 | 35.0,35.0,35.0,5.3152843 6 | 20.0,20.0,20.0,5.1605887 7 | 15.0,15.0,15.0,7.9477215 8 | 30.0,30.0,20.0,6.843039 9 | 20.0,30.0,20.0,5.6644707 10 | 20.0,20.0,30.0,5.1327558 11 | 30.0,20.0,30.0,6.2937737 12 | 20.0,30.0,30.0,7.0178814 13 | 10.0,10.0,10.0,6.2519994 14 | 5.0,5.0,5.0,6.758832 15 | 20.0,10.0,10.0,6.149199 16 | 25.0,5.0,5.0,6.0459957 17 | 20.0,20.0,10.0,6.179367 18 | 25.0,25.0,5.0,7.5464993 19 | 10.0,20.0,10.0,6.7691355 20 | 5.0,25.0,5.0,7.414635 21 | 10.0,10.0,20.0,7.644702 22 | 5.0,5.0,25.0,7.9950647 23 | 20.0,10.0,20.0,7.7871985 24 | 25.0,5.0,25.0,7.541923 25 | 10.0,20.0,20.0,5.544867 26 | 5.0,25.0,25.0,6.2433696 27 | 0.0,20.0,20.0,7.92168 28 | 10.0,30.0,20.0,7.1019 29 | 0.0,30.0,20.0,7.5254474 30 | 0.0,20.0,30.0,6.700008 31 | 10.0,20.0,30.0,6.430404 32 | 10.0,30.0,30.0,6.8656473 33 | 0.0,30.0,30.0,6.5862246 34 | 20.0,0.0,20.0,5.622538 35 | 30.0,0.0,20.0,7.365405 36 | 30.0,10.0,20.0,5.8200464 37 | 20.0,0.0,30.0,7.6613946 38 | 30.0,0.0,30.0,5.496637 39 | 30.0,10.0,30.0,6.99788 40 | 20.0,10.0,30.0,5.2526336 41 | 0.0,0.0,20.0,6.1528196 42 | 10.0,0.0,20.0,7.110221 43 | 0.0,10.0,20.0,6.059225 44 | 0.0,0.0,30.0,5.4632764 45 | 10.0,0.0,30.0,5.9380693 46 | 10.0,10.0,30.0,7.6529727 47 | 0.0,10.0,30.0,7.875597 48 | 0.0,20.0,0.0,7.672293 49 | 10.0,20.0,0.0,7.4183726 50 | 10.0,30.0,0.0,5.321903 51 | 0.0,30.0,0.0,5.02718 52 | 0.0,20.0,10.0,5.5751724 53 | 10.0,30.0,10.0,5.811432 54 | 0.0,30.0,10.0,6.848549 55 | 20.0,20.0,0.0,7.4772143 56 | 30.0,20.0,0.0,7.4552665 57 | 30.0,30.0,0.0,5.2413454 58 | 20.0,30.0,0.0,6.8036833 59 | 30.0,20.0,10.0,7.5037594 60 | 30.0,30.0,10.0,5.7139177 61 | 20.0,30.0,10.0,7.2857795 62 | 20.0,0.0,0.0,5.329049 63 | 30.0,0.0,0.0,5.9650927 64 | 30.0,10.0,0.0,6.279782 65 | 20.0,10.0,0.0,5.073644 66 | 20.0,0.0,10.0,6.1649995 67 | 30.0,0.0,10.0,5.282367 68 | 30.0,10.0,10.0,6.480736 69 | 0.0,0.0,0.0,5.651837 70 | 10.0,0.0,0.0,7.228695 71 | 10.0,10.0,0.0,5.588002 72 | 0.0,10.0,0.0,5.858989 73 | 0.0,0.0,10.0,5.5002246 74 | 10.0,0.0,10.0,5.5180902 75 | 0.0,10.0,10.0,6.44466 76 | 40.0,30.0,30.0,7.9719553 77 | 45.0,25.0,25.0,6.4953046 78 | 40.0,40.0,30.0,5.596677 79 | 45.0,45.0,25.0,7.105357 80 | 30.0,40.0,30.0,6.9705153 81 | 25.0,45.0,25.0,5.4153104 82 | 30.0,30.0,40.0,5.319486 83 | 25.0,25.0,45.0,5.5819726 84 | 40.0,30.0,40.0,6.952742 85 | 45.0,25.0,45.0,6.443127 86 | 40.0,40.0,40.0,7.48194 87 | 45.0,45.0,45.0,5.8947372 88 | 30.0,40.0,40.0,7.0534954 89 | 25.0,45.0,45.0,7.587678 90 | 20.0,50.0,50.0,5.2785535 91 | 15.0,55.0,55.0,7.399596 92 | 20.0,40.0,40.0,7.6538663 93 | 30.0,50.0,40.0,5.6193542 94 | 20.0,50.0,40.0,7.392579 95 | 20.0,40.0,50.0,7.424148 96 | 30.0,40.0,50.0,7.7810616 97 | 30.0,50.0,50.0,5.346684 98 | 10.0,50.0,50.0,7.0860395 99 | 5.0,45.0,45.0,6.3579607 100 | 20.0,60.0,50.0,5.499626 101 | 25.0,65.0,45.0,7.1411533 102 | 10.0,60.0,50.0,7.5535955 103 | 5.0,65.0,45.0,7.746223 104 | 10.0,50.0,60.0,7.313232 105 | 5.0,45.0,65.0,5.439751 106 | 20.0,50.0,60.0,5.8443613 107 | 25.0,45.0,65.0,7.757513 108 | 20.0,60.0,60.0,6.131807 109 | 25.0,65.0,65.0,6.2348795 110 | 10.0,60.0,60.0,7.7780795 111 | 5.0,65.0,65.0,5.915801 112 | 0.0,60.0,60.0,6.2433143 113 | 10.0,70.0,60.0,6.066139 114 | 0.0,70.0,60.0,5.1315875 115 | 0.0,60.0,70.0,5.552613 116 | 10.0,60.0,70.0,5.711569 117 | 10.0,70.0,70.0,5.5505133 118 | 0.0,70.0,70.0,7.264352 119 | 30.0,60.0,60.0,6.280074 120 | 30.0,70.0,60.0,6.831469 121 | 20.0,70.0,60.0,6.6368866 122 | 20.0,60.0,70.0,7.9241695 123 | 30.0,60.0,70.0,7.041111 124 | 30.0,70.0,70.0,7.2198386 125 | 20.0,70.0,70.0,7.900868 126 | 20.0,40.0,60.0,5.8777127 127 | 30.0,40.0,60.0,5.737972 128 | 30.0,50.0,60.0,6.749414 129 | 20.0,40.0,70.0,5.774108 130 | 30.0,40.0,70.0,6.420157 131 | 30.0,50.0,70.0,7.5025287 132 | 20.0,50.0,70.0,5.6912007 133 | 0.0,40.0,60.0,5.885922 134 | 10.0,40.0,60.0,7.1159205 135 | 0.0,50.0,60.0,6.097029 136 | 0.0,40.0,70.0,6.186232 137 | 10.0,40.0,70.0,5.691784 138 | 10.0,50.0,70.0,6.0320306 139 | 0.0,50.0,70.0,7.84489 140 | 0.0,60.0,40.0,6.4653273 141 | 10.0,60.0,40.0,5.402801 142 | 10.0,70.0,40.0,7.551884 143 | 0.0,70.0,40.0,6.724971 144 | 0.0,60.0,50.0,7.2198124 145 | 10.0,70.0,50.0,7.113994 146 | 0.0,70.0,50.0,7.9046354 147 | 20.0,60.0,40.0,5.4944167 148 | 30.0,60.0,40.0,5.0815935 149 | 30.0,70.0,40.0,5.9525113 150 | 20.0,70.0,40.0,6.786755 151 | 30.0,60.0,50.0,6.4598184 152 | 30.0,70.0,50.0,7.077664 153 | 20.0,70.0,50.0,7.4590693 154 | 0.0,40.0,40.0,6.176065 155 | 10.0,40.0,40.0,5.4319296 156 | 10.0,50.0,40.0,7.414469 157 | 0.0,50.0,40.0,7.1401114 158 | 0.0,40.0,50.0,6.2260323 159 | 10.0,40.0,50.0,6.555297 160 | 0.0,50.0,50.0,6.9955487 161 | 50.0,50.0,50.0,7.6105466 162 | 55.0,55.0,55.0,7.7194824 163 | 50.0,40.0,40.0,6.0617123 164 | 50.0,50.0,40.0,5.0111294 165 | 40.0,50.0,40.0,7.535233 166 | 40.0,40.0,50.0,5.464522 167 | 50.0,40.0,50.0,5.612433 168 | 40.0,50.0,50.0,5.7657933 169 | 60.0,50.0,50.0,6.686198 170 | 65.0,45.0,45.0,5.394728 171 | 60.0,60.0,50.0,6.3686676 172 | 65.0,65.0,45.0,7.2210417 173 | 50.0,60.0,50.0,6.788553 174 | 45.0,65.0,45.0,7.4630456 175 | 50.0,50.0,60.0,6.2864294 176 | 45.0,45.0,65.0,6.1191635 177 | 60.0,50.0,60.0,6.6655817 178 | 65.0,45.0,65.0,5.590556 179 | 60.0,60.0,60.0,6.250802 180 | 65.0,65.0,65.0,5.2962794 181 | 50.0,60.0,60.0,6.2014093 182 | 45.0,65.0,65.0,7.245818 183 | 40.0,60.0,60.0,5.3448563 184 | 50.0,70.0,60.0,6.180889 185 | 40.0,70.0,60.0,6.126648 186 | 40.0,60.0,70.0,6.704487 187 | 50.0,60.0,70.0,7.003931 188 | 50.0,70.0,70.0,7.5224905 189 | 40.0,70.0,70.0,6.491694 190 | 70.0,60.0,60.0,7.800368 191 | 70.0,70.0,60.0,5.7585616 192 | 60.0,70.0,60.0,7.2734723 193 | 60.0,60.0,70.0,5.0002213 194 | 70.0,60.0,70.0,5.76272 195 | 70.0,70.0,70.0,7.247302 196 | 60.0,70.0,70.0,6.597008 197 | 60.0,40.0,60.0,7.831483 198 | 70.0,40.0,60.0,7.06035 199 | 70.0,50.0,60.0,5.862726 200 | 60.0,40.0,70.0,7.306997 201 | 70.0,40.0,70.0,5.2494946 202 | 70.0,50.0,70.0,7.924323 203 | 60.0,50.0,70.0,5.1478558 204 | 40.0,40.0,60.0,7.936571 205 | 50.0,40.0,60.0,6.188554 206 | 40.0,50.0,60.0,6.805831 207 | 40.0,40.0,70.0,5.190107 208 | 50.0,40.0,70.0,6.2295723 209 | 50.0,50.0,70.0,7.1675005 210 | 40.0,50.0,70.0,5.7162166 211 | 40.0,60.0,40.0,7.9506664 212 | 50.0,60.0,40.0,5.0047836 213 | 50.0,70.0,40.0,5.4363503 214 | 40.0,70.0,40.0,7.337333 215 | 40.0,60.0,50.0,7.4153824 216 | 50.0,70.0,50.0,7.307741 217 | 40.0,70.0,50.0,6.6109967 218 | 60.0,60.0,40.0,7.5582085 219 | 70.0,60.0,40.0,6.050606 220 | 70.0,70.0,40.0,5.803966 221 | 60.0,70.0,40.0,5.1856675 222 | 70.0,60.0,50.0,7.4639106 223 | 70.0,70.0,50.0,6.1389995 224 | 60.0,70.0,50.0,6.7146506 225 | 60.0,40.0,40.0,7.17873 226 | 70.0,40.0,40.0,6.424615 227 | 70.0,50.0,40.0,7.7121525 228 | 60.0,50.0,40.0,5.1056595 229 | 60.0,40.0,50.0,5.5419817 230 | 70.0,40.0,50.0,6.0155435 231 | 70.0,50.0,50.0,6.7324886 232 | 50.0,20.0,50.0,7.0150695 233 | 55.0,15.0,55.0,7.2144547 234 | 40.0,20.0,40.0,6.5462995 235 | 50.0,20.0,40.0,6.24786 236 | 50.0,30.0,40.0,5.6937647 237 | 40.0,20.0,50.0,5.923622 238 | 50.0,30.0,50.0,7.8362927 239 | 40.0,30.0,50.0,5.8825426 240 | 50.0,10.0,50.0,5.4592905 241 | 45.0,5.0,45.0,7.160958 242 | 60.0,10.0,50.0,5.5925574 243 | 65.0,5.0,45.0,7.9370327 244 | 60.0,20.0,50.0,6.584945 245 | 65.0,25.0,45.0,7.6964755 246 | 50.0,10.0,60.0,6.410964 247 | 45.0,5.0,65.0,6.7601514 248 | 60.0,10.0,60.0,7.879087 249 | 65.0,5.0,65.0,6.764473 250 | 60.0,20.0,60.0,5.720877 251 | 65.0,25.0,65.0,5.1028013 252 | 50.0,20.0,60.0,7.2894206 253 | 45.0,25.0,65.0,7.9955797 254 | 40.0,20.0,60.0,6.2804585 255 | 50.0,30.0,60.0,5.804328 256 | 40.0,30.0,60.0,5.3947906 257 | 40.0,20.0,70.0,5.1176314 258 | 50.0,20.0,70.0,5.0756955 259 | 50.0,30.0,70.0,5.814651 260 | 40.0,30.0,70.0,6.3855605 261 | 70.0,20.0,60.0,5.7515616 262 | 70.0,30.0,60.0,6.8151293 263 | 60.0,30.0,60.0,5.9087143 264 | 60.0,20.0,70.0,6.731852 265 | 70.0,20.0,70.0,5.509034 266 | 70.0,30.0,70.0,5.4784074 267 | 60.0,30.0,70.0,6.251089 268 | 60.0,0.0,60.0,7.1635942 269 | 70.0,0.0,60.0,6.4403234 270 | 70.0,10.0,60.0,6.931592 271 | 60.0,0.0,70.0,6.5053196 272 | 70.0,0.0,70.0,7.4345555 273 | 70.0,10.0,70.0,6.4282517 274 | 60.0,10.0,70.0,6.569468 275 | 40.0,0.0,60.0,7.7012305 276 | 50.0,0.0,60.0,5.51958 277 | 40.0,10.0,60.0,7.625599 278 | 40.0,0.0,70.0,5.0829597 279 | 50.0,0.0,70.0,6.9810157 280 | 50.0,10.0,70.0,6.2433167 281 | 40.0,10.0,70.0,7.3738446 282 | 60.0,20.0,40.0,5.893911 283 | 70.0,20.0,40.0,7.1277013 284 | 70.0,30.0,40.0,7.195791 285 | 60.0,30.0,40.0,6.026679 286 | 70.0,20.0,50.0,6.1267657 287 | 70.0,30.0,50.0,6.0773196 288 | 60.0,30.0,50.0,6.8498554 289 | 60.0,0.0,40.0,6.6612983 290 | 70.0,0.0,40.0,5.3416824 291 | 70.0,10.0,40.0,7.166074 292 | 60.0,10.0,40.0,7.094349 293 | 60.0,0.0,50.0,5.528999 294 | 70.0,0.0,50.0,7.8252263 295 | 70.0,10.0,50.0,7.1631303 296 | 40.0,0.0,40.0,7.89041 297 | 50.0,0.0,40.0,5.9148927 298 | 50.0,10.0,40.0,7.824318 299 | 40.0,10.0,40.0,5.226832 300 | 40.0,0.0,50.0,6.382409 301 | 50.0,0.0,50.0,5.3888574 302 | 40.0,10.0,50.0,5.0143623 303 | 20.0,20.0,50.0,7.881567 304 | 15.0,15.0,55.0,7.1526785 305 | 20.0,20.0,40.0,7.185422 306 | 30.0,20.0,40.0,7.702026 307 | 20.0,30.0,40.0,7.3096147 308 | 30.0,20.0,50.0,7.5490694 309 | 30.0,30.0,50.0,5.0988364 310 | 20.0,30.0,50.0,5.9305863 311 | 10.0,10.0,50.0,6.355421 312 | 5.0,5.0,45.0,5.1226244 313 | 20.0,10.0,50.0,7.39949 314 | 25.0,5.0,45.0,5.773465 315 | 10.0,20.0,50.0,7.3968596 316 | 5.0,25.0,45.0,7.220735 317 | 10.0,10.0,60.0,5.2339783 318 | 5.0,5.0,65.0,6.8849416 319 | 20.0,10.0,60.0,7.414807 320 | 25.0,5.0,65.0,7.309367 321 | 20.0,20.0,60.0,5.199789 322 | 25.0,25.0,65.0,7.3067584 323 | 10.0,20.0,60.0,5.707911 324 | 5.0,25.0,65.0,7.5697026 325 | 0.0,20.0,60.0,7.892629 326 | 10.0,30.0,60.0,7.3426557 327 | 0.0,30.0,60.0,5.498974 328 | 0.0,20.0,70.0,6.6580596 329 | 10.0,20.0,70.0,6.2413044 330 | 10.0,30.0,70.0,5.454458 331 | 0.0,30.0,70.0,5.486219 332 | 30.0,20.0,60.0,5.3791027 333 | 30.0,30.0,60.0,7.6304617 334 | 20.0,30.0,60.0,7.2823777 335 | 20.0,20.0,70.0,7.994597 336 | 30.0,20.0,70.0,5.893169 337 | 30.0,30.0,70.0,5.681053 338 | 20.0,30.0,70.0,5.375485 339 | 20.0,0.0,60.0,5.0278535 340 | 30.0,0.0,60.0,6.596277 341 | 30.0,10.0,60.0,7.828338 342 | 20.0,0.0,70.0,6.9328957 343 | 30.0,0.0,70.0,7.1428995 344 | 30.0,10.0,70.0,6.4815965 345 | 20.0,10.0,70.0,6.745667 346 | 0.0,0.0,60.0,6.8442883 347 | 10.0,0.0,60.0,7.8332124 348 | 0.0,10.0,60.0,5.4807796 349 | 0.0,0.0,70.0,7.188834 350 | 10.0,0.0,70.0,6.8272815 351 | 10.0,10.0,70.0,5.5553493 352 | 0.0,10.0,70.0,5.0186105 353 | 0.0,20.0,40.0,7.656568 354 | 10.0,20.0,40.0,5.444116 355 | 10.0,30.0,40.0,7.47197 356 | 0.0,30.0,40.0,5.0449286 357 | 0.0,20.0,50.0,6.372166 358 | 10.0,30.0,50.0,6.9331913 359 | 0.0,30.0,50.0,5.1811385 360 | 20.0,0.0,40.0,5.2773447 361 | 30.0,0.0,40.0,7.5287633 362 | 30.0,10.0,40.0,7.9496083 363 | 20.0,10.0,40.0,6.345802 364 | 20.0,0.0,50.0,5.127469 365 | 30.0,0.0,50.0,5.352638 366 | 30.0,10.0,50.0,6.1449614 367 | 0.0,0.0,40.0,5.389288 368 | 10.0,0.0,40.0,5.176426 369 | 10.0,10.0,40.0,6.7272587 370 | 0.0,10.0,40.0,5.5583906 371 | 0.0,0.0,50.0,5.027744 372 | 10.0,0.0,50.0,7.7832594 373 | 0.0,10.0,50.0,6.611421 374 | 20.0,50.0,20.0,5.5559387 375 | 15.0,55.0,15.0,6.472899 376 | 20.0,40.0,20.0,5.3493896 377 | 30.0,40.0,20.0,5.4895453 378 | 30.0,50.0,20.0,7.0886574 379 | 20.0,40.0,30.0,5.328709 380 | 30.0,50.0,30.0,6.6975355 381 | 20.0,50.0,30.0,6.2607007 382 | 10.0,50.0,10.0,6.3862925 383 | 5.0,45.0,5.0,6.0739517 384 | 20.0,50.0,10.0,6.9150887 385 | 25.0,45.0,5.0,6.305426 386 | 20.0,60.0,10.0,5.3053107 387 | 25.0,65.0,5.0,6.77278 388 | 10.0,60.0,10.0,7.0190306 389 | 5.0,65.0,5.0,7.1671743 390 | 10.0,50.0,20.0,7.4054475 391 | 5.0,45.0,25.0,5.9528956 392 | 20.0,60.0,20.0,6.2453756 393 | 25.0,65.0,25.0,5.986861 394 | 10.0,60.0,20.0,6.559955 395 | 5.0,65.0,25.0,5.059075 396 | 0.0,60.0,20.0,5.859459 397 | 10.0,70.0,20.0,5.855525 398 | 0.0,70.0,20.0,6.067783 399 | 0.0,60.0,30.0,5.944198 400 | 10.0,60.0,30.0,6.73583 401 | 10.0,70.0,30.0,7.0508046 402 | 0.0,70.0,30.0,5.806248 403 | 30.0,60.0,20.0,7.113931 404 | 30.0,70.0,20.0,7.768743 405 | 20.0,70.0,20.0,6.8511057 406 | 20.0,60.0,30.0,7.6635027 407 | 30.0,60.0,30.0,7.1037707 408 | 30.0,70.0,30.0,5.205009 409 | 20.0,70.0,30.0,6.502485 410 | 0.0,40.0,20.0,5.078964 411 | 10.0,40.0,20.0,7.1566367 412 | 0.0,50.0,20.0,7.4770427 413 | 0.0,40.0,30.0,7.2405014 414 | 10.0,40.0,30.0,6.5370474 415 | 10.0,50.0,30.0,6.374063 416 | 0.0,50.0,30.0,6.648256 417 | 0.0,60.0,0.0,6.821542 418 | 10.0,60.0,0.0,7.8724656 419 | 10.0,70.0,0.0,7.126649 420 | 0.0,70.0,0.0,5.3382545 421 | 0.0,60.0,10.0,6.67523 422 | 10.0,70.0,10.0,7.1545596 423 | 0.0,70.0,10.0,7.405872 424 | 20.0,60.0,0.0,7.811369 425 | 30.0,60.0,0.0,5.354061 426 | 30.0,70.0,0.0,5.4227295 427 | 20.0,70.0,0.0,7.5879984 428 | 30.0,60.0,10.0,5.7628646 429 | 30.0,70.0,10.0,6.997854 430 | 20.0,70.0,10.0,7.450177 431 | 20.0,40.0,0.0,5.4375234 432 | 30.0,40.0,0.0,6.8005533 433 | 30.0,50.0,0.0,5.675005 434 | 20.0,50.0,0.0,7.511979 435 | 20.0,40.0,10.0,5.980827 436 | 30.0,40.0,10.0,5.3145027 437 | 30.0,50.0,10.0,5.2505918 438 | 0.0,40.0,0.0,7.6102996 439 | 10.0,40.0,0.0,6.824515 440 | 10.0,50.0,0.0,6.047262 441 | 0.0,50.0,0.0,5.5825825 442 | 0.0,40.0,10.0,6.239404 443 | 10.0,40.0,10.0,6.568473 444 | 0.0,50.0,10.0,5.1333303 445 | 50.0,50.0,20.0,7.6270766 446 | 55.0,55.0,15.0,5.221479 447 | 40.0,40.0,20.0,7.408926 448 | 50.0,40.0,20.0,7.3023973 449 | 40.0,50.0,20.0,6.550499 450 | 50.0,40.0,30.0,7.9487796 451 | 50.0,50.0,30.0,5.4321756 452 | 40.0,50.0,30.0,7.698955 453 | 50.0,50.0,10.0,6.9885693 454 | 45.0,45.0,5.0,7.065945 455 | 60.0,50.0,10.0,7.5360937 456 | 65.0,45.0,5.0,5.1273637 457 | 60.0,60.0,10.0,7.3341165 458 | 65.0,65.0,5.0,6.543441 459 | 50.0,60.0,10.0,5.922596 460 | 45.0,65.0,5.0,6.6406045 461 | 60.0,50.0,20.0,5.128289 462 | 65.0,45.0,25.0,6.0203023 463 | 60.0,60.0,20.0,5.001102 464 | 65.0,65.0,25.0,5.2057905 465 | 50.0,60.0,20.0,5.821198 466 | 45.0,65.0,25.0,5.6867228 467 | 40.0,60.0,20.0,6.3637695 468 | 50.0,70.0,20.0,5.628808 469 | 40.0,70.0,20.0,6.042379 470 | 40.0,60.0,30.0,6.362496 471 | 50.0,60.0,30.0,7.5956345 472 | 50.0,70.0,30.0,7.8651924 473 | 40.0,70.0,30.0,6.556777 474 | 70.0,60.0,20.0,6.2041645 475 | 70.0,70.0,20.0,6.8467917 476 | 60.0,70.0,20.0,7.839671 477 | 60.0,60.0,30.0,5.3994446 478 | 70.0,60.0,30.0,7.7536297 479 | 70.0,70.0,30.0,5.243161 480 | 60.0,70.0,30.0,6.442224 481 | 60.0,40.0,20.0,6.4298425 482 | 70.0,40.0,20.0,5.3362155 483 | 70.0,50.0,20.0,5.964266 484 | 60.0,40.0,30.0,5.854339 485 | 70.0,40.0,30.0,6.333876 486 | 70.0,50.0,30.0,7.790379 487 | 60.0,50.0,30.0,5.543803 488 | 40.0,60.0,0.0,5.5258265 489 | 50.0,60.0,0.0,5.6058993 490 | 50.0,70.0,0.0,7.08117 491 | 40.0,70.0,0.0,7.337462 492 | 40.0,60.0,10.0,6.4716473 493 | 50.0,70.0,10.0,6.8290596 494 | 40.0,70.0,10.0,5.638047 495 | 60.0,60.0,0.0,5.9463954 496 | 70.0,60.0,0.0,6.798744 497 | 70.0,70.0,0.0,6.2905426 498 | 60.0,70.0,0.0,7.727278 499 | 70.0,60.0,10.0,5.562083 500 | 70.0,70.0,10.0,7.0931854 501 | 60.0,70.0,10.0,7.911126 502 | 60.0,40.0,0.0,5.3038526 503 | 70.0,40.0,0.0,6.450502 504 | 70.0,50.0,0.0,5.9410853 505 | 60.0,50.0,0.0,6.5372252 506 | 60.0,40.0,10.0,5.9051046 507 | 70.0,40.0,10.0,7.585469 508 | 70.0,50.0,10.0,7.532981 509 | 40.0,40.0,0.0,6.021814 510 | 50.0,40.0,0.0,5.456141 511 | 50.0,50.0,0.0,7.3521757 512 | 40.0,50.0,0.0,7.2318134 513 | 40.0,40.0,10.0,7.90114 514 | 50.0,40.0,10.0,7.624527 515 | 40.0,50.0,10.0,6.666988 516 | 50.0,20.0,20.0,7.967265 517 | 55.0,15.0,15.0,7.6263537 518 | 40.0,20.0,20.0,7.4854417 519 | 50.0,30.0,20.0,7.558067 520 | 40.0,30.0,20.0,5.0983276 521 | 40.0,20.0,30.0,5.732471 522 | 50.0,20.0,30.0,6.017284 523 | 50.0,30.0,30.0,5.5661964 524 | 50.0,10.0,10.0,5.336673 525 | 45.0,5.0,5.0,5.382497 526 | 60.0,10.0,10.0,5.5800037 527 | 65.0,5.0,5.0,7.5219226 528 | 60.0,20.0,10.0,6.0398235 529 | 65.0,25.0,5.0,5.1792736 530 | 50.0,20.0,10.0,6.519595 531 | 45.0,25.0,5.0,6.0508137 532 | 50.0,10.0,20.0,6.888384 533 | 45.0,5.0,25.0,7.759214 534 | 60.0,10.0,20.0,7.196427 535 | 65.0,5.0,25.0,7.8822994 536 | 60.0,20.0,20.0,7.670335 537 | 65.0,25.0,25.0,6.921694 538 | 70.0,20.0,20.0,7.4727993 539 | 70.0,30.0,20.0,6.8232255 540 | 60.0,30.0,20.0,6.4632936 541 | 60.0,20.0,30.0,5.0399485 542 | 70.0,20.0,30.0,6.8187857 543 | 70.0,30.0,30.0,7.967264 544 | 60.0,30.0,30.0,7.4543033 545 | 60.0,0.0,20.0,5.0850887 546 | 70.0,0.0,20.0,6.677745 547 | 70.0,10.0,20.0,7.6228485 548 | 60.0,0.0,30.0,7.114197 549 | 70.0,0.0,30.0,6.868905 550 | 70.0,10.0,30.0,7.867885 551 | 60.0,10.0,30.0,7.874838 552 | 40.0,0.0,20.0,6.64085 553 | 50.0,0.0,20.0,5.8808503 554 | 40.0,10.0,20.0,7.904613 555 | 40.0,0.0,30.0,5.678589 556 | 50.0,0.0,30.0,5.0472145 557 | 50.0,10.0,30.0,5.9775643 558 | 40.0,10.0,30.0,6.5075283 559 | 40.0,20.0,0.0,7.4329453 560 | 50.0,20.0,0.0,6.883267 561 | 50.0,30.0,0.0,7.7149467 562 | 40.0,30.0,0.0,7.2461667 563 | 40.0,20.0,10.0,6.683363 564 | 50.0,30.0,10.0,7.5096416 565 | 40.0,30.0,10.0,5.834151 566 | 60.0,20.0,0.0,6.105797 567 | 70.0,20.0,0.0,5.6578383 568 | 70.0,30.0,0.0,7.3671136 569 | 60.0,30.0,0.0,5.43272 570 | 70.0,20.0,10.0,7.52005 571 | 70.0,30.0,10.0,6.984733 572 | 60.0,30.0,10.0,5.1770697 573 | 60.0,0.0,0.0,7.5334115 574 | 70.0,0.0,0.0,7.3148084 575 | 70.0,10.0,0.0,6.6317644 576 | 60.0,10.0,0.0,7.9379735 577 | 60.0,0.0,10.0,5.2178 578 | 70.0,0.0,10.0,7.300008 579 | 70.0,10.0,10.0,5.7991114 580 | 40.0,0.0,0.0,6.408153 581 | 50.0,0.0,0.0,7.278351 582 | 50.0,10.0,0.0,5.5346026 583 | 40.0,10.0,0.0,5.513516 584 | 40.0,0.0,10.0,6.295528 585 | 50.0,0.0,10.0,5.9622436 586 | 40.0,10.0,10.0,5.2223735 587 | -------------------------------------------------------------------------------- /examples/data/graphs_folder/toy_graph.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BlueBrain/AstroVascPy/322eaa8ebb32133521428b46700f54fb3f8b9af3/examples/data/graphs_folder/toy_graph.bin -------------------------------------------------------------------------------- /examples/data/graphs_folder/toy_graph.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BlueBrain/AstroVascPy/322eaa8ebb32133521428b46700f54fb3f8b9af3/examples/data/graphs_folder/toy_graph.h5 -------------------------------------------------------------------------------- /examples/data/params.yaml: -------------------------------------------------------------------------------- 1 | dataset: 'toy_graph' 2 | 3 | data_folder: 'data/graphs_folder/' 4 | output_folder: 'output' 5 | 6 | blood_viscosity: 1.2e-6 # plasma viscosity in g.µm^-1.s^-1 7 | 8 | vasc_axis: 1 # vasculature axis corresponding to x, y, or z. Should be set to 0, 1, or 2. 9 | depth_ratio: 0.05 # Depth along the vasc_axis. This is the portion of the vasculature where there are inputs. 10 | max_nb_inputs: 3 # maximum number of inputs to inject flow/pressure into vasculature. Should be >= 1. 11 | 12 | base_pressure: 1.33e-3 # reference pressure in g * um^{-1} * s^{-2}. At resting state equal to the external pressure 13 | 14 | # Enable/Disable endfeet activity on entry nodes. 15 | entry_noise: true 16 | 17 | ### OU calibration parameters 18 | 19 | threshold_r: 3 # Radius (in micro-meters) threshold. 20 | # A radius smaller than the threshold is considered a capillary. 21 | # A radius bigger than the threshold is considered an artery. 22 | 23 | c_cap: 2.8 # constant used in the ROU parameter calibration for capillaries 24 | c_art: 2.8 # constant used in the ROU parameter calibration for arteries 25 | 26 | # Capillaries 27 | max_r_capill: 1.38 # max radius change factor 28 | t_2_max_capill: 2.7 # time (in seconds) to reach r_max from 0 29 | 30 | # Arteries 31 | max_r_artery: 1.23 # max radius change factor 32 | t_2_max_artery: 3.3 # time (in seconds) to reach r_max from 0 33 | 34 | # PETSc Linear solver 35 | solver: 'lgmres' 36 | max_it: 1000 37 | r_tol: 1.0e-12 38 | -------------------------------------------------------------------------------- /examples/job_script.sbatch: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # for the full mouse brain vasculature, we need at least 12 nodes 4 | #SBATCH --job-name="1_BF" 5 | #SBATCH --nodes=8 6 | #SBATCH --ntasks-per-node=32 7 | 8 | #SBATCH --account=proj16 9 | #SBATCH --partition=prod 10 | #SBATCH --constraint=cpu 11 | #SBATCH --time=00:25:00 12 | 13 | #SBATCH --cpus-per-task=2 14 | #SBATCH --exclusive 15 | #SBATCH --mem=0 16 | #SBATCH --output="%x-%j.log" 17 | 18 | JOB_SCRIPT=$(scontrol show job ${SLURM_JOB_ID} | awk -F= '/Command=/{print $2}') 19 | JOB_SCRIPT_DIR=$(dirname ${JOB_SCRIPT}) 20 | 21 | SETUP_SCRIPT="${JOB_SCRIPT_DIR}/../setup.sh" 22 | if [[ ! -f ${SETUP_SCRIPT} ]]; then 23 | >&2 echo "[ERROR] The 'setup.sh' script could not be found!" 24 | exit -1 25 | fi 26 | 27 | source ${SETUP_SCRIPT} 28 | 29 | echo 30 | echo "### Simulation Start" 31 | echo 32 | # time srun dplace python "${JOB_SCRIPT_DIR}/compute_static_flow_pressure.py" 33 | time srun dplace python "${JOB_SCRIPT_DIR}/simulate_OU_process.py" 34 | -------------------------------------------------------------------------------- /examples/load_graph_archngv.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import multiprocessing 3 | import pickle 4 | from functools import partial 5 | from pathlib import Path 6 | 7 | import numpy as np 8 | import pandas as pd 9 | import psutil 10 | from archngv import NGVCircuit 11 | from joblib import Parallel, delayed, parallel_config 12 | from tqdm import tqdm 13 | 14 | from astrovascpy import bloodflow 15 | from astrovascpy.exceptions import BloodFlowError 16 | from astrovascpy.utils import Graph 17 | 18 | 19 | def load_graph_archngv_parallel( 20 | filename, n_workers, n_astro=None, parallelization_backend="multiprocessing" 21 | ): 22 | """Load a vasculature from an NGV circuit. 23 | 24 | Args: 25 | filename (str): vasculature dataset. 26 | n_workers (int): number of processes to set endfeet on edges. 27 | n_astro (int): for testing, if not None, it will reduce the number of astrocytes used 28 | parallelization_backend (str): Either multiprocessing or joblib 29 | 30 | Returns: 31 | vasculatureAPI.PointVasculature: graph containing point vasculature skeleton. 32 | 33 | Raises: 34 | BloodFlowError: if the file object identified by filename is not in h5 format. 35 | """ 36 | if not Path(filename).exists(): 37 | raise BloodFlowError("File provided does not exist") 38 | circuit = NGVCircuit(filename) 39 | pv = circuit.vasculature.point_graph 40 | graph = Graph.from_point_vasculature(pv) 41 | graph.edge_properties.index = pd.MultiIndex.from_frame( 42 | graph.edge_properties.loc[:, ["section_id", "segment_id"]] 43 | ) 44 | gv_conn = circuit.gliovascular_connectome 45 | worker = partial(bloodflow.get_closest_edges, graph=graph) 46 | 47 | args = ( 48 | ( 49 | gv_conn.vasculature_sections_segments(endfoot_id).vasculature_section_id.values[0], 50 | gv_conn.vasculature_sections_segments(endfoot_id).vasculature_segment_id.values[0], 51 | gv_conn.get(endfoot_id, ["endfoot_compartment_length"]).values[0], 52 | ) 53 | for astro_id in np.arange(n_astro or circuit.astrocytes.size) 54 | for endfoot_id in gv_conn.astrocyte_endfeet(astro_id) 55 | ) 56 | endfoot_ids = [ 57 | endfoot_id 58 | for astro_id in np.arange(n_astro or circuit.astrocytes.size) 59 | for endfoot_id in gv_conn.astrocyte_endfeet(astro_id) 60 | ] 61 | 62 | if parallelization_backend == "multiprocessing": 63 | with multiprocessing.Pool(n_workers) as pool: 64 | for result_ids, result_endfeet in zip( 65 | tqdm( 66 | pool.imap(worker, args, chunksize=max(1, int(len(endfoot_ids) / n_workers))), 67 | total=len(endfoot_ids), 68 | ), 69 | endfoot_ids, 70 | ): 71 | # Only the main process executes this part, i.e. as soon as it receives the parallelly generated data 72 | graph.edge_properties.loc[pd.MultiIndex.from_arrays(result_ids.T), "endfeet_id"] = ( 73 | result_endfeet 74 | ) 75 | 76 | elif parallelization_backend == "joblib": 77 | with parallel_config( 78 | backend="loky", prefer="processes", n_jobs=n_workers, inner_max_num_threads=1 79 | ): 80 | parallel = Parallel(return_as="generator", batch_size="auto") 81 | parallelized_region = parallel( 82 | delayed(worker)(arg) for arg in tqdm(args, total=len(endfoot_ids)) 83 | ) 84 | 85 | for result_ids, result_endfeet in zip(parallelized_region, endfoot_ids): 86 | # Only the main process executes this part, i.e. as soon as it receives the parallelly generated data 87 | graph.edge_properties.loc[pd.MultiIndex.from_arrays(result_ids.T), "endfeet_id"] = ( 88 | result_endfeet 89 | ) 90 | 91 | else: 92 | raise BloodFlowError( 93 | f"parallelization_backend={parallelization_backend} invalid option. Use 'joblib' or 'multiprocessing'." 94 | ) 95 | 96 | return graph 97 | 98 | 99 | def main(): 100 | global print 101 | print = partial(print, flush=True) 102 | 103 | parser = argparse.ArgumentParser(description="File paths for NGVCircuits and output graph.") 104 | parser.add_argument( 105 | "--filename_ngv", type=str, required=True, help="Path to the NGV circuits file" 106 | ) 107 | parser.add_argument( 108 | "--output_graph", type=str, required=True, help="Path to the output graph file" 109 | ) 110 | args = parser.parse_args() 111 | 112 | filename_ngv = args.filename_ngv 113 | output_graph = args.output_graph 114 | 115 | n_cores = psutil.cpu_count(logical=False) 116 | print(f"number of physical CPU cores = {n_cores}") 117 | 118 | print(f"NGV Circuits file: {filename_ngv}") 119 | print("loading circuit : start") 120 | graph = load_graph_archngv_parallel( 121 | filename_ngv, n_workers=n_cores 122 | ) # n_astro=50 for debugging (smaller processing needs) 123 | print("loading circuit : finish") 124 | 125 | print("pickle graph : start") 126 | filehandler = open(output_graph, "wb") 127 | pickle.dump(graph, filehandler) 128 | print("pickle graph : finish") 129 | print(f"Graph file: {output_graph}") 130 | 131 | 132 | if __name__ == "__main__": 133 | main() 134 | -------------------------------------------------------------------------------- /examples/load_graph_archngv.sbatch: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #SBATCH --job-name="archngv" 4 | #SBATCH --nodes=1 5 | 6 | #SBATCH --account=proj16 7 | #SBATCH --partition=prod 8 | #SBATCH --constraint=cpu 9 | #SBATCH --time=00:30:00 10 | 11 | #SBATCH --cpus-per-task=2 12 | #SBATCH --exclusive 13 | #SBATCH --mem=0 14 | #SBATCH --output="%x-%j.log" 15 | 16 | JOB_SCRIPT=$(scontrol show job ${SLURM_JOB_ID} | awk -F= '/Command=/{print $2}') 17 | JOB_SCRIPT_DIR=$(dirname ${JOB_SCRIPT}) 18 | 19 | SETUP_SCRIPT="${JOB_SCRIPT_DIR}/../setup.sh" 20 | if [[ ! -f ${SETUP_SCRIPT} ]]; then 21 | >&2 echo "[ERROR] The 'setup.sh' script could not be found!" 22 | exit 2 23 | fi 24 | 25 | source ${SETUP_SCRIPT} 26 | 27 | FILENAME_NGV="/gpfs/bbp.cscs.ch/project/proj137/NGVCircuits/rat_O1" 28 | 29 | GRAPH_PATH="./data/graphs_folder/dumped_graph.bin" 30 | 31 | echo 32 | echo "### Loading graph" 33 | echo 34 | # It is imperative to use srun and dplace, otherwise the Python processes 35 | # do not work properly (possible deadlocks and/or performance degradation) 36 | time srun -n 1 --mpi=none dplace python ${JOB_SCRIPT_DIR}/load_graph_archngv.py --filename_ngv ${FILENAME_NGV} --output_graph ${GRAPH_PATH} 37 | -------------------------------------------------------------------------------- /examples/simulate_OU_process.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # coding: utf-8 3 | 4 | import sys 5 | from functools import partial 6 | from pathlib import Path 7 | 8 | import numpy as np 9 | import petsc4py 10 | import yaml 11 | from mpi4py import MPI 12 | from petsc4py import PETSc 13 | 14 | from astrovascpy.bloodflow import generate_endfeet, simulate_ou_process 15 | 16 | # from astrovascpy.io import load_graph_from_bin 17 | # from astrovascpy.io import load_graph_from_h5 18 | from astrovascpy.io import load_graph_from_csv 19 | from astrovascpy.report_writer import write_simulation_report 20 | from astrovascpy.utils import create_entry_largest_nodes, create_input_speed, mpi_mem, mpi_timer 21 | 22 | petsc4py.init(sys.argv) 23 | 24 | MPI_COMM = MPI.COMM_WORLD 25 | MPI_RANK = MPI_COMM.Get_rank() 26 | MPI_SIZE = MPI_COMM.Get_size() 27 | PETSc.Sys.Print(f"Number of MPI tasks = {MPI_SIZE}") 28 | 29 | 30 | print = partial(print, flush=True) 31 | 32 | save_vtk = False 33 | save_sonata = True 34 | 35 | curr_dir = Path(__file__).resolve().parent 36 | params = yaml.full_load(open(str(curr_dir / "data/params.yaml"))) 37 | 38 | output_path = Path(curr_dir / params["output_folder"]) 39 | 40 | if MPI_RANK == 0 and not output_path.exists(): 41 | output_path.mkdir() 42 | 43 | ############################################################################################ 44 | PETSc.Sys.Print("loading circuit") 45 | 46 | node_dataset = curr_dir / "data/graphs_folder/node_dataset.csv" 47 | edge_dataset = curr_dir / "data/graphs_folder/edge_dataset.csv" 48 | graph_sonata = curr_dir / "data/graphs_folder/toy_graph.h5" 49 | graph_bin = curr_dir / "data/graphs_folder/toy_graph.bin" 50 | 51 | with mpi_timer.region("loading circuit"), mpi_mem.region("loading circuit"): 52 | graph = load_graph_from_csv(node_filename=node_dataset, edge_filename=edge_dataset) 53 | # Uncomment the following if you want to import with different methods 54 | # 55 | # graph = load_graph_from_h5(filename=graph_sonata) 56 | # graph = load_graph_from_bin(graph_bin) 57 | 58 | GEN_ENDFEET = True 59 | COVERAGE = 0.7 # percentage of endfeet coverage 60 | if GEN_ENDFEET: 61 | generate_endfeet(graph, endfeet_coverage=COVERAGE, seed=42) 62 | 63 | PETSc.Sys.Print("compute entry nodes") 64 | 65 | entry_nodes = create_entry_largest_nodes(graph, params) 66 | PETSc.Sys.Print("entry nodes: ", entry_nodes) 67 | 68 | 69 | PETSc.Sys.Print("simulate astrovascpy") 70 | 71 | simulation_time = 5 # seconds 72 | time_step = 0.01 73 | 74 | # Flag to enable the relaxation phase. 75 | # Set 'RELAXATION = True' if you want to stop the radii perturbation (set noise to zero) 76 | # at the time 'relaxation_start'. 77 | # Set relaxation_start = 0 if you don't want noise at all. 78 | RELAXATION = False # True 79 | if RELAXATION: 80 | relaxation_start = 3.0 # relaxation starting time 81 | else: 82 | relaxation_start = simulation_time # No relaxation 83 | 84 | # Flag to enable sine wave input flow 85 | SINE_INFLOW = True 86 | if SINE_INFLOW: 87 | A = 6119 # Amplitude of the sine wave 88 | else: 89 | A = 0 90 | 91 | # Blood speed on the entry nodes 92 | if graph is not None: 93 | entry_speed = create_input_speed( 94 | T=simulation_time, step=time_step, A=A, f=8, C=35000, read_from_file=None 95 | ) 96 | else: 97 | entry_speed = None 98 | 99 | with mpi_timer.region("simulate astrovascpy"), mpi_mem.region("simulate astrovascpy"): 100 | flows, pressures, radiii = simulate_ou_process( 101 | graph, entry_nodes, simulation_time, relaxation_start, time_step, entry_speed, params 102 | ) 103 | 104 | if graph is not None: 105 | points = graph.node_properties[["x", "y", "z"]].to_numpy() 106 | pressures = np.mean(pressures[:, graph.edges], axis=2) 107 | # The pressure operation above is equivalent to: 108 | # pressure_edge = [] 109 | # for u, v in graph.edges: 110 | # pressure_edge.append(0.5 * (pressure[u] + pressure[v])) 111 | 112 | ############################################################################################ 113 | 114 | if graph is not None: 115 | if save_sonata: 116 | sonata_path = Path(curr_dir / params["output_folder"]) / "sonata_files" 117 | if not sonata_path.exists(): 118 | Path.mkdir(sonata_path) 119 | print("start sonata reporting", flush=True) 120 | filename = ( 121 | sonata_path 122 | / "simulate_ou_process_2_8_sigma_10_seconds_january_9_3_entry_nodes_relaxation" 123 | ) 124 | if not filename.exists(): 125 | Path.mkdir(filename) 126 | write_simulation_report( 127 | np.arange(graph.n_edges), 128 | filename, 129 | 0, 130 | simulation_time, 131 | time_step, 132 | flows, 133 | pressures, 134 | radiii, 135 | np.power(radiii, 2) * np.pi * graph.edge_properties["length"].to_numpy(), 136 | ) 137 | print("end of sonata reporting", flush=True) 138 | 139 | if save_vtk: 140 | from astrovascpy.vtk_io import vtk_writer 141 | 142 | vtk_path = Path(params["output_folder"]) / "vtk_files" 143 | if not vtk_path.exists(): 144 | Path.mkdir(vtk_path) 145 | 146 | print("start vtk saving", flush=True) 147 | filename = vtk_path / "simulate_ou_process" 148 | if not filename.exists(): 149 | Path.mkdir(filename) 150 | 151 | for i, (flow, pressure, radii) in enumerate(zip(flows, pressures, radiii)): 152 | vtk_writer( 153 | str(filename / ("step_" + str(i))), 154 | points, 155 | graph.edges, 156 | radii, 157 | np.zeros(graph.n_edges), 158 | extra_properties={"flow": flow, "pressure": pressure, "radii": radii}, 159 | ) 160 | print("end of vtk saving", flush=True) 161 | 162 | ############################################################################################ 163 | 164 | mpi_timer.print() 165 | mpi_mem.print() 166 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "bloodflow", 3 | "auto-changelog": { 4 | "replaceText": { 5 | "([bB]reaking: ?)": "", 6 | "([bB]reaking change: ?)": "", 7 | "(^[bB]uild: ?)": "", 8 | "(^[bB]uild\\((.*)\\): ?)": "\\($2\\)", 9 | "(^[cC]hore: ?)": "", 10 | "(^[cC]hore\\((.*)\\): ?)": "\\($2\\)", 11 | "(^[cC][iI]: ?)": "", 12 | "(^[cC][iI]\\((.*)\\): ?)": "\\($2\\)", 13 | "(^[dD]ocs: ?)": "", 14 | "(^[dD]ocs\\((.*)\\): ?)": "\\($2\\)", 15 | "(^[fF]eat: ?)": "", 16 | "(^[fF]eat\\((.*)\\): ?)": "\\($2\\)", 17 | "(^[fF]ix: ?)": "", 18 | "(^[fF]ix\\((.*)\\): ?)": "\\($2\\)", 19 | "(^[pP]erf: ?)": "", 20 | "(^[pP]erf\\((.*)\\): ?)": "\\($2\\)", 21 | "(^[rR]efactor: ?)": "", 22 | "(^[rR]efactor\\((.*)\\): ?)": "\\($2\\)", 23 | "(^[rR]evert: ?)": "", 24 | "(^[rR]evert\\((.*)\\): ?)": "\\($2\\)", 25 | "(^[sS]tyle: ?)": "", 26 | "(^[sS]tyle\\((.*)\\): ?)": "\\($2\\)", 27 | "(^[tT]est: ?)": "", 28 | "(^[tT]est\\((.*)\\): ?)": "\\($2\\)" 29 | } 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | # SETUPTOOLS 2 | [build-system] 3 | requires = [ 4 | "setuptools>=42", 5 | "wheel", 6 | "setuptools_scm[toml]>=3.4", 7 | ] 8 | 9 | # BLACK 10 | [tool.black] 11 | line-length = 100 12 | target-version = [ 13 | "py310", 14 | "py311" 15 | ] 16 | 17 | # PYDOCSTYLE 18 | [tool.pydocstyle] 19 | # ignore the following: 20 | # - D107: Missing docstring in __init__ 21 | add-ignore = [ 22 | "D107", 23 | ] 24 | convention = "google" 25 | 26 | # ISORT 27 | [tool.isort] 28 | profile = "black" 29 | line_length = 100 30 | force_single_line = true 31 | 32 | # PYTEST 33 | [tool.pytest.ini_options] 34 | testpaths = [ 35 | "tests", 36 | ] 37 | 38 | [tool.ruff] 39 | # Exclude a variety of commonly ignored directories. 40 | exclude = [ 41 | ".bzr", 42 | ".direnv", 43 | ".eggs", 44 | ".git", 45 | ".git-rewrite", 46 | ".hg", 47 | ".ipynb_checkpoints", 48 | ".mypy_cache", 49 | ".nox", 50 | ".pants.d", 51 | ".pyenv", 52 | ".pytest_cache", 53 | ".pytype", 54 | ".ruff_cache", 55 | ".svn", 56 | ".tox", 57 | ".venv", 58 | ".vscode", 59 | "__pypackages__", 60 | "_build", 61 | "buck-out", 62 | "build", 63 | "dist", 64 | "node_modules", 65 | "site-packages", 66 | "venv", 67 | ] 68 | 69 | # Same as Black. 70 | line-length = 100 71 | indent-width = 4 72 | 73 | [tool.ruff.lint] 74 | select = ["E4", "E7", "E9", "F", "I"] 75 | ignore = ["F401", "F403", "F405", "E741", "E721"] 76 | 77 | # Allow fix for all enabled rules (when `--fix`) is provided. 78 | fixable = ["ALL"] 79 | unfixable = [] 80 | 81 | # Allow unused variables when underscore-prefixed. 82 | dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" 83 | 84 | [tool.ruff.format] 85 | # Like Black, use double quotes for strings. 86 | quote-style = "double" 87 | 88 | # Like Black, indent with spaces, rather than tabs. 89 | indent-style = "space" 90 | 91 | # Like Black, respect magic trailing commas. 92 | skip-magic-trailing-comma = false 93 | 94 | # Like Black, automatically detect the appropriate line ending. 95 | line-ending = "auto" 96 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """Setup for the astrovascpy package.""" 2 | 3 | import importlib.util 4 | from pathlib import Path 5 | 6 | from setuptools import find_namespace_packages, setup 7 | 8 | spec = importlib.util.spec_from_file_location( 9 | "astrovascpy.version", 10 | "astrovascpy/version.py", 11 | ) 12 | module = importlib.util.module_from_spec(spec) 13 | spec.loader.exec_module(module) 14 | VERSION = module.VERSION 15 | 16 | reqs = [ 17 | "cached-property", 18 | "click", 19 | "coverage", 20 | "cython", 21 | "h5py", 22 | "libsonata", 23 | "matplotlib", 24 | "morphio", 25 | "mpi4py", 26 | "networkx", 27 | "numpy", 28 | "pandas", 29 | "psutil", 30 | "pyyaml", 31 | "scipy", 32 | "seaborn", 33 | "tables", 34 | "tqdm", 35 | "trimesh", 36 | "vascpy", 37 | ] 38 | 39 | doc_reqs = [ 40 | "sphinx-mdinclude", 41 | "sphinx", 42 | "sphinx-bluebrain-theme", 43 | "sphinx-click", 44 | ] 45 | 46 | test_reqs = [ 47 | "pytest", 48 | "pytest-mpi", 49 | ] 50 | 51 | setup( 52 | name="AstroVascPy", 53 | description="Simulating blood flow in vasculature", 54 | author="Blue Brain Project, EPFL", 55 | long_description=Path("README.md").read_text(encoding="utf-8"), 56 | long_description_content_type="text/markdown", 57 | url="https://github.com/BlueBrain/AstroVascPy", 58 | project_urls={ 59 | "Tracker": "https://github.com/BlueBrain/AstroVascpy/issues", 60 | "Source": "https://github.com/BlueBrain/AstroVascPy", 61 | }, 62 | license="Apache-2", 63 | packages=find_namespace_packages(include=["astrovascpy*"]), 64 | python_requires=">=3.11", 65 | version=VERSION, 66 | install_requires=reqs, 67 | extras_require={ 68 | "docs": doc_reqs, 69 | "test": test_reqs, 70 | "viz": ["vtk"], 71 | }, 72 | include_package_data=True, 73 | classifiers=[ 74 | "Development Status :: 5 - Production/Stable", 75 | "Intended Audience :: Education", 76 | "Intended Audience :: Science/Research", 77 | "Programming Language :: Python :: 3", 78 | "Topic :: Scientific/Engineering :: Bio-Informatics", 79 | ], 80 | ) 81 | -------------------------------------------------------------------------------- /setup.sh: -------------------------------------------------------------------------------- 1 | echo 2 | echo "### setup/set env started" 3 | echo 4 | 5 | SETUP_DIR=$(dirname ${BASH_SOURCE[0]}) 6 | 7 | if command -v module &> /dev/null 8 | then 9 | module purge 10 | module load unstable git python gcc hpe-mpi py-mpi4py petsc py-petsc4py 11 | 12 | else 13 | if command -v conda &> /dev/null 14 | then 15 | if conda env list | grep bfs_env >/dev/null 2>/dev/null; 16 | then 17 | echo "Conda setup: Done" 18 | conda activate bfs_env 19 | 20 | echo 21 | echo "If you want to purge the current env, follow the steps below: " 22 | echo "1. conda deactivate" 23 | echo "2. conda remove -y --name bfs_env --all" 24 | echo 25 | else 26 | conda create -y --name bfs_env python=3.11.6 27 | conda activate bfs_env 28 | conda install -y pip 29 | 30 | conda install -y -c conda-forge mpi mpi4py petsc petsc4py 31 | "$CONDA_PREFIX/bin/pip" install tox joblib archngv 32 | # If complex number support is needed 33 | #conda install -y -c conda-forge mpi mpi4py "petsc=*=*complex*" "petsc4py=*=*complex*" 34 | fi 35 | # Environment variables 36 | CONDA_PACKAGES=$($CONDA_PREFIX/bin/python3 -c 'import sysconfig; print(sysconfig.get_paths()["purelib"])') 37 | export PYTHONPATH=$CONDA_PACKAGES:$PYTHONPATH 38 | else 39 | echo 40 | echo "Please install Conda, and then proceed to the installation of AstroVascPy." 41 | echo "!! EXITING !!" 42 | echo 43 | exit 44 | fi 45 | fi 46 | 47 | if command -v module &> /dev/null 48 | then 49 | echo 50 | echo "### python-venv [Python Virtual Environment]" 51 | echo 52 | # Export proxy configuration before trying to 'pip install' from a compute node 53 | export HTTP_PROXY="http://bbpproxy.epfl.ch:80/" 54 | export HTTPS_PROXY="http://bbpproxy.epfl.ch:80/" 55 | export http_proxy="http://bbpproxy.epfl.ch:80/" 56 | export https_proxy="http://bbpproxy.epfl.ch:80/" 57 | 58 | if [ -d "python-venv" ] 59 | then 60 | echo "python-venv already set" 61 | source python-venv/bin/activate 62 | else 63 | python3 -m venv --prompt astrovascpy ${SETUP_DIR}/python-venv 64 | source ${SETUP_DIR}/python-venv/bin/activate 65 | python3 -m pip install --upgrade pip 66 | fi 67 | pip3 install -e ${SETUP_DIR} 68 | pip3 install tox joblib archngv 69 | else 70 | conda_bin=`conda info | grep "active env location" | grep -o "/.*"`/bin 71 | $conda_bin/pip install -e ${SETUP_DIR} 72 | fi 73 | 74 | # Backend solver/library for the linear systems 75 | # petsc or scipy 76 | export BACKEND_SOLVER_BFS='scipy' 77 | 78 | # Run the SciPy solver and compare the result with the PETSc one [which is the default]! 79 | # 0 : False / 1 : True 80 | export DEBUG_BFS=0 81 | 82 | # Show PETSc progress or not 83 | # 0 : False / 1 : True 84 | export VERBOSE_BFS=0 85 | 86 | echo 87 | echo "### setup finished" 88 | echo 89 | 90 | echo 91 | echo "--> Now you could go to the examples folder and run your first example as: " 92 | echo "--> conda_bin/mpirun (or srun) -n number_of_mpi_tasks python compute_static_flow_pressure.py" 93 | echo 94 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests.""" 2 | -------------------------------------------------------------------------------- /tests/data/dataset.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BlueBrain/AstroVascPy/322eaa8ebb32133521428b46700f54fb3f8b9af3/tests/data/dataset.h5 -------------------------------------------------------------------------------- /tests/data/input_flow/sine.csv: -------------------------------------------------------------------------------- 1 | 0.000000000000000000e+00 2 | 6.279051952931337388e-02 3 | 1.253332335643042583e-01 4 | 1.873813145857246287e-01 5 | 2.486898871648547948e-01 6 | 3.090169943749473958e-01 7 | 3.681245526846779748e-01 8 | 4.257792915650727150e-01 9 | 4.817536741017153235e-01 10 | 5.358267949789966567e-01 11 | 5.877852522924731371e-01 12 | 6.374239897486897455e-01 13 | 6.845471059286887261e-01 14 | 7.289686274214115524e-01 15 | 7.705132427757892533e-01 16 | 8.090169943749474513e-01 17 | 8.443279255020150753e-01 18 | 8.763066800438636950e-01 19 | 9.048270524660195768e-01 20 | 9.297764858882514583e-01 21 | 9.510565162951535312e-01 22 | 9.685831611286310761e-01 23 | 9.822872507286887211e-01 24 | 9.921147013144778759e-01 25 | 9.980267284282715590e-01 26 | 1.000000000000000000e+00 27 | 9.980267284282715590e-01 28 | 9.921147013144777649e-01 29 | 9.822872507286886101e-01 30 | 9.685831611286310761e-01 31 | 9.510565162951535312e-01 32 | 9.297764858882513472e-01 33 | 9.048270524660194658e-01 34 | 8.763066800438634729e-01 35 | 8.443279255020149643e-01 36 | 8.090169943749474513e-01 37 | 7.705132427757892533e-01 38 | 7.289686274214114414e-01 39 | 6.845471059286885041e-01 40 | 6.374239897486895234e-01 41 | 5.877852522924732481e-01 42 | 5.358267949789966567e-01 43 | 4.817536741017152124e-01 44 | 4.257792915650724930e-01 45 | 3.681245526846777527e-01 46 | 3.090169943749471182e-01 47 | 2.486898871648548226e-01 48 | 1.873813145857245732e-01 49 | 1.253332335643040918e-01 50 | 6.279051952931313796e-02 51 | -3.216245299353273201e-16 52 | -6.279051952931334613e-02 53 | -1.253332335643042861e-01 54 | -1.873813145857247675e-01 55 | -2.486898871648550169e-01 56 | -3.090169943749477288e-01 57 | -3.681245526846783078e-01 58 | -4.257792915650726595e-01 59 | -4.817536741017153790e-01 60 | -5.358267949789967677e-01 61 | -5.877852522924733591e-01 62 | -6.374239897486899675e-01 63 | -6.845471059286887261e-01 64 | -7.289686274214116635e-01 65 | -7.705132427757893643e-01 66 | -8.090169943749473402e-01 67 | -8.443279255020152974e-01 68 | -8.763066800438635839e-01 69 | -9.048270524660197989e-01 70 | -9.297764858882514583e-01 71 | -9.510565162951535312e-01 72 | -9.685831611286311871e-01 73 | -9.822872507286887211e-01 74 | -9.921147013144778759e-01 75 | -9.980267284282715590e-01 76 | -1.000000000000000000e+00 77 | -9.980267284282715590e-01 78 | -9.921147013144778759e-01 79 | -9.822872507286886101e-01 80 | -9.685831611286310761e-01 81 | -9.510565162951536422e-01 82 | -9.297764858882512362e-01 83 | -9.048270524660195768e-01 84 | -8.763066800438633619e-01 85 | -8.443279255020149643e-01 86 | -8.090169943749470072e-01 87 | -7.705132427757890312e-01 88 | -7.289686274214115524e-01 89 | -6.845471059286882820e-01 90 | -6.374239897486896345e-01 91 | -5.877852522924725820e-01 92 | -5.358267949789963236e-01 93 | -4.817536741017153235e-01 94 | -4.257792915650722154e-01 95 | -3.681245526846778637e-01 96 | -3.090169943749467851e-01 97 | -2.486898871648544895e-01 98 | -1.873813145857246842e-01 99 | -1.253332335643037865e-01 100 | -6.279051952931326286e-02 101 | -2.449293598294706414e-16 102 | -------------------------------------------------------------------------------- /tests/data/node_dataset.csv: -------------------------------------------------------------------------------- 1 | x,y,z,diameter 2 | 30.0,20.0,20.0,7.4542317 3 | 25.0,25.0,25.0,5.932791 4 | 30.0,30.0,30.0,7.829187 5 | 35.0,35.0,35.0,5.3152843 6 | 20.0,20.0,20.0,5.1605887 7 | 15.0,15.0,15.0,7.9477215 8 | 30.0,30.0,20.0,6.843039 9 | 20.0,30.0,20.0,5.6644707 10 | 20.0,20.0,30.0,5.1327558 11 | 30.0,20.0,30.0,6.2937737 12 | 20.0,30.0,30.0,7.0178814 13 | 10.0,10.0,10.0,6.2519994 14 | 5.0,5.0,5.0,6.758832 15 | 20.0,10.0,10.0,6.149199 16 | 25.0,5.0,5.0,6.0459957 17 | 20.0,20.0,10.0,6.179367 18 | 25.0,25.0,5.0,7.5464993 19 | 10.0,20.0,10.0,6.7691355 20 | 5.0,25.0,5.0,7.414635 21 | 10.0,10.0,20.0,7.644702 22 | 5.0,5.0,25.0,7.9950647 23 | 20.0,10.0,20.0,7.7871985 24 | 25.0,5.0,25.0,7.541923 25 | 10.0,20.0,20.0,5.544867 26 | 5.0,25.0,25.0,6.2433696 27 | 0.0,20.0,20.0,7.92168 28 | 10.0,30.0,20.0,7.1019 29 | 0.0,30.0,20.0,7.5254474 30 | 0.0,20.0,30.0,6.700008 31 | 10.0,20.0,30.0,6.430404 32 | 10.0,30.0,30.0,6.8656473 33 | 0.0,30.0,30.0,6.5862246 34 | 20.0,0.0,20.0,5.622538 35 | 30.0,0.0,20.0,7.365405 36 | 30.0,10.0,20.0,5.8200464 37 | 20.0,0.0,30.0,7.6613946 38 | 30.0,0.0,30.0,5.496637 39 | 30.0,10.0,30.0,6.99788 40 | 20.0,10.0,30.0,5.2526336 41 | 0.0,0.0,20.0,6.1528196 42 | 10.0,0.0,20.0,7.110221 43 | 0.0,10.0,20.0,6.059225 44 | 0.0,0.0,30.0,5.4632764 45 | 10.0,0.0,30.0,5.9380693 46 | 10.0,10.0,30.0,7.6529727 47 | 0.0,10.0,30.0,7.875597 48 | 0.0,20.0,0.0,7.672293 49 | 10.0,20.0,0.0,7.4183726 50 | 10.0,30.0,0.0,5.321903 51 | 0.0,30.0,0.0,5.02718 52 | 0.0,20.0,10.0,5.5751724 53 | 10.0,30.0,10.0,5.811432 54 | 0.0,30.0,10.0,6.848549 55 | 20.0,20.0,0.0,7.4772143 56 | 30.0,20.0,0.0,7.4552665 57 | 30.0,30.0,0.0,5.2413454 58 | 20.0,30.0,0.0,6.8036833 59 | 30.0,20.0,10.0,7.5037594 60 | 30.0,30.0,10.0,5.7139177 61 | 20.0,30.0,10.0,7.2857795 62 | 20.0,0.0,0.0,5.329049 63 | 30.0,0.0,0.0,5.9650927 64 | 30.0,10.0,0.0,6.279782 65 | 20.0,10.0,0.0,5.073644 66 | 20.0,0.0,10.0,6.1649995 67 | 30.0,0.0,10.0,5.282367 68 | 30.0,10.0,10.0,6.480736 69 | 0.0,0.0,0.0,5.651837 70 | 10.0,0.0,0.0,7.228695 71 | 10.0,10.0,0.0,5.588002 72 | 0.0,10.0,0.0,5.858989 73 | 0.0,0.0,10.0,5.5002246 74 | 10.0,0.0,10.0,5.5180902 75 | 0.0,10.0,10.0,6.44466 76 | 40.0,30.0,30.0,7.9719553 77 | 45.0,25.0,25.0,6.4953046 78 | 40.0,40.0,30.0,5.596677 79 | 45.0,45.0,25.0,7.105357 80 | 30.0,40.0,30.0,6.9705153 81 | 25.0,45.0,25.0,5.4153104 82 | 30.0,30.0,40.0,5.319486 83 | 25.0,25.0,45.0,5.5819726 84 | 40.0,30.0,40.0,6.952742 85 | 45.0,25.0,45.0,6.443127 86 | 40.0,40.0,40.0,7.48194 87 | 45.0,45.0,45.0,5.8947372 88 | 30.0,40.0,40.0,7.0534954 89 | 25.0,45.0,45.0,7.587678 90 | 20.0,50.0,50.0,5.2785535 91 | 15.0,55.0,55.0,7.399596 92 | 20.0,40.0,40.0,7.6538663 93 | 30.0,50.0,40.0,5.6193542 94 | 20.0,50.0,40.0,7.392579 95 | 20.0,40.0,50.0,7.424148 96 | 30.0,40.0,50.0,7.7810616 97 | 30.0,50.0,50.0,5.346684 98 | 10.0,50.0,50.0,7.0860395 99 | 5.0,45.0,45.0,6.3579607 100 | 20.0,60.0,50.0,5.499626 101 | 25.0,65.0,45.0,7.1411533 102 | 10.0,60.0,50.0,7.5535955 103 | 5.0,65.0,45.0,7.746223 104 | 10.0,50.0,60.0,7.313232 105 | 5.0,45.0,65.0,5.439751 106 | 20.0,50.0,60.0,5.8443613 107 | 25.0,45.0,65.0,7.757513 108 | 20.0,60.0,60.0,6.131807 109 | 25.0,65.0,65.0,6.2348795 110 | 10.0,60.0,60.0,7.7780795 111 | 5.0,65.0,65.0,5.915801 112 | 0.0,60.0,60.0,6.2433143 113 | 10.0,70.0,60.0,6.066139 114 | 0.0,70.0,60.0,5.1315875 115 | 0.0,60.0,70.0,5.552613 116 | 10.0,60.0,70.0,5.711569 117 | 10.0,70.0,70.0,5.5505133 118 | 0.0,70.0,70.0,7.264352 119 | 30.0,60.0,60.0,6.280074 120 | 30.0,70.0,60.0,6.831469 121 | 20.0,70.0,60.0,6.6368866 122 | 20.0,60.0,70.0,7.9241695 123 | 30.0,60.0,70.0,7.041111 124 | 30.0,70.0,70.0,7.2198386 125 | 20.0,70.0,70.0,7.900868 126 | 20.0,40.0,60.0,5.8777127 127 | 30.0,40.0,60.0,5.737972 128 | 30.0,50.0,60.0,6.749414 129 | 20.0,40.0,70.0,5.774108 130 | 30.0,40.0,70.0,6.420157 131 | 30.0,50.0,70.0,7.5025287 132 | 20.0,50.0,70.0,5.6912007 133 | 0.0,40.0,60.0,5.885922 134 | 10.0,40.0,60.0,7.1159205 135 | 0.0,50.0,60.0,6.097029 136 | 0.0,40.0,70.0,6.186232 137 | 10.0,40.0,70.0,5.691784 138 | 10.0,50.0,70.0,6.0320306 139 | 0.0,50.0,70.0,7.84489 140 | 0.0,60.0,40.0,6.4653273 141 | 10.0,60.0,40.0,5.402801 142 | 10.0,70.0,40.0,7.551884 143 | 0.0,70.0,40.0,6.724971 144 | 0.0,60.0,50.0,7.2198124 145 | 10.0,70.0,50.0,7.113994 146 | 0.0,70.0,50.0,7.9046354 147 | 20.0,60.0,40.0,5.4944167 148 | 30.0,60.0,40.0,5.0815935 149 | 30.0,70.0,40.0,5.9525113 150 | 20.0,70.0,40.0,6.786755 151 | 30.0,60.0,50.0,6.4598184 152 | 30.0,70.0,50.0,7.077664 153 | 20.0,70.0,50.0,7.4590693 154 | 0.0,40.0,40.0,6.176065 155 | 10.0,40.0,40.0,5.4319296 156 | 10.0,50.0,40.0,7.414469 157 | 0.0,50.0,40.0,7.1401114 158 | 0.0,40.0,50.0,6.2260323 159 | 10.0,40.0,50.0,6.555297 160 | 0.0,50.0,50.0,6.9955487 161 | 50.0,50.0,50.0,7.6105466 162 | 55.0,55.0,55.0,7.7194824 163 | 50.0,40.0,40.0,6.0617123 164 | 50.0,50.0,40.0,5.0111294 165 | 40.0,50.0,40.0,7.535233 166 | 40.0,40.0,50.0,5.464522 167 | 50.0,40.0,50.0,5.612433 168 | 40.0,50.0,50.0,5.7657933 169 | 60.0,50.0,50.0,6.686198 170 | 65.0,45.0,45.0,5.394728 171 | 60.0,60.0,50.0,6.3686676 172 | 65.0,65.0,45.0,7.2210417 173 | 50.0,60.0,50.0,6.788553 174 | 45.0,65.0,45.0,7.4630456 175 | 50.0,50.0,60.0,6.2864294 176 | 45.0,45.0,65.0,6.1191635 177 | 60.0,50.0,60.0,6.6655817 178 | 65.0,45.0,65.0,5.590556 179 | 60.0,60.0,60.0,6.250802 180 | 65.0,65.0,65.0,5.2962794 181 | 50.0,60.0,60.0,6.2014093 182 | 45.0,65.0,65.0,7.245818 183 | 40.0,60.0,60.0,5.3448563 184 | 50.0,70.0,60.0,6.180889 185 | 40.0,70.0,60.0,6.126648 186 | 40.0,60.0,70.0,6.704487 187 | 50.0,60.0,70.0,7.003931 188 | 50.0,70.0,70.0,7.5224905 189 | 40.0,70.0,70.0,6.491694 190 | 70.0,60.0,60.0,7.800368 191 | 70.0,70.0,60.0,5.7585616 192 | 60.0,70.0,60.0,7.2734723 193 | 60.0,60.0,70.0,5.0002213 194 | 70.0,60.0,70.0,5.76272 195 | 70.0,70.0,70.0,7.247302 196 | 60.0,70.0,70.0,6.597008 197 | 60.0,40.0,60.0,7.831483 198 | 70.0,40.0,60.0,7.06035 199 | 70.0,50.0,60.0,5.862726 200 | 60.0,40.0,70.0,7.306997 201 | 70.0,40.0,70.0,5.2494946 202 | 70.0,50.0,70.0,7.924323 203 | 60.0,50.0,70.0,5.1478558 204 | 40.0,40.0,60.0,7.936571 205 | 50.0,40.0,60.0,6.188554 206 | 40.0,50.0,60.0,6.805831 207 | 40.0,40.0,70.0,5.190107 208 | 50.0,40.0,70.0,6.2295723 209 | 50.0,50.0,70.0,7.1675005 210 | 40.0,50.0,70.0,5.7162166 211 | 40.0,60.0,40.0,7.9506664 212 | 50.0,60.0,40.0,5.0047836 213 | 50.0,70.0,40.0,5.4363503 214 | 40.0,70.0,40.0,7.337333 215 | 40.0,60.0,50.0,7.4153824 216 | 50.0,70.0,50.0,7.307741 217 | 40.0,70.0,50.0,6.6109967 218 | 60.0,60.0,40.0,7.5582085 219 | 70.0,60.0,40.0,6.050606 220 | 70.0,70.0,40.0,5.803966 221 | 60.0,70.0,40.0,5.1856675 222 | 70.0,60.0,50.0,7.4639106 223 | 70.0,70.0,50.0,6.1389995 224 | 60.0,70.0,50.0,6.7146506 225 | 60.0,40.0,40.0,7.17873 226 | 70.0,40.0,40.0,6.424615 227 | 70.0,50.0,40.0,7.7121525 228 | 60.0,50.0,40.0,5.1056595 229 | 60.0,40.0,50.0,5.5419817 230 | 70.0,40.0,50.0,6.0155435 231 | 70.0,50.0,50.0,6.7324886 232 | 50.0,20.0,50.0,7.0150695 233 | 55.0,15.0,55.0,7.2144547 234 | 40.0,20.0,40.0,6.5462995 235 | 50.0,20.0,40.0,6.24786 236 | 50.0,30.0,40.0,5.6937647 237 | 40.0,20.0,50.0,5.923622 238 | 50.0,30.0,50.0,7.8362927 239 | 40.0,30.0,50.0,5.8825426 240 | 50.0,10.0,50.0,5.4592905 241 | 45.0,5.0,45.0,7.160958 242 | 60.0,10.0,50.0,5.5925574 243 | 65.0,5.0,45.0,7.9370327 244 | 60.0,20.0,50.0,6.584945 245 | 65.0,25.0,45.0,7.6964755 246 | 50.0,10.0,60.0,6.410964 247 | 45.0,5.0,65.0,6.7601514 248 | 60.0,10.0,60.0,7.879087 249 | 65.0,5.0,65.0,6.764473 250 | 60.0,20.0,60.0,5.720877 251 | 65.0,25.0,65.0,5.1028013 252 | 50.0,20.0,60.0,7.2894206 253 | 45.0,25.0,65.0,7.9955797 254 | 40.0,20.0,60.0,6.2804585 255 | 50.0,30.0,60.0,5.804328 256 | 40.0,30.0,60.0,5.3947906 257 | 40.0,20.0,70.0,5.1176314 258 | 50.0,20.0,70.0,5.0756955 259 | 50.0,30.0,70.0,5.814651 260 | 40.0,30.0,70.0,6.3855605 261 | 70.0,20.0,60.0,5.7515616 262 | 70.0,30.0,60.0,6.8151293 263 | 60.0,30.0,60.0,5.9087143 264 | 60.0,20.0,70.0,6.731852 265 | 70.0,20.0,70.0,5.509034 266 | 70.0,30.0,70.0,5.4784074 267 | 60.0,30.0,70.0,6.251089 268 | 60.0,0.0,60.0,7.1635942 269 | 70.0,0.0,60.0,6.4403234 270 | 70.0,10.0,60.0,6.931592 271 | 60.0,0.0,70.0,6.5053196 272 | 70.0,0.0,70.0,7.4345555 273 | 70.0,10.0,70.0,6.4282517 274 | 60.0,10.0,70.0,6.569468 275 | 40.0,0.0,60.0,7.7012305 276 | 50.0,0.0,60.0,5.51958 277 | 40.0,10.0,60.0,7.625599 278 | 40.0,0.0,70.0,5.0829597 279 | 50.0,0.0,70.0,6.9810157 280 | 50.0,10.0,70.0,6.2433167 281 | 40.0,10.0,70.0,7.3738446 282 | 60.0,20.0,40.0,5.893911 283 | 70.0,20.0,40.0,7.1277013 284 | 70.0,30.0,40.0,7.195791 285 | 60.0,30.0,40.0,6.026679 286 | 70.0,20.0,50.0,6.1267657 287 | 70.0,30.0,50.0,6.0773196 288 | 60.0,30.0,50.0,6.8498554 289 | 60.0,0.0,40.0,6.6612983 290 | 70.0,0.0,40.0,5.3416824 291 | 70.0,10.0,40.0,7.166074 292 | 60.0,10.0,40.0,7.094349 293 | 60.0,0.0,50.0,5.528999 294 | 70.0,0.0,50.0,7.8252263 295 | 70.0,10.0,50.0,7.1631303 296 | 40.0,0.0,40.0,7.89041 297 | 50.0,0.0,40.0,5.9148927 298 | 50.0,10.0,40.0,7.824318 299 | 40.0,10.0,40.0,5.226832 300 | 40.0,0.0,50.0,6.382409 301 | 50.0,0.0,50.0,5.3888574 302 | 40.0,10.0,50.0,5.0143623 303 | 20.0,20.0,50.0,7.881567 304 | 15.0,15.0,55.0,7.1526785 305 | 20.0,20.0,40.0,7.185422 306 | 30.0,20.0,40.0,7.702026 307 | 20.0,30.0,40.0,7.3096147 308 | 30.0,20.0,50.0,7.5490694 309 | 30.0,30.0,50.0,5.0988364 310 | 20.0,30.0,50.0,5.9305863 311 | 10.0,10.0,50.0,6.355421 312 | 5.0,5.0,45.0,5.1226244 313 | 20.0,10.0,50.0,7.39949 314 | 25.0,5.0,45.0,5.773465 315 | 10.0,20.0,50.0,7.3968596 316 | 5.0,25.0,45.0,7.220735 317 | 10.0,10.0,60.0,5.2339783 318 | 5.0,5.0,65.0,6.8849416 319 | 20.0,10.0,60.0,7.414807 320 | 25.0,5.0,65.0,7.309367 321 | 20.0,20.0,60.0,5.199789 322 | 25.0,25.0,65.0,7.3067584 323 | 10.0,20.0,60.0,5.707911 324 | 5.0,25.0,65.0,7.5697026 325 | 0.0,20.0,60.0,7.892629 326 | 10.0,30.0,60.0,7.3426557 327 | 0.0,30.0,60.0,5.498974 328 | 0.0,20.0,70.0,6.6580596 329 | 10.0,20.0,70.0,6.2413044 330 | 10.0,30.0,70.0,5.454458 331 | 0.0,30.0,70.0,5.486219 332 | 30.0,20.0,60.0,5.3791027 333 | 30.0,30.0,60.0,7.6304617 334 | 20.0,30.0,60.0,7.2823777 335 | 20.0,20.0,70.0,7.994597 336 | 30.0,20.0,70.0,5.893169 337 | 30.0,30.0,70.0,5.681053 338 | 20.0,30.0,70.0,5.375485 339 | 20.0,0.0,60.0,5.0278535 340 | 30.0,0.0,60.0,6.596277 341 | 30.0,10.0,60.0,7.828338 342 | 20.0,0.0,70.0,6.9328957 343 | 30.0,0.0,70.0,7.1428995 344 | 30.0,10.0,70.0,6.4815965 345 | 20.0,10.0,70.0,6.745667 346 | 0.0,0.0,60.0,6.8442883 347 | 10.0,0.0,60.0,7.8332124 348 | 0.0,10.0,60.0,5.4807796 349 | 0.0,0.0,70.0,7.188834 350 | 10.0,0.0,70.0,6.8272815 351 | 10.0,10.0,70.0,5.5553493 352 | 0.0,10.0,70.0,5.0186105 353 | 0.0,20.0,40.0,7.656568 354 | 10.0,20.0,40.0,5.444116 355 | 10.0,30.0,40.0,7.47197 356 | 0.0,30.0,40.0,5.0449286 357 | 0.0,20.0,50.0,6.372166 358 | 10.0,30.0,50.0,6.9331913 359 | 0.0,30.0,50.0,5.1811385 360 | 20.0,0.0,40.0,5.2773447 361 | 30.0,0.0,40.0,7.5287633 362 | 30.0,10.0,40.0,7.9496083 363 | 20.0,10.0,40.0,6.345802 364 | 20.0,0.0,50.0,5.127469 365 | 30.0,0.0,50.0,5.352638 366 | 30.0,10.0,50.0,6.1449614 367 | 0.0,0.0,40.0,5.389288 368 | 10.0,0.0,40.0,5.176426 369 | 10.0,10.0,40.0,6.7272587 370 | 0.0,10.0,40.0,5.5583906 371 | 0.0,0.0,50.0,5.027744 372 | 10.0,0.0,50.0,7.7832594 373 | 0.0,10.0,50.0,6.611421 374 | 20.0,50.0,20.0,5.5559387 375 | 15.0,55.0,15.0,6.472899 376 | 20.0,40.0,20.0,5.3493896 377 | 30.0,40.0,20.0,5.4895453 378 | 30.0,50.0,20.0,7.0886574 379 | 20.0,40.0,30.0,5.328709 380 | 30.0,50.0,30.0,6.6975355 381 | 20.0,50.0,30.0,6.2607007 382 | 10.0,50.0,10.0,6.3862925 383 | 5.0,45.0,5.0,6.0739517 384 | 20.0,50.0,10.0,6.9150887 385 | 25.0,45.0,5.0,6.305426 386 | 20.0,60.0,10.0,5.3053107 387 | 25.0,65.0,5.0,6.77278 388 | 10.0,60.0,10.0,7.0190306 389 | 5.0,65.0,5.0,7.1671743 390 | 10.0,50.0,20.0,7.4054475 391 | 5.0,45.0,25.0,5.9528956 392 | 20.0,60.0,20.0,6.2453756 393 | 25.0,65.0,25.0,5.986861 394 | 10.0,60.0,20.0,6.559955 395 | 5.0,65.0,25.0,5.059075 396 | 0.0,60.0,20.0,5.859459 397 | 10.0,70.0,20.0,5.855525 398 | 0.0,70.0,20.0,6.067783 399 | 0.0,60.0,30.0,5.944198 400 | 10.0,60.0,30.0,6.73583 401 | 10.0,70.0,30.0,7.0508046 402 | 0.0,70.0,30.0,5.806248 403 | 30.0,60.0,20.0,7.113931 404 | 30.0,70.0,20.0,7.768743 405 | 20.0,70.0,20.0,6.8511057 406 | 20.0,60.0,30.0,7.6635027 407 | 30.0,60.0,30.0,7.1037707 408 | 30.0,70.0,30.0,5.205009 409 | 20.0,70.0,30.0,6.502485 410 | 0.0,40.0,20.0,5.078964 411 | 10.0,40.0,20.0,7.1566367 412 | 0.0,50.0,20.0,7.4770427 413 | 0.0,40.0,30.0,7.2405014 414 | 10.0,40.0,30.0,6.5370474 415 | 10.0,50.0,30.0,6.374063 416 | 0.0,50.0,30.0,6.648256 417 | 0.0,60.0,0.0,6.821542 418 | 10.0,60.0,0.0,7.8724656 419 | 10.0,70.0,0.0,7.126649 420 | 0.0,70.0,0.0,5.3382545 421 | 0.0,60.0,10.0,6.67523 422 | 10.0,70.0,10.0,7.1545596 423 | 0.0,70.0,10.0,7.405872 424 | 20.0,60.0,0.0,7.811369 425 | 30.0,60.0,0.0,5.354061 426 | 30.0,70.0,0.0,5.4227295 427 | 20.0,70.0,0.0,7.5879984 428 | 30.0,60.0,10.0,5.7628646 429 | 30.0,70.0,10.0,6.997854 430 | 20.0,70.0,10.0,7.450177 431 | 20.0,40.0,0.0,5.4375234 432 | 30.0,40.0,0.0,6.8005533 433 | 30.0,50.0,0.0,5.675005 434 | 20.0,50.0,0.0,7.511979 435 | 20.0,40.0,10.0,5.980827 436 | 30.0,40.0,10.0,5.3145027 437 | 30.0,50.0,10.0,5.2505918 438 | 0.0,40.0,0.0,7.6102996 439 | 10.0,40.0,0.0,6.824515 440 | 10.0,50.0,0.0,6.047262 441 | 0.0,50.0,0.0,5.5825825 442 | 0.0,40.0,10.0,6.239404 443 | 10.0,40.0,10.0,6.568473 444 | 0.0,50.0,10.0,5.1333303 445 | 50.0,50.0,20.0,7.6270766 446 | 55.0,55.0,15.0,5.221479 447 | 40.0,40.0,20.0,7.408926 448 | 50.0,40.0,20.0,7.3023973 449 | 40.0,50.0,20.0,6.550499 450 | 50.0,40.0,30.0,7.9487796 451 | 50.0,50.0,30.0,5.4321756 452 | 40.0,50.0,30.0,7.698955 453 | 50.0,50.0,10.0,6.9885693 454 | 45.0,45.0,5.0,7.065945 455 | 60.0,50.0,10.0,7.5360937 456 | 65.0,45.0,5.0,5.1273637 457 | 60.0,60.0,10.0,7.3341165 458 | 65.0,65.0,5.0,6.543441 459 | 50.0,60.0,10.0,5.922596 460 | 45.0,65.0,5.0,6.6406045 461 | 60.0,50.0,20.0,5.128289 462 | 65.0,45.0,25.0,6.0203023 463 | 60.0,60.0,20.0,5.001102 464 | 65.0,65.0,25.0,5.2057905 465 | 50.0,60.0,20.0,5.821198 466 | 45.0,65.0,25.0,5.6867228 467 | 40.0,60.0,20.0,6.3637695 468 | 50.0,70.0,20.0,5.628808 469 | 40.0,70.0,20.0,6.042379 470 | 40.0,60.0,30.0,6.362496 471 | 50.0,60.0,30.0,7.5956345 472 | 50.0,70.0,30.0,7.8651924 473 | 40.0,70.0,30.0,6.556777 474 | 70.0,60.0,20.0,6.2041645 475 | 70.0,70.0,20.0,6.8467917 476 | 60.0,70.0,20.0,7.839671 477 | 60.0,60.0,30.0,5.3994446 478 | 70.0,60.0,30.0,7.7536297 479 | 70.0,70.0,30.0,5.243161 480 | 60.0,70.0,30.0,6.442224 481 | 60.0,40.0,20.0,6.4298425 482 | 70.0,40.0,20.0,5.3362155 483 | 70.0,50.0,20.0,5.964266 484 | 60.0,40.0,30.0,5.854339 485 | 70.0,40.0,30.0,6.333876 486 | 70.0,50.0,30.0,7.790379 487 | 60.0,50.0,30.0,5.543803 488 | 40.0,60.0,0.0,5.5258265 489 | 50.0,60.0,0.0,5.6058993 490 | 50.0,70.0,0.0,7.08117 491 | 40.0,70.0,0.0,7.337462 492 | 40.0,60.0,10.0,6.4716473 493 | 50.0,70.0,10.0,6.8290596 494 | 40.0,70.0,10.0,5.638047 495 | 60.0,60.0,0.0,5.9463954 496 | 70.0,60.0,0.0,6.798744 497 | 70.0,70.0,0.0,6.2905426 498 | 60.0,70.0,0.0,7.727278 499 | 70.0,60.0,10.0,5.562083 500 | 70.0,70.0,10.0,7.0931854 501 | 60.0,70.0,10.0,7.911126 502 | 60.0,40.0,0.0,5.3038526 503 | 70.0,40.0,0.0,6.450502 504 | 70.0,50.0,0.0,5.9410853 505 | 60.0,50.0,0.0,6.5372252 506 | 60.0,40.0,10.0,5.9051046 507 | 70.0,40.0,10.0,7.585469 508 | 70.0,50.0,10.0,7.532981 509 | 40.0,40.0,0.0,6.021814 510 | 50.0,40.0,0.0,5.456141 511 | 50.0,50.0,0.0,7.3521757 512 | 40.0,50.0,0.0,7.2318134 513 | 40.0,40.0,10.0,7.90114 514 | 50.0,40.0,10.0,7.624527 515 | 40.0,50.0,10.0,6.666988 516 | 50.0,20.0,20.0,7.967265 517 | 55.0,15.0,15.0,7.6263537 518 | 40.0,20.0,20.0,7.4854417 519 | 50.0,30.0,20.0,7.558067 520 | 40.0,30.0,20.0,5.0983276 521 | 40.0,20.0,30.0,5.732471 522 | 50.0,20.0,30.0,6.017284 523 | 50.0,30.0,30.0,5.5661964 524 | 50.0,10.0,10.0,5.336673 525 | 45.0,5.0,5.0,5.382497 526 | 60.0,10.0,10.0,5.5800037 527 | 65.0,5.0,5.0,7.5219226 528 | 60.0,20.0,10.0,6.0398235 529 | 65.0,25.0,5.0,5.1792736 530 | 50.0,20.0,10.0,6.519595 531 | 45.0,25.0,5.0,6.0508137 532 | 50.0,10.0,20.0,6.888384 533 | 45.0,5.0,25.0,7.759214 534 | 60.0,10.0,20.0,7.196427 535 | 65.0,5.0,25.0,7.8822994 536 | 60.0,20.0,20.0,7.670335 537 | 65.0,25.0,25.0,6.921694 538 | 70.0,20.0,20.0,7.4727993 539 | 70.0,30.0,20.0,6.8232255 540 | 60.0,30.0,20.0,6.4632936 541 | 60.0,20.0,30.0,5.0399485 542 | 70.0,20.0,30.0,6.8187857 543 | 70.0,30.0,30.0,7.967264 544 | 60.0,30.0,30.0,7.4543033 545 | 60.0,0.0,20.0,5.0850887 546 | 70.0,0.0,20.0,6.677745 547 | 70.0,10.0,20.0,7.6228485 548 | 60.0,0.0,30.0,7.114197 549 | 70.0,0.0,30.0,6.868905 550 | 70.0,10.0,30.0,7.867885 551 | 60.0,10.0,30.0,7.874838 552 | 40.0,0.0,20.0,6.64085 553 | 50.0,0.0,20.0,5.8808503 554 | 40.0,10.0,20.0,7.904613 555 | 40.0,0.0,30.0,5.678589 556 | 50.0,0.0,30.0,5.0472145 557 | 50.0,10.0,30.0,5.9775643 558 | 40.0,10.0,30.0,6.5075283 559 | 40.0,20.0,0.0,7.4329453 560 | 50.0,20.0,0.0,6.883267 561 | 50.0,30.0,0.0,7.7149467 562 | 40.0,30.0,0.0,7.2461667 563 | 40.0,20.0,10.0,6.683363 564 | 50.0,30.0,10.0,7.5096416 565 | 40.0,30.0,10.0,5.834151 566 | 60.0,20.0,0.0,6.105797 567 | 70.0,20.0,0.0,5.6578383 568 | 70.0,30.0,0.0,7.3671136 569 | 60.0,30.0,0.0,5.43272 570 | 70.0,20.0,10.0,7.52005 571 | 70.0,30.0,10.0,6.984733 572 | 60.0,30.0,10.0,5.1770697 573 | 60.0,0.0,0.0,7.5334115 574 | 70.0,0.0,0.0,7.3148084 575 | 70.0,10.0,0.0,6.6317644 576 | 60.0,10.0,0.0,7.9379735 577 | 60.0,0.0,10.0,5.2178 578 | 70.0,0.0,10.0,7.300008 579 | 70.0,10.0,10.0,5.7991114 580 | 40.0,0.0,0.0,6.408153 581 | 50.0,0.0,0.0,7.278351 582 | 50.0,10.0,0.0,5.5346026 583 | 40.0,10.0,0.0,5.513516 584 | 40.0,0.0,10.0,6.295528 585 | 50.0,0.0,10.0,5.9622436 586 | 40.0,10.0,10.0,5.2223735 587 | -------------------------------------------------------------------------------- /tests/data/reporting/compartment_report.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BlueBrain/AstroVascPy/322eaa8ebb32133521428b46700f54fb3f8b9af3/tests/data/reporting/compartment_report.h5 -------------------------------------------------------------------------------- /tests/data/reporting/create_reports.py: -------------------------------------------------------------------------------- 1 | """Taken from the libsonata lib.""" 2 | 3 | import h5py 4 | import numpy as np 5 | 6 | 7 | def write_element_report(filepath, units): 8 | population_names = ["default", "default2"] 9 | node_ids = np.arange(0, 3) 10 | index_pointers = np.arange(0, 4) 11 | element_ids = np.zeros(3) 12 | times = (0.0, 1.0, 0.1) 13 | data = [node_ids + j * 0.1 for j in range(10)] 14 | 15 | string_dtype = h5py.special_dtype(vlen=str) 16 | with h5py.File(filepath, "w") as h5f: 17 | h5f.create_group("report") 18 | gpop_element = h5f.create_group("/report/" + population_names[0]) 19 | ddata = gpop_element.create_dataset("data", data=data, dtype=np.float32) 20 | ddata.attrs.create("units", data=units[population_names[0]]["data"], dtype=string_dtype) 21 | gmapping = h5f.create_group("/report/" + population_names[0] + "/mapping") 22 | 23 | dnodes = gmapping.create_dataset("node_ids", data=node_ids, dtype=np.uint64) 24 | dnodes.attrs.create("sorted", data=True, dtype=np.uint8) 25 | gmapping.create_dataset("index_pointers", data=index_pointers, dtype=np.uint64) 26 | gmapping.create_dataset("element_ids", data=element_ids, dtype=np.uint32) 27 | dtimes = gmapping.create_dataset("time", data=times, dtype=np.double) 28 | dtimes.attrs.create("units", data=units[population_names[0]]["time"], dtype=string_dtype) 29 | 30 | gpop_element2 = h5f.create_group("/report/" + population_names[1]) 31 | ddata = gpop_element2.create_dataset("data", data=data, dtype=np.float32) 32 | ddata.attrs.create("units", data=units[population_names[1]]["data"], dtype=string_dtype) 33 | gmapping = h5f.create_group("/report/" + population_names[1] + "/mapping") 34 | 35 | dnodes = gmapping.create_dataset("node_ids", data=node_ids, dtype=np.uint64) 36 | dnodes.attrs.create("sorted", data=True, dtype=np.uint8) 37 | gmapping.create_dataset("index_pointers", data=index_pointers, dtype=np.uint64) 38 | gmapping.create_dataset("element_ids", data=element_ids, dtype=np.uint32) 39 | dtimes = gmapping.create_dataset("time", data=times, dtype=np.double) 40 | dtimes.attrs.create("units", data=units[population_names[1]]["time"], dtype=string_dtype) 41 | 42 | 43 | if __name__ == "__main__": 44 | units = { 45 | "default": {"time": "ms", "data": "mV"}, 46 | "default2": {"time": "ms", "data": "mV"}, 47 | } 48 | write_element_report("compartment_report.h5", units) 49 | units_diff = { 50 | "default": {"time": "ms", "data": "mV"}, 51 | "default2": {"time": "s", "data": "mR"}, 52 | } 53 | write_element_report("diff_unit_compartment_report.h5", units_diff) 54 | -------------------------------------------------------------------------------- /tests/data/reporting/diff_unit_compartment_report.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BlueBrain/AstroVascPy/322eaa8ebb32133521428b46700f54fb3f8b9af3/tests/data/reporting/diff_unit_compartment_report.h5 -------------------------------------------------------------------------------- /tests/data/toy_graph.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BlueBrain/AstroVascPy/322eaa8ebb32133521428b46700f54fb3f8b9af3/tests/data/toy_graph.bin -------------------------------------------------------------------------------- /tests/data/toy_graph.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BlueBrain/AstroVascPy/322eaa8ebb32133521428b46700f54fb3f8b9af3/tests/data/toy_graph.h5 -------------------------------------------------------------------------------- /tests/test_graphs.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import numpy.testing as npt 4 | import pandas as pd 5 | import pytest 6 | 7 | from astrovascpy import bloodflow as tested 8 | from astrovascpy.utils import Graph 9 | 10 | L = logging.getLogger(__name__) 11 | 12 | 13 | @pytest.fixture 14 | def params(): 15 | return { 16 | "blood_viscosity": 0.1, 17 | "base_pressure": 1.33e-3, 18 | "max_nb_inputs": 3, 19 | "depth_ratio": 0.05, 20 | "vasc_axis": 1, 21 | "threshold_r": 3, 22 | "max_r_capill": 1.38, 23 | "t_2_max_capill": 2.7, 24 | "max_r_artery": 1.23, 25 | "t_2_max_artery": 3.3, 26 | } 27 | 28 | 29 | def test_simple_graph(params): 30 | point_properties = pd.DataFrame( 31 | { 32 | "x": [0, 0, 0, 0, 0], 33 | "y": [0, 1, 2, 3, 4], 34 | "z": [0, 0, 0, 0, 0], 35 | "diameter": [10, 9, 9, 8, 7], 36 | } 37 | ) 38 | edge_properties = pd.DataFrame( 39 | { 40 | "start_node": [0, 1, 2, 3], 41 | "end_node": [1, 2, 3, 4], 42 | "type": [0, 0, 0, 0], 43 | }, 44 | index=pd.MultiIndex.from_tuples( 45 | ([0, 0], [0, 1], [0, 2], [0, 3]), 46 | names=["section_id", "segment_id"], 47 | ), 48 | ) 49 | graph = Graph(point_properties, edge_properties) 50 | entry_nodes = [0] 51 | input_flow = [1.0] 52 | boundary_flow = tested.boundary_flows_A_based(graph, entry_nodes, input_flow) 53 | tested.update_static_flow_pressure(graph, boundary_flow, params) 54 | normal_flow = graph.edge_properties["flow"].copy() 55 | normal_pressure = graph.node_properties["pressure"].copy() 56 | 57 | npt.assert_allclose( 58 | normal_flow.to_list(), 59 | [1.0, 1.0, 1.0, 1.0], 60 | ) 61 | npt.assert_allclose( 62 | normal_pressure.to_list(), 63 | [ 64 | 0.005867, 65 | 0.005104, 66 | 0.004185, 67 | 0.003064, 68 | 0.00133, 69 | ], 70 | rtol=1e-6, 71 | atol=1e-6, 72 | ) 73 | 74 | graph.edge_properties.loc[(0, 2), "radius"] *= 1.2 75 | tested.update_static_flow_pressure(graph, boundary_flow, params) 76 | vasodilated_flow = graph.edge_properties["flow"] 77 | vasodilated_pressure = graph.node_properties["pressure"] 78 | 79 | npt.assert_allclose( 80 | vasodilated_flow.to_list(), 81 | [1.0, 1.0, 1.0, 1.0], 82 | ) 83 | npt.assert_allclose( 84 | vasodilated_pressure.to_list(), 85 | [ 86 | 0.005342, 87 | 0.00458, 88 | 0.00366, 89 | 0.003064, 90 | 0.00133, 91 | ], 92 | rtol=1e-6, 93 | atol=1e-6, 94 | ) 95 | 96 | assert (vasodilated_flow >= normal_flow - 1e-10).all() # added 1e-10 for numerical errors 97 | assert (vasodilated_pressure - 1e-10 <= normal_pressure).all() 98 | 99 | 100 | def test_bifurcation(params): 101 | point_properties = pd.DataFrame( 102 | { 103 | "x": [0, 0, 0, 0], 104 | "y": [0, 1, 2, 2], 105 | "z": [1, 1, 0, 2], 106 | "diameter": [12, 6, 10, 8], 107 | } 108 | ) 109 | 110 | edge_properties = pd.DataFrame( 111 | { 112 | "start_node": [0, 1, 1], 113 | "end_node": [1, 2, 3], 114 | "type": [0, 0, 0], 115 | }, 116 | index=pd.MultiIndex.from_tuples( 117 | ([0, 0], [0, 1], [0, 2]), 118 | names=["section_id", "segment_id"], 119 | ), 120 | ) 121 | graph = Graph(point_properties, edge_properties) 122 | entry_nodes = [0] 123 | input_flow = [1.0] 124 | boundary_flow = tested.boundary_flows_A_based(graph, entry_nodes, input_flow) 125 | tested.update_static_flow_pressure(graph, boundary_flow, params) 126 | normal_flow = graph.edge_properties["flow"].copy() 127 | normal_pressure = graph.node_properties["pressure"].copy() 128 | npt.assert_allclose(normal_flow.to_list(), [1.0, 0.609756, 0.390244], rtol=1e-6, atol=1e-6) 129 | npt.assert_allclose( 130 | normal_pressure.to_list(), 131 | [0.003469, 0.00255, 0.001356, 0.00133], 132 | rtol=1e-6, 133 | atol=1e-6, 134 | ) 135 | 136 | graph.edge_properties["radius"] *= 1.2 137 | tested.update_static_flow_pressure(graph, boundary_flow, params) 138 | vasodilated_flow = graph.edge_properties["flow"] 139 | vasodilated_pressure = graph.node_properties["pressure"] 140 | 141 | npt.assert_allclose(vasodilated_flow.to_list(), [1.0, 0.609756, 0.390244], rtol=1e-6, atol=1e-6) 142 | npt.assert_allclose( 143 | vasodilated_pressure.to_list(), 144 | [0.002464, 0.001975, 0.001341, 0.00133], 145 | rtol=1e-6, 146 | atol=1e-6, 147 | ) 148 | 149 | assert (vasodilated_flow > normal_flow - 1e-10).all() # added 1e-10 for numerical errors 150 | assert ( 151 | vasodilated_pressure - 1e-10 <= normal_pressure 152 | ).all() # added 1e-10 for numerical errors 153 | 154 | 155 | def test_loop(params): 156 | """This example is from F. Schmidt Phd thesis, figure 9.1A""" 157 | point_properties = pd.DataFrame( 158 | { 159 | "x": [0, 1, 2, 3, 2, 3, 4, 5], 160 | "y": [0, 0, 1, 1, -1, -1, 0, 0], 161 | "z": [0, 0, 0, 0, 0, 0, 0, 0], 162 | "diameter": [1, 1, 1, 1, 1, 1, 1, 1], 163 | } 164 | ) 165 | 166 | edge_properties = pd.DataFrame( 167 | { 168 | "start_node": [0, 1, 2, 3, 1, 4, 5, 6], 169 | "end_node": [1, 2, 3, 6, 4, 5, 6, 7], 170 | "type": [0, 0, 0, 0, 0, 0, 0, 0], 171 | }, 172 | index=pd.MultiIndex.from_tuples( 173 | ([0, 0], [1, 0], [1, 1], [1, 2], [2, 0], [2, 1], [2, 2], [3, 0]), 174 | names=["section_id", "segment_id"], 175 | ), 176 | ) 177 | 178 | graph = Graph(point_properties, edge_properties) 179 | entry_nodes = [0] 180 | input_flow = [1.0] 181 | boundary_flow = tested.boundary_flows_A_based(graph, entry_nodes, input_flow) 182 | tested.update_static_flow_pressure(graph, boundary_flow, params) 183 | normal_flow = graph.edge_properties["flow"].copy() 184 | normal_pressure = graph.node_properties["pressure"].copy() 185 | npt.assert_allclose( 186 | normal_flow.to_list(), 187 | [ 188 | 1.0, 189 | 0.5, 190 | 0.5, 191 | 0.5, 192 | 0.5, 193 | 0.5, 194 | 0.5, 195 | 1.0, 196 | ], 197 | rtol=1e-6, 198 | atol=1e-6, 199 | ) 200 | npt.assert_allclose( 201 | normal_pressure.to_list(), 202 | [ 203 | 4.327605e02, 204 | 3.221995e02, 205 | 2.440211e02, 206 | 1.887407e02, 207 | 2.440211e02, 208 | 1.887407e02, 209 | 1.105623e02, 210 | 1.330000e-03, 211 | ], 212 | rtol=1e-6, 213 | atol=1e-6, 214 | ) 215 | 216 | graph.edge_properties.loc[(1, 1), "radius"] *= 1.2 217 | tested.update_static_flow_pressure(graph, boundary_flow, params) 218 | vasodilated_flow = graph.edge_properties["flow"] 219 | vasodilated_pressure = graph.node_properties["pressure"] 220 | npt.assert_allclose( 221 | vasodilated_flow.to_list(), 222 | [ 223 | 1.0, 224 | 0.545135, 225 | 0.545135, 226 | 0.545135, 227 | 0.454865, 228 | 0.454865, 229 | 0.454865, 230 | 1.0, 231 | ], 232 | rtol=1e-6, 233 | atol=1e-6, 234 | ) 235 | npt.assert_allclose( 236 | vasodilated_pressure.to_list(), 237 | [ 238 | 4.136561e02, 239 | 3.030952e02, 240 | 2.178597e02, 241 | 1.957978e02, 242 | 2.319739e02, 243 | 1.816836e02, 244 | 1.105623e02, 245 | 1.330000e-03, 246 | ], 247 | rtol=1e-6, 248 | atol=1e-6, 249 | ) 250 | 251 | # flow increases in active branch (pressure decreases) 252 | assert vasodilated_flow[(1, 1)] > normal_flow[(1, 1)] 253 | assert vasodilated_pressure[2] < normal_pressure[2] 254 | 255 | # flow decreases in passive branch (pressure increases) 256 | assert vasodilated_flow[(2, 1)] < normal_flow[(2, 1)] 257 | assert vasodilated_pressure[4] < normal_pressure[4] 258 | -------------------------------------------------------------------------------- /tests/test_io.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import numpy as np 4 | import numpy.testing as npt 5 | 6 | from astrovascpy.io import load_graph, load_graph_from_bin, load_graph_from_csv, load_graph_from_h5 7 | from astrovascpy.PetscBinaryIO import get_conf 8 | 9 | TEST_DIR = Path(__file__).resolve().parent 10 | TEST_DATA_DIR = TEST_DIR / "data" 11 | 12 | 13 | def test_petsc_installation(): 14 | precision, indices, complex = get_conf() 15 | assert ( 16 | complex is False 17 | ), "PETSc needs to be compiled with configure option --with-scalar-type=real" 18 | 19 | 20 | def test_load_graph(): 21 | graph = load_graph(TEST_DATA_DIR / "dataset.h5") 22 | assert graph.edge_properties.shape == (116, 10) 23 | assert list(graph.edge_properties) == [ 24 | "start_node", 25 | "end_node", 26 | "type", 27 | "section_id", 28 | "segment_id", 29 | "length", 30 | "radius", 31 | "radius_origin", 32 | "endfeet_id", 33 | "volume", 34 | ] 35 | npt.assert_array_equal(graph.edge_properties["endfeet_id"], np.full((116,), -1)) 36 | assert graph.node_properties.shape == (114, 4) 37 | assert list(graph.node_properties) == ["x", "y", "z", "diameter"] 38 | assert graph.n_nodes == 114 39 | assert graph.n_edges == 116 40 | 41 | 42 | def test_load_graph_from_h5(): 43 | graph = load_graph_from_h5(TEST_DATA_DIR / "toy_graph.h5") 44 | assert graph.edge_properties.shape == (584, 10) 45 | assert list(graph.edge_properties) == [ 46 | "start_node", 47 | "end_node", 48 | "type", 49 | "section_id", 50 | "segment_id", 51 | "length", 52 | "radius", 53 | "radius_origin", 54 | "endfeet_id", 55 | "volume", 56 | ] 57 | npt.assert_array_equal(graph.edge_properties["endfeet_id"], np.full((584,), -1)) 58 | assert graph.node_properties.shape == (585, 4) 59 | assert list(graph.node_properties) == ["x", "y", "z", "diameter"] 60 | assert graph.n_nodes == 585 61 | assert graph.n_edges == 584 62 | 63 | 64 | def test_load_graph_from_csv(): 65 | node_dataset = TEST_DATA_DIR / "node_dataset.csv" 66 | edge_dataset = TEST_DATA_DIR / "edge_dataset.csv" 67 | graph = load_graph_from_csv(node_filename=node_dataset, edge_filename=edge_dataset) 68 | assert graph.edge_properties.shape == (584, 10) 69 | assert list(graph.edge_properties) == [ 70 | "start_node", 71 | "end_node", 72 | "type", 73 | "section_id", 74 | "segment_id", 75 | "length", 76 | "radius", 77 | "radius_origin", 78 | "endfeet_id", 79 | "volume", 80 | ] 81 | npt.assert_array_equal(np.count_nonzero(graph.edge_properties["endfeet_id"] != -1), 281) 82 | assert graph.node_properties.shape == (585, 4) 83 | assert list(graph.node_properties) == ["x", "y", "z", "diameter"] 84 | assert graph.n_nodes == 585 85 | assert graph.n_edges == 584 86 | 87 | 88 | def test_load_graph_from_bin(): 89 | graph = load_graph_from_bin(TEST_DATA_DIR / "toy_graph.bin") 90 | assert graph.edge_properties.shape == (584, 10) 91 | assert list(graph.edge_properties) == [ 92 | "start_node", 93 | "end_node", 94 | "type", 95 | "section_id", 96 | "segment_id", 97 | "length", 98 | "radius", 99 | "radius_origin", 100 | "endfeet_id", 101 | "volume", 102 | ] 103 | npt.assert_array_equal(np.count_nonzero(graph.edge_properties["endfeet_id"] != -1), 281) 104 | assert graph.node_properties.shape == (585, 4) 105 | assert list(graph.node_properties) == ["x", "y", "z", "diameter"] 106 | assert graph.n_nodes == 585 107 | assert graph.n_edges == 584 108 | -------------------------------------------------------------------------------- /tests/test_mpi.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import numpy.testing as npt 3 | import petsc4py 4 | import pytest 5 | from mpi4py import MPI 6 | from scipy.sparse import coo_matrix 7 | 8 | from astrovascpy.scipy_petsc_conversions import ( 9 | PETScMat2coo, 10 | PETScVec2array, 11 | array2PETScVec, 12 | coomatrix2PETScMat, 13 | distribute_array, 14 | ) 15 | 16 | # pip install pytest-mpi 17 | # mpirun -n 4 pytest --with-mpi tests/test_mpi.py 18 | 19 | # The tests are skipped if not using the option --with-mpi 20 | 21 | COMM = MPI.COMM_WORLD 22 | RANK = COMM.Get_rank() 23 | 24 | 25 | @pytest.mark.mpi(min_size=2) 26 | def test_distribute_array(): 27 | """Test that a numpy array is distributed correctly among 4 ranks""" 28 | 29 | assert COMM.Get_size() == 4 # this test only works with 4 ranks 30 | 31 | if RANK == 0: 32 | vec = np.array([-6, -5, -4, -3], dtype=np.int32) 33 | if RANK == 1: 34 | vec = np.array([-2, -1, 0], dtype=np.int32) 35 | if RANK == 2: 36 | vec = np.array([1, 2, 3], dtype=np.int32) 37 | if RANK == 3: 38 | vec = np.array([4, 5, 6], dtype=np.int32) 39 | 40 | if RANK == 0: 41 | v = np.arange(-6, 7, dtype=np.int32) 42 | else: 43 | v = None 44 | 45 | vloc = distribute_array(v, array_type=None) 46 | 47 | is_same_type = vec.dtype == vloc.dtype # same type 48 | is_same_vec = np.array_equal(vec, vloc) # same elements 49 | 50 | is_same_type_0 = COMM.reduce(is_same_type, op=MPI.LAND, root=0) 51 | is_same_vec_0 = COMM.reduce(is_same_vec, op=MPI.LAND, root=0) 52 | 53 | if RANK == 0: 54 | assert is_same_type_0 is True 55 | assert is_same_vec_0 is True 56 | 57 | 58 | @pytest.mark.mpi(min_size=2) 59 | def test_numpy_array2petsc(): 60 | """Test that the conversion from numpy array to petsc arrays works as expected""" 61 | 62 | # case 1 63 | vec1 = np.array([]) if RANK == 0 else None 64 | temp1 = array2PETScVec(vec1) 65 | vec1_new = PETScVec2array(temp1) 66 | npt.assert_array_equal(vec1_new, vec1) 67 | 68 | # case 2 69 | vec2 = np.array([1.0]) if RANK == 0 else None 70 | temp2 = array2PETScVec(vec2) 71 | vec2_new = PETScVec2array(temp2) 72 | npt.assert_array_equal(vec2_new, vec2) 73 | 74 | # case 3 75 | vec3 = np.arange(22, dtype=np.int64) if RANK == 0 else None 76 | temp3 = array2PETScVec(vec3) 77 | vec3_new = PETScVec2array(temp3) 78 | # array2PETScVec converts automatically to float 79 | if RANK == 0: 80 | assert vec3.dtype != vec3_new.dtype 81 | assert vec3.dtype == np.int64 82 | assert vec3_new.dtype == np.float64 83 | npt.assert_array_equal(vec3_new, vec3) 84 | 85 | 86 | @pytest.mark.mpi(min_size=2) 87 | def test_scipy2petsc_conversion(): 88 | """Test that the conversion from scipy sparse coo to petsc arrays works as expected""" 89 | 90 | def create_sparse_coo(m, n, seed=33): 91 | """Create a coo matrix of size (m,n) with m elements in random positions""" 92 | np.random.seed(seed) 93 | X = 10 # just a random value 94 | # coo representation (row, col, data) 95 | row = np.arange(start=0, stop=m) 96 | col = np.random.randint(low=0, high=n, size=m) 97 | data = np.arange(start=X, stop=X + m) 98 | return coo_matrix((data, (row, col)), shape=(m, n)) 99 | 100 | # case 1 101 | if RANK == 0: 102 | A = create_sparse_coo(m=5, n=8, seed=31) 103 | else: 104 | A = None 105 | A_petsc = coomatrix2PETScMat(A) 106 | A_coo = PETScMat2coo(A_petsc) 107 | 108 | if RANK == 0: 109 | npt.assert_array_equal(A.row, A_coo.row) 110 | npt.assert_array_equal(A.col, A_coo.col) 111 | npt.assert_array_equal(A.data, A_coo.data) 112 | 113 | # case 2 114 | if RANK == 0: 115 | B = create_sparse_coo(m=1000, n=80000, seed=32) 116 | else: 117 | B = None 118 | B_petsc = coomatrix2PETScMat(B) 119 | B_coo = PETScMat2coo(B_petsc) 120 | 121 | if RANK == 0: 122 | npt.assert_array_equal(B.row, B_coo.row) 123 | npt.assert_array_equal(B.col, B_coo.col) 124 | npt.assert_array_equal(B.data, B_coo.data) 125 | 126 | # case 3: empty matrix 127 | if RANK == 0: 128 | m = 10000 129 | n = 10000 130 | C = coo_matrix((m, n)) 131 | else: 132 | C = None 133 | C_petsc = coomatrix2PETScMat(C) 134 | C_coo = PETScMat2coo(C_petsc) 135 | 136 | if RANK == 0: 137 | npt.assert_array_equal(C.row, C_coo.row) 138 | npt.assert_array_equal(C.col, C_coo.col) 139 | npt.assert_array_equal(C.data, C_coo.data) 140 | 141 | 142 | @pytest.mark.mpi(min_size=2) 143 | def test_coomatrix2PETScMat(): 144 | """The input of coomatrix2PETScMat must be a scipy sparse matrix 145 | and the output must be a PETSC object""" 146 | 147 | D = None 148 | if RANK == 0: 149 | with pytest.raises(AttributeError): 150 | coomatrix2PETScMat(D) 151 | 152 | F = np.eye(N=10) 153 | if RANK == 0: 154 | with pytest.raises(AttributeError): 155 | coomatrix2PETScMat(F) 156 | 157 | if RANK == 0: 158 | m, n = 1000, 2000 159 | G = coo_matrix((m, n)) 160 | else: 161 | G = None 162 | G_petsc = coomatrix2PETScMat(G) 163 | assert isinstance(G_petsc, petsc4py.PETSc.Mat) 164 | -------------------------------------------------------------------------------- /tests/test_ou.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import numpy.testing as npt 3 | 4 | import astrovascpy.ou as OU 5 | 6 | 7 | def test_OU_calibration(): 8 | """Verify that the calibration is able to calculate the correct kappa and sigma""" 9 | 10 | def verify(kappa_test, sigma_test): 11 | r_max_test = 2.8 * sigma_test / np.sqrt(2 * kappa_test) 12 | target_time = OU.expected_time(kappa_test, r_max_test, C=2.8) 13 | kappa, sigma = OU.compute_OU_params(time=target_time, x_max=r_max_test, c=2.8) 14 | npt.assert_allclose((kappa, sigma), (kappa_test, sigma_test), rtol=1e-4, atol=1e-8) 15 | 16 | kappa_set = [0.1, 0.3, 1, 5, 10, 20] 17 | sigma_set = [0.2, 1.5, 4, 12] 18 | 19 | for k in kappa_set: 20 | for s in sigma_set: 21 | verify(k, s) 22 | -------------------------------------------------------------------------------- /tests/test_report_reader.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import numpy as np 4 | import numpy.testing as npt 5 | import pandas as pd 6 | import pandas.testing as pdt 7 | import pytest 8 | 9 | import astrovascpy.report_reader as test_module 10 | from astrovascpy.exceptions import BloodFlowError 11 | 12 | TEST_DIR = Path(__file__).resolve().parent 13 | TEST_DATA_DIR = TEST_DIR / "data/reporting" 14 | 15 | 16 | class TestBloodflowReport: 17 | def setup_method(self): 18 | self.test_obj = test_module.BloodflowReport(TEST_DATA_DIR / "compartment_report.h5") 19 | self.test_wrong = test_module.BloodflowReport( 20 | TEST_DATA_DIR / "diff_unit_compartment_report.h5" 21 | ) 22 | 23 | def test_time_units(self): 24 | assert self.test_obj.time_units == "ms" 25 | with pytest.raises(BloodFlowError): 26 | self.test_wrong.time_units 27 | 28 | def test_data_units(self): 29 | assert self.test_obj.data_units == "mV" 30 | with pytest.raises(BloodFlowError): 31 | self.test_wrong.data_units 32 | 33 | def test_population_names(self): 34 | assert sorted(self.test_obj.population_names) == ["default", "default2"] 35 | 36 | def test_get_population(self): 37 | assert isinstance(self.test_obj["default"], test_module.PopulationBloodflowReport) 38 | 39 | def test_iter(self): 40 | assert sorted(list(self.test_obj)) == ["default", "default2"] 41 | for report in self.test_obj: 42 | isinstance(report, test_module.PopulationBloodflowReport) 43 | 44 | def test_filter(self): 45 | filtered = self.test_obj.filter(group=None, t_start=0.3, t_stop=0.6) 46 | assert filtered.frame_report == self.test_obj 47 | assert filtered.t_start == 0.3 48 | assert filtered.t_stop == 0.6 49 | assert filtered.group is None 50 | assert isinstance(filtered, test_module.FilteredFrameReport) 51 | npt.assert_allclose(filtered.report.index, np.array([0.3, 0.4, 0.5, 0.6])) 52 | assert filtered.report.columns.tolist() == [ 53 | ("default", 0), 54 | ("default", 1), 55 | ("default", 2), 56 | ("default2", 0), 57 | ("default2", 1), 58 | ("default2", 2), 59 | ] 60 | 61 | filtered = self.test_obj.filter(group=[1], t_start=0.3, t_stop=0.6) 62 | npt.assert_allclose(filtered.report.index, np.array([0.3, 0.4, 0.5, 0.6])) 63 | assert filtered.report.columns.tolist() == [("default", 1), ("default2", 1)] 64 | 65 | filtered = self.test_obj.filter(group=[0, 1, 2], t_start=0.3, t_stop=0.6) 66 | assert filtered.report.columns.tolist() == [ 67 | ("default", 0), 68 | ("default", 1), 69 | ("default", 2), 70 | ("default2", 0), 71 | ("default2", 1), 72 | ("default2", 2), 73 | ] 74 | 75 | filtered = self.test_obj.filter(group=[], t_start=0.3, t_stop=0.6) 76 | assert filtered.report.empty 77 | 78 | 79 | class TestPopulationBloodflowReport: 80 | def setup_method(self): 81 | self.test_obj = test_module.BloodflowReport(TEST_DATA_DIR / "compartment_report.h5")[ 82 | "default" 83 | ] 84 | timestamps = np.linspace(0, 0.9, 10) 85 | data = {0: timestamps, 1: timestamps + 1, 2: timestamps + 2} 86 | self.df = pd.DataFrame(data=data, index=timestamps, columns=[0, 1, 2]).astype(np.float32) 87 | 88 | def test_name(self): 89 | assert self.test_obj.name == "default" 90 | 91 | def test_get(self): 92 | pdt.assert_frame_equal(self.test_obj.get(), self.df, check_column_type=False) 93 | pdt.assert_frame_equal(self.test_obj.get([]), pd.DataFrame()) 94 | pdt.assert_frame_equal(self.test_obj.get(np.array([])), pd.DataFrame()) 95 | pdt.assert_frame_equal(self.test_obj.get(()), pd.DataFrame()) 96 | 97 | pdt.assert_frame_equal(self.test_obj.get(2), self.df.loc[:, [2]], check_column_type=False) 98 | 99 | pdt.assert_frame_equal( 100 | self.test_obj.get([2, 0]), self.df.loc[:, [0, 2]], check_column_type=False 101 | ) 102 | 103 | pdt.assert_frame_equal( 104 | self.test_obj.get([0, 2]), self.df.loc[:, [0, 2]], check_column_type=False 105 | ) 106 | 107 | pdt.assert_frame_equal( 108 | self.test_obj.get(np.asarray([0, 2])), self.df.loc[:, [0, 2]], check_column_type=False 109 | ) 110 | 111 | pdt.assert_frame_equal( 112 | self.test_obj.get([2], t_stop=0.5), 113 | self.df.iloc[:6].loc[:, [2]], 114 | check_column_type=False, 115 | ) 116 | 117 | pdt.assert_frame_equal( 118 | self.test_obj.get([2], t_stop=0.55), 119 | self.df.iloc[:6].loc[:, [2]], 120 | check_column_type=False, 121 | ) 122 | 123 | pdt.assert_frame_equal( 124 | self.test_obj.get([2], t_start=0.5), 125 | self.df.iloc[5:].loc[:, [2]], 126 | check_column_type=False, 127 | ) 128 | 129 | pdt.assert_frame_equal( 130 | self.test_obj.get([2], t_start=0.5, t_stop=0.8), 131 | self.df.iloc[5:9].loc[:, [2]], 132 | check_column_type=False, 133 | ) 134 | 135 | pdt.assert_frame_equal( 136 | self.test_obj.get([2, 1], t_start=0.5, t_stop=0.8), 137 | self.df.iloc[5:9].loc[:, [1, 2]], 138 | check_column_type=False, 139 | ) 140 | 141 | pdt.assert_frame_equal( 142 | self.test_obj.get([2, 1], t_start=0.2, t_stop=0.8), 143 | self.df.iloc[2:9].loc[:, [1, 2]], 144 | check_column_type=False, 145 | ) 146 | 147 | with pytest.raises(BloodFlowError): 148 | self.test_obj.get(-1, t_start=0.2) 149 | 150 | with pytest.raises(BloodFlowError): 151 | self.test_obj.get(0, t_start=-1) 152 | 153 | with pytest.raises(BloodFlowError): 154 | self.test_obj.get([0, 2], t_start=15) 155 | 156 | def test_node_ids(self): 157 | npt.assert_array_equal(self.test_obj.node_ids, np.array(sorted([0, 1, 2]))) 158 | 159 | 160 | def test_overriden_function(): 161 | test_obj = test_module.FrameReport(TEST_DATA_DIR / "compartment_report.h5")["default"] 162 | assert test_obj.get(group=0, t_start=None, t_stop=0.0).iloc[0][0][0] == 0.0 163 | -------------------------------------------------------------------------------- /tests/test_report_writer.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import numpy as np 4 | import numpy.testing as npt 5 | 6 | import astrovascpy.report_writer as test_module 7 | from astrovascpy.report_reader import BloodflowReport 8 | 9 | TEST_DIR = Path(__file__).resolve().parent 10 | TEST_DATA_DIR = TEST_DIR / "data/reporting/export" 11 | 12 | if not TEST_DATA_DIR.exists(): 13 | TEST_DATA_DIR.mkdir() 14 | 15 | 16 | def test_write_simulation_report(): 17 | n_edges = 100 18 | start_time = 0 19 | end_time = 1.0 20 | time_step = 0.1 21 | nb_time_step = int((end_time - start_time) / time_step) 22 | flows = np.zeros((nb_time_step, n_edges)) 23 | pressures = np.zeros((nb_time_step, n_edges)) 24 | radii = np.ones((nb_time_step, n_edges)) 25 | volumes = np.power(radii, 2) * np.pi * np.ones(n_edges) 26 | test_module.write_simulation_report( 27 | np.arange(n_edges), 28 | TEST_DATA_DIR, 29 | start_time, 30 | end_time, 31 | time_step, 32 | flows, 33 | pressures, 34 | radii, 35 | volumes, 36 | ) 37 | report = BloodflowReport(TEST_DATA_DIR / "report_radii.h5") 38 | npt.assert_almost_equal(report["vasculature"].get(), radii) 39 | report = BloodflowReport(TEST_DATA_DIR / "report_flows.h5") 40 | npt.assert_almost_equal(report["vasculature"].get(), flows) 41 | report = BloodflowReport(TEST_DATA_DIR / "report_pressures.h5") 42 | npt.assert_almost_equal(report["vasculature"].get(), pressures) 43 | report = BloodflowReport(TEST_DATA_DIR / "report_volumes.h5") 44 | npt.assert_almost_equal(report["vasculature"].get(), volumes) 45 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from pathlib import Path 3 | 4 | import numpy as np 5 | import numpy.testing as npt 6 | import pandas as pd 7 | import pytest 8 | from vascpy import PointVasculature 9 | 10 | import astrovascpy.utils as test_module 11 | from astrovascpy.exceptions import BloodFlowError 12 | 13 | L = logging.getLogger(__name__) 14 | 15 | TEST_DIR = Path(__file__).resolve().parent 16 | 17 | 18 | @pytest.fixture 19 | def point_properties(): 20 | return pd.DataFrame({"x": [0, 0, 0], "y": [0, 1, 2], "z": [2, 1, 0], "diameter": [1, 3, 2]}) 21 | 22 | 23 | @pytest.fixture 24 | def edge_properties(): 25 | return pd.DataFrame( 26 | {"start_node": [0, 1], "end_node": [1, 2], "type": [0, 0]}, 27 | index=pd.MultiIndex.from_arrays(([0, 0], [0, 1]), names=["section_id", "segment_id"]), 28 | ) 29 | 30 | 31 | def test_find_neighbors(point_properties, edge_properties): 32 | graph = PointVasculature(point_properties, edge_properties) 33 | section_id = 0 34 | segment_id = 0 35 | npt.assert_array_equal( 36 | test_module.find_neighbors(graph, section_id, segment_id).to_numpy(), 37 | np.array([False, True]), 38 | ) 39 | 40 | 41 | def test_find_degrees_of_neighbors(point_properties, edge_properties): 42 | graph = PointVasculature(point_properties, edge_properties) 43 | node_id = 0 44 | neighbours, connected_id, corresponding_degrees = test_module.find_degrees_of_neighbors( 45 | graph, node_id 46 | ) 47 | npt.assert_array_equal( 48 | neighbours, 49 | np.array([True, False]), 50 | ) 51 | npt.assert_array_equal( 52 | np.array(connected_id), 53 | np.array({0, 1}), 54 | ) 55 | npt.assert_array_equal( 56 | corresponding_degrees, 57 | np.array([1, 2]), 58 | ) 59 | 60 | 61 | def test_create_entry_largest_nodes(point_properties, edge_properties, caplog): 62 | graph = test_module.Graph(point_properties, edge_properties) 63 | 64 | with pytest.raises(BloodFlowError): 65 | test_module.create_entry_largest_nodes(graph, params={"max_nb_inputs": -1.0}) 66 | with pytest.raises(BloodFlowError): 67 | test_module.create_entry_largest_nodes(graph, params={"depth_ratio": -1.0}) 68 | with pytest.raises(BloodFlowError): 69 | test_module.create_entry_largest_nodes(graph, params={"vasc_axis": -1.0}) 70 | 71 | with caplog.at_level(logging.WARNING): 72 | assert ( 73 | test_module.create_entry_largest_nodes( 74 | graph, 75 | params={ 76 | "max_nb_inputs": 1, 77 | "depth_ratio": 10, 78 | "vasc_axis": 1, 79 | "blood_viscosity": 0.1, 80 | "base_pressure": 1.33e-3, 81 | }, 82 | ) 83 | == np.array([2]) 84 | ).all() 85 | assert "'depth_ratio' parameter must be <= 1. Considering depth_ratio = 1." in caplog.text 86 | 87 | with caplog.at_level(logging.WARNING): 88 | assert ( 89 | test_module.create_entry_largest_nodes( 90 | graph, 91 | params={ 92 | "max_nb_inputs": 1, 93 | "depth_ratio": -1, 94 | "vasc_axis": 1, 95 | "blood_viscosity": 0.1, 96 | "base_pressure": 1.33e-3, 97 | }, 98 | ) 99 | == np.array([2]) 100 | ).all() 101 | assert "'depth_ratio' parameter must be >= 0. Considering depth_ratio = 0." in caplog.text 102 | 103 | 104 | def test_get_largest_nodes(point_properties, edge_properties, caplog): 105 | graph = PointVasculature(point_properties, edge_properties) 106 | 107 | assert (test_module.get_largest_nodes(graph, n_nodes=1) == np.array([2])).all() 108 | with pytest.raises(BloodFlowError): 109 | test_module.get_largest_nodes(graph, n_nodes=-1) 110 | with caplog.at_level(logging.WARNING): 111 | assert (test_module.get_largest_nodes(graph, depth_ratio=-1) == np.array([2])).all() 112 | assert "The depth_ratio must be >= 0. Taking depth_ratio = 0." in caplog.text 113 | with caplog.at_level(logging.WARNING): 114 | assert (test_module.get_largest_nodes(graph, depth_ratio=10) == np.array([2])).all() 115 | assert "The depth_ratio must be <= 1. Taking depth_ratio = 1." in caplog.text 116 | with pytest.raises(BloodFlowError): 117 | test_module.get_largest_nodes(graph, vasc_axis=-1) 118 | with pytest.raises(BloodFlowError): 119 | test_module.get_largest_nodes(graph, vasc_axis=3) 120 | 121 | 122 | def test_get_large_nodes(point_properties, edge_properties, caplog): 123 | graph = PointVasculature(point_properties, edge_properties) 124 | 125 | assert (test_module.get_large_nodes(graph, min_radius=2) == np.array([2])).all() 126 | with pytest.raises(BloodFlowError): 127 | assert (test_module.get_large_nodes(graph, min_radius=-1) == np.array([2])).all() 128 | with caplog.at_level(logging.WARNING): 129 | assert (test_module.get_large_nodes(graph, depth_ratio=10) == np.array([2])).all() 130 | assert "The depth_ratio must be <= 1. Taking depth_ratio = 1." in caplog.text 131 | with caplog.at_level(logging.WARNING): 132 | assert (test_module.get_large_nodes(graph, depth_ratio=-1) == np.array([2])).all() 133 | assert "The depth_ratio must be >= 0. Taking depth_ratio = 0." in caplog.text 134 | with pytest.raises(BloodFlowError): 135 | test_module.get_large_nodes(graph, vasc_axis=-1) 136 | with pytest.raises(BloodFlowError): 137 | test_module.get_large_nodes(graph, vasc_axis=3) 138 | 139 | 140 | def test_compute_edge_data(point_properties, edge_properties): 141 | # length = [2, 2], radii = [1, 1.25] 142 | graph = test_module.Graph(point_properties, edge_properties) 143 | edge_lengths, edge_radii, edge_volume = graph._compute_edge_data() 144 | npt.assert_array_equal(edge_lengths, np.array(np.sqrt([2, 2]))) 145 | npt.assert_array_equal(edge_radii, np.array([1, 1.25])) 146 | npt.assert_array_equal( 147 | edge_volume, np.array([np.sqrt(2) * np.pi, 1.25**2 * np.sqrt(2) * np.pi]) 148 | ) 149 | 150 | 151 | def test_set_edge_data(point_properties, edge_properties): 152 | graph = test_module.Graph(point_properties, edge_properties) 153 | npt.assert_allclose(np.array(np.sqrt([2, 2])), graph.edge_properties.length) 154 | npt.assert_allclose(np.array([1, 1.25]), graph.edge_properties.radius) 155 | npt.assert_allclose( 156 | np.array([np.sqrt(2) * np.pi, 1.25**2 * np.sqrt(2) * np.pi]), graph.edge_properties.volume 157 | ) 158 | 159 | 160 | def test_is_iterable(): 161 | assert test_module.is_iterable([12, 13]) 162 | assert test_module.is_iterable(np.asarray([12, 13])) 163 | assert not test_module.is_iterable(12) 164 | assert not test_module.is_iterable("abc") 165 | 166 | 167 | def test_ensure_list(): 168 | assert test_module.ensure_list(1) == [1] 169 | assert test_module.ensure_list([1]) == [1] 170 | assert test_module.ensure_list(iter([1])) == [1] 171 | assert test_module.ensure_list((2, 1)) == [2, 1] 172 | assert test_module.ensure_list("abc") == ["abc"] 173 | 174 | 175 | def test_ensure_ids(): 176 | res = test_module.ensure_ids(np.array([1, 2, 3], dtype=np.uint64)) 177 | npt.assert_equal(res, np.array([1, 2, 3], dtype=test_module.IDS_DTYPE)) 178 | npt.assert_equal(res.dtype, test_module.IDS_DTYPE) 179 | 180 | 181 | def test_create_input_speed(): 182 | T = 1 183 | step = 0.01 184 | speed = test_module.create_input_speed(T, step, A=1, f=1, C=0) 185 | N = T / step 186 | 187 | # There are N steps, corresponding to N+1 time points 188 | assert len(speed) == N + 1 189 | 190 | TEST_DATA_DIR = TEST_DIR / "data/input_flow" 191 | speed_from_file_1 = test_module.create_input_speed( 192 | T, step, read_from_file=TEST_DATA_DIR / "sine.csv" 193 | ) 194 | # There are N steps, corresponding to N+1 time points 195 | assert len(speed_from_file_1) == N + 1 196 | 197 | # Read from file. Data has 101 time points 198 | T = 2 # increase T and therefore the number of time steps 199 | with pytest.raises(BloodFlowError): 200 | test_module.create_input_speed(T, step, read_from_file=TEST_DATA_DIR / "sine.csv") 201 | 202 | 203 | @pytest.mark.parametrize("window", [None, 15, 30]) 204 | def test_sine_estimation(window): 205 | T = 1 206 | N = 1000 207 | time = np.linspace(0, T, N) 208 | A = 40 209 | f = 15 210 | C = 100 211 | signal = A * np.sin(2 * np.pi * f * time) + C 212 | 213 | A_est, f_est, C_est = test_module.fit_sine_model(signal=signal, window=window) 214 | 215 | assert np.abs(A - A_est) < 1e-1 216 | # Sometimes the end part of the sine is considered as a peak. 217 | # The estimated frequency can differ by 1. 218 | assert np.abs(f - f_est) <= 1 219 | assert np.abs(C - C_est) < 1e-1 220 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [base] 2 | name = astrovascpy 3 | files = {[base]name} docs/source/conf.py setup.py 4 | 5 | [tox] 6 | envlist = 7 | check-packaging 8 | lint 9 | docs 10 | py{311} 11 | 12 | minversion = 3.18 13 | 14 | [testenv] 15 | extras = test 16 | pass_env = * 17 | setenv = 18 | COVERAGE_FILE = {env:COVERAGE_FILE:.coverage-{envname}} 19 | PIP_INDEX_URL = {env:PIP_INDEX_URL:https://pypi.python.org/simple} 20 | BACKEND_SOLVER_BFS = petsc 21 | allowlist_externals = mpirun 22 | commands = coverage run --branch --source={[base]name} \ 23 | -m pytest \ 24 | --basetemp={envtmpdir} \ 25 | --verbose \ 26 | --junit-xml=reports/pytest-{envname}.xml \ 27 | {posargs} 28 | mpirun -n 4 coverage run --branch --parallel-mode --source={[base]name} \ 29 | -m pytest --with-mpi {toxinidir}/tests/test_mpi.py 30 | coverage combine --append 31 | coverage report --fail-under=50 32 | coverage html --directory="reports" 33 | 34 | [testenv:check-packaging] 35 | skip_install = true 36 | deps = 37 | build 38 | twine 39 | commands = 40 | python -m build -o {envtmpdir}/dist 41 | twine check {envtmpdir}/dist/* 42 | 43 | [testenv:lint] 44 | skip_install = true 45 | allowlist_externals = bash 46 | deps = 47 | codespell 48 | pre-commit 49 | commands = 50 | codespell --config .codespellrc -i 3 -x .codespellignorelines -w {[base]files} README.md CHANGELOG.md docs/source 51 | pre-commit run --all-files --show-diff-on-failure 52 | 53 | [testenv:docs] 54 | extras = 55 | docs 56 | viz 57 | commands = sphinx-build -W --keep-going docs/source docs/build/html 58 | --------------------------------------------------------------------------------