├── .editorconfig
├── .github
├── ISSUE_TEMPLATE.md
└── workflows
│ ├── codeql-analysis.yml
│ ├── main.yml
│ ├── publish-pypi.yaml
│ └── semgrep.yml
├── .gitignore
├── .pre-commit-config.yaml
├── .readthedocs.yaml
├── .travis.yml
├── CONTRIBUTING.rst
├── Dockerfile
├── HISTORY.rst
├── LICENSE
├── MANIFEST.in
├── Makefile
├── README.rst
├── docker-compose.yml
├── docs
├── Makefile
├── conf.py
├── contributing.rst
├── history.rst
├── index.rst
├── installation.rst
├── make.bat
├── readme.rst
└── usage.rst
├── orthanc_ext
├── __init__.py
├── event_dispatcher.py
├── executor_utilities.py
├── http_utilities.py
├── logging_configurator.py
├── orthanc.py
├── orthanc_utilities.py
├── pyorthanc_utilities.py
├── python_utilities.py
└── scripts
│ ├── __init__.py
│ ├── anonymization.py
│ ├── auto_forward.py
│ ├── auto_retries.py
│ ├── event_publisher.py
│ ├── kafka_event_publisher.py
│ ├── nats_event_publisher.py
│ └── rabbitmq_event_publisher.py
├── requirements_dev.in
├── requirements_dev.txt
├── server_cert.cnf
├── setup.cfg
├── setup.py
├── tests
├── __init__.py
├── entry_point.py
├── test_anonymization.py
├── test_auto_forward.py
├── test_auto_retries.py
├── test_event_dispatcher.py
├── test_event_publisher.py
├── test_executor_utilities.py
├── test_http_utilities.py
├── test_logging_configurator.py
├── test_notifications_kafka.py
├── test_notifications_nats.py
├── test_notifications_rabbitmq.py
├── test_orthanc_api_consistency.py
├── test_orthanc_api_handler.py
└── test_pyorthanc_utilities.py
└── tox.ini
/.editorconfig:
--------------------------------------------------------------------------------
1 | # http://editorconfig.org
2 |
3 | root = true
4 |
5 | [*]
6 | indent_style = space
7 | indent_size = 4
8 | trim_trailing_whitespace = true
9 | insert_final_newline = true
10 | charset = utf-8
11 | end_of_line = lf
12 |
13 | [*.bat]
14 | indent_style = tab
15 | end_of_line = crlf
16 |
17 | [LICENSE]
18 | insert_final_newline = false
19 |
20 | [Makefile]
21 | indent_style = tab
22 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | * Orthanc Server Extensions version:
2 | * Python version:
3 | * Operating System:
4 |
5 | ### Description
6 |
7 | Describe what you were trying to get done.
8 | Tell us what happened, what went wrong, and what you expected to happen.
9 |
10 | ### What I Did
11 |
12 | ```
13 | Paste the command(s) you ran and the output.
14 | If there was a crash, please include the traceback here.
15 | ```
16 |
--------------------------------------------------------------------------------
/.github/workflows/codeql-analysis.yml:
--------------------------------------------------------------------------------
1 | # For most projects, this workflow file will not need changing; you simply need
2 | # to commit it to your repository.
3 | #
4 | # You may wish to alter this file to override the set of languages analyzed,
5 | # or to provide custom queries or build logic.
6 | #
7 | # ******** NOTE ********
8 | # We have attempted to detect the languages in your repository. Please check
9 | # the `language` matrix defined below to confirm you have the correct set of
10 | # supported CodeQL languages.
11 | #
12 | name: "CodeQL"
13 |
14 | on:
15 | push:
16 | branches: [ "main" ]
17 | pull_request:
18 | # The branches below must be a subset of the branches above
19 | branches: [ "main" ]
20 | schedule:
21 | - cron: '31 17 * * 0'
22 |
23 | jobs:
24 | analyze:
25 | name: Analyze
26 | runs-on: ubuntu-latest
27 | permissions:
28 | actions: read
29 | contents: read
30 | security-events: write
31 |
32 | strategy:
33 | fail-fast: false
34 | matrix:
35 | language: [ 'python' ]
36 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
37 | # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
38 |
39 | steps:
40 | - name: Checkout repository
41 | uses: actions/checkout@v3
42 |
43 | # Initializes the CodeQL tools for scanning.
44 | - name: Initialize CodeQL
45 | uses: github/codeql-action/init@v2
46 | with:
47 | languages: ${{ matrix.language }}
48 | # If you wish to specify custom queries, you can do so here or in a config file.
49 | # By default, queries listed here will override any specified in a config file.
50 | # Prefix the list here with "+" to use these queries and those in the config file.
51 |
52 | # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
53 | # queries: security-extended,security-and-quality
54 |
55 |
56 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
57 | # If this step fails, then you should remove it and run the build manually (see below)
58 | - name: Autobuild
59 | uses: github/codeql-action/autobuild@v2
60 |
61 | # ℹ️ Command-line programs to run using the OS shell.
62 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
63 |
64 | # If the Autobuild fails above, remove it and uncomment the following three lines.
65 | # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
66 |
67 | # - run: |
68 | # echo "Run, Build Application using script"
69 | # ./location_of_script_within_repo/buildscript.sh
70 |
71 | - name: Perform CodeQL Analysis
72 | uses: github/codeql-action/analyze@v2
73 |
--------------------------------------------------------------------------------
/.github/workflows/main.yml:
--------------------------------------------------------------------------------
1 | name: Build and tests
2 | on:
3 | push:
4 | branches: [ main ]
5 | pull_request:
6 | jobs:
7 | build:
8 | strategy:
9 | matrix:
10 | config:
11 | # [Python version, tox env]
12 | - ["3.11", "py311"]
13 | - ["3.10", "py310"]
14 | - ["3.9", "py39"]
15 | runs-on: ubuntu-latest
16 | name: ${{ matrix.config[1] }}
17 | steps:
18 | - name: Checkout
19 | uses: actions/checkout@v3
20 | - name: Setup
21 | uses: actions/setup-python@v4
22 | with:
23 | python-version: ${{ matrix.config[0] }}
24 | - name: Install dependencies
25 | run: |
26 | python -m pip install --upgrade pip
27 | pip install tox
28 | - name: Test runner
29 | run: tox -e ${{ matrix.config[1] }}
30 |
--------------------------------------------------------------------------------
/.github/workflows/publish-pypi.yaml:
--------------------------------------------------------------------------------
1 | name: Publish to pypi
2 | on:
3 | push:
4 | branches: [ main ]
5 | tags: [ -v* ]
6 | jobs:
7 | publish-to-pypi:
8 | runs-on: ubuntu-20.04
9 | steps:
10 | - uses: actions/checkout@main
11 | - name: Set up Python 3.9
12 | uses: actions/setup-python@v3
13 | with:
14 | python-version: 3.9
15 | - name: build-distribution
16 | run: |
17 | python setup.py sdist
18 | - name: publish
19 | uses: pypa/gh-action-pypi-publish@release/v1
20 | with:
21 | user: __token__
22 | password: ${{ secrets.PYPI_API_TOKEN_ORTHANC_SERVER_EXTENSIONS }}
23 |
--------------------------------------------------------------------------------
/.github/workflows/semgrep.yml:
--------------------------------------------------------------------------------
1 | # Name of this GitHub Actions workflow.
2 | name: Semgrep
3 |
4 | on:
5 | # Scan changed files in PRs (diff-aware scanning):
6 | pull_request: {}
7 | # Scan on-demand through GitHub Actions interface:
8 | workflow_dispatch: {}
9 | # Scan mainline branches and report all findings:
10 | push:
11 | branches: ["integrate-semgrep", "main"]
12 | # Schedule the CI job (this method uses cron syntax):
13 | schedule:
14 | - cron: '20 17 * * *' # Sets Semgrep to scan every day at 17:20 UTC.
15 | # It is recommended to change the schedule to a random time.
16 |
17 | jobs:
18 | semgrep:
19 | # User definable name of this GitHub Actions job.
20 | name: semgrep/ci
21 | # If you are self-hosting, change the following `runs-on` value:
22 | runs-on: ubuntu-latest
23 |
24 | container:
25 | # A Docker image with Semgrep installed. Do not change this.
26 | image: returntocorp/semgrep
27 |
28 | # Skip any PR created by dependabot to avoid permission issues:
29 | if: (github.actor != 'dependabot[bot]')
30 |
31 | steps:
32 | # Fetch project source with GitHub Actions Checkout.
33 | - uses: actions/checkout@v3
34 | # Run the "semgrep ci" command on the command line of the docker image.
35 | - run: semgrep scan --config auto
36 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | bin
3 | pyvenv.cfg
4 |
5 | # ide files
6 | .idea/
7 |
8 | # Byte-compiled / optimized / DLL files
9 | __pycache__/
10 | *.py[cod]
11 | *$py.class
12 |
13 | # C extensions
14 | *.so
15 |
16 | # Distribution / packaging
17 | .Python
18 | env/
19 | build/
20 | develop-eggs/
21 | dist/
22 | downloads/
23 | eggs/
24 | .eggs/
25 | lib/
26 | lib64/
27 | parts/
28 | sdist/
29 | var/
30 | wheels/
31 | *.egg-info/
32 | .installed.cfg
33 | *.egg
34 |
35 | # PyInstaller
36 | # Usually these files are written by a python script from a template
37 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
38 | *.manifest
39 | *.spec
40 |
41 | # Installer logs
42 | pip-log.txt
43 | pip-delete-this-directory.txt
44 |
45 | # Unit test / coverage reports
46 | htmlcov/
47 | .tox/
48 | .coverage
49 | .coverage.*
50 | .cache
51 | nosetests.xml
52 | coverage.xml
53 | *.cover
54 | .hypothesis/
55 | .pytest_cache/
56 |
57 | # Translations
58 | *.mo
59 | *.pot
60 |
61 | # Django stuff:
62 | *.log
63 | local_settings.py
64 |
65 | # Flask stuff:
66 | instance/
67 | .webassets-cache
68 |
69 | # Scrapy stuff:
70 | .scrapy
71 |
72 | # Sphinx documentation
73 | docs/_build/
74 |
75 | # PyBuilder
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # pyenv
82 | .python-version
83 |
84 | # celery beat schedule file
85 | celerybeat-schedule
86 |
87 | # SageMath parsed files
88 | *.sage.py
89 |
90 | # dotenv
91 | .env
92 |
93 | # virtualenv
94 | .venv
95 | venv/
96 | ENV/
97 |
98 | # Spyder project settings
99 | .spyderproject
100 | .spyproject
101 |
102 | # Rope project settings
103 | .ropeproject
104 |
105 | # mkdocs documentation
106 | /site
107 |
108 | # mypy
109 | .mypy_cache/
110 |
111 | # IDE settings
112 | .vscode/
113 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v3.4.0
4 | hooks:
5 | - id: trailing-whitespace
6 | # - id: end-of-file-fixer
7 | - id: check-yaml
8 | - id: check-added-large-files
9 | - id: double-quote-string-fixer
10 | - id: mixed-line-ending
11 | args: [ --fix=lf ]
12 | - id: check-ast
13 | - id: debug-statements
14 | - id: check-merge-conflict
15 | - repo: https://github.com/pre-commit/mirrors-yapf
16 | rev: v0.31.0
17 | hooks:
18 | - id: yapf
19 | - repo: https://gitlab.com/pycqa/flake8
20 | rev: 3.9.0
21 | hooks:
22 | - id: flake8
23 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | # Read the Docs configuration file for Sphinx projects
2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
3 |
4 | # Required
5 | version: 2
6 |
7 | # Set the OS, Python version and other tools you might need
8 | build:
9 | os: ubuntu-22.04
10 | tools:
11 | python: "3.11"
12 | # You can also specify other tool versions:
13 | # nodejs: "20"
14 | # rust: "1.70"
15 | # golang: "1.20"
16 |
17 | # Build documentation in the "docs/" directory with Sphinx
18 | sphinx:
19 | configuration: docs/conf.py
20 | # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
21 | # builder: "dirhtml"
22 | # Fail on all warnings to avoid broken references
23 | # fail_on_warning: true
24 |
25 | # Optionally build your docs in additional formats such as PDF and ePub
26 | # formats:
27 | # - pdf
28 | # - epub
29 |
30 | # Optional but recommended, declare the Python requirements required
31 | # to build your documentation
32 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
33 | # python:
34 | # install:
35 | # - requirements: docs/requirements.txt
36 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 | python:
3 | - 3.9
4 | - 3.8
5 | - 3.7
6 | install: pip install -U tox-travis
7 | script: tox
8 | deploy:
9 | provider: pypi
10 | distributions: sdist bdist_wheel
11 | username: __token__
12 | password:
13 | secure: BWETOR65pK+FcRexuUd+I6QtBpxRoqGCKj4Lkj4bChzg482pJ/hu7bJs2VVXqifd3b0+oqcASR8y1p13E2+RufSH9Ehf1/x0pW+UGh+4sgoqs0OiybJ3wyrraZ5WC1XE744pccwF0euIUb64a4oU+DPoNiWGdmNGzcUf59SpIwAhtuoz9pAcIwchWuB0X4hgxiM72Dc3YrT1mnPYFdG4irWlVm93obwmV+5xMPN2scRmXHbf8dJjVz+3ng9OgV2x0h3k5hMaJTKdkA1HBXH0bwaD4+Ti1z2IwezdXBhnA7wN3nv9u3vx9MBpzkuC+yinqSQOHJGdamzOfB4aq/bwZWHjPqkVRu0TlyovBzgqdfT/82lII7Q2VjqrdrBLHxxAyAFUqrrNkV69GGcdTTRk2AVYK1ktOxf1FiSn8uHbOEkw+w3OBdFkYO6bZC1gmZkVOVAgJuDywdjAqYrjloz3wwXlkNrCjBPq9oiOVH4pag5fWCAs4HlbW46cjxG+1r2WxH/1Jgi+jfs2YD/fk6SX2bSUJev5vJg2aTKymfiP+KjMWoOGqjc9x+3DZgPvvCgMgbCA99cLRSW+5vmTHFZTnD4M0wLmFk+8h52RIOZ53gdNauJVPYFGPPgSo9Jy/HXfrGePdSRwcJXBUYitOxJOdsLrHCpPnhtRfUpdtyqvtd0=
14 | on:
15 | tags: true
16 | repo: walkIT-nl/orthanc-server-extensions
17 | python: 3.8
18 |
--------------------------------------------------------------------------------
/CONTRIBUTING.rst:
--------------------------------------------------------------------------------
1 | .. highlight:: shell
2 |
3 | ============
4 | Contributing
5 | ============
6 |
7 | Contributions are welcome, and they are greatly appreciated! Every little bit
8 | helps, and credit will always be given.
9 |
10 | You can contribute in many ways:
11 |
12 | Types of Contributions
13 | ----------------------
14 |
15 | Report Bugs
16 | ~~~~~~~~~~~
17 |
18 | Report bugs at https://github.com/walkit-nl/orthanc-server-extensions/issues.
19 |
20 | If you are reporting a bug, please include:
21 |
22 | * Your operating system name and version.
23 | * Any details about your local setup that might be helpful in troubleshooting.
24 | * Detailed steps to reproduce the bug.
25 |
26 | Fix Bugs
27 | ~~~~~~~~
28 |
29 | Look through the GitHub issues for bugs. Anything tagged with "bug" and "help
30 | wanted" is open to whoever wants to implement it.
31 |
32 | Implement Features
33 | ~~~~~~~~~~~~~~~~~~
34 |
35 | Look through the GitHub issues for features. Anything tagged with "enhancement"
36 | and "help wanted" is open to whoever wants to implement it.
37 |
38 | Write Documentation
39 | ~~~~~~~~~~~~~~~~~~~
40 |
41 | Orthanc Server Extensions could always use more documentation, whether as part of the
42 | official Orthanc Server Extensions docs, in docstrings, or even on the web in blog posts,
43 | articles, and such.
44 |
45 | Submit Feedback
46 | ~~~~~~~~~~~~~~~
47 |
48 | The best way to send feedback is to file an issue at https://github.com/walkit-nl/orthanc-server-extensions/issues.
49 |
50 | If you are proposing a feature:
51 |
52 | * Explain in detail how it would work.
53 | * Keep the scope as narrow as possible, to make it easier to implement.
54 | * Remember that this is a volunteer-driven project, and that contributions
55 | are welcome :)
56 |
57 | Get Started!
58 | ------------
59 |
60 | Ready to contribute? Here's how to set up `orthanc-server-extensions` for local development.
61 |
62 | 1. Fork the `orthanc-server-extensions` repo on GitHub.
63 | 2. Clone your fork locally::
64 |
65 | $ git clone git@github.com:your_name_here/orthanc-server-extensions.git
66 |
67 | 3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development::
68 |
69 | $ mkvirtualenv orthanc-server-extensions
70 | $ cd orthanc-server-extensions/
71 | $ python setup.py develop
72 |
73 | 3b. Install commit hooks
74 |
75 | To achieve code consistency, `orthanc-server-extensions` uses a number of (commit hooks). (https://pre-commit.com/). Install them by running:
76 |
77 | $ pre-commit install
78 |
79 | 4. Create a branch for local development::
80 |
81 | $ git checkout -b name-of-your-bugfix-or-feature
82 |
83 | Now you can make your changes locally.
84 |
85 | 5. When you're done making changes, check that your changes pass flake8 and the
86 | tests, including testing other Python versions with tox::
87 |
88 | $ flake8 orthanc_ext tests
89 | $ python setup.py test or pytest
90 | $ tox
91 |
92 | To get flake8 and tox, just pip install them into your virtualenv.
93 |
94 | 6. Commit your changes and push your branch to GitHub::
95 |
96 | $ git add .
97 | $ git commit -m "Your detailed description of your changes."
98 | $ git push origin name-of-your-bugfix-or-feature
99 |
100 | 7. Submit a pull request through the GitHub website.
101 |
102 | Pull Request Guidelines
103 | -----------------------
104 |
105 | Before you submit a pull request, check that it meets these guidelines:
106 |
107 | 1. The pull request should include tests.
108 | 2. If the pull request adds functionality, the docs should be updated. Put
109 | your new functionality into a function with a docstring, and add the
110 | feature to the list in README.rst.
111 | 3. The pull request should work for Python 3.6, 3.7 and 3.8, and for PyPy. Check
112 | https://travis-ci.com/walkit-nl/orthanc-server-extensions/pull_requests
113 | and make sure that the tests pass for all supported Python versions.
114 |
115 | Tips
116 | ----
117 |
118 | To run a subset of tests::
119 |
120 | $ pytest tests.test_orthanc_ext
121 |
122 |
123 | Deploying
124 | ---------
125 |
126 | A reminder for the maintainers on how to deploy.
127 | Make sure all your changes are committed (including an entry in HISTORY.rst).
128 | Then run::
129 |
130 | $ bump2version patch # possible: major / minor / patch
131 | $ git push
132 | $ git push --tags
133 |
134 | Travis will then deploy to PyPI if tests pass.
135 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM osimis/orthanc:23.9.2-full
2 |
3 | RUN apt-get update && ACCEPT_EULA=Y apt-get dist-upgrade -y && apt-get install -y openssl
4 |
5 | COPY server_cert.cnf .
6 | RUN openssl req -nodes -new -x509 -days 3650 -keyout /etc/ssl/private/server.key -out /etc/ssl/certs/server.pem -config server_cert.cnf
7 | RUN mkdir -p /ssl && cat /etc/ssl/private/server.key /etc/ssl/certs/server.pem > /ssl/keyAndCert.pem
8 |
9 | COPY orthanc_ext /python/orthanc_ext
10 | WORKDIR /python
11 | COPY setup.py README.rst HISTORY.rst ./
12 | RUN pip3 install httpx .[nats-event-publisher,pyorthanc] # does not get picked up in setup.py
13 | RUN python3 setup.py install
14 | COPY tests/entry_point.py /python/entry_point.py
15 |
--------------------------------------------------------------------------------
/HISTORY.rst:
--------------------------------------------------------------------------------
1 | =======
2 | History
3 | =======
4 |
5 | 3.5.0 (2023-10-12)
6 | ------------------
7 | * Support pyOrthanc as Orthanc API client
8 |
9 | 3.4.0 (2023-06-21)
10 | ------------------
11 | * Improved asyncio performance
12 |
13 | 3.3.0 (2023-01-30)
14 | ------------------
15 | * Publish Orthanc change events to Kafka, RabbitMQ and NATS
16 | * Run asyncio functions (coroutines) for concurrent processing of a change event
17 | * Chain functions into a pipeline (composition)
18 |
19 | 3.2.8 (2021-09-18)
20 | ------------------
21 | * get_metadata_of_first_instance_of_series() now propagates http errors if /instances call fails.
22 |
23 | 3.2.7 (2021-09-17)
24 | ------------------
25 | * Small resilience fix for httpx (more conservative timeouts)
26 | * get_metadata_of_first_instance_of_series() will now return None for unknown keys
27 |
28 | 3.2.6 (2021-09-16)
29 | ------------------
30 |
31 | * Replace requests/responses library by httpx/respx
32 | * Add support for anonymization and re-identification using study merge
33 | * Many smaller and bigger refactorings
34 | * Dependency updates
35 |
36 | 3.1.1 (2021-02-11)
37 | ------------------
38 |
39 | * Add DICOM auto forwarding sample with retries
40 |
41 | 3.1.0 (2021-02-07)
42 | ------------------
43 |
44 | * Improved logging, aligned log format and levels with Orthanc.
45 |
46 | 0.1.0 (2021-01-09)
47 | ------------------
48 |
49 | * First release on PyPI.
50 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU AFFERO GENERAL PUBLIC LICENSE
2 | Version 3, 19 November 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU Affero General Public License is a free, copyleft license for
11 | software and other kinds of works, specifically designed to ensure
12 | cooperation with the community in the case of network server software.
13 |
14 | The licenses for most software and other practical works are designed
15 | to take away your freedom to share and change the works. By contrast,
16 | our General Public Licenses are intended to guarantee your freedom to
17 | share and change all versions of a program--to make sure it remains free
18 | software for all its users.
19 |
20 | When we speak of free software, we are referring to freedom, not
21 | price. Our General Public Licenses are designed to make sure that you
22 | have the freedom to distribute copies of free software (and charge for
23 | them if you wish), that you receive source code or can get it if you
24 | want it, that you can change the software or use pieces of it in new
25 | free programs, and that you know you can do these things.
26 |
27 | Developers that use our General Public Licenses protect your rights
28 | with two steps: (1) assert copyright on the software, and (2) offer
29 | you this License which gives you legal permission to copy, distribute
30 | and/or modify the software.
31 |
32 | A secondary benefit of defending all users' freedom is that
33 | improvements made in alternate versions of the program, if they
34 | receive widespread use, become available for other developers to
35 | incorporate. Many developers of free software are heartened and
36 | encouraged by the resulting cooperation. However, in the case of
37 | software used on network servers, this result may fail to come about.
38 | The GNU General Public License permits making a modified version and
39 | letting the public access it on a server without ever releasing its
40 | source code to the public.
41 |
42 | The GNU Affero General Public License is designed specifically to
43 | ensure that, in such cases, the modified source code becomes available
44 | to the community. It requires the operator of a network server to
45 | provide the source code of the modified version running there to the
46 | users of that server. Therefore, public use of a modified version, on
47 | a publicly accessible server, gives the public access to the source
48 | code of the modified version.
49 |
50 | An older license, called the Affero General Public License and
51 | published by Affero, was designed to accomplish similar goals. This is
52 | a different license, not a version of the Affero GPL, but Affero has
53 | released a new version of the Affero GPL which permits relicensing under
54 | this license.
55 |
56 | The precise terms and conditions for copying, distribution and
57 | modification follow.
58 |
59 | TERMS AND CONDITIONS
60 |
61 | 0. Definitions.
62 |
63 | "This License" refers to version 3 of the GNU Affero General Public License.
64 |
65 | "Copyright" also means copyright-like laws that apply to other kinds of
66 | works, such as semiconductor masks.
67 |
68 | "The Program" refers to any copyrightable work licensed under this
69 | License. Each licensee is addressed as "you". "Licensees" and
70 | "recipients" may be individuals or organizations.
71 |
72 | To "modify" a work means to copy from or adapt all or part of the work
73 | in a fashion requiring copyright permission, other than the making of an
74 | exact copy. The resulting work is called a "modified version" of the
75 | earlier work or a work "based on" the earlier work.
76 |
77 | A "covered work" means either the unmodified Program or a work based
78 | on the Program.
79 |
80 | To "propagate" a work means to do anything with it that, without
81 | permission, would make you directly or secondarily liable for
82 | infringement under applicable copyright law, except executing it on a
83 | computer or modifying a private copy. Propagation includes copying,
84 | distribution (with or without modification), making available to the
85 | public, and in some countries other activities as well.
86 |
87 | To "convey" a work means any kind of propagation that enables other
88 | parties to make or receive copies. Mere interaction with a user through
89 | a computer network, with no transfer of a copy, is not conveying.
90 |
91 | An interactive user interface displays "Appropriate Legal Notices"
92 | to the extent that it includes a convenient and prominently visible
93 | feature that (1) displays an appropriate copyright notice, and (2)
94 | tells the user that there is no warranty for the work (except to the
95 | extent that warranties are provided), that licensees may convey the
96 | work under this License, and how to view a copy of this License. If
97 | the interface presents a list of user commands or options, such as a
98 | menu, a prominent item in the list meets this criterion.
99 |
100 | 1. Source Code.
101 |
102 | The "source code" for a work means the preferred form of the work
103 | for making modifications to it. "Object code" means any non-source
104 | form of a work.
105 |
106 | A "Standard Interface" means an interface that either is an official
107 | standard defined by a recognized standards body, or, in the case of
108 | interfaces specified for a particular programming language, one that
109 | is widely used among developers working in that language.
110 |
111 | The "System Libraries" of an executable work include anything, other
112 | than the work as a whole, that (a) is included in the normal form of
113 | packaging a Major Component, but which is not part of that Major
114 | Component, and (b) serves only to enable use of the work with that
115 | Major Component, or to implement a Standard Interface for which an
116 | implementation is available to the public in source code form. A
117 | "Major Component", in this context, means a major essential component
118 | (kernel, window system, and so on) of the specific operating system
119 | (if any) on which the executable work runs, or a compiler used to
120 | produce the work, or an object code interpreter used to run it.
121 |
122 | The "Corresponding Source" for a work in object code form means all
123 | the source code needed to generate, install, and (for an executable
124 | work) run the object code and to modify the work, including scripts to
125 | control those activities. However, it does not include the work's
126 | System Libraries, or general-purpose tools or generally available free
127 | programs which are used unmodified in performing those activities but
128 | which are not part of the work. For example, Corresponding Source
129 | includes interface definition files associated with source files for
130 | the work, and the source code for shared libraries and dynamically
131 | linked subprograms that the work is specifically designed to require,
132 | such as by intimate data communication or control flow between those
133 | subprograms and other parts of the work.
134 |
135 | The Corresponding Source need not include anything that users
136 | can regenerate automatically from other parts of the Corresponding
137 | Source.
138 |
139 | The Corresponding Source for a work in source code form is that
140 | same work.
141 |
142 | 2. Basic Permissions.
143 |
144 | All rights granted under this License are granted for the term of
145 | copyright on the Program, and are irrevocable provided the stated
146 | conditions are met. This License explicitly affirms your unlimited
147 | permission to run the unmodified Program. The output from running a
148 | covered work is covered by this License only if the output, given its
149 | content, constitutes a covered work. This License acknowledges your
150 | rights of fair use or other equivalent, as provided by copyright law.
151 |
152 | You may make, run and propagate covered works that you do not
153 | convey, without conditions so long as your license otherwise remains
154 | in force. You may convey covered works to others for the sole purpose
155 | of having them make modifications exclusively for you, or provide you
156 | with facilities for running those works, provided that you comply with
157 | the terms of this License in conveying all material for which you do
158 | not control copyright. Those thus making or running the covered works
159 | for you must do so exclusively on your behalf, under your direction
160 | and control, on terms that prohibit them from making any copies of
161 | your copyrighted material outside their relationship with you.
162 |
163 | Conveying under any other circumstances is permitted solely under
164 | the conditions stated below. Sublicensing is not allowed; section 10
165 | makes it unnecessary.
166 |
167 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
168 |
169 | No covered work shall be deemed part of an effective technological
170 | measure under any applicable law fulfilling obligations under article
171 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
172 | similar laws prohibiting or restricting circumvention of such
173 | measures.
174 |
175 | When you convey a covered work, you waive any legal power to forbid
176 | circumvention of technological measures to the extent such circumvention
177 | is effected by exercising rights under this License with respect to
178 | the covered work, and you disclaim any intention to limit operation or
179 | modification of the work as a means of enforcing, against the work's
180 | users, your or third parties' legal rights to forbid circumvention of
181 | technological measures.
182 |
183 | 4. Conveying Verbatim Copies.
184 |
185 | You may convey verbatim copies of the Program's source code as you
186 | receive it, in any medium, provided that you conspicuously and
187 | appropriately publish on each copy an appropriate copyright notice;
188 | keep intact all notices stating that this License and any
189 | non-permissive terms added in accord with section 7 apply to the code;
190 | keep intact all notices of the absence of any warranty; and give all
191 | recipients a copy of this License along with the Program.
192 |
193 | You may charge any price or no price for each copy that you convey,
194 | and you may offer support or warranty protection for a fee.
195 |
196 | 5. Conveying Modified Source Versions.
197 |
198 | You may convey a work based on the Program, or the modifications to
199 | produce it from the Program, in the form of source code under the
200 | terms of section 4, provided that you also meet all of these conditions:
201 |
202 | a) The work must carry prominent notices stating that you modified
203 | it, and giving a relevant date.
204 |
205 | b) The work must carry prominent notices stating that it is
206 | released under this License and any conditions added under section
207 | 7. This requirement modifies the requirement in section 4 to
208 | "keep intact all notices".
209 |
210 | c) You must license the entire work, as a whole, under this
211 | License to anyone who comes into possession of a copy. This
212 | License will therefore apply, along with any applicable section 7
213 | additional terms, to the whole of the work, and all its parts,
214 | regardless of how they are packaged. This License gives no
215 | permission to license the work in any other way, but it does not
216 | invalidate such permission if you have separately received it.
217 |
218 | d) If the work has interactive user interfaces, each must display
219 | Appropriate Legal Notices; however, if the Program has interactive
220 | interfaces that do not display Appropriate Legal Notices, your
221 | work need not make them do so.
222 |
223 | A compilation of a covered work with other separate and independent
224 | works, which are not by their nature extensions of the covered work,
225 | and which are not combined with it such as to form a larger program,
226 | in or on a volume of a storage or distribution medium, is called an
227 | "aggregate" if the compilation and its resulting copyright are not
228 | used to limit the access or legal rights of the compilation's users
229 | beyond what the individual works permit. Inclusion of a covered work
230 | in an aggregate does not cause this License to apply to the other
231 | parts of the aggregate.
232 |
233 | 6. Conveying Non-Source Forms.
234 |
235 | You may convey a covered work in object code form under the terms
236 | of sections 4 and 5, provided that you also convey the
237 | machine-readable Corresponding Source under the terms of this License,
238 | in one of these ways:
239 |
240 | a) Convey the object code in, or embodied in, a physical product
241 | (including a physical distribution medium), accompanied by the
242 | Corresponding Source fixed on a durable physical medium
243 | customarily used for software interchange.
244 |
245 | b) Convey the object code in, or embodied in, a physical product
246 | (including a physical distribution medium), accompanied by a
247 | written offer, valid for at least three years and valid for as
248 | long as you offer spare parts or customer support for that product
249 | model, to give anyone who possesses the object code either (1) a
250 | copy of the Corresponding Source for all the software in the
251 | product that is covered by this License, on a durable physical
252 | medium customarily used for software interchange, for a price no
253 | more than your reasonable cost of physically performing this
254 | conveying of source, or (2) access to copy the
255 | Corresponding Source from a network server at no charge.
256 |
257 | c) Convey individual copies of the object code with a copy of the
258 | written offer to provide the Corresponding Source. This
259 | alternative is allowed only occasionally and noncommercially, and
260 | only if you received the object code with such an offer, in accord
261 | with subsection 6b.
262 |
263 | d) Convey the object code by offering access from a designated
264 | place (gratis or for a charge), and offer equivalent access to the
265 | Corresponding Source in the same way through the same place at no
266 | further charge. You need not require recipients to copy the
267 | Corresponding Source along with the object code. If the place to
268 | copy the object code is a network server, the Corresponding Source
269 | may be on a different server (operated by you or a third party)
270 | that supports equivalent copying facilities, provided you maintain
271 | clear directions next to the object code saying where to find the
272 | Corresponding Source. Regardless of what server hosts the
273 | Corresponding Source, you remain obligated to ensure that it is
274 | available for as long as needed to satisfy these requirements.
275 |
276 | e) Convey the object code using peer-to-peer transmission, provided
277 | you inform other peers where the object code and Corresponding
278 | Source of the work are being offered to the general public at no
279 | charge under subsection 6d.
280 |
281 | A separable portion of the object code, whose source code is excluded
282 | from the Corresponding Source as a System Library, need not be
283 | included in conveying the object code work.
284 |
285 | A "User Product" is either (1) a "consumer product", which means any
286 | tangible personal property which is normally used for personal, family,
287 | or household purposes, or (2) anything designed or sold for incorporation
288 | into a dwelling. In determining whether a product is a consumer product,
289 | doubtful cases shall be resolved in favor of coverage. For a particular
290 | product received by a particular user, "normally used" refers to a
291 | typical or common use of that class of product, regardless of the status
292 | of the particular user or of the way in which the particular user
293 | actually uses, or expects or is expected to use, the product. A product
294 | is a consumer product regardless of whether the product has substantial
295 | commercial, industrial or non-consumer uses, unless such uses represent
296 | the only significant mode of use of the product.
297 |
298 | "Installation Information" for a User Product means any methods,
299 | procedures, authorization keys, or other information required to install
300 | and execute modified versions of a covered work in that User Product from
301 | a modified version of its Corresponding Source. The information must
302 | suffice to ensure that the continued functioning of the modified object
303 | code is in no case prevented or interfered with solely because
304 | modification has been made.
305 |
306 | If you convey an object code work under this section in, or with, or
307 | specifically for use in, a User Product, and the conveying occurs as
308 | part of a transaction in which the right of possession and use of the
309 | User Product is transferred to the recipient in perpetuity or for a
310 | fixed term (regardless of how the transaction is characterized), the
311 | Corresponding Source conveyed under this section must be accompanied
312 | by the Installation Information. But this requirement does not apply
313 | if neither you nor any third party retains the ability to install
314 | modified object code on the User Product (for example, the work has
315 | been installed in ROM).
316 |
317 | The requirement to provide Installation Information does not include a
318 | requirement to continue to provide support service, warranty, or updates
319 | for a work that has been modified or installed by the recipient, or for
320 | the User Product in which it has been modified or installed. Access to a
321 | network may be denied when the modification itself materially and
322 | adversely affects the operation of the network or violates the rules and
323 | protocols for communication across the network.
324 |
325 | Corresponding Source conveyed, and Installation Information provided,
326 | in accord with this section must be in a format that is publicly
327 | documented (and with an implementation available to the public in
328 | source code form), and must require no special password or key for
329 | unpacking, reading or copying.
330 |
331 | 7. Additional Terms.
332 |
333 | "Additional permissions" are terms that supplement the terms of this
334 | License by making exceptions from one or more of its conditions.
335 | Additional permissions that are applicable to the entire Program shall
336 | be treated as though they were included in this License, to the extent
337 | that they are valid under applicable law. If additional permissions
338 | apply only to part of the Program, that part may be used separately
339 | under those permissions, but the entire Program remains governed by
340 | this License without regard to the additional permissions.
341 |
342 | When you convey a copy of a covered work, you may at your option
343 | remove any additional permissions from that copy, or from any part of
344 | it. (Additional permissions may be written to require their own
345 | removal in certain cases when you modify the work.) You may place
346 | additional permissions on material, added by you to a covered work,
347 | for which you have or can give appropriate copyright permission.
348 |
349 | Notwithstanding any other provision of this License, for material you
350 | add to a covered work, you may (if authorized by the copyright holders of
351 | that material) supplement the terms of this License with terms:
352 |
353 | a) Disclaiming warranty or limiting liability differently from the
354 | terms of sections 15 and 16 of this License; or
355 |
356 | b) Requiring preservation of specified reasonable legal notices or
357 | author attributions in that material or in the Appropriate Legal
358 | Notices displayed by works containing it; or
359 |
360 | c) Prohibiting misrepresentation of the origin of that material, or
361 | requiring that modified versions of such material be marked in
362 | reasonable ways as different from the original version; or
363 |
364 | d) Limiting the use for publicity purposes of names of licensors or
365 | authors of the material; or
366 |
367 | e) Declining to grant rights under trademark law for use of some
368 | trade names, trademarks, or service marks; or
369 |
370 | f) Requiring indemnification of licensors and authors of that
371 | material by anyone who conveys the material (or modified versions of
372 | it) with contractual assumptions of liability to the recipient, for
373 | any liability that these contractual assumptions directly impose on
374 | those licensors and authors.
375 |
376 | All other non-permissive additional terms are considered "further
377 | restrictions" within the meaning of section 10. If the Program as you
378 | received it, or any part of it, contains a notice stating that it is
379 | governed by this License along with a term that is a further
380 | restriction, you may remove that term. If a license document contains
381 | a further restriction but permits relicensing or conveying under this
382 | License, you may add to a covered work material governed by the terms
383 | of that license document, provided that the further restriction does
384 | not survive such relicensing or conveying.
385 |
386 | If you add terms to a covered work in accord with this section, you
387 | must place, in the relevant source files, a statement of the
388 | additional terms that apply to those files, or a notice indicating
389 | where to find the applicable terms.
390 |
391 | Additional terms, permissive or non-permissive, may be stated in the
392 | form of a separately written license, or stated as exceptions;
393 | the above requirements apply either way.
394 |
395 | 8. Termination.
396 |
397 | You may not propagate or modify a covered work except as expressly
398 | provided under this License. Any attempt otherwise to propagate or
399 | modify it is void, and will automatically terminate your rights under
400 | this License (including any patent licenses granted under the third
401 | paragraph of section 11).
402 |
403 | However, if you cease all violation of this License, then your
404 | license from a particular copyright holder is reinstated (a)
405 | provisionally, unless and until the copyright holder explicitly and
406 | finally terminates your license, and (b) permanently, if the copyright
407 | holder fails to notify you of the violation by some reasonable means
408 | prior to 60 days after the cessation.
409 |
410 | Moreover, your license from a particular copyright holder is
411 | reinstated permanently if the copyright holder notifies you of the
412 | violation by some reasonable means, this is the first time you have
413 | received notice of violation of this License (for any work) from that
414 | copyright holder, and you cure the violation prior to 30 days after
415 | your receipt of the notice.
416 |
417 | Termination of your rights under this section does not terminate the
418 | licenses of parties who have received copies or rights from you under
419 | this License. If your rights have been terminated and not permanently
420 | reinstated, you do not qualify to receive new licenses for the same
421 | material under section 10.
422 |
423 | 9. Acceptance Not Required for Having Copies.
424 |
425 | You are not required to accept this License in order to receive or
426 | run a copy of the Program. Ancillary propagation of a covered work
427 | occurring solely as a consequence of using peer-to-peer transmission
428 | to receive a copy likewise does not require acceptance. However,
429 | nothing other than this License grants you permission to propagate or
430 | modify any covered work. These actions infringe copyright if you do
431 | not accept this License. Therefore, by modifying or propagating a
432 | covered work, you indicate your acceptance of this License to do so.
433 |
434 | 10. Automatic Licensing of Downstream Recipients.
435 |
436 | Each time you convey a covered work, the recipient automatically
437 | receives a license from the original licensors, to run, modify and
438 | propagate that work, subject to this License. You are not responsible
439 | for enforcing compliance by third parties with this License.
440 |
441 | An "entity transaction" is a transaction transferring control of an
442 | organization, or substantially all assets of one, or subdividing an
443 | organization, or merging organizations. If propagation of a covered
444 | work results from an entity transaction, each party to that
445 | transaction who receives a copy of the work also receives whatever
446 | licenses to the work the party's predecessor in interest had or could
447 | give under the previous paragraph, plus a right to possession of the
448 | Corresponding Source of the work from the predecessor in interest, if
449 | the predecessor has it or can get it with reasonable efforts.
450 |
451 | You may not impose any further restrictions on the exercise of the
452 | rights granted or affirmed under this License. For example, you may
453 | not impose a license fee, royalty, or other charge for exercise of
454 | rights granted under this License, and you may not initiate litigation
455 | (including a cross-claim or counterclaim in a lawsuit) alleging that
456 | any patent claim is infringed by making, using, selling, offering for
457 | sale, or importing the Program or any portion of it.
458 |
459 | 11. Patents.
460 |
461 | A "contributor" is a copyright holder who authorizes use under this
462 | License of the Program or a work on which the Program is based. The
463 | work thus licensed is called the contributor's "contributor version".
464 |
465 | A contributor's "essential patent claims" are all patent claims
466 | owned or controlled by the contributor, whether already acquired or
467 | hereafter acquired, that would be infringed by some manner, permitted
468 | by this License, of making, using, or selling its contributor version,
469 | but do not include claims that would be infringed only as a
470 | consequence of further modification of the contributor version. For
471 | purposes of this definition, "control" includes the right to grant
472 | patent sublicenses in a manner consistent with the requirements of
473 | this License.
474 |
475 | Each contributor grants you a non-exclusive, worldwide, royalty-free
476 | patent license under the contributor's essential patent claims, to
477 | make, use, sell, offer for sale, import and otherwise run, modify and
478 | propagate the contents of its contributor version.
479 |
480 | In the following three paragraphs, a "patent license" is any express
481 | agreement or commitment, however denominated, not to enforce a patent
482 | (such as an express permission to practice a patent or covenant not to
483 | sue for patent infringement). To "grant" such a patent license to a
484 | party means to make such an agreement or commitment not to enforce a
485 | patent against the party.
486 |
487 | If you convey a covered work, knowingly relying on a patent license,
488 | and the Corresponding Source of the work is not available for anyone
489 | to copy, free of charge and under the terms of this License, through a
490 | publicly available network server or other readily accessible means,
491 | then you must either (1) cause the Corresponding Source to be so
492 | available, or (2) arrange to deprive yourself of the benefit of the
493 | patent license for this particular work, or (3) arrange, in a manner
494 | consistent with the requirements of this License, to extend the patent
495 | license to downstream recipients. "Knowingly relying" means you have
496 | actual knowledge that, but for the patent license, your conveying the
497 | covered work in a country, or your recipient's use of the covered work
498 | in a country, would infringe one or more identifiable patents in that
499 | country that you have reason to believe are valid.
500 |
501 | If, pursuant to or in connection with a single transaction or
502 | arrangement, you convey, or propagate by procuring conveyance of, a
503 | covered work, and grant a patent license to some of the parties
504 | receiving the covered work authorizing them to use, propagate, modify
505 | or convey a specific copy of the covered work, then the patent license
506 | you grant is automatically extended to all recipients of the covered
507 | work and works based on it.
508 |
509 | A patent license is "discriminatory" if it does not include within
510 | the scope of its coverage, prohibits the exercise of, or is
511 | conditioned on the non-exercise of one or more of the rights that are
512 | specifically granted under this License. You may not convey a covered
513 | work if you are a party to an arrangement with a third party that is
514 | in the business of distributing software, under which you make payment
515 | to the third party based on the extent of your activity of conveying
516 | the work, and under which the third party grants, to any of the
517 | parties who would receive the covered work from you, a discriminatory
518 | patent license (a) in connection with copies of the covered work
519 | conveyed by you (or copies made from those copies), or (b) primarily
520 | for and in connection with specific products or compilations that
521 | contain the covered work, unless you entered into that arrangement,
522 | or that patent license was granted, prior to 28 March 2007.
523 |
524 | Nothing in this License shall be construed as excluding or limiting
525 | any implied license or other defenses to infringement that may
526 | otherwise be available to you under applicable patent law.
527 |
528 | 12. No Surrender of Others' Freedom.
529 |
530 | If conditions are imposed on you (whether by court order, agreement or
531 | otherwise) that contradict the conditions of this License, they do not
532 | excuse you from the conditions of this License. If you cannot convey a
533 | covered work so as to satisfy simultaneously your obligations under this
534 | License and any other pertinent obligations, then as a consequence you may
535 | not convey it at all. For example, if you agree to terms that obligate you
536 | to collect a royalty for further conveying from those to whom you convey
537 | the Program, the only way you could satisfy both those terms and this
538 | License would be to refrain entirely from conveying the Program.
539 |
540 | 13. Remote Network Interaction; Use with the GNU General Public License.
541 |
542 | Notwithstanding any other provision of this License, if you modify the
543 | Program, your modified version must prominently offer all users
544 | interacting with it remotely through a computer network (if your version
545 | supports such interaction) an opportunity to receive the Corresponding
546 | Source of your version by providing access to the Corresponding Source
547 | from a network server at no charge, through some standard or customary
548 | means of facilitating copying of software. This Corresponding Source
549 | shall include the Corresponding Source for any work covered by version 3
550 | of the GNU General Public License that is incorporated pursuant to the
551 | following paragraph.
552 |
553 | Notwithstanding any other provision of this License, you have
554 | permission to link or combine any covered work with a work licensed
555 | under version 3 of the GNU General Public License into a single
556 | combined work, and to convey the resulting work. The terms of this
557 | License will continue to apply to the part which is the covered work,
558 | but the work with which it is combined will remain governed by version
559 | 3 of the GNU General Public License.
560 |
561 | 14. Revised Versions of this License.
562 |
563 | The Free Software Foundation may publish revised and/or new versions of
564 | the GNU Affero General Public License from time to time. Such new versions
565 | will be similar in spirit to the present version, but may differ in detail to
566 | address new problems or concerns.
567 |
568 | Each version is given a distinguishing version number. If the
569 | Program specifies that a certain numbered version of the GNU Affero General
570 | Public License "or any later version" applies to it, you have the
571 | option of following the terms and conditions either of that numbered
572 | version or of any later version published by the Free Software
573 | Foundation. If the Program does not specify a version number of the
574 | GNU Affero General Public License, you may choose any version ever published
575 | by the Free Software Foundation.
576 |
577 | If the Program specifies that a proxy can decide which future
578 | versions of the GNU Affero General Public License can be used, that proxy's
579 | public statement of acceptance of a version permanently authorizes you
580 | to choose that version for the Program.
581 |
582 | Later license versions may give you additional or different
583 | permissions. However, no additional obligations are imposed on any
584 | author or copyright holder as a result of your choosing to follow a
585 | later version.
586 |
587 | 15. Disclaimer of Warranty.
588 |
589 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
590 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
591 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
592 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
593 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
594 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
595 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
596 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
597 |
598 | 16. Limitation of Liability.
599 |
600 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
601 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
602 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
603 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
604 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
605 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
606 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
607 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
608 | SUCH DAMAGES.
609 |
610 | 17. Interpretation of Sections 15 and 16.
611 |
612 | If the disclaimer of warranty and limitation of liability provided
613 | above cannot be given local legal effect according to their terms,
614 | reviewing courts shall apply local law that most closely approximates
615 | an absolute waiver of all civil liability in connection with the
616 | Program, unless a warranty or assumption of liability accompanies a
617 | copy of the Program in return for a fee.
618 |
619 | END OF TERMS AND CONDITIONS
620 |
621 | How to Apply These Terms to Your New Programs
622 |
623 | If you develop a new program, and you want it to be of the greatest
624 | possible use to the public, the best way to achieve this is to make it
625 | free software which everyone can redistribute and change under these terms.
626 |
627 | To do so, attach the following notices to the program. It is safest
628 | to attach them to the start of each source file to most effectively
629 | state the exclusion of warranty; and each file should have at least
630 | the "copyright" line and a pointer to where the full notice is found.
631 |
632 |
633 | Copyright (C)
634 |
635 | This program is free software: you can redistribute it and/or modify
636 | it under the terms of the GNU Affero General Public License as published by
637 | the Free Software Foundation, either version 3 of the License, or
638 | (at your option) any later version.
639 |
640 | This program is distributed in the hope that it will be useful,
641 | but WITHOUT ANY WARRANTY; without even the implied warranty of
642 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
643 | GNU Affero General Public License for more details.
644 |
645 | You should have received a copy of the GNU Affero General Public License
646 | along with this program. If not, see .
647 |
648 | Also add information on how to contact you by electronic and paper mail.
649 |
650 | If your software can interact with users remotely through a computer
651 | network, you should also make sure that it provides a way for users to
652 | get its source. For example, if your program is a web application, its
653 | interface could display a "Source" link that leads users to an archive
654 | of the code. There are many ways you could offer source, and different
655 | solutions will be better for different programs; see section 13 for the
656 | specific requirements.
657 |
658 | You should also get your employer (if you work as a programmer) or school,
659 | if any, to sign a "copyright disclaimer" for the program, if necessary.
660 | For more information on this, and how to apply and follow the GNU AGPL, see
661 | .
662 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include CONTRIBUTING.rst
2 | include HISTORY.rst
3 | include LICENSE
4 | include README.rst
5 |
6 | recursive-include tests *
7 | recursive-exclude * __pycache__
8 | recursive-exclude * *.py[co]
9 |
10 | recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif
11 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | .PHONY: clean clean-test clean-pyc clean-build docs help
2 | .DEFAULT_GOAL := help
3 |
4 | define BROWSER_PYSCRIPT
5 | import os, webbrowser, sys
6 |
7 | from urllib.request import pathname2url
8 |
9 | webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1])))
10 | endef
11 | export BROWSER_PYSCRIPT
12 |
13 | define PRINT_HELP_PYSCRIPT
14 | import re, sys
15 |
16 | for line in sys.stdin:
17 | match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line)
18 | if match:
19 | target, help = match.groups()
20 | print("%-20s %s" % (target, help))
21 | endef
22 | export PRINT_HELP_PYSCRIPT
23 |
24 | BROWSER := python -c "$$BROWSER_PYSCRIPT"
25 |
26 | help:
27 | @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST)
28 |
29 | clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts
30 |
31 | clean-build: ## remove build artifacts
32 | rm -fr build/
33 | rm -fr dist/
34 | rm -fr .eggs/
35 | find . -name '*.egg-info' -exec rm -fr {} +
36 | find . -name '*.egg' -exec rm -f {} +
37 |
38 | clean-pyc: ## remove Python file artifacts
39 | find . -name '*.pyc' -exec rm -f {} +
40 | find . -name '*.pyo' -exec rm -f {} +
41 | find . -name '*~' -exec rm -f {} +
42 | find . -name '__pycache__' -exec rm -fr {} +
43 |
44 | clean-test: ## remove test and coverage artifacts
45 | rm -fr .tox/
46 | rm -f .coverage
47 | rm -fr htmlcov/
48 | rm -fr .pytest_cache
49 |
50 | lint: ## check style with flake8
51 | flake8 orthanc-server-extensions tests
52 |
53 | test: ## run tests quickly with the default Python
54 | pytest
55 |
56 | test-all: ## run tests on every Python version with tox
57 | tox
58 |
59 | coverage: ## check code coverage quickly with the default Python
60 | coverage run --source orthanc-server-extensions -m pytest
61 | coverage report -m
62 | coverage html
63 | $(BROWSER) htmlcov/index.html
64 |
65 | docs: ## generate Sphinx HTML documentation, including API docs
66 | rm -f docs/orthanc-server-extensions.rst
67 | rm -f docs/modules.rst
68 | sphinx-apidoc -o docs/ orthanc-server-extensions
69 | $(MAKE) -C docs clean
70 | $(MAKE) -C docs html
71 | $(BROWSER) docs/_build/html/index.html
72 |
73 | servedocs: docs ## compile the docs watching for changes
74 | watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D .
75 |
76 | release: dist ## package and upload a release
77 | twine upload dist/*
78 |
79 | dist: clean ## builds source and wheel package
80 | python setup.py sdist
81 | python setup.py bdist_wheel
82 | ls -l dist
83 |
84 | install: clean ## install the package to the active Python's site-packages
85 | python setup.py install
86 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | =========================
2 | Orthanc Server Extensions
3 | =========================
4 |
5 |
6 | .. image:: https://img.shields.io/pypi/v/orthanc-server-extensions.svg
7 | :target: https://pypi.python.org/pypi/orthanc-server-extensions
8 |
9 | .. image:: https://travis-ci.com/walkIT-nl/orthanc-server-extensions.svg?branch=main
10 | :target: https://travis-ci.com/walkIT-nl/orthanc-server-extensions
11 |
12 | .. image:: https://readthedocs.org/projects/orthanc-server-extensions/badge/?version=latest
13 | :target: https://orthanc-server-extensions.readthedocs.io/en/latest/?badge=latest
14 | :alt: Documentation Status
15 |
16 | .. image:: https://github.com/walkIT-nl/orthanc-server-extensions/actions/workflows/main.yml/badge.svg
17 | :target: https://github.com/walkIT-nl/orthanc-server-extensions/actions/workflows/main.yml
18 | :alt: Build and test status
19 |
20 | A simple Orthanc python plugin based event processing framework to extend Orthanc’s feature set. It focuses on
21 | integration and orchestration, like study routing, event notifications and audit logging.
22 |
23 |
24 | * Free software: GNU Affero General Public License v3
25 | * Documentation: https://orthanc-server-extensions.readthedocs.io.
26 |
27 |
28 | Features
29 | --------
30 | * easily plug event handling scripts for all Orthanc's `change events`_ -
31 | * chain functions into a pipeline (composition)
32 | * run asyncio functions (coroutines) for concurrent processing of change events
33 | * run (integration) tests for your Orthanc python scripts
34 | * publish events to Kafka, RabbitMQ and NATS
35 |
36 | Modules
37 | -------
38 | * auto_retries: retry failed jobs
39 | * auto_forward: forward DICOM to external systems based on python match functions
40 | * anonymization: anonymize DICOM Series using the Orthanc API
41 |
42 | Why this library was written
43 | ----------------------------
44 |
45 | Improve developer happiness: the development roundtrip is just a little bit long to build, run and test a function, even with Docker.
46 | With this library, you can start from the unit tests, move to integration tests, and then deploy the result in the Docker image.
47 |
48 | Enable testability: the Orthanc API is provided as a module which is not easy to mock in a clean way.
49 | Orthanc server extensions provide a few simple abstractions that keep functions clean and independently testable.
50 |
51 | Improve performance: async functions will be executed concurrently, which is advantageous if the processing is I/O bound.
52 |
53 | Httpx was chosen as a base library to access the Orthanc API, rather than orthanc.RestApi*, because it is well known,
54 | developer friendly, and external API access avoids deadlocks in the Python plugin (before this was solved in 3.1).
55 |
56 |
57 | Getting Started
58 | -------
59 |
60 | ``entry_point.py`` provides the first boilerplate to get started. Run it by issuing
61 | ``docker-compose up --build``; you should be greeted with 'orthanc started event handled!' message, which is also published to
62 |
63 | Developing
64 | ----------
65 |
66 | Write your event handling scripts and register them in ``event_dispatcher.register_event_handlers()``. Examples,
67 | including the use of async functions and function composition (pipeline), can be found in ``tests/test_event_dispatcher.py``.
68 |
69 |
70 | Credits
71 | -------
72 |
73 | This project would obviously not exist without Orthanc, its documentation and its community.
74 |
75 | This package was created with Cookiecutter_ and the `audreyr/cookiecutter-pypackage`_ project template.
76 |
77 | .. _change events: https://book.orthanc-server.com/plugins/python.html#listening-to-changes).
78 | .. _Cookiecutter: https://github.com/audreyr/cookiecutter
79 | .. _`audreyr/cookiecutter-pypackage`: https://github.com/audreyr/cookiecutter-pypackage
80 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3"
2 | services:
3 | nats:
4 | image: nats
5 | command:
6 | - '-js'
7 | orthanc:
8 | image: local/orthanc
9 | build: .
10 | environment:
11 | VERBOSE_ENABLED: "true"
12 | ORTHANC__SSL_ENABLED: "true"
13 | ORTHANC__SSL_CERTIFICATE: "/ssl/keyAndCert.pem"
14 |
15 | ORTHANC__OVERWRITE_INSTANCES: "true"
16 |
17 | ORTHANC__PYTHON_VERBOSE: "false"
18 | ORTHANC__HTTP_PORT: "8042"
19 | ORTHANC__PYTHON_SCRIPT: "/python/entry_point.py"
20 |
21 | ORTHANC__REGISTERED_USERS: |
22 | {"demo": "demo"}
23 |
24 | NATS_URL: nats://nats
25 | depends_on:
26 | - nats
27 | ports:
28 | - "127.0.0.1:4242:4242"
29 | - "127.0.0.1:8042:8042"
30 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = python -msphinx
7 | SPHINXPROJ = orthanc-server-extensions
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | #
3 | # orthanc_ext documentation build configuration file, created by
4 | # sphinx-quickstart on Fri Jun 9 13:47:02 2017.
5 | #
6 | # This file is execfile()d with the current directory set to its
7 | # containing dir.
8 | #
9 | # Note that not all possible configuration values are present in this
10 | # autogenerated file.
11 | #
12 | # All configuration values have a default; values that are commented out
13 | # serve to show the default.
14 |
15 | # If extensions (or modules to document with autodoc) are in another
16 | # directory, add these directories to sys.path here. If the directory is
17 | # relative to the documentation root, use os.path.abspath to make it
18 | # absolute, like shown here.
19 | #
20 | import os
21 | import sys
22 |
23 | sys.path.insert(0, os.path.abspath('..'))
24 |
25 | import orthanc_ext # noqa: E402
26 |
27 | # -- General configuration ---------------------------------------------
28 |
29 | # If your documentation needs a minimal Sphinx version, state it here.
30 | #
31 | # needs_sphinx = '1.0'
32 |
33 | # Add any Sphinx extension module names here, as strings. They can be
34 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
35 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
36 |
37 | # Add any paths that contain templates here, relative to this directory.
38 | templates_path = ['_templates']
39 |
40 | # The suffix(es) of source filenames.
41 | # You can specify multiple suffix as a list of string:
42 | #
43 | # source_suffix = ['.rst', '.md']
44 | source_suffix = '.rst'
45 |
46 | # The master toctree document.
47 | master_doc = 'index'
48 |
49 | # General information about the project.
50 | project = 'Orthanc Server Python Extensions'
51 | copyright = '2023, WalkIT'
52 | author = 'WalkIT'
53 |
54 | # The version info for the project you're documenting, acts as replacement
55 | # for |version| and |release|, also used in various other places throughout
56 | # the built documents.
57 | #
58 | # The short X.Y version.
59 | version = orthanc_ext.__version__
60 | # The full version, including alpha/beta/rc tags.
61 | release = orthanc_ext.__version__
62 |
63 | # The language for content autogenerated by Sphinx. Refer to documentation
64 | # for a list of supported languages.
65 | #
66 | # This is also used if you do content translation via gettext catalogs.
67 | # Usually you set "language" from the command line for these cases.
68 | language = None
69 |
70 | # List of patterns, relative to source directory, that match files and
71 | # directories to ignore when looking for source files.
72 | # This patterns also effect to html_static_path and html_extra_path
73 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
74 |
75 | # The name of the Pygments (syntax highlighting) style to use.
76 | pygments_style = 'sphinx'
77 |
78 | # If true, `todo` and `todoList` produce output, else they produce nothing.
79 | todo_include_todos = False
80 |
81 | # -- Options for HTML output -------------------------------------------
82 |
83 | # The theme to use for HTML and HTML Help pages. See the documentation for
84 | # a list of builtin themes.
85 | #
86 | html_theme = 'alabaster'
87 |
88 | # Theme options are theme-specific and customize the look and feel of a
89 | # theme further. For a list of options available for each theme, see the
90 | # documentation.
91 | #
92 | # html_theme_options = {}
93 |
94 | # Add any paths that contain custom static files (such as style sheets) here,
95 | # relative to this directory. They are copied after the builtin static files,
96 | # so a file named "default.css" will overwrite the builtin "default.css".
97 | html_static_path = ['_static']
98 |
99 | # -- Options for HTMLHelp output ---------------------------------------
100 |
101 | # Output file base name for HTML help builder.
102 | htmlhelp_basename = 'orthanc_server_extensionsdoc'
103 |
104 | # -- Options for LaTeX output ------------------------------------------
105 |
106 | latex_elements = {
107 | # The paper size ('letterpaper' or 'a4paper').
108 | #
109 | # 'papersize': 'letterpaper',
110 | # The font size ('10pt', '11pt' or '12pt').
111 | #
112 | # 'pointsize': '10pt',
113 | # Additional stuff for the LaTeX preamble.
114 | #
115 | # 'preamble': '',
116 | # Latex figure (float) alignment
117 | #
118 | # 'figure_align': 'htbp',
119 | }
120 |
121 | # Grouping the document tree into LaTeX files. List of tuples
122 | # (source start file, target name, title, author, documentclass
123 | # [howto, manual, or own class]).
124 | latex_documents = [
125 | (master_doc, 'orthanc_ext.tex', 'Orthanc Server Extensions Documentation', 'WalkIT', 'manual'),
126 | ]
127 |
128 | # -- Options for manual page output ------------------------------------
129 |
130 | # One entry per manual page. List of tuples
131 | # (source start file, name, description, authors, manual section).
132 | man_pages = [(master_doc, 'orthanc_ext', 'Orthanc Server Extensions Documentation', [author], 1)]
133 |
134 | # -- Options for Texinfo output ----------------------------------------
135 |
136 | # Grouping the document tree into Texinfo files. List of tuples
137 | # (source start file, target name, title, author,
138 | # dir menu entry, description, category)
139 | texinfo_documents = [(
140 | master_doc, 'orthanc_ext', 'Orthanc Server Extensions Documentation', author, 'orthanc_ext',
141 | 'One line description of project.', 'Miscellaneous',
142 | ), ]
143 |
--------------------------------------------------------------------------------
/docs/contributing.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../CONTRIBUTING.rst
2 |
--------------------------------------------------------------------------------
/docs/history.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../HISTORY.rst
2 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | Welcome to Orthanc Server Extensions's documentation!
2 | =====================================================
3 |
4 | .. toctree::
5 | :maxdepth: 2
6 | :caption: Contents:
7 |
8 | readme
9 | installation
10 | usage
11 | modules
12 | contributing
13 | history
14 |
15 | Indices and tables
16 | ==================
17 | * :ref:`genindex`
18 | * :ref:`modindex`
19 | * :ref:`search`
20 |
--------------------------------------------------------------------------------
/docs/installation.rst:
--------------------------------------------------------------------------------
1 | .. highlight:: shell
2 |
3 | ============
4 | Installation
5 | ============
6 |
7 |
8 | Stable release
9 | --------------
10 |
11 | To install Orthanc Server Extensions, run this command in your terminal:
12 |
13 | .. code-block:: console
14 |
15 | $ pip install orthanc-server-extensions
16 |
17 | This is the preferred method to install Orthanc Server Extensions, as it will always install the most recent stable release.
18 |
19 | If you don't have `pip`_ installed, this `Python installation guide`_ can guide
20 | you through the process.
21 |
22 | .. _pip: https://pip.pypa.io
23 | .. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/
24 |
25 |
26 | From sources
27 | ------------
28 |
29 | The sources for Orthanc Server Extensions can be downloaded from the `Github repo`_.
30 |
31 | You can either clone the public repository:
32 |
33 | .. code-block:: console
34 |
35 | $ git clone git://github.com/walkit-nl/orthanc-server-extensions
36 |
37 | Or download the `tarball`_:
38 |
39 | .. code-block:: console
40 |
41 | $ curl -OJL https://github.com/walkit-nl/orthanc-server-extensions/tarball/master
42 |
43 | Once you have a copy of the source, you can install it with:
44 |
45 | .. code-block:: console
46 |
47 | $ python setup.py install
48 |
49 |
50 | .. _Github repo: https://github.com/walkit-nl/orthanc-server-extensions
51 | .. _tarball: https://github.com/walkit-nl/orthanc-server-extensions/tarball/master
52 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=python -msphinx
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 | set SPHINXPROJ=orthanc-server-extensions
13 |
14 | if "%1" == "" goto help
15 |
16 | %SPHINXBUILD% >NUL 2>NUL
17 | if errorlevel 9009 (
18 | echo.
19 | echo.The Sphinx module was not found. Make sure you have Sphinx installed,
20 | echo.then set the SPHINXBUILD environment variable to point to the full
21 | echo.path of the 'sphinx-build' executable. Alternatively you may add the
22 | echo.Sphinx directory to PATH.
23 | echo.
24 | echo.If you don't have Sphinx installed, grab it from
25 | echo.http://sphinx-doc.org/
26 | exit /b 1
27 | )
28 |
29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
30 | goto end
31 |
32 | :help
33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
34 |
35 | :end
36 | popd
37 |
--------------------------------------------------------------------------------
/docs/readme.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../README.rst
2 |
--------------------------------------------------------------------------------
/docs/usage.rst:
--------------------------------------------------------------------------------
1 | =====
2 | Usage
3 | =====
4 |
5 | To use Orthanc Server Extensions in an integration project, the following snippet can serve as the entrypoint script
6 | specified in orthanc.json or as an environment variable for the Osimis docker image (ORTHANC__PYTHON_SCRIPT)::
7 |
8 | import logging
9 | import orthanc # provided by the Orthanc plugin
10 | from orthanc_ext import event_dispatcher
11 |
12 | # to prevent the import dependency on orthanc above, normally this would be defined in a separate module
13 | def log_event(evt, session):
14 | logging.warn(evt.resource_id)
15 |
16 | event_dispatcher.register_event_handlers({orthanc.ChangeType.STABLE_STUDY: log_event}, orthanc_module=orthanc,
17 | sync_client=event_dispatcher.create_session(orthanc))
18 |
19 | To unit test the log_event handler with pytest, use::
20 |
21 | from orthanc_ext.orthanc import OrthancApiHandler
22 | event_dispatcher.register_event_handlers({orthanc.ChangeType.STABLE_STUDY: log_event}, orthanc_module=OrthancApiHandler(),
23 | session=requests)
24 | def test_shall_log_on_change(caplog):
25 | orthanc.on_change(orthanc.ChangeType.STABLE_STUDY, orthanc.ResourceType.STUDY, "resource-uuid")
26 |
27 | assert 'resource-uuid' in caplog.text
28 |
29 | One can use the excellent respx_ library to stub the API responses.
30 |
31 | To integration test a handler, use::
32 |
33 | from orthanc_ext.orthanc import OrthancApiHandler
34 | from requests_toolbelt import sessions
35 |
36 | orthanc = OrthancApiHandler()
37 | session = sessions.BaseUrlSession('http://your-orthanc-server:8042')
38 | session.auth = ('orthanc', 'orthanc')
39 |
40 | def get_system_status(_, session):
41 | # this would be the session created above
42 | # BaseUrlSession allows usage equivalent to RestApiPost
43 | return session.get('/system').json()
44 |
45 | event_dispatcher.register_event_handlers({orthanc.ChangeType.ORTHANC_STARTED: get_system_status}, orthanc_module=orthanc,
46 | session=session)
47 |
48 | def test_get_system_status_shall_return_version():
49 | system_info, = orthanc.on_change(orthanc.ChangeType.ORTHANC_STARTED, orthanc.ResourceType.NONE, '')
50 |
51 | assert system_info.get('Version') is not None
52 |
53 | The event_dispatcher will ensure that your API call will work the same when called from the Orthanc Python plugin.
54 | For more examples, see the tests/ directory in the Git repository.
55 |
56 | .. respx: https://lundberg.github.io/respx/
57 |
--------------------------------------------------------------------------------
/orthanc_ext/__init__.py:
--------------------------------------------------------------------------------
1 | """Top-level package for Orthanc Server Extensions."""
2 |
3 | __author__ = """WalkIT"""
4 | __email__ = 'code@walkit.nl'
5 | __version__ = '3.5.1'
6 |
--------------------------------------------------------------------------------
/orthanc_ext/event_dispatcher.py:
--------------------------------------------------------------------------------
1 | import inspect
2 | import json
3 | import logging
4 | from dataclasses import dataclass
5 |
6 | from orthanc_ext.executor_utilities import SequentialHybridExecutor
7 | from orthanc_ext.http_utilities import get_rest_api_base_url, \
8 | get_certificate, OrthancClientTypeFactory, HttpxClientType
9 | from orthanc_ext.logging_configurator import python_logging
10 | from orthanc_ext.python_utilities import ensure_iterable, create_reverse_type_dict
11 |
12 |
13 | def register_event_handlers(
14 | event_handlers,
15 | orthanc_module,
16 | sync_client,
17 | async_client=None,
18 | logging_configuration=python_logging,
19 | handler_executor=SequentialHybridExecutor):
20 | logging_configuration(orthanc_module)
21 |
22 | @dataclass
23 | class ChangeEvent:
24 | change_type: int
25 | resource_type: int
26 | resource_id: str
27 |
28 | def __str__(self):
29 | return (
30 | f'ChangeEvent('
31 | f'change_type={event_types.get(self.change_type)}, '
32 | f'resource_type={resource_types.get(self.resource_type)}, '
33 | f"resource_id='{self.resource_id}')")
34 |
35 | def create_type_index(orthanc_type):
36 | return create_reverse_type_dict(orthanc_type)
37 |
38 | event_types = create_type_index(orthanc_module.ChangeType)
39 | resource_types = create_type_index(orthanc_module.ResourceType)
40 |
41 | event_handlers = {k: ensure_iterable(v) for k, v in event_handlers.items()}
42 |
43 | executor = handler_executor(sync_client, async_client)
44 |
45 | def unhandled_event_logger(event, _):
46 | logging.debug(f'no handler registered for {event_types[event.change_type]}')
47 |
48 | def OnChange(change_type, resource_type, resource_id):
49 | event = ChangeEvent(change_type, resource_type, resource_id)
50 | handlers = event_handlers.get(change_type, [unhandled_event_logger])
51 |
52 | sync_handlers = get_sync_handlers(handlers)
53 | async_handlers = get_async_handlers(handlers)
54 |
55 | return executor.invoke_all(event, sync_handlers, async_handlers)
56 |
57 | orthanc_module.RegisterOnChangeCallback(OnChange)
58 |
59 | return executor
60 |
61 |
62 | def get_async_handlers(handlers):
63 | return [handler for handler in handlers if inspect.iscoroutinefunction(handler)]
64 |
65 |
66 | def get_sync_handlers(handlers):
67 | return [handler for handler in handlers if not inspect.iscoroutinefunction(handler)]
68 |
69 |
70 | def create_session(orthanc, client_type: OrthancClientTypeFactory = HttpxClientType.SYNC):
71 | config = json.loads(orthanc.GetConfiguration())
72 | return client_type.create_internal_client(
73 | get_rest_api_base_url(config), orthanc.GenerateRestApiAuthorizationToken(),
74 | get_certificate(config))
75 |
--------------------------------------------------------------------------------
/orthanc_ext/executor_utilities.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import logging
3 | from threading import Thread
4 |
5 |
6 | class AsyncOnlyExecutor:
7 | """
8 | Delegates sync handlers to an executor, mimicking async execution.
9 | Executes async handlers in an event loop that runs in a separate thread.
10 |
11 | For optimal performance, use of only async handlers is preferable.
12 |
13 | This executor needs to be started and stopped.
14 | """
15 |
16 | def __init__(self, sync_client, async_client):
17 | self.sync_client = sync_client
18 | self.async_client = async_client
19 | self.loop = asyncio.new_event_loop()
20 |
21 | self.tasks = set() # make sure tasks are not garbage collected
22 |
23 | def run_loop(loop):
24 | asyncio.set_event_loop(self.loop)
25 | try:
26 | loop.run_forever()
27 | finally:
28 | loop.run_until_complete(loop.shutdown_asyncgens())
29 | loop.close()
30 |
31 | self.thread = Thread(target=run_loop, args=(self.loop, ), daemon=True)
32 |
33 | def invoke_all(self, event, sync_handlers, async_handlers):
34 | tasks = [
35 | asyncio.run_coroutine_threadsafe(
36 | on_change_async(inject_with_event_http_client(
37 | [handler], event, self.async_client)), self.loop) for handler in async_handlers
38 | ]
39 |
40 | tasks.append(
41 | self.loop.create_task(
42 | asyncio.to_thread(
43 | inject_with_event_http_client, sync_handlers, event, self.sync_client),
44 | name=f'sync_handlers{sync_handlers}'))
45 |
46 | self.tasks.update(tasks)
47 | for task in tasks:
48 | task.add_done_callback(self.tasks.discard)
49 |
50 | return tasks
51 |
52 | def start(self):
53 | self.thread.start()
54 |
55 | def stop(self):
56 | if self.tasks:
57 | logging.warning(
58 | 'about to stop event loop with %i task(s) pending: %s', len(self.tasks), self.tasks)
59 | pending = asyncio.all_tasks(self.loop)
60 | for task in pending:
61 | task.cancel()
62 |
63 | asyncio.run_coroutine_threadsafe(stop_event_loop_in_thread(self.loop), self.loop)
64 | self.thread.join()
65 |
66 |
67 | async def stop_event_loop_in_thread(loop):
68 | logging.info('stopping event loop')
69 | loop.stop()
70 |
71 |
72 | def inject_with_event_http_client(handlers, event, client):
73 | return [handler(event, client) for handler in handlers]
74 |
75 |
76 | class SequentialHybridExecutor:
77 | """Blocking event executor that handles both sync and async handlers,
78 | returning the gathered results in a list.
79 | It waits for all async handlers to have completed per received event.
80 | """
81 |
82 | def __init__(self, sync_client, async_client):
83 | self.sync_client = sync_client
84 | self.async_client = async_client
85 |
86 | def invoke_all(self, event, sync_handlers, async_handlers):
87 | return inject_with_event_http_client(sync_handlers, event, self.sync_client) + asyncio.run(
88 | on_change_async(
89 | inject_with_event_http_client(async_handlers, event, self.async_client)))
90 |
91 |
92 | async def on_change_async(async_handlers):
93 | return_values = await asyncio.gather(*async_handlers, return_exceptions=True)
94 |
95 | for index, return_value in enumerate(return_values):
96 | if isinstance(return_value, BaseException):
97 | logging.exception(
98 | 'execution of coroutine \'%s\' failed with exception %s',
99 | async_handlers[index].__name__,
100 | repr(return_value),
101 | exc_info=(return_value.__class__, return_value, return_value.__traceback__))
102 |
103 | return return_values
104 |
--------------------------------------------------------------------------------
/orthanc_ext/http_utilities.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from enum import Enum
3 | from typing import Union
4 |
5 | import httpx
6 |
7 |
8 | def get_rest_api_base_url(config):
9 | port = config.get('HttpPort', 8042)
10 | scheme = 'https' if config.get('SslEnabled', False) else 'http'
11 | return f'{scheme}://localhost:{port}/'
12 |
13 |
14 | def get_certificate(config):
15 | return False if not config.get('SslEnabled', False) else config.get('SslCertificate', False)
16 |
17 |
18 | @dataclass
19 | class OrthancClientTypeFactory:
20 | http_client: type
21 |
22 | def create_internal_client(self, *args, **kwargs):
23 | return create_internal_client(*args, **kwargs, client_type=self)
24 |
25 |
26 | class HttpxClientType(OrthancClientTypeFactory, Enum):
27 | SYNC = httpx.Client
28 | ASYNC = httpx.AsyncClient
29 |
30 |
31 | # deprecated, for backward compatibility
32 | ClientType = HttpxClientType
33 |
34 |
35 | def create_internal_client(
36 | base_url,
37 | token='',
38 | cert: Union[str, bool] = False,
39 | client_type: ClientType = HttpxClientType.SYNC):
40 | return client_type.http_client(
41 | base_url=base_url,
42 | timeout=httpx.Timeout(300, connect=30),
43 | verify=cert,
44 | headers={'Authorization': token})
45 |
--------------------------------------------------------------------------------
/orthanc_ext/logging_configurator.py:
--------------------------------------------------------------------------------
1 | import enum
2 | import logging
3 | import sys
4 |
5 |
6 | def python_logging(_, default_level=logging.INFO):
7 | """Configures python logging. Useful when Orthanc is using stderr and
8 | stdout handlers: offers more log levels and a better date format.
9 | """
10 | fmt = '%(levelname)s %(asctime)s %(filename)s:%(lineno)s] %(message)s'
11 | logging.basicConfig(format=fmt)
12 | logger = logging.getLogger()
13 | logger.setLevel(default_level)
14 | return fmt
15 |
16 |
17 | def configure_orthanc_logging():
18 |
19 | def orthanc_logging(orthanc_module, default_level=logging.INFO):
20 | """Configures orthanc logging. Useful when orthanc is configured to write to a log file"""
21 | logger = logging.getLogger()
22 | logger.setLevel(default_level)
23 | logger.addHandler(logging.StreamHandler(sys.stderr))
24 | logger.addHandler(OrthancLogHandler(orthanc_module))
25 |
26 | return orthanc_logging
27 |
28 |
29 | class OrthancLogHandler(logging.Handler):
30 |
31 | def __init__(self, orthanc_module):
32 | logging.Handler.__init__(self)
33 | self.orthanc_module = orthanc_module
34 | self.log_func_mapping = {
35 | logging.INFO: orthanc_module.LogInfo,
36 | logging.WARNING: orthanc_module.LogWarning,
37 | logging.ERROR: orthanc_module.LogError,
38 | logging.CRITICAL: orthanc_module.LogError,
39 | }
40 |
41 | def emit(self, record: logging.LogRecord) -> None:
42 | self.log_func_mapping.get(record.levelno, self.orthanc_module.LogInfo)(
43 | logging.Formatter(fmt='[%(filename)s:%(lineno)s] %(message)s').format(record))
44 |
45 |
46 | class OrthancLevel(enum.Enum):
47 | DEFAULT = ('default', 'WARNING')
48 | VERBOSE = ('verbose', 'INFO')
49 | TRACE = ('trace', 'DEBUG')
50 |
51 | def __init__(self, orthanc_level, python_level):
52 | self.orthanc_level = orthanc_level
53 | self.python_level = python_level
54 |
55 |
56 | def configure_log_level(client, level: OrthancLevel):
57 | client.put('/tools/log-level-plugins', level.value)
58 |
--------------------------------------------------------------------------------
/orthanc_ext/orthanc.py:
--------------------------------------------------------------------------------
1 | """
2 | This module implements the Orthanc Python plugin API to run requests against an
3 | external Orthanc instance.
4 |
5 | This will allow you quickly evolve your python scripts and make them easy to
6 | integration test as well.
7 | """
8 | import json
9 | import uuid
10 |
11 |
12 | class OrthancApiHandler(object):
13 |
14 | class ResourceType:
15 | PATIENT = 0
16 | STUDY = 1
17 | SERIES = 2
18 | INSTANCE = 3
19 | NONE = 4
20 |
21 | # Redefine to make this type available for unit tests
22 | # outside the Orthanc Python plugin.
23 | # https://hg.orthanc-server.com/orthanc-python/file/tip/\
24 | # Sources/Autogenerated/sdk_OrthancPluginChangeType.impl.h
25 | class ChangeType:
26 | COMPLETED_SERIES = 0
27 | DELETED = 1
28 | NEW_CHILD_INSTANCE = 2
29 | NEW_INSTANCE = 3
30 | NEW_PATIENT = 4
31 | NEW_SERIES = 5
32 | NEW_STUDY = 6
33 | STABLE_PATIENT = 7
34 | STABLE_SERIES = 8
35 | STABLE_STUDY = 9
36 | ORTHANC_STARTED = 10
37 | ORTHANC_STOPPED = 11
38 | UPDATED_ATTACHMENT = 12
39 | UPDATED_METADATA = 13
40 | UPDATED_PEERS = 14
41 | UPDATED_MODALITIES = 15
42 | JOB_SUBMITTED = 16
43 | JOB_SUCCESS = 17
44 | JOB_FAILURE = 18
45 |
46 | # not defined by orthanc
47 | UNKNOWN = 999
48 |
49 | def __init__(self, config={}):
50 | self.config = config
51 |
52 | @staticmethod
53 | def GenerateRestApiAuthorizationToken():
54 | return str(uuid.uuid4())
55 |
56 | def RegisterOnChangeCallback(self, change_callback):
57 | self.change_callback = change_callback
58 |
59 | def on_change(self, change_type, resource_type, resource_id):
60 | return self.change_callback(change_type, resource_type, resource_id)
61 |
62 | def LogInfo(self, message):
63 | print(f'INFO: {message}')
64 |
65 | def LogWarning(self, message):
66 | print(f'WARNING: {message}')
67 |
68 | def LogError(self, message):
69 | print(f'ERROR: {message}')
70 |
71 | def GetConfiguration(self):
72 | return json.dumps(self.config)
73 |
--------------------------------------------------------------------------------
/orthanc_ext/orthanc_utilities.py:
--------------------------------------------------------------------------------
1 | """
2 | Convenience methods to work with the Orthanc REST API
3 | """
4 | from http.client import NOT_FOUND
5 |
6 |
7 | def anonymize(client, series_id):
8 | request = {'Force': False, 'KeepPrivateTags': True, 'Permissive': True, }
9 | resp = client.post(f'{series_id}/anonymize', json=request)
10 | resp.raise_for_status()
11 | return resp.json()
12 |
13 |
14 | def get_parent_study_url(client, series_id):
15 | resp = client.get(f'/series/{series_id}')
16 | resp.raise_for_status()
17 | series = resp.json()
18 | return f"/studies/{series['ParentStudy']}"
19 |
20 |
21 | def get_metadata_of_first_instance_of_series(client, series_id, metadata_key):
22 | resp = client.get(f'/series/{series_id}/instances')
23 | resp.raise_for_status()
24 | instances = resp.json()
25 | assert len(instances) > 0, f'expected at least one instance in series {series_id}'
26 | resp = client.get(f'/instances/{instances[0]["ID"]}/metadata/{metadata_key}')
27 | if resp.status_code in [NOT_FOUND]:
28 | return None
29 | resp.raise_for_status()
30 | return resp.text
31 |
--------------------------------------------------------------------------------
/orthanc_ext/pyorthanc_utilities.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 | from typing import Union
3 |
4 | import httpx
5 | from pyorthanc import Orthanc, AsyncOrthanc
6 |
7 | from orthanc_ext.http_utilities import OrthancClientTypeFactory
8 |
9 |
10 | class PyOrthancClientType(OrthancClientTypeFactory, Enum):
11 | SYNC = Orthanc
12 | ASYNC = AsyncOrthanc
13 |
14 | def create_internal_client(self, *args, **kwargs):
15 | return create_internal_client(*args, **kwargs, client_type=self)
16 |
17 |
18 | def create_internal_client(
19 | base_url,
20 | token='',
21 | cert: Union[str, bool] = False,
22 | client_type: PyOrthancClientType = PyOrthancClientType.SYNC):
23 |
24 | # note: only difference with the httpx.Client constructor is the `base_url` positional argument.
25 | return client_type.http_client(
26 | base_url,
27 | base_url=base_url,
28 | timeout=httpx.Timeout(300, connect=30),
29 | verify=cert,
30 | headers={'Authorization': token})
31 |
--------------------------------------------------------------------------------
/orthanc_ext/python_utilities.py:
--------------------------------------------------------------------------------
1 | from typing import Iterable
2 |
3 |
4 | def pipeline(*functions):
5 |
6 | class Pipeline:
7 |
8 | def __call__(self, evt, *args):
9 | arg = evt
10 | for step in functions:
11 | arg = step(arg, *args)
12 | return arg
13 |
14 | def __repr__(self):
15 | return f'pipeline({functions})'
16 |
17 | return Pipeline()
18 |
19 |
20 | def ensure_iterable(v):
21 | return v if isinstance(v, Iterable) else [v]
22 |
23 |
24 | def hashable(k):
25 | try:
26 | return hash(k)
27 | except TypeError:
28 | return False
29 |
30 |
31 | def create_reverse_type_dict(py_type):
32 | return {v: k for k, v in py_type.__dict__.items() if hashable(v)}
33 |
--------------------------------------------------------------------------------
/orthanc_ext/scripts/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/walkIT-nl/orthanc-server-extensions/9417ff122c52f06f82ededf1eb378aeb7d42ca94/orthanc_ext/scripts/__init__.py
--------------------------------------------------------------------------------
/orthanc_ext/scripts/anonymization.py:
--------------------------------------------------------------------------------
1 | import dataclasses
2 | import logging
3 |
4 | from orthanc_ext.orthanc_utilities import get_parent_study_url
5 |
6 |
7 | @dataclasses.dataclass
8 | class AnonymizationRequest:
9 | Force: bool = False
10 | KeepPrivateTags: bool = False
11 | Permissive: bool = False
12 | Keep: list = dataclasses.field(
13 | default_factory=lambda: ['StudyDescription', 'SeriesDescription'])
14 |
15 |
16 | @dataclasses.dataclass
17 | class ModificationResponse:
18 | ID: str
19 | Path: str
20 | PatientID: str
21 | Type: str
22 |
23 |
24 | @dataclasses.dataclass
25 | class ModificationRequest:
26 | Force: bool = False
27 | KeepPrivateTags: bool = False
28 | Permissive: bool = True
29 |
30 | Keep: list = dataclasses.field(
31 | default_factory=lambda: ['StudyDescription', 'SeriesDescription'])
32 |
33 |
34 | def anonymize_series(client, series_id):
35 | resp = client.post(
36 | f'/series/{series_id}/anonymize', json=dataclasses.asdict(AnonymizationRequest()))
37 | resp.raise_for_status()
38 |
39 | response = ModificationResponse(**resp.json())
40 | logging.info(f'Anonymized "/series/{series_id}" to "{response.Path}"')
41 |
42 | return response
43 |
44 |
45 | def reidentify_series(client, anonymized_series_id, original_series_id):
46 | parent_study_url = get_parent_study_url(client, original_series_id)
47 | resp = client.post(f'{parent_study_url}/merge', json={'Resources': [anonymized_series_id]})
48 | resp.raise_for_status()
49 | return resp.json()
50 |
--------------------------------------------------------------------------------
/orthanc_ext/scripts/auto_forward.py:
--------------------------------------------------------------------------------
1 | import dataclasses
2 | import logging
3 |
4 | import httpx
5 |
6 | from typing import Callable, Iterable
7 |
8 |
9 | @dataclasses.dataclass
10 | class DicomReceivedMatcher:
11 | matches: Callable[[str, httpx.Client], bool]
12 | modality_selector: Callable[[str, httpx.Client], str]
13 |
14 |
15 | def forward_dicom(matchers: Iterable[DicomReceivedMatcher]):
16 |
17 | def forward_series(event, client):
18 | resource_id = event.resource_id
19 | for matcher in matchers:
20 | if not matcher.matches(resource_id, client):
21 | logging.info(
22 | f'matcher "{matcher}" did not match; resource '
23 | f'"{resource_id}" not forwarded')
24 | continue
25 | modality = matcher.modality_selector(resource_id, client)
26 | resp = client.post(f'/modalities/{modality}/store', json=[resource_id])
27 | resp.raise_for_status()
28 | logging.info(
29 | f'DICOM export to modality "{modality}" started for '
30 | f'resource "{resource_id}"')
31 |
32 | return forward_series
33 |
--------------------------------------------------------------------------------
/orthanc_ext/scripts/auto_retries.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import logging
3 | import threading
4 |
5 | ONE_MINUTE = 60
6 | ONE_HOUR = ONE_MINUTE * 60
7 | ONE_DAY = 24 * ONE_HOUR
8 |
9 | RETRYABLE_JOBTYPES = {'DicomModalityStore'}
10 |
11 |
12 | def parse_time(job_time):
13 | return datetime.datetime.strptime(job_time, '%Y%m%dT%H%M%S.%f')
14 |
15 |
16 | def calculate_delay(job, first_retry=ONE_MINUTE):
17 | elapsed = parse_time(job['CompletionTime']) - parse_time(job['CreationTime'])
18 | return min(max(first_retry, elapsed.seconds * 2), ONE_DAY)
19 |
20 |
21 | def resubmit_job(client, job_id):
22 | resp = client.post(f'/jobs/{job_id}/resubmit')
23 | resp.raise_for_status()
24 | logging.info(f'resubmitted job "{job_id}"')
25 |
26 |
27 | def python_timer_runner(job_id, delay, client):
28 | timer = threading.Timer(interval=delay, function=resubmit_job, args=[client, job_id])
29 | timer.start()
30 |
31 |
32 | def handle_failed_forwarding_job(
33 | first_retry=ONE_MINUTE, job_types=RETRYABLE_JOBTYPES, job_runner=python_timer_runner):
34 |
35 | def handle_failed_forwarding_job(event, client):
36 | job_id = event.resource_id
37 | response = client.get(f'/jobs/{job_id}')
38 | response.raise_for_status()
39 | job = response.json()
40 | job_type = job['Type']
41 | if job_type not in job_types:
42 | logging.debug(f'not retrying "{job_type}" job "{job_id}"')
43 | return
44 | delay = calculate_delay(job, first_retry)
45 | logging.debug(f'resubmitting job "{job_id}" after {delay} seconds')
46 | return job_runner(job_id, delay, client)
47 |
48 | return handle_failed_forwarding_job
49 |
--------------------------------------------------------------------------------
/orthanc_ext/scripts/event_publisher.py:
--------------------------------------------------------------------------------
1 | import dataclasses
2 |
3 | from cloudevents.conversion import to_structured, from_http
4 | from cloudevents.http import CloudEvent
5 |
6 |
7 | def create_valid_orthanc_cloud_event(evt):
8 | return CloudEvent.create({
9 | 'type': 'orthanc-server-extensions.change-event',
10 | 'source': 'https://orthanc-server-identifer'
11 | },
12 | data=dataclasses.asdict(evt))
13 |
14 |
15 | def convert_change_event_to_message(evt) -> tuple:
16 | return to_structured(create_valid_orthanc_cloud_event(evt))
17 |
18 |
19 | def convert_message_to_change_event(headers: dict, data: bytes):
20 | return from_http(CloudEvent, headers, data=data)
21 |
--------------------------------------------------------------------------------
/orthanc_ext/scripts/kafka_event_publisher.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 |
3 | from aiokafka import AIOKafkaProducer
4 | from aiokafka.admin import AIOKafkaAdminClient, NewTopic
5 |
6 | from orthanc_ext.scripts.event_publisher import convert_change_event_to_message
7 |
8 |
9 | @dataclass
10 | class KafkaConfig:
11 | bootstrap_server: str
12 | topic: str = 'orthanc-events'
13 |
14 |
15 | async def publish_to_kafka(kafka_config: KafkaConfig, evt, _):
16 | producer = AIOKafkaProducer(
17 | security_protocol='PLAINTEXT', bootstrap_servers=kafka_config.bootstrap_server)
18 | await producer.start()
19 | try:
20 | _, event = convert_change_event_to_message(evt)
21 | await producer.send_and_wait(kafka_config.topic, event)
22 |
23 | finally:
24 | await producer.stop()
25 |
26 |
27 | async def create_stream(kafka_config: KafkaConfig, *_):
28 | async with AIOKafkaAdminClient(bootstrap_servers=kafka_config.bootstrap_server,
29 | request_timeout_ms=10000) as admin_client:
30 | await admin_client.start()
31 |
32 | await admin_client.create_topics(
33 | new_topics=[NewTopic(name=kafka_config.topic, num_partitions=1, replication_factor=1)],
34 | validate_only=False)
35 |
--------------------------------------------------------------------------------
/orthanc_ext/scripts/nats_event_publisher.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 |
3 | import nats
4 |
5 | from orthanc_ext.scripts.event_publisher import convert_change_event_to_message
6 |
7 |
8 | @dataclass
9 | class NatsConfig:
10 | url: str
11 | stream_name: str = 'orthanc-events'
12 | subject = 'onchange'
13 |
14 |
15 | async def create_stream(nats_config: NatsConfig, *_):
16 | nc = await nats.connect(nats_config.url)
17 | try:
18 | js = nc.jetstream()
19 | await js.add_stream(name=nats_config.stream_name, subjects=[nats_config.subject])
20 | finally:
21 | await nc.close()
22 |
23 |
24 | async def publish_to_nats(nats_config: NatsConfig, evt, *_):
25 | nc = await nats.connect(nats_config.url)
26 | try:
27 | js = nc.jetstream()
28 | _, message = convert_change_event_to_message(evt)
29 | return await js.publish(nats_config.subject, message, stream=nats_config.stream_name)
30 | finally:
31 | await nc.close()
32 |
--------------------------------------------------------------------------------
/orthanc_ext/scripts/rabbitmq_event_publisher.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 |
3 | import aio_pika
4 |
5 | from orthanc_ext.scripts.event_publisher import convert_change_event_to_message
6 |
7 |
8 | @dataclass
9 | class RabbitmqConfig:
10 | url: str
11 | queue_name: str = 'orthanc-events'
12 |
13 |
14 | async def create_queue(rabbitmq_config: RabbitmqConfig, *_):
15 | connection = await aio_pika.connect_robust(rabbitmq_config.url)
16 | try:
17 | queue_name = rabbitmq_config.queue_name
18 | channel = await connection.channel()
19 | await channel.declare_queue(queue_name, auto_delete=True)
20 | finally:
21 | await connection.close()
22 |
23 |
24 | async def publish_to_rabbitmq(rabbitmq_config: RabbitmqConfig, evt, *_):
25 | connection = await aio_pika.connect_robust(rabbitmq_config.url)
26 | try:
27 | queue_name = rabbitmq_config.queue_name
28 | channel = await connection.channel()
29 | _, message = convert_change_event_to_message(evt)
30 | await channel.default_exchange.publish(
31 | aio_pika.Message(body=message), routing_key=queue_name,
32 | )
33 | finally:
34 | await connection.close()
35 |
--------------------------------------------------------------------------------
/requirements_dev.in:
--------------------------------------------------------------------------------
1 | pip
2 | bump2version
3 | wheel
4 | watchdog
5 | flake8
6 | tox
7 | coverage
8 | Sphinx
9 | twine
10 | mypy
11 |
12 | pytest
13 | dockercontext
14 | pytest-asyncio
15 | pytest-html
16 | pytest-cov
17 |
18 | httpx
19 | pre-commit
20 | respx
21 | yapf
22 |
--------------------------------------------------------------------------------
/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | #
2 | # This file is autogenerated by pip-compile with Python 3.9
3 | # by the following command:
4 | #
5 | # pip-compile requirements_dev.in
6 | #
7 | alabaster==0.7.15
8 | # via sphinx
9 | anyio==4.2.0
10 | # via httpx
11 | babel==2.14.0
12 | # via sphinx
13 | bump2version==1.0.1
14 | # via -r requirements_dev.in
15 | cachetools==5.3.2
16 | # via tox
17 | certifi==2023.11.17
18 | # via
19 | # httpcore
20 | # httpx
21 | # requests
22 | cfgv==3.4.0
23 | # via pre-commit
24 | chardet==5.2.0
25 | # via tox
26 | charset-normalizer==3.3.2
27 | # via requests
28 | colorama==0.4.6
29 | # via tox
30 | coverage[toml]==7.4.0
31 | # via
32 | # -r requirements_dev.in
33 | # pytest-cov
34 | distlib==0.3.8
35 | # via virtualenv
36 | docker==7.0.0
37 | # via dockercontext
38 | dockercontext==0.1
39 | # via -r requirements_dev.in
40 | docutils==0.20.1
41 | # via
42 | # readme-renderer
43 | # sphinx
44 | exceptiongroup==1.2.0
45 | # via
46 | # anyio
47 | # pytest
48 | filelock==3.13.1
49 | # via
50 | # tox
51 | # virtualenv
52 | flake8==7.0.0
53 | # via -r requirements_dev.in
54 | h11==0.14.0
55 | # via httpcore
56 | httpcore==1.0.2
57 | # via httpx
58 | httpx==0.26.0
59 | # via
60 | # -r requirements_dev.in
61 | # respx
62 | identify==2.5.33
63 | # via pre-commit
64 | idna==3.6
65 | # via
66 | # anyio
67 | # httpx
68 | # requests
69 | imagesize==1.4.1
70 | # via sphinx
71 | importlib-metadata==7.0.1
72 | # via
73 | # keyring
74 | # sphinx
75 | # twine
76 | # yapf
77 | iniconfig==2.0.0
78 | # via pytest
79 | jaraco-classes==3.3.0
80 | # via keyring
81 | jinja2==3.1.2
82 | # via
83 | # pytest-html
84 | # sphinx
85 | keyring==24.3.0
86 | # via twine
87 | markdown-it-py==3.0.0
88 | # via rich
89 | markupsafe==2.1.3
90 | # via jinja2
91 | mccabe==0.7.0
92 | # via flake8
93 | mdurl==0.1.2
94 | # via markdown-it-py
95 | more-itertools==10.2.0
96 | # via jaraco-classes
97 | mypy==1.8.0
98 | # via -r requirements_dev.in
99 | mypy-extensions==1.0.0
100 | # via mypy
101 | nh3==0.2.15
102 | # via readme-renderer
103 | nodeenv==1.8.0
104 | # via pre-commit
105 | packaging==23.2
106 | # via
107 | # docker
108 | # pyproject-api
109 | # pytest
110 | # sphinx
111 | # tox
112 | pkginfo==1.9.6
113 | # via twine
114 | platformdirs==4.1.0
115 | # via
116 | # tox
117 | # virtualenv
118 | # yapf
119 | pluggy==1.3.0
120 | # via
121 | # pytest
122 | # tox
123 | pre-commit==3.6.0
124 | # via -r requirements_dev.in
125 | pycodestyle==2.11.1
126 | # via flake8
127 | pyflakes==3.2.0
128 | # via flake8
129 | pygments==2.17.2
130 | # via
131 | # readme-renderer
132 | # rich
133 | # sphinx
134 | pyproject-api==1.6.1
135 | # via tox
136 | pytest==7.4.4
137 | # via
138 | # -r requirements_dev.in
139 | # pytest-asyncio
140 | # pytest-cov
141 | # pytest-html
142 | # pytest-metadata
143 | pytest-asyncio==0.23.3
144 | # via -r requirements_dev.in
145 | pytest-cov==4.1.0
146 | # via -r requirements_dev.in
147 | pytest-html==4.1.1
148 | # via -r requirements_dev.in
149 | pytest-metadata==3.0.0
150 | # via pytest-html
151 | pyyaml==6.0.1
152 | # via pre-commit
153 | readme-renderer==42.0
154 | # via twine
155 | requests==2.31.0
156 | # via
157 | # docker
158 | # requests-toolbelt
159 | # sphinx
160 | # twine
161 | requests-toolbelt==1.0.0
162 | # via twine
163 | respx==0.20.2
164 | # via -r requirements_dev.in
165 | rfc3986==2.0.0
166 | # via twine
167 | rich==13.7.0
168 | # via twine
169 | sniffio==1.3.0
170 | # via
171 | # anyio
172 | # httpx
173 | snowballstemmer==2.2.0
174 | # via sphinx
175 | sphinx==7.2.6
176 | # via
177 | # -r requirements_dev.in
178 | # sphinxcontrib-applehelp
179 | # sphinxcontrib-devhelp
180 | # sphinxcontrib-htmlhelp
181 | # sphinxcontrib-qthelp
182 | # sphinxcontrib-serializinghtml
183 | sphinxcontrib-applehelp==1.0.7
184 | # via sphinx
185 | sphinxcontrib-devhelp==1.0.5
186 | # via sphinx
187 | sphinxcontrib-htmlhelp==2.0.4
188 | # via sphinx
189 | sphinxcontrib-jsmath==1.0.1
190 | # via sphinx
191 | sphinxcontrib-qthelp==1.0.6
192 | # via sphinx
193 | sphinxcontrib-serializinghtml==1.1.9
194 | # via sphinx
195 | tomli==2.0.1
196 | # via
197 | # coverage
198 | # mypy
199 | # pyproject-api
200 | # pytest
201 | # tox
202 | # yapf
203 | tox==4.11.4
204 | # via -r requirements_dev.in
205 | twine==4.0.2
206 | # via -r requirements_dev.in
207 | typing-extensions==4.9.0
208 | # via
209 | # anyio
210 | # mypy
211 | urllib3==2.1.0
212 | # via
213 | # docker
214 | # requests
215 | # twine
216 | virtualenv==20.25.0
217 | # via
218 | # pre-commit
219 | # tox
220 | watchdog==3.0.0
221 | # via -r requirements_dev.in
222 | wheel==0.42.0
223 | # via -r requirements_dev.in
224 | yapf==0.40.2
225 | # via -r requirements_dev.in
226 | zipp==3.17.0
227 | # via importlib-metadata
228 |
229 | # The following packages are considered to be unsafe in a requirements file:
230 | # pip
231 | # setuptools
232 |
--------------------------------------------------------------------------------
/server_cert.cnf:
--------------------------------------------------------------------------------
1 | [ req ]
2 | default_bits = 2048
3 | distinguished_name = req_distinguished_name
4 | x509_extensions = v3_req
5 | prompt = no
6 | [ req_distinguished_name ]
7 | countryName = NL
8 | stateOrProvinceName = Utrecht
9 | localityName = Utrecht
10 | organizationName = WalkIT
11 | commonName = localhost
12 | [v3_req]
13 | subjectKeyIdentifier = hash
14 | subjectAltName = @alt_names
15 | [alt_names]
16 | DNS.1 = orthanc
17 | DNS.2 = localhost
18 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [bumpversion]
2 | current_version = 3.5.1
3 | commit = True
4 | tag = True
5 |
6 | [bumpversion:file:setup.py]
7 | search = version='{current_version}'
8 | replace = version='{new_version}'
9 |
10 | [bumpversion:file:orthanc_ext/__init__.py]
11 | search = __version__ = '{current_version}'
12 | replace = __version__ = '{new_version}'
13 |
14 | [bdist_wheel]
15 | universal = 1
16 |
17 | [flake8]
18 | exclude = docs
19 | max-line-length = 100
20 |
21 | [aliases]
22 | test = pytest
23 |
24 | [tool:pytest]
25 |
26 | [yapf]
27 | align_closing_bracket_with_visual_indent = true
28 | allow_multiline_lambdas = true
29 | allow_split_before_default_or_named_assigns = true
30 | based_on_style = pep8
31 | blank_line_before_nested_class_or_def = true
32 | coalesce_brackets = true
33 | column_limit = 100
34 | disable_ending_comma_heuristic = true
35 | each_dict_entry_on_separate_line = true
36 | force_multiline_dict = false
37 | indent_dictionary_value = true
38 | join_multiple_lines = true
39 | space_between_ending_comma_and_closing_bracket = true
40 | spaces_around_dict_delimiters = false
41 | split_all_comma_separated_values = false
42 | split_all_top_level_comma_separated_values = false
43 | split_arguments_when_comma_terminated = false
44 | split_before_closing_bracket = true
45 | split_before_dict_set_generator = true
46 | split_before_dot = false
47 | split_before_expression_after_opening_paren = true
48 | split_before_first_argument = true
49 | split_before_named_assigns = true
50 | split_complex_comprehension = true
51 | split_penalty_after_opening_bracket = -20
52 | split_penalty_for_added_line_split = 100
53 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """The setup script."""
3 |
4 | from setuptools import setup, find_packages
5 |
6 | with open('README.rst') as readme_file:
7 | readme = readme_file.read()
8 |
9 | with open('HISTORY.rst') as history_file:
10 | history = history_file.read()
11 |
12 | requirements = ['httpx', ]
13 |
14 | setup_requirements = []
15 |
16 | test_requirements = ['pytest>=3', 'respx', ]
17 |
18 | classifiers = [
19 | 'Development Status :: 4 - Beta',
20 | 'Intended Audience :: Developers',
21 | 'License :: OSI Approved :: GNU Affero General Public License v3',
22 | 'Natural Language :: English',
23 | 'Programming Language :: Python :: 3',
24 | 'Programming Language :: Python :: 3.8',
25 | 'Programming Language :: Python :: 3.9',
26 | 'Programming Language :: Python :: 3.10',
27 | 'Programming Language :: Python :: 3.11',
28 | ] # yapf: disable
29 |
30 | setup(
31 | author='WalkIT',
32 | author_email='code@walkit.nl',
33 | python_requires='>=3.8',
34 | classifiers=classifiers,
35 | description=(
36 | "An Orthanc python plugin based framework to extend Orthanc's "
37 | 'feature set with testable Python scripts '),
38 | install_requires=requirements,
39 | license='GNU Affero General Public License v3',
40 | long_description=readme + '\n\n' + history,
41 | include_package_data=True,
42 | keywords='orthanc testing',
43 | name='orthanc-server-extensions',
44 | packages=find_packages(include=['orthanc_ext', 'orthanc_ext.*']),
45 | setup_requires=setup_requirements,
46 | extras_require={
47 | 'nats-event-publisher': ['cloudevents', 'nats-py'],
48 | 'kafka-event-publisher': ['cloudevents', 'aiokafka'],
49 | 'rabbitmq-event-publisher': ['cloudevents', 'aio-pika'],
50 | 'pyorthanc': ['pyorthanc>1.0']
51 | },
52 | test_suite='tests',
53 | tests_require=test_requirements,
54 | url='https://github.com/walkIT-nl/orthanc-server-extensions',
55 | version='3.5.1',
56 | zip_safe=False,
57 | )
58 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | """Unit test package for orthanc_ext."""
2 |
--------------------------------------------------------------------------------
/tests/entry_point.py:
--------------------------------------------------------------------------------
1 | """Test entry point script for Orthanc Python Plugin.
2 | """
3 | import logging
4 | from functools import partial
5 |
6 | import orthanc # NOQA provided by the plugin runtime.
7 |
8 | from orthanc_ext import event_dispatcher
9 | from orthanc_ext.pyorthanc_utilities import PyOrthancClientType
10 | from orthanc_ext.python_utilities import pipeline
11 | from orthanc_ext.scripts.nats_event_publisher import create_stream, publish_to_nats, NatsConfig
12 |
13 |
14 | def log_event(param, event, _):
15 | logging.info(f'orthanc "{event}" event handled with param "{param}"')
16 |
17 |
18 | def start_maintenance_cycle(event, _):
19 | logging.info(f'do something special on "{event}"')
20 |
21 |
22 | def retrieve_system_info(_, client):
23 | return client.get_system()
24 |
25 |
26 | def show_system_info(info, _client):
27 | version = info.get('Version')
28 | logging.warning(f'orthanc version retrieved: "{version}"', )
29 |
30 |
31 | nats_config = NatsConfig('nats://nats')
32 |
33 | event_dispatcher.register_event_handlers({
34 | orthanc.ChangeType.ORTHANC_STARTED: [
35 | partial(log_event, 'started'),
36 | partial(create_stream, nats_config), start_maintenance_cycle,
37 | pipeline(retrieve_system_info, show_system_info)
38 | ],
39 | orthanc.ChangeType.STABLE_STUDY: [partial(publish_to_nats, nats_config)],
40 | orthanc.ChangeType.ORTHANC_STOPPED: partial(log_event, 'stopped')
41 | }, orthanc, event_dispatcher.create_session(orthanc, client_type=PyOrthancClientType.SYNC),
42 | event_dispatcher.create_session(
43 | orthanc, client_type=PyOrthancClientType.ASYNC))
44 |
--------------------------------------------------------------------------------
/tests/test_anonymization.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | import respx
4 |
5 | from orthanc_ext.http_utilities import create_internal_client
6 | from orthanc_ext.orthanc import OrthancApiHandler
7 | from orthanc_ext.scripts.anonymization import anonymize_series, reidentify_series
8 |
9 | orthanc = OrthancApiHandler()
10 | client = create_internal_client('https://localhost:8042')
11 |
12 |
13 | @respx.mock
14 | def test_anonymization_shall_leverage_orthanc_builtin_functionality(caplog):
15 | caplog.set_level(logging.INFO)
16 | store = respx.post('/series/1.2.3/anonymize').respond(
17 | 200, json={
18 | 'ID': 1,
19 | 'Path': '/series/1.2.4',
20 | 'PatientID': '123',
21 | 'Type': 'boe'
22 | })
23 | anonymize_series(client, '1.2.3')
24 | assert store.called
25 | assert caplog.messages == [
26 | 'HTTP Request: POST https://localhost:8042/series/1.2.3/anonymize "HTTP/1.1 200 OK"',
27 | 'Anonymized "/series/1.2.3" to "/series/1.2.4"'
28 | ]
29 |
30 |
31 | @respx.mock
32 | def test_reidentify_series_shall_leverage_orthanc_merge_to_replace_patient_study_module_tags(
33 | caplog):
34 | caplog.set_level(logging.INFO)
35 |
36 | get_series = respx.get('/series/7052c73b-da85938f-1f05fa57-2aae3a3f-76f4e628').respond(
37 | 200, json={'ParentStudy': '0293e107-c1439ada-0bd307cd-f98de2c2-9245e619'})
38 | merge_series = respx.post(
39 | '/studies/0293e107-c1439ada-0bd307cd-f98de2c2-9245e619/merge').respond(
40 | 200,
41 | json={
42 | 'Description': 'REST API',
43 | 'FailedInstancesCount': 0,
44 | 'InstancesCount': 12,
45 | 'TargetStudy': '0293e107-c1439ada-0bd307cd-f98de2c2-9245e619'
46 | })
47 |
48 | reidentify_series(client, 'uuid-ano', '7052c73b-da85938f-1f05fa57-2aae3a3f-76f4e628')
49 |
50 | assert get_series.called
51 | assert merge_series.called
52 |
--------------------------------------------------------------------------------
/tests/test_auto_forward.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | import respx
4 |
5 | from orthanc_ext.event_dispatcher import register_event_handlers
6 | from orthanc_ext.http_utilities import create_internal_client
7 | from orthanc_ext.logging_configurator import python_logging
8 | from orthanc_ext.orthanc import OrthancApiHandler
9 | from orthanc_ext.orthanc_utilities import (get_metadata_of_first_instance_of_series)
10 | from orthanc_ext.scripts.auto_forward import (forward_dicom, DicomReceivedMatcher)
11 |
12 | orthanc = OrthancApiHandler()
13 | client = create_internal_client('https://localhost:8042')
14 |
15 |
16 | def register_and_trigger_handler(matchers):
17 | register_event_handlers({orthanc.ChangeType.STABLE_STUDY: forward_dicom(matchers)},
18 | orthanc,
19 | client,
20 | logging_configuration=python_logging)
21 | orthanc.on_change(orthanc.ChangeType.STABLE_STUDY, '', 'study-uuid')
22 |
23 |
24 | def is_not_dicom_origin(resource_id, client):
25 | return get_metadata_of_first_instance_of_series(
26 | client, resource_id, 'Origin') != 'DicomProtocol'
27 |
28 |
29 | @respx.mock
30 | def test_autoforward_on_match_shall_start_start_modality_store(caplog):
31 | store = respx.post('/modalities/pacs/store').respond(200, text='study-uuid')
32 | register_and_trigger_handler([DicomReceivedMatcher(lambda uid, _: True, lambda uid, _: 'pacs')])
33 | assert store.called
34 | assert caplog.messages == [
35 | 'HTTP Request: POST https://localhost:8042/modalities/pacs/store "HTTP/1.1 200 OK"',
36 | 'DICOM export to modality "pacs" started for resource "study-uuid"'
37 | ]
38 |
39 |
40 | @respx.mock
41 | def test_autoforward_on_multiple_matches_shall_start_start_modality_store(caplog):
42 | instances = respx.get('/series/study-uuid/instances').respond(
43 | 200, json=[{
44 | 'ID': 'b99cd218-ae67f0d7-70324b6b-2b095801-f858dedf'
45 | }])
46 | origin = respx.get(
47 | '/instances/b99cd218-ae67f0d7-70324b6b-2b095801-f858dedf'
48 | '/metadata/Origin').respond(
49 | 200, text='Plugins')
50 | pacs1 = respx.post('/modalities/pacs1/store').respond(200, text='study-uuid')
51 | pacs2 = respx.post('/modalities/pacs2/store').respond(200, text='study-uuid')
52 | caplog.set_level(logging.INFO)
53 | matcher1 = DicomReceivedMatcher(is_not_dicom_origin, lambda uid, _: 'pacs1')
54 | matcher2 = DicomReceivedMatcher(lambda uid, _: True, lambda uid, _: 'pacs2')
55 | register_and_trigger_handler([matcher1, matcher2])
56 | assert instances.called
57 | assert origin.called
58 | assert pacs1.called
59 | assert pacs2.called
60 |
61 | url = 'https://localhost:8042'
62 | assert caplog.messages == [
63 | f'HTTP Request: GET {url}/series/study-uuid/instances "HTTP/1.1 200 OK"',
64 | 'HTTP Request: GET '
65 | f'{url}/instances/b99cd218-ae67f0d7-70324b6b-2b095801-f858dedf/metadata/Origin '
66 | '"HTTP/1.1 200 OK"', f'HTTP Request: POST {url}/modalities/pacs1/store "HTTP/1.1 200 OK"',
67 | 'DICOM export to modality "pacs1" started for resource "study-uuid"',
68 | f'HTTP Request: POST {url}/modalities/pacs2/store "HTTP/1.1 '
69 | '200 OK"', 'DICOM export to modality "pacs2" started for resource "study-uuid"'
70 | ]
71 |
72 |
73 | def test_autoforward_on_no_match_shall_log_and_continue(caplog):
74 | register_and_trigger_handler([
75 | DicomReceivedMatcher(lambda uid, _: False, lambda uid, _: 'pacs')
76 | ])
77 | (message, ) = caplog.messages
78 | assert 'did not match; resource "study-uuid" not forwarded' in message
79 |
--------------------------------------------------------------------------------
/tests/test_auto_retries.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | import respx
4 |
5 | from orthanc_ext import event_dispatcher
6 | from orthanc_ext.http_utilities import create_internal_client
7 | from orthanc_ext.logging_configurator import python_logging
8 | from orthanc_ext.orthanc import OrthancApiHandler
9 | from orthanc_ext.scripts.auto_retries import (
10 | handle_failed_forwarding_job, calculate_delay, ONE_MINUTE, ONE_DAY, resubmit_job)
11 |
12 | orthanc = OrthancApiHandler()
13 | client = create_internal_client('https://localhost:8042')
14 |
15 |
16 | def test_calculate_delay():
17 | job = {
18 | 'CompletionTime': '20210210T084933.795611',
19 | 'Content': {
20 | 'Description': 'REST API',
21 | 'FailedInstancesCount': 0,
22 | 'InstancesCount': 1,
23 | 'LocalAet': 'ORTHANC',
24 | 'ParentResources': ['3121d449-9b15610c-df9b8396-bee611db-3901f794'],
25 | 'RemoteAet': 'PYNETDICOM'
26 | },
27 | 'CreationTime': '20210210T084350.430751',
28 | 'EffectiveRuntime': 0.036999999999999998,
29 | 'ErrorCode': 9,
30 | 'ErrorDescription': 'Error in the network protocol',
31 | 'ID': '0a9b0d5f-a2a8-46c1-8b2a-6a1e081427fb',
32 | 'Priority': 0,
33 | 'Progress': 0,
34 | 'State': 'Failure',
35 | 'Timestamp': '20210210T090925.594915',
36 | 'Type': 'DicomModalityStore'
37 | }
38 | assert calculate_delay(job) == 686
39 |
40 |
41 | def test_calculate_delay_shall_not_retry_too_aggressively():
42 | # interval first try: 1 second
43 | job = {'CreationTime': '20210210T084350.430751', 'CompletionTime': '20210210T084351.430751'}
44 | assert calculate_delay(job) == ONE_MINUTE
45 |
46 |
47 | def test_calculate_delay_shall_use_back_off():
48 | # time between previous tries: 3 minutes
49 | job = {'CreationTime': '20210210T084350.430751', 'CompletionTime': '20210210T084650.430751'}
50 | assert calculate_delay(job) == 6 * ONE_MINUTE
51 |
52 | job = {'CreationTime': '20210210T084350.430751', 'CompletionTime': '20210210T085250.430751'}
53 | assert calculate_delay(job) == 18 * ONE_MINUTE
54 |
55 |
56 | def test_calculate_delay_shall_retry_every_day():
57 | job = {'CreationTime': '20210210T084350.430751', 'CompletionTime': '20210210T224350.430751'}
58 | assert calculate_delay(job) == ONE_DAY
59 |
60 |
61 | @respx.mock
62 | def test_should_not_resubmit_other_job_types(caplog):
63 | job = respx.get('/jobs/job-uuid').respond(
64 | 200,
65 | json={
66 | 'CreationTime': '20210210T084350.430751',
67 | 'CompletionTime': '20210210T224350.430751',
68 | 'Type': 'CreateDicomZip'
69 | })
70 | event_dispatcher.register_event_handlers(
71 | {orthanc.ChangeType.JOB_FAILURE: handle_failed_forwarding_job(0.1)},
72 | orthanc,
73 | client,
74 | logging_configuration=python_logging)
75 | caplog.set_level(logging.DEBUG)
76 | orthanc.on_change(orthanc.ChangeType.JOB_FAILURE, '', 'job-uuid')
77 | assert job.called
78 | assert caplog.messages[-2] == 'not retrying "CreateDicomZip" job "job-uuid"'
79 |
80 |
81 | @respx.mock
82 | def test_on_failure_should_resubmit_job(caplog):
83 | job = respx.get('/jobs/job-uuid').respond(
84 | 200,
85 | json={
86 | 'CreationTime': '20210210T084350.430751',
87 | 'CompletionTime': '20210210T084351.430751',
88 | 'Type': 'DicomModalityStore'
89 | })
90 | resubmit = respx.post('/jobs/job-uuid/resubmit').respond(200)
91 |
92 | event_dispatcher.register_event_handlers({
93 | orthanc.ChangeType.JOB_FAILURE:
94 | handle_failed_forwarding_job(
95 | 0.1, job_runner=lambda job_id, delay, httpx_client: resubmit_job(client, job_id))
96 | },
97 | orthanc,
98 | client,
99 | logging_configuration=python_logging)
100 | caplog.set_level(logging.DEBUG)
101 | orthanc.on_change(orthanc.ChangeType.JOB_FAILURE, '', 'job-uuid')
102 | assert job.called
103 | assert resubmit.called
104 | assert caplog.messages[-4] == 'resubmitting job "job-uuid" after 2 seconds'
105 | assert caplog.messages[-2] == 'resubmitted job "job-uuid"'
106 |
--------------------------------------------------------------------------------
/tests/test_event_dispatcher.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import dataclasses
3 | import logging
4 | import time
5 | from functools import partial
6 |
7 | import httpx
8 | import pytest
9 | import respx
10 |
11 | from orthanc_ext import event_dispatcher
12 | from orthanc_ext.http_utilities import create_internal_client, ClientType
13 | from orthanc_ext.logging_configurator import python_logging
14 | from orthanc_ext.orthanc import OrthancApiHandler
15 | from orthanc_ext.python_utilities import pipeline
16 |
17 | orthanc = OrthancApiHandler()
18 |
19 |
20 | @pytest.fixture
21 | def async_client():
22 | return create_internal_client('https://localhost:8042', '', client_type=ClientType.ASYNC)
23 |
24 |
25 | @dataclasses.dataclass
26 | class ChangeEvent:
27 | change: orthanc.ChangeType = orthanc.ChangeType.UNKNOWN
28 | resource_type: int = orthanc.ResourceType.NONE
29 | resource_id: str = None
30 | orthanc = None
31 |
32 |
33 | def capture(event):
34 |
35 | def capture_impl(incoming_event, local_orthanc):
36 | event.change = incoming_event.change_type
37 | event.resource_type = incoming_event.resource_type
38 | event.resource_id = incoming_event.resource_id
39 | event.orthanc = local_orthanc
40 |
41 | return event
42 |
43 | return capture_impl
44 |
45 |
46 | async def async_func(return_value, evt, session):
47 | assert evt is not None
48 | assert session is not None
49 | return return_value
50 |
51 |
52 | async def async_fail(*_):
53 | raise Exception('failed')
54 |
55 |
56 | async def async_get(_, session):
57 | return await session.get('http://localhost:0/')
58 |
59 |
60 | async def async_sleep(*_):
61 | await asyncio.sleep(0.5)
62 | return 42
63 |
64 |
65 | def test_registered_callback_should_be_triggered_on_change_event():
66 | event = ChangeEvent()
67 | event_dispatcher.register_event_handlers(
68 | {orthanc.ChangeType.STABLE_STUDY: capture(event)}, orthanc, httpx)
69 | sync_result = orthanc.on_change(
70 | orthanc.ChangeType.STABLE_STUDY, orthanc.ResourceType.STUDY, 'resource-uuid')
71 | assert sync_result[0].resource_id == 'resource-uuid'
72 | assert sync_result[0].resource_type == orthanc.ResourceType.STUDY
73 | assert sync_result[0].orthanc is not None
74 |
75 |
76 | @dataclasses.dataclass
77 | class StableStudyEvent:
78 | resource_id: str
79 | StudyInstanceUid: str = None
80 |
81 |
82 | def embellish(evt, client) -> StableStudyEvent:
83 | study = client.get(f'http://localhost/studies/{evt.resource_id}').json()
84 | return StableStudyEvent(evt.resource_id, study.get('StudyInstanceUid'))
85 |
86 |
87 | def publish(evt: StableStudyEvent, client):
88 | client.post('http://localhost/publish', json=dataclasses.asdict(evt))
89 | return evt
90 |
91 |
92 | @respx.mock
93 | def test_pipeline_should_run_all_functions_to_completion_passing_results_to_the_next_function():
94 | respx.get('http://localhost/studies/resource-uuid').respond(
95 | 200, json={'StudyInstanceUid': '1.2.3'})
96 | respx.post('http://localhost/publish').respond(200)
97 |
98 | event_dispatcher.register_event_handlers(
99 | {orthanc.ChangeType.STABLE_STUDY: pipeline(embellish, publish)}, orthanc, httpx)
100 | publication_response = orthanc.on_change(
101 | orthanc.ChangeType.STABLE_STUDY, orthanc.ResourceType.STUDY, 'resource-uuid')
102 |
103 | assert publication_response == [
104 | StableStudyEvent(resource_id='resource-uuid', StudyInstanceUid='1.2.3')
105 | ]
106 |
107 |
108 | def test_registered_async_callback_should_be_run_to_completion_on_change_event(async_client):
109 | event_dispatcher.register_event_handlers(
110 | {orthanc.ChangeType.STABLE_STUDY: partial(async_func, 42)}, orthanc, httpx, async_client)
111 | async_result = orthanc.on_change(
112 | orthanc.ChangeType.STABLE_STUDY, orthanc.ResourceType.STUDY, 'resource-uuid')
113 | assert async_result == [42]
114 |
115 |
116 | def test_multiple_registered_async_callbacks_should_be_run_to_completion_on_change_event(
117 | async_client):
118 | event_dispatcher.register_event_handlers(
119 | {orthanc.ChangeType.STABLE_STUDY: [partial(async_func, 42),
120 | partial(async_func, 41)]}, orthanc, httpx, async_client)
121 | async_result = orthanc.on_change(
122 | orthanc.ChangeType.STABLE_STUDY, orthanc.ResourceType.STUDY, 'resource-uuid')
123 | assert async_result == [42, 41]
124 |
125 |
126 | def test_all_async_callbacks_should_be_run_to_completion_on_change_event_if_one_or_more_fail(
127 | caplog, async_client):
128 | event_dispatcher.register_event_handlers({
129 | orthanc.ChangeType.STABLE_STUDY:
130 | [async_fail, async_get,
131 | partial(async_func, 42),
132 | partial(async_func, 41)]
133 | }, orthanc, httpx, async_client)
134 | async_result = orthanc.on_change(
135 | orthanc.ChangeType.STABLE_STUDY, orthanc.ResourceType.STUDY, 'resource-uuid')
136 |
137 | exception = async_result[0]
138 | http_exception = async_result[1]
139 |
140 | assert async_result == [exception, http_exception, 42, 41]
141 | assert exception.args == ('failed', )
142 | assert type(exception) == Exception
143 |
144 | assert 'async_fail' in caplog.messages[0]
145 | assert "Exception('failed')" in caplog.messages[0]
146 |
147 | assert "ConnectError('All connection attempts failed')" in caplog.messages[1]
148 |
149 |
150 | def test_async_callbacks_should_be_run_concurrently_on_change_event(async_client):
151 | awaitables = []
152 | start = time.perf_counter()
153 | for i in range(0, 10):
154 | awaitables.append(async_sleep)
155 | event_dispatcher.register_event_handlers(
156 | {orthanc.ChangeType.STABLE_STUDY: awaitables}, orthanc, httpx, async_client)
157 | async_result = orthanc.on_change(
158 | orthanc.ChangeType.STABLE_STUDY, orthanc.ResourceType.STUDY, 'resource-uuid')
159 | assert async_result == [42, 42, 42, 42, 42, 42, 42, 42, 42, 42]
160 | assert time.perf_counter() - start < 1
161 |
162 |
163 | def test_all_registered_callbacks_should_be_triggered_on_change_event():
164 | event1 = ChangeEvent()
165 | event2 = ChangeEvent()
166 | event_dispatcher.register_event_handlers(
167 | {orthanc.ChangeType.STABLE_STUDY: [capture(event1), capture(event2)]}, orthanc, httpx)
168 | orthanc.on_change(orthanc.ChangeType.STABLE_STUDY, orthanc.ResourceType.STUDY, 'resource-uuid')
169 | assert event1.resource_id is not None
170 | assert event2.resource_id is not None
171 |
172 |
173 | def test_no_registered_callbacks_should_be_reported_in_on_change_event(caplog):
174 | args = {}
175 | event_dispatcher.register_event_handlers(
176 | args, orthanc, httpx, logging_configuration=python_logging)
177 | caplog.set_level(logging.DEBUG)
178 | orthanc.on_change(orthanc.ChangeType.ORTHANC_STARTED, '', '')
179 | assert 'no handler registered for ORTHANC_STARTED' in caplog.text
180 |
181 |
182 | @respx.mock
183 | def test_shall_return_values_from_executed_handlers():
184 | system = respx.get('/system').respond(200, json={'Version': '1.9.0'})
185 |
186 | def get_system_info(_, client):
187 | return client.get('http://localhost:8042/system').json()
188 |
189 | event_dispatcher.register_event_handlers(
190 | {orthanc.ChangeType.ORTHANC_STARTED: get_system_info}, orthanc, httpx)
191 | (system_info, ) = orthanc.on_change(
192 | orthanc.ChangeType.ORTHANC_STARTED, orthanc.ResourceType.NONE, '')
193 | assert system.called
194 | assert system_info.get('Version') == '1.9.0'
195 |
196 |
197 | def test_event_shall_have_human_readable_representation(caplog):
198 | caplog.set_level(logging.INFO)
199 |
200 | def log_event(evt, _):
201 | logging.info(evt)
202 |
203 | event_dispatcher.register_event_handlers(
204 | {orthanc.ChangeType.STABLE_STUDY: log_event}, orthanc, httpx)
205 | orthanc.on_change(orthanc.ChangeType.STABLE_STUDY, orthanc.ResourceType.STUDY, 'uuid')
206 | assert 'change_type=STABLE_STUDY' in caplog.text
207 | assert 'resource_type=STUDY' in caplog.text
208 |
209 |
210 | def test_create_session_shall_pass_ssl_cert_if_ssl_is_enabled_and_report_issues():
211 | configured_orthanc = OrthancApiHandler(
212 | config={
213 | 'SslEnabled': True,
214 | 'SslCertificate': 'path-to-non-existing-file'
215 | })
216 | with pytest.raises(IOError, match='.*TLS CA.*invalid path: path-to-non-existing-file'):
217 | event_dispatcher.create_session(configured_orthanc)
218 |
219 |
220 | def test_create_session_shall_not_raise_an_exception_for_a_non_existing_ssl_cert_if_ssl_is_disabled(
221 | ):
222 | configured_orthanc = OrthancApiHandler(
223 | config={
224 | 'SslEnabled': False,
225 | 'SslCertificate': 'path-to-non-existing-file'
226 | })
227 |
228 | client = event_dispatcher.create_session(configured_orthanc)
229 |
230 | assert_client_is_successfully_constructed(client)
231 |
232 |
233 | def assert_client_is_successfully_constructed(client):
234 | assert client is not None
235 |
--------------------------------------------------------------------------------
/tests/test_event_publisher.py:
--------------------------------------------------------------------------------
1 | import dataclasses
2 | from dataclasses import dataclass
3 |
4 | from orthanc_ext.scripts.event_publisher import convert_change_event_to_message, \
5 | convert_message_to_change_event
6 |
7 |
8 | @dataclass
9 | class ChangeEvent:
10 | StudyInstanceID: str
11 | SeriesInstanceID: str
12 |
13 |
14 | def test_cloud_event_conversion_roundtrip():
15 | event = ChangeEvent('1.2.3', '4.5.6')
16 | headers, data = convert_change_event_to_message(event)
17 |
18 | assert headers == {'content-type': 'application/cloudevents+json'}
19 | assert type(data) == bytes
20 | assert dataclasses.asdict(event) == convert_message_to_change_event(headers, data).data
21 |
--------------------------------------------------------------------------------
/tests/test_executor_utilities.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import time
3 | from functools import partial
4 |
5 | from orthanc_ext.executor_utilities import SequentialHybridExecutor, AsyncOnlyExecutor
6 | from tests.test_event_dispatcher import ChangeEvent
7 |
8 |
9 | def test_SequentialHybridExecutor_should_invoke_both_sync_async_handlers_and_return_the_result():
10 | s_client = object()
11 | a_client = object
12 | dispatcher = SequentialHybridExecutor(s_client, a_client)
13 | change_event = ChangeEvent()
14 | start = time.perf_counter()
15 | assert dispatcher.invoke_all(
16 | change_event, [lambda event, client: (42, event, client)],
17 | [partial(long_async_func, 43)]) == [(42, change_event, s_client),
18 | (43, change_event, a_client)]
19 | end = time.perf_counter()
20 | assert end - start > 0.5, 'invoke_all should wait for async function completion'
21 |
22 |
23 | async def long_async_func(ret_val, event, client):
24 | await asyncio.sleep(0.5)
25 | return ret_val, event, client
26 |
27 |
28 | def long_sync_func(ret_val, event, client):
29 | time.sleep(0.5)
30 | return ret_val, event, client
31 |
32 |
33 | def test_AsyncOnlyExecutor_shall_handle_events_as_they_come_in():
34 | a_client = object()
35 | s_client = object()
36 | dispatcher = AsyncOnlyExecutor(s_client, a_client)
37 | dispatcher.start()
38 | change_event1 = ChangeEvent()
39 | change_event2 = ChangeEvent()
40 | change_event2.resource_id = 'resource-uuid2'
41 |
42 | start = time.perf_counter()
43 |
44 | task1, _ = dispatcher.invoke_all(change_event1, [], [partial(long_async_func, 42)])
45 | sync_task1, = dispatcher.invoke_all(change_event2, [partial(long_sync_func, 40)], [])
46 | end = time.perf_counter()
47 |
48 | task2, _ = dispatcher.invoke_all(change_event2, [], [partial(long_async_func, 43)])
49 |
50 | assert task1.result() == [(42, change_event1, a_client)]
51 | assert task2.result() == [(43, change_event2, a_client)]
52 | while not sync_task1.done():
53 | time.sleep(0.1)
54 | assert sync_task1.result() == [(40, change_event2, s_client)]
55 |
56 | assert end - start < 0.01, 'invoke_all should never block'
57 |
58 |
59 | def test_AsyncOnlyExecutor_shall_report_exceptions_with_traceback(caplog):
60 | a_client = object()
61 | dispatcher = AsyncOnlyExecutor(None, a_client)
62 | dispatcher.start()
63 | change_event1 = ChangeEvent()
64 | change_event2 = ChangeEvent()
65 | change_event2.resource_id = 'resource-uuid2'
66 |
67 | async def failing_func(ex, *_):
68 | raise ex
69 |
70 | ex = Exception('failed')
71 | task1, sync_task_empty = dispatcher.invoke_all(change_event1, [], [partial(failing_func, ex)])
72 | assert task1.result() == [ex]
73 | assert sync_task_empty.result() == []
74 |
75 | assert "execution of coroutine 'failing_func' failed with exception" in caplog.records[
76 | 0].message
77 | assert "Exception('failed')" in caplog.records[0].message
78 |
79 |
80 | def test_AsyncOnlyExecutor_shall_report_pending_tasks_on_stop(caplog):
81 | a_client = object()
82 | dispatcher = AsyncOnlyExecutor(None, a_client)
83 | dispatcher.start()
84 | sync_task1, = dispatcher.invoke_all(ChangeEvent(), [partial(long_sync_func, 40)], [])
85 | dispatcher.stop()
86 | assert sync_task1.cancelled()
87 | assert 'about to stop event loop with 1 task(s) pending: ' \
88 | "{ 16
13 | for event in events:
14 | assert orthanc.ChangeType.__dict__.get(
15 | event) is not None, f'{event} should be added on {orthanc.ChangeType}'
16 |
17 |
18 | def test_resource_type_list_should_be_complete():
19 | events = get_type_enum_values('OrthancPluginResourceType')
20 | assert len(events) > 4
21 | for event in events:
22 | assert (
23 | orthanc.ResourceType.__dict__.get(event)
24 | is not None), f"'{event} should be added on {orthanc.ResourceType}"
25 |
26 |
27 | def get_type_enum_values(type_under_test):
28 | resp = httpx.get(
29 | f'https://orthanc.uclouvain.be/hg/orthanc-python/raw-file/'
30 | f'tip/Sources/Autogenerated/sdk_{type_under_test}.impl.h')
31 | resp.raise_for_status()
32 | return re.findall(f'sdk_{type_under_test}_Type.tp_dict, "([A-Z_]+)"', resp.text)
33 |
--------------------------------------------------------------------------------
/tests/test_orthanc_api_handler.py:
--------------------------------------------------------------------------------
1 | from orthanc_ext.orthanc import OrthancApiHandler
2 |
3 | orthanc = OrthancApiHandler()
4 |
5 |
6 | def test_GenerateRestApiAuthorizationToken_should_yield_a_token():
7 | assert orthanc.GenerateRestApiAuthorizationToken() is not None
8 |
--------------------------------------------------------------------------------
/tests/test_pyorthanc_utilities.py:
--------------------------------------------------------------------------------
1 | from pyorthanc import Orthanc, AsyncOrthanc
2 |
3 | from orthanc_ext.event_dispatcher import create_session
4 | from orthanc_ext.orthanc import OrthancApiHandler
5 | from orthanc_ext.pyorthanc_utilities import PyOrthancClientType
6 |
7 |
8 | def test_shall_create_sync_client():
9 | client = PyOrthancClientType.SYNC.create_internal_client(base_url='http://localhost:8042')
10 | assert client is not None
11 | assert type(client) == Orthanc
12 |
13 |
14 | def test_shall_create_async_client():
15 | client = PyOrthancClientType.ASYNC.create_internal_client(base_url='http://localhost:8042')
16 | assert client is not None
17 | assert type(client) == AsyncOrthanc
18 |
19 |
20 | def test_shall_support_create_session_for_backward_compatibility():
21 | assert create_session(OrthancApiHandler(), PyOrthancClientType.SYNC) is not None
22 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist =
3 | py39,
4 | py310,
5 | py311,
6 | flake8
7 |
8 | [travis]
9 | python =
10 | 3.11: py311
11 | 3.10: py310
12 | 3.9: py39
13 |
14 | [testenv]
15 | setenv =
16 | PYTHONPATH = {toxinidir}
17 | deps =
18 | .[nats-event-publisher, kafka-event-publisher, rabbitmq-event-publisher, pyorthanc]
19 | -r{toxinidir}/requirements_dev.txt
20 | pytest-cov
21 | ; If you want to make tox run the tests with the same versions, create a
22 | ; requirements.txt with the pinned versions and uncomment the following line:
23 | ; -r{toxinidir}/requirements.txt
24 | commands =
25 | pip install -U pip
26 | pytest --basetemp={envtmpdir} --cov --cov-report term --cov-report html --cov=orthanc_ext --cov-append tests
27 |
28 | [testenv:flake8]
29 | basepython = python
30 | deps = flake8
31 | commands = flake8 orthanc_ext tests
32 |
--------------------------------------------------------------------------------