├── .github └── workflows │ ├── lint.yml │ ├── prep-release.yml │ ├── publish-release.yml │ └── test.yml ├── .gitignore ├── .pre-commit-config.yaml ├── CHANGELOG.md ├── LICENSE ├── README.md ├── docs ├── api_reference.md ├── guide.md ├── index.md ├── install.md └── jupyter.svg ├── mkdocs.yml ├── pyproject.toml ├── src └── fps │ ├── __init__.py │ ├── _config.py │ ├── _context.py │ ├── _importer.py │ ├── _module.py │ ├── _signal.py │ ├── cli │ └── _cli.py │ ├── py.typed │ └── web │ ├── fastapi.py │ └── server.py └── tests ├── conftest.py ├── test_app.py ├── test_cli.py ├── test_config.py ├── test_context.py ├── test_exceptions.py ├── test_module.py ├── test_signal.py ├── test_start_stop.py ├── test_tasks.py ├── test_value.py └── test_web.py /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: lint 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | lint: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v4 10 | - uses: actions/setup-python@v5 11 | - uses: pre-commit/action@v3.0.1 12 | with: 13 | extra_args: --all-files --show-diff-on-failure 14 | -------------------------------------------------------------------------------- /.github/workflows/prep-release.yml: -------------------------------------------------------------------------------- 1 | name: "Step 1: Prep Release" 2 | on: 3 | workflow_dispatch: 4 | inputs: 5 | version_spec: 6 | description: "New Version Specifier" 7 | default: "next" 8 | required: false 9 | branch: 10 | description: "The branch to target" 11 | required: false 12 | post_version_spec: 13 | description: "Post Version Specifier" 14 | required: false 15 | # silent: 16 | # description: "Set a placeholder in the changelog and don't publish the release." 17 | # required: false 18 | # type: boolean 19 | since: 20 | description: "Use PRs with activity since this date or git reference" 21 | required: false 22 | since_last_stable: 23 | description: "Use PRs with activity since the last stable git tag" 24 | required: false 25 | type: boolean 26 | jobs: 27 | prep_release: 28 | runs-on: ubuntu-latest 29 | permissions: 30 | contents: write 31 | steps: 32 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 33 | 34 | - name: Prep Release 35 | id: prep-release 36 | uses: jupyter-server/jupyter_releaser/.github/actions/prep-release@v2 37 | with: 38 | token: ${{ secrets.GITHUB_TOKEN }} 39 | version_spec: ${{ github.event.inputs.version_spec }} 40 | # silent: ${{ github.event.inputs.silent }} 41 | post_version_spec: ${{ github.event.inputs.post_version_spec }} 42 | branch: ${{ github.event.inputs.branch }} 43 | since: ${{ github.event.inputs.since }} 44 | since_last_stable: ${{ github.event.inputs.since_last_stable }} 45 | 46 | - name: "** Next Step **" 47 | run: | 48 | echo "Optional): Review Draft Release: ${{ steps.prep-release.outputs.release_url }}" 49 | -------------------------------------------------------------------------------- /.github/workflows/publish-release.yml: -------------------------------------------------------------------------------- 1 | name: "Step 2: Publish Release" 2 | on: 3 | workflow_dispatch: 4 | inputs: 5 | branch: 6 | description: "The target branch" 7 | required: false 8 | release_url: 9 | description: "The URL of the draft GitHub release" 10 | required: false 11 | steps_to_skip: 12 | description: "Comma separated list of steps to skip" 13 | required: false 14 | 15 | jobs: 16 | publish_release: 17 | runs-on: ubuntu-latest 18 | environment: release 19 | permissions: 20 | id-token: write 21 | steps: 22 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 23 | 24 | - uses: actions/create-github-app-token@v1 25 | id: app-token 26 | with: 27 | app-id: ${{ vars.APP_ID }} 28 | private-key: ${{ secrets.APP_PRIVATE_KEY }} 29 | 30 | - name: Populate Release 31 | id: populate-release 32 | uses: jupyter-server/jupyter_releaser/.github/actions/populate-release@v2 33 | with: 34 | token: ${{ steps.app-token.outputs.token }} 35 | branch: ${{ github.event.inputs.branch }} 36 | release_url: ${{ github.event.inputs.release_url }} 37 | steps_to_skip: ${{ github.event.inputs.steps_to_skip }} 38 | 39 | - name: Finalize Release 40 | id: finalize-release 41 | env: 42 | NPM_TOKEN: ${{ secrets.NPM_TOKEN }} 43 | uses: jupyter-server/jupyter_releaser/.github/actions/finalize-release@v2 44 | with: 45 | token: ${{ steps.app-token.outputs.token }} 46 | release_url: ${{ steps.populate-release.outputs.release_url }} 47 | 48 | - name: "** Next Step **" 49 | if: ${{ success() }} 50 | run: | 51 | echo "Verify the final release" 52 | echo ${{ steps.finalize-release.outputs.release_url }} 53 | 54 | - name: "** Failure Message **" 55 | if: ${{ failure() }} 56 | run: | 57 | echo "Failed to Publish the Draft Release Url:" 58 | echo ${{ steps.populate-release.outputs.release_url }} 59 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: test 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | branches: [main] 8 | 9 | jobs: 10 | test: 11 | name: ${{ matrix.os }} python-${{ matrix.python-version }} 12 | runs-on: ${{ matrix.os }} 13 | 14 | strategy: 15 | matrix: 16 | os: [ubuntu-latest, windows-latest, macos-latest] 17 | python-version: [ '3.9', '3.10', '3.11', '3.12', '3.13' ] 18 | 19 | steps: 20 | - name: Checkout repository 21 | uses: actions/checkout@v4 22 | - name: Setup Python 23 | uses: actions/setup-python@v5 24 | with: 25 | python-version: ${{ matrix.python-version }} 26 | - name: Ensure pip >= v25.1 27 | run: python -m pip install "pip >= 25.1" 28 | - name: Install dependencies 29 | run: pip install --group test ".[click,fastapi,anycorn]" 30 | - name: Check types 31 | run: mypy src 32 | - name: Run tests 33 | if: ${{ !((matrix.python-version == '3.13') && (matrix.os == 'ubuntu-latest')) }} 34 | run: pytest --color=yes -v tests 35 | - name: Run code coverage 36 | if: ${{ (matrix.python-version == '3.13') && (matrix.os == 'ubuntu-latest') }} 37 | run: | 38 | coverage run -m pytest tests 39 | coverage report --fail-under=100 40 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | .coverage 3 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/astral-sh/ruff-pre-commit 3 | rev: v0.11.2 4 | hooks: 5 | - id: ruff 6 | args: [--fix, --show-fixes] 7 | - id: ruff-format 8 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Version history 2 | 3 | 4 | 5 | ## 0.4.0 6 | 7 | ([Full Changelog](https://github.com/jupyter-server/fps/compare/v0.3.0...8d8b4c7d9c189bd0f60c7094323369bd478f58e1)) 8 | 9 | ### Merged PRs 10 | 11 | - Split optional dependencies [#132](https://github.com/jupyter-server/fps/pull/132) ([@davidbrochart](https://github.com/davidbrochart)) 12 | - Make CLI optional [#131](https://github.com/jupyter-server/fps/pull/131) ([@davidbrochart](https://github.com/davidbrochart)) 13 | - Extract out server from `FastAPIModule` to `ServerModule` [#130](https://github.com/jupyter-server/fps/pull/130) ([@davidbrochart](https://github.com/davidbrochart)) 14 | - Add `Module` API documentation [#129](https://github.com/jupyter-server/fps/pull/129) ([@davidbrochart](https://github.com/davidbrochart)) 15 | - Use dependency groups for `test` and `docs` [#128](https://github.com/jupyter-server/fps/pull/128) ([@davidbrochart](https://github.com/davidbrochart)) 16 | 17 | ### Contributors to this release 18 | 19 | ([GitHub contributors page for this release](https://github.com/jupyter-server/fps/graphs/contributors?from=2025-04-21&to=2025-05-22&type=c)) 20 | 21 | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Adavidbrochart+updated%3A2025-04-21..2025-05-22&type=Issues) 22 | 23 | 24 | 25 | ## 0.3.0 26 | 27 | ([Full Changelog](https://github.com/jupyter-server/fps/compare/v0.2.2...b587c686df102b7e85a4eedaefe6e5b467cbe5f0)) 28 | 29 | ### Merged PRs 30 | 31 | - [#126](https://github.com/jupyter-server/fps/pull/126) ([@davidbrochart](https://github.com/davidbrochart)), [#127](https://github.com/jupyter-server/fps/pull/127) ([@davidbrochart](https://github.com/davidbrochart)). 32 | - Add API documentation. 33 | - Change `exclusive` argument of `SharedValue()`, `Context.put()` and `Module.put()` to `max_boworrers`. 34 | - Add `manage` argument of `SharedValue()` and `Context.put()` to use its context manager for setup/teardown. 35 | - Add `add_teardown_callback()` method to `Context` and `Module` to register a teardown callback. 36 | - Add `shared_value` argument of `Context.put()` to share a value in multiple contexts. 37 | - Add `timeout` argument of `SharedValue.get()` and `Context.get()`. 38 | - Add `teardown_callback` argument of `SharedValue()` and `Module.put()`. 39 | - Remove `SharedValue.set_teardown_callback()`. 40 | 41 | ### Contributors to this release 42 | 43 | ([GitHub contributors page for this release](https://github.com/jupyter-server/fps/graphs/contributors?from=2025-04-01&to=2025-04-21&type=c)) 44 | 45 | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Adavidbrochart+updated%3A2025-04-01..2025-04-21&type=Issues) 46 | 47 | ## 0.2.2 48 | 49 | ([Full Changelog](https://github.com/jupyter-server/fps/compare/v0.2.1...78d1d789a9330a75eadb5a75d9e059d2d01a4538)) 50 | 51 | ### Merged PRs 52 | 53 | - Add Signal documentation [#125](https://github.com/jupyter-server/fps/pull/125) ([@davidbrochart](https://github.com/davidbrochart)) 54 | - Add Signal iterator [#124](https://github.com/jupyter-server/fps/pull/124) ([@davidbrochart](https://github.com/davidbrochart)) 55 | - Add Signal [#123](https://github.com/jupyter-server/fps/pull/123) ([@davidbrochart](https://github.com/davidbrochart)) 56 | 57 | ### Contributors to this release 58 | 59 | ([GitHub contributors page for this release](https://github.com/jupyter-server/fps/graphs/contributors?from=2025-03-31&to=2025-04-01&type=c)) 60 | 61 | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Adavidbrochart+updated%3A2025-03-31..2025-04-01&type=Issues) 62 | 63 | ## 0.2.1 64 | 65 | ([Full Changelog](https://github.com/jupyter-server/fps/compare/v0.2.0...77c09d4d8248dba75841a2d5317bb69f3872d31b)) 66 | 67 | ### Merged PRs 68 | 69 | - Export SharedValue [#122](https://github.com/jupyter-server/fps/pull/122) ([@davidbrochart](https://github.com/davidbrochart)) 70 | 71 | ### Contributors to this release 72 | 73 | ([GitHub contributors page for this release](https://github.com/jupyter-server/fps/graphs/contributors?from=2025-03-26&to=2025-03-31&type=c)) 74 | 75 | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Adavidbrochart+updated%3A2025-03-26..2025-03-31&type=Issues) 76 | 77 | ## 0.2.0 78 | 79 | ([Full Changelog](https://github.com/jupyter-server/fps/compare/v0.1.6...f16bd46ed2815b4d618c10b76908581d073ab2b4)) 80 | 81 | ### Merged PRs 82 | 83 | - Revert "Update to latest Jupyter Release Actions" [#121](https://github.com/jupyter-server/fps/pull/121) ([@Zsailer](https://github.com/Zsailer)) 84 | - Update to latest Jupyter Release Actions [#120](https://github.com/jupyter-server/fps/pull/120) ([@Zsailer](https://github.com/Zsailer)) 85 | - Add context documentation [#119](https://github.com/jupyter-server/fps/pull/119) ([@davidbrochart](https://github.com/davidbrochart)) 86 | - Convert repo to use releaser from repo [#118](https://github.com/jupyter-server/fps/pull/118) ([@davidbrochart](https://github.com/davidbrochart)) 87 | - Add optional `teardown_callback` parameter to `Context.put()` [#117](https://github.com/jupyter-server/fps/pull/117) ([@davidbrochart](https://github.com/davidbrochart)) 88 | - Add Context [#116](https://github.com/jupyter-server/fps/pull/116) ([@davidbrochart](https://github.com/davidbrochart)) 89 | - Add concurrency test for tasks [#115](https://github.com/jupyter-server/fps/pull/115) ([@davidbrochart](https://github.com/davidbrochart)) 90 | - Add .gitignore [#114](https://github.com/jupyter-server/fps/pull/114) ([@davidbrochart](https://github.com/davidbrochart)) 91 | 92 | ### Contributors to this release 93 | 94 | ([GitHub contributors page for this release](https://github.com/jupyter-server/fps/graphs/contributors?from=2025-03-19&to=2025-03-26&type=c)) 95 | 96 | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Adavidbrochart+updated%3A2025-03-19..2025-03-26&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Apre-commit-ci+updated%3A2025-03-19..2025-03-26&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3AZsailer+updated%3A2025-03-19..2025-03-26&type=Issues) 97 | 98 | ## 0.1.6 99 | 100 | ([Full Changelog](https://github.com/jupyter-server/fps/compare/v0.1.5...1fa4f5bef6cd23682e882f0bf3c2d5654be12108)) 101 | 102 | ### Merged PRs 103 | 104 | - missing default values in cli.main [#113](https://github.com/jupyter-server/fps/pull/113) ([@minrk](https://github.com/minrk)) 105 | 106 | ### Contributors to this release 107 | 108 | ([GitHub contributors page for this release](https://github.com/jupyter-server/fps/graphs/contributors?from=2025-03-06&to=2025-03-19&type=c)) 109 | 110 | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Aminrk+updated%3A2025-03-06..2025-03-19&type=Issues) 111 | 112 | ## 0.1.5 113 | 114 | ([Full Changelog](https://github.com/jupyter-server/fps/compare/v0.1.4...7d84a4fbd7495676227d2e97e7418c348bec4662)) 115 | 116 | ### Merged PRs 117 | 118 | - Require anycorn >=0.18.1 [#111](https://github.com/jupyter-server/fps/pull/111) ([@davidbrochart](https://github.com/davidbrochart)) 119 | - Fix KeyboardInterrupt handling on Trio [#110](https://github.com/jupyter-server/fps/pull/110) ([@davidbrochart](https://github.com/davidbrochart)) 120 | - Support running on Trio [#109](https://github.com/jupyter-server/fps/pull/109) ([@davidbrochart](https://github.com/davidbrochart)) 121 | - Add --help-all CLI option [#108](https://github.com/jupyter-server/fps/pull/108) ([@davidbrochart](https://github.com/davidbrochart)) 122 | - Add --show-config CLI option [#107](https://github.com/jupyter-server/fps/pull/107) ([@davidbrochart](https://github.com/davidbrochart)) 123 | - Bump anyioutils v0.7.0 [#106](https://github.com/jupyter-server/fps/pull/106) ([@davidbrochart](https://github.com/davidbrochart)) 124 | 125 | ### Contributors to this release 126 | 127 | ([GitHub contributors page for this release](https://github.com/jupyter-server/fps/graphs/contributors?from=2025-02-26&to=2025-03-06&type=c)) 128 | 129 | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Adavidbrochart+updated%3A2025-02-26..2025-03-06&type=Issues) 130 | 131 | ## 0.1.4 132 | 133 | ([Full Changelog](https://github.com/jupyter-server/fps/compare/v0.1.3...68a4b47acb16fd8570d8c086a84c737399cef086)) 134 | 135 | ### Merged PRs 136 | 137 | - Stop application when background tasks fail [#105](https://github.com/jupyter-server/fps/pull/105) ([@davidbrochart](https://github.com/davidbrochart)) 138 | 139 | ### Contributors to this release 140 | 141 | ([GitHub contributors page for this release](https://github.com/jupyter-server/fps/graphs/contributors?from=2025-02-26&to=2025-02-26&type=c)) 142 | 143 | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Adavidbrochart+updated%3A2025-02-26..2025-02-26&type=Issues) 144 | 145 | ## 0.1.3 146 | 147 | ([Full Changelog](https://github.com/jupyter-server/fps/compare/v0.1.2...f86c6487fd3512cc6fb8883f3a7049aed857157e)) 148 | 149 | ### Merged PRs 150 | 151 | - Wait for server to be started [#104](https://github.com/jupyter-server/fps/pull/104) ([@davidbrochart](https://github.com/davidbrochart)) 152 | 153 | ### Contributors to this release 154 | 155 | ([GitHub contributors page for this release](https://github.com/jupyter-server/fps/graphs/contributors?from=2025-02-16&to=2025-02-26&type=c)) 156 | 157 | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Adavidbrochart+updated%3A2025-02-16..2025-02-26&type=Issues) 158 | 159 | ## 0.1.2 160 | 161 | ([Full Changelog](https://github.com/jupyter-server/fps/compare/v0.1.1...0823e706665a44475a6d1065a7f7ff24d216204f)) 162 | 163 | ### Merged PRs 164 | 165 | - Fix CLI [#102](https://github.com/jupyter-server/fps/pull/102) ([@davidbrochart](https://github.com/davidbrochart)) 166 | 167 | ### Contributors to this release 168 | 169 | ([GitHub contributors page for this release](https://github.com/jupyter-server/fps/graphs/contributors?from=2025-02-07&to=2025-02-16&type=c)) 170 | 171 | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Adavidbrochart+updated%3A2025-02-07..2025-02-16&type=Issues) 172 | 173 | ## 0.1.1 174 | 175 | ([Full Changelog](https://github.com/jupyter-server/fps/compare/v0.1.0...31fcee05d8ac894e971aa1b083c0831eb6679b97)) 176 | 177 | ### Merged PRs 178 | 179 | - Add documentation [#100](https://github.com/jupyter-server/fps/pull/100) ([@davidbrochart](https://github.com/davidbrochart)) 180 | 181 | ### Contributors to this release 182 | 183 | ([GitHub contributors page for this release](https://github.com/jupyter-server/fps/graphs/contributors?from=2025-02-05&to=2025-02-07&type=c)) 184 | 185 | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Adavidbrochart+updated%3A2025-02-05..2025-02-07&type=Issues) 186 | 187 | ## 0.1.0 188 | 189 | ([Full Changelog](https://github.com/jupyter-server/fps/compare/v0.0.21...14bb99d3c8129cc9adc32ee8ff9b1c6e4fb0d512)) 190 | 191 | ### Merged PRs 192 | 193 | - Remove "value" from API [#99](https://github.com/jupyter-server/fps/pull/99) ([@davidbrochart](https://github.com/davidbrochart)) 194 | - Merge FastAIO [#98](https://github.com/jupyter-server/fps/pull/98) ([@davidbrochart](https://github.com/davidbrochart)) 195 | 196 | ### Contributors to this release 197 | 198 | ([GitHub contributors page for this release](https://github.com/jupyter-server/fps/graphs/contributors?from=2022-11-23&to=2025-02-05&type=c)) 199 | 200 | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Adavidbrochart+updated%3A2022-11-23..2025-02-05&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Apre-commit-ci+updated%3A2022-11-23..2025-02-05&type=Issues) 201 | 202 | ## 0.0.21 203 | 204 | ([Full Changelog](https://github.com/jupyter-server/fps/compare/v0.0.20...c7e394d1ee94837c4118d24a0c86f9499ae90e3d)) 205 | 206 | ### Merged PRs 207 | 208 | - Allow setting log level [#86](https://github.com/jupyter-server/fps/pull/86) ([@davidbrochart](https://github.com/davidbrochart)) 209 | 210 | ### Contributors to this release 211 | 212 | ([GitHub contributors page for this release](https://github.com/jupyter-server/fps/graphs/contributors?from=2022-09-19&to=2022-11-23&type=c)) 213 | 214 | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Adavidbrochart+updated%3A2022-09-19..2022-11-23&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Apre-commit-ci+updated%3A2022-09-19..2022-11-23&type=Issues) 215 | 216 | ## 0.0.20 217 | 218 | ([Full Changelog](https://github.com/jupyter-server/fps/compare/v0.0.19...503fbae2b5d54306c27b8fc9a195e4af1ec96c4e)) 219 | 220 | ### Merged PRs 221 | 222 | - Wait for server started before opening browser [#83](https://github.com/jupyter-server/fps/pull/83) ([@davidbrochart](https://github.com/davidbrochart)) 223 | - Switch to hatch [#82](https://github.com/jupyter-server/fps/pull/82) ([@davidbrochart](https://github.com/davidbrochart)) 224 | 225 | ### Contributors to this release 226 | 227 | ([GitHub contributors page for this release](https://github.com/jupyter-server/fps/graphs/contributors?from=2022-08-31&to=2022-09-19&type=c)) 228 | 229 | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Adavidbrochart+updated%3A2022-08-31..2022-09-19&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Apre-commit-ci+updated%3A2022-08-31..2022-09-19&type=Issues) 230 | 231 | ## 0.0.19 232 | 233 | No merged PRs 234 | 235 | ## 0.0.18 236 | 237 | ([Full Changelog](https://github.com/jupyter-server/fps/compare/v0.0.17...e1f6ea6f9a2f02e0e00bbea1dfbd0c6f52e4c23a)) 238 | 239 | ### Merged PRs 240 | 241 | - Use Rich in typer [#78](https://github.com/jupyter-server/fps/pull/78) ([@davidbrochart](https://github.com/davidbrochart)) 242 | 243 | ### Contributors to this release 244 | 245 | ([GitHub contributors page for this release](https://github.com/jupyter-server/fps/graphs/contributors?from=2022-08-30&to=2022-08-31&type=c)) 246 | 247 | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Adavidbrochart+updated%3A2022-08-30..2022-08-31&type=Issues) 248 | 249 | ## 0.0.17 250 | 251 | No merged PRs 252 | 253 | ## 0.0.16 254 | 255 | No merged PRs 256 | 257 | ## 0.0.15 258 | 259 | ([Full Changelog](https://github.com/jupyter-server/fps/compare/v0.0.14...a5caf65d3a8c88c3be11b509812fae58bad3414a)) 260 | 261 | ### Merged PRs 262 | 263 | - Fix releasing of fps-uvicorn [#74](https://github.com/jupyter-server/fps/pull/74) ([@davidbrochart](https://github.com/davidbrochart)) 264 | 265 | ### Contributors to this release 266 | 267 | ([GitHub contributors page for this release](https://github.com/jupyter-server/fps/graphs/contributors?from=2022-08-29&to=2022-08-29&type=c)) 268 | 269 | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Adavidbrochart+updated%3A2022-08-29..2022-08-29&type=Issues) 270 | 271 | ## 0.0.14 272 | 273 | ([Full Changelog](https://github.com/jupyter-server/fps/compare/v0.0.13...1d48dbfa3838e1f5635edcc7f3ced17714901518)) 274 | 275 | ### Merged PRs 276 | 277 | - Store Uvicorn CLI options, add possibility to pass query parameters [#72](https://github.com/jupyter-server/fps/pull/72) ([@davidbrochart](https://github.com/davidbrochart)) 278 | 279 | ### Contributors to this release 280 | 281 | ([GitHub contributors page for this release](https://github.com/jupyter-server/fps/graphs/contributors?from=2022-08-29&to=2022-08-29&type=c)) 282 | 283 | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Adavidbrochart+updated%3A2022-08-29..2022-08-29&type=Issues) 284 | 285 | ## 0.0.13 286 | 287 | ([Full Changelog](https://github.com/jupyter-server/fps/compare/v0.0.12...166041e3346e3fa6773f8d6f48b08dacee785cb5)) 288 | 289 | ### Merged PRs 290 | 291 | - Fix exit [#70](https://github.com/jupyter-server/fps/pull/70) ([@davidbrochart](https://github.com/davidbrochart)) 292 | - [pre-commit.ci] pre-commit autoupdate [#69](https://github.com/jupyter-server/fps/pull/69) ([@pre-commit-ci](https://github.com/pre-commit-ci)) 293 | - [pre-commit.ci] pre-commit autoupdate [#68](https://github.com/jupyter-server/fps/pull/68) ([@pre-commit-ci](https://github.com/pre-commit-ci)) 294 | 295 | ### Contributors to this release 296 | 297 | ([GitHub contributors page for this release](https://github.com/jupyter-server/fps/graphs/contributors?from=2022-07-14&to=2022-08-29&type=c)) 298 | 299 | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Adavidbrochart+updated%3A2022-07-14..2022-08-29&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Apre-commit-ci+updated%3A2022-07-14..2022-08-29&type=Issues) 300 | 301 | ## 0.0.12 302 | 303 | ([Full Changelog](https://github.com/jupyter-server/fps/compare/v0.0.11...724caf852e8952a5e7eb89c604d3d5d6179e4040)) 304 | 305 | ### Merged PRs 306 | 307 | - Rework application plugin [#66](https://github.com/jupyter-server/fps/pull/66) ([@davidbrochart](https://github.com/davidbrochart)) 308 | 309 | ### Contributors to this release 310 | 311 | ([GitHub contributors page for this release](https://github.com/jupyter-server/fps/graphs/contributors?from=2022-07-14&to=2022-07-14&type=c)) 312 | 313 | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Adavidbrochart+updated%3A2022-07-14..2022-07-14&type=Issues) 314 | 315 | ## 0.0.11 316 | 317 | ([Full Changelog](https://github.com/jupyter-server/fps/compare/v0.0.10...1474ca165a9e6aa8434ed630382fb111d065715b)) 318 | 319 | ### Merged PRs 320 | 321 | - Add application pluggin [#64](https://github.com/jupyter-server/fps/pull/64) ([@davidbrochart](https://github.com/davidbrochart)) 322 | - [pre-commit.ci] pre-commit autoupdate [#63](https://github.com/jupyter-server/fps/pull/63) ([@pre-commit-ci](https://github.com/pre-commit-ci)) 323 | - FPS is not experimental anymore [#62](https://github.com/jupyter-server/fps/pull/62) ([@davidbrochart](https://github.com/davidbrochart)) 324 | 325 | ### Contributors to this release 326 | 327 | ([GitHub contributors page for this release](https://github.com/jupyter-server/fps/graphs/contributors?from=2022-05-17&to=2022-07-14&type=c)) 328 | 329 | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Adavidbrochart+updated%3A2022-05-17..2022-07-14&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Apre-commit-ci+updated%3A2022-05-17..2022-07-14&type=Issues) 330 | 331 | ## 0.0.10 332 | 333 | ([Full Changelog](https://github.com/jupyter-server/fps/compare/fps-0.0.9...189716c887dcd008561292f4d33d6a5f252a920f)) 334 | 335 | ### Merged PRs 336 | 337 | - Prepare for use with Jupyter Releaser [#60](https://github.com/jupyter-server/fps/pull/60) ([@davidbrochart](https://github.com/davidbrochart)) 338 | - Allow startup/shutdown to run in tests [#59](https://github.com/jupyter-server/fps/pull/59) ([@davidbrochart](https://github.com/davidbrochart)) 339 | - [pre-commit.ci] pre-commit autoupdate [#57](https://github.com/jupyter-server/fps/pull/57) ([@pre-commit-ci](https://github.com/pre-commit-ci)) 340 | 341 | ### Contributors to this release 342 | 343 | ([GitHub contributors page for this release](https://github.com/jupyter-server/fps/graphs/contributors?from=2021-10-29&to=2022-05-17&type=c)) 344 | 345 | [@adriendelsalle](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Aadriendelsalle+updated%3A2021-10-29..2022-05-17&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Adavidbrochart+updated%3A2021-10-29..2022-05-17&type=Issues) | [@fcollonval](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Afcollonval+updated%3A2021-10-29..2022-05-17&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Ffps+involves%3Apre-commit-ci+updated%3A2021-10-29..2022-05-17&type=Issues) 346 | 347 | 0.0.9 (October 29, 2021) 348 | ======================== 349 | 350 | New features: 351 | - Add root_path to uvicorn plugin (#38 @davidborchart) 352 | - Add enabled_plugins config (#42 @adriendelsalle) 353 | 354 | Improvements: 355 | - Handle list delimiters for disabled_plugins (#41 #52 @adriendelsalle) 356 | - Improve log messages for conflicting routes (#43 @adriendelsalle) 357 | - Catch mounts masking routes (#45 @adriendelsalle) 358 | - Add tests on configuration (#46 @adriendelsalle) 359 | - housekeeping (#47 #51 @adriendelsalle #48 @davidborchart) 360 | 361 | 0.0.8 (September 22, 2021) 362 | ========================== 363 | 364 | New features: 365 | - Add capability to disable plugins (#30 @adriendelsalle) 366 | - Add a new hook to register exception handlers (#31 @adriendelsalle) 367 | - Add a builtin RedirectException (#33 @adriendelsalle) 368 | 369 | Improvements: 370 | - Group router logs (#28 @adriendelsalle) 371 | - Minor improvements on pip recipes (#34 @adriendelsalle) 372 | 373 | Bug fixes: 374 | - Fix interactive API docs (#28 @davidborchart) 375 | 376 | Breaking change: 377 | - Make uvicorn server a plugin (#32 #35 @adriendelsalle) 378 | 379 | 0.0.7 (September 13, 2021) 380 | ========================== 381 | 382 | New features: 383 | - Add testing module and `pytest` generic fixtures (#19 @adriendelsalle) 384 | 385 | Improvements: 386 | - Support `pluggy 1.0.0` and future releases (#15 @adriendelsalle) 387 | - Improve CLI parsing of plugins options (#17 @adriendelsalle) 388 | 389 | Bug fixes: 390 | - Fix `python 3.7` compatibility (#18 @adriendelsalle) 391 | 392 | Documentation: 393 | - Document testing module (#20 #21 @adriendelsalle) 394 | 395 | 0.0.6 (September 8, 2021) 396 | ========================= 397 | 398 | New features: 399 | - Add capability to pass router kwargs when registering it (#10 @davidbrochart) 400 | 401 | Documentation: 402 | - Remove note about CLI limited to FPS config, since plugins ones are now supported (#13 @davidbrochart) 403 | 404 | 0.0.5 (August 6, 2021) 405 | ====================== 406 | 407 | New features: 408 | - Add capability to pass any plugin configuration as a CLI extra argument (#5) 409 | 410 | Bug fixes: 411 | - Allow to pass a negative CLI flag `--no-` for boolean `open-browser` option 412 | - Fix colors based on status code for `uvicorn` logs (#7) 413 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | # Licensing terms 2 | 3 | This project is licensed under the terms of the Modified BSD License 4 | (also known as New or Revised or 3-Clause BSD), as follows: 5 | 6 | - Copyright (c) 2021-, Jupyter Development Team 7 | 8 | All rights reserved. 9 | 10 | Redistribution and use in source and binary forms, with or without 11 | modification, are permitted provided that the following conditions are met: 12 | 13 | Redistributions of source code must retain the above copyright notice, this 14 | list of conditions and the following disclaimer. 15 | 16 | Redistributions in binary form must reproduce the above copyright notice, this 17 | list of conditions and the following disclaimer in the documentation and/or 18 | other materials provided with the distribution. 19 | 20 | Neither the name of the Jupyter Development Team nor the names of its 21 | contributors may be used to endorse or promote products derived from this 22 | software without specific prior written permission. 23 | 24 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 25 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 26 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 27 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE 28 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 29 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 30 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 31 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 32 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 33 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 34 | 35 | ## About the Jupyter Development Team 36 | 37 | The Jupyter Development Team is the set of all contributors to the Jupyter project. 38 | This includes all of the Jupyter subprojects. 39 | 40 | The core team that coordinates development on GitHub can be found here: 41 | https://github.com/jupyter/. 42 | 43 | ## Our Copyright Policy 44 | 45 | Jupyter uses a shared copyright model. Each contributor maintains copyright 46 | over their contributions to Jupyter. But, it is important to note that these 47 | contributions are typically only changes to the repositories. Thus, the Jupyter 48 | source code, in its entirety is not the copyright of any single person or 49 | institution. Instead, it is the collective copyright of the entire Jupyter 50 | Development Team. If individual contributors want to maintain a record of what 51 | changes/contributions they have specific copyright on, they should indicate 52 | their copyright in the commit message of the change, when they commit the 53 | change to one of the Jupyter repositories. 54 | 55 | With this in mind, the following banner should be used in any source code file 56 | to indicate the copyright and license terms: 57 | 58 | # Copyright (c) Jupyter Development Team. 59 | # Distributed under the terms of the Modified BSD License. 60 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Build Status](https://github.com/jupyter-server/fps/actions/workflows/test.yml/badge.svg?query=branch%3Amain++)](https://github.com/jupyter-server/fps/actions/workflows/test.yml/badge.svg?query=branch%3Amain++) 2 | [![Code Coverage](https://img.shields.io/badge/coverage-100%25-green)](https://img.shields.io/badge/coverage-100%25-green) 3 | 4 | # FPS 5 | 6 | A system for creating modular, configurable, pluggable and concurrent applications. 7 | -------------------------------------------------------------------------------- /docs/api_reference.md: -------------------------------------------------------------------------------- 1 | # API reference 2 | 3 | ::: fps 4 | options: 5 | inherited_members: true 6 | unwrap_annotated: true 7 | members: 8 | - Context 9 | - Module 10 | - SharedValue 11 | - Value 12 | -------------------------------------------------------------------------------- /docs/guide.md: -------------------------------------------------------------------------------- 1 | ## The simplest application 2 | 3 | Let's create our first FPS application. Enter the following code in a file called `simple.py`: 4 | 5 | ```py 6 | from fps import Module 7 | 8 | class Main(Module): 9 | def __init__(self, name, **kwargs): 10 | super().__init__(name) 11 | self.config = kwargs 12 | 13 | async def start(self): 14 | print(self.config["greeting"]) 15 | 16 | async def stop(self): 17 | print(self.config["farewell"]) 18 | ``` 19 | 20 | And enter in the terminal: 21 | 22 | ```bash 23 | fps simple:Main --set greeting="Hello, World!" --set farewell="See you later!" 24 | ``` 25 | 26 | This should print `Hello, World!` and hang forever, which means that the application is running. To exit, press Ctrl-C. This should now print `See you later!` and return to the terminal prompt. 27 | 28 | What happened? 29 | 30 | - By entering `fps simple:Main`, we told FPS to run the module called `Main` in the `simple.py` file. 31 | - Options `--set greeting="Hello, World!"` and `--set farewell="See you later!"` told FPS to pass parameter keys `greeting` and `farewell` to `Main.__init__`'s keyword arguments, with values `"Hello, World!"` and `"See you later!"`, respectively. 32 | - In its startup phase (`start` method), `Main` prints the `greeting` parameter value. 33 | - After starting, the application runs until it is stopped. Pressing Ctrl-C stops the application, calling its teardown phase. 34 | - In its teardown phase (`stop` method), `Main` prints the `farewell` parameter value. 35 | 36 | ## Sharing objects between modules 37 | 38 | Now let's see how we can share objects between modules. Enter the following code in a file called `share.py`: 39 | 40 | ```py 41 | from anyio import Event, sleep 42 | from fps import Module 43 | 44 | class Main(Module): 45 | def __init__(self, name): 46 | super().__init__(name) 47 | self.add_module(Publisher, "publisher") 48 | self.add_module(Consumer, "consumer") 49 | 50 | class Publisher(Module): 51 | async def start(self): 52 | self.shared = Event() # the object to share 53 | self.put(self.shared, Event) # publish the shared object as type Event 54 | print("Published:", self.shared.is_set()) 55 | await self.shared.wait() # wait for the shared object to be updated 56 | self.exit_app() # force the application to exit 57 | 58 | async def stop(self): 59 | print("Got:", self.shared.is_set()) 60 | 61 | 62 | class Consumer(Module): 63 | def __init__(self, name, wait=0): 64 | super().__init__(name) 65 | self.wait = float(wait) 66 | 67 | async def start(self): 68 | shared = await self.get(Event) # request an object of type Event 69 | print("Acquired:", shared.is_set()) 70 | await sleep(self.wait) # wait before updating the shared object 71 | shared.set() # update the shared object 72 | print("Updated:", shared.is_set()) 73 | ``` 74 | 75 | And enter in the terminal: 76 | 77 | ```bash 78 | fps share:Main 79 | ``` 80 | 81 | You should see in the terminal: 82 | ``` 83 | Published: False 84 | Acquired: False 85 | Updated: True 86 | Got: True 87 | ``` 88 | 89 | Sharing objects between modules is based on types: a module (`Consumer`) requests an object of a given type (`Event`) with `await self.get`, and it eventually acquires it when another module (`Publisher`) publishes an object of this type with `self.put`. It is the same object that they are sharing, so if `Consumer` changes the object, `Publisher` sees it immediatly. 90 | 91 | The `Consumer`'s default value for parameter `wait` is 0, which means that the shared object will be updated right away. If we set it to 0.5 seconds: 92 | 93 | ```bash 94 | fps share:Main --set consumer.wait=0.5 95 | ``` 96 | 97 | You should see that the application hangs for half a second after the shared object is acquired. This illustrate that we can configure any nested module in the application, just by providing the path to its parameter in the CLI. If we provide a wrong parameter name, we get a nice error: 98 | 99 | ```bash 100 | fps share:Main --set consumer.wrong_parameter=0.5 101 | ``` 102 | 103 | ``` 104 | RuntimeError: Cannot instantiate module 'root_module.consumer': Consumer.__init__() got an unexpected keyword argument 'wrong_parameter' 105 | ``` 106 | 107 | ## A pluggable web server 108 | 109 | FPS comes with a `FastAPIModule` that publishes a `FastAPI` application. This `FastAPI` object can be shared with other modules, which can add routes to it. As part of its startup phase, `FastAPIModule` serves the `FastAPI` application with a web server. Enter the following code in a file called `server.py`: 110 | 111 | ```py 112 | from fastapi import FastAPI 113 | from fps import Module 114 | from fps.web.fastapi import FastAPIModule 115 | from pydantic import BaseModel 116 | 117 | class Main(Module): 118 | def __init__(self, name): 119 | super().__init__(name) 120 | self.add_module(FastAPIModule, "fastapi") 121 | self.add_module(Router, "router") 122 | 123 | class Router(Module): 124 | def __init__(self, name, **kwargs): 125 | super().__init__(name) 126 | self.config = Config(**kwargs) 127 | 128 | async def prepare(self): 129 | app = await self.get(FastAPI) 130 | @app.get("/") 131 | def read_root(): 132 | return {self.config.key: self.config.value} 133 | 134 | class Config(BaseModel): 135 | key: str = "count" 136 | value: int = 3 137 | ``` 138 | 139 | And enter in the terminal: 140 | 141 | ```bash 142 | fps server:Main 143 | ``` 144 | 145 | Now if you open a browser at `http://127.0.0.1:8000`, you should see: 146 | 147 | ```json 148 | {"count":3} 149 | ``` 150 | 151 | Note that `Router` has a `prepare` method. It is similar to the `start` method, be it is executed just before. Typically, this is used by modules like `FastAPIModule` which must give a chance to every other module to register their routes on the `FastAPI` application, before running the server in `start`, because routes cannot be added once the server has started. 152 | 153 | See how `Router` uses a Pydantic model `Config` to validate its configuration. With this, running the application with a wrong type will not work: 154 | 155 | ```bash 156 | fps main:Main --set router.value=foo 157 | # RuntimeError: Cannot instantiate module 'root_module.router': 1 validation error for Config 158 | # value 159 | # Input should be a valid integer, unable to parse string as an integer [type=int_parsing, input_value='foo', input_type=str] 160 | # For further information visit https://errors.pydantic.dev/2.10/v/int_parsing 161 | ``` 162 | 163 | [Jupyverse](https://github.com/jupyter-server/jupyverse) uses `FastAPIModule` in order to compose a Jupyter server from swappable pluggins. 164 | 165 | ## A declarative application 166 | 167 | It is possible to configure an application entirely as a Python dictionary or a JSON file. Let's rewrite the previous example in `router.py`, and just keep the code for the `Router` module: 168 | 169 | ```py 170 | from fastapi import FastAPI 171 | from fps import Module 172 | from pydantic import BaseModel 173 | 174 | class Router(Module): 175 | def __init__(self, name, **kwargs): 176 | super().__init__(name) 177 | self.config = Config(**kwargs) 178 | 179 | async def prepare(self): 180 | app = await self.get(FastAPI) 181 | @app.get("/") 182 | def read_root(): 183 | return {self.config.key: self.config.value} 184 | 185 | class Config(BaseModel): 186 | key: str = "count" 187 | value: int = 3 188 | ``` 189 | 190 | Now we can write a `config.json` file like so: 191 | 192 | ```json 193 | { 194 | "main": { 195 | "type": "fps_module", 196 | "modules": { 197 | "fastapi": { 198 | "type": "fps.web.fastapi:FastAPIModule" 199 | }, 200 | "router": { 201 | "type": "router:Router", 202 | "config": { 203 | "value": 7 204 | } 205 | } 206 | } 207 | } 208 | } 209 | ``` 210 | 211 | And launch our application with: 212 | 213 | ```bash 214 | fps --config config.json 215 | ``` 216 | 217 | Note that the `type` field in `config.json` can be a path to a module, like `fps.web.fastapi:FastAPIModule` or `router:Router`, or a module name registered in the `fps.modules` entry-point group, like `fps_module` which is a base FPS `Module`. 218 | 219 | ## A note on concurrency 220 | 221 | The following `Module` methods are run as background tasks: 222 | 223 | - `prepare` 224 | - `start` 225 | - `stop` 226 | 227 | FPS will consider each of them to have completed if they run to completion, or if they call `self.done()`. Let's consider the following example: 228 | 229 | ```py 230 | from anyio import sleep 231 | from fps import Module 232 | 233 | class MyModule(Module): 234 | async def start(self): 235 | await sleep(float("inf")) 236 | ``` 237 | 238 | FPS will notice that this module never completes the startup phase, because its `start` method hangs indefinitely. By default, this will time out after one second. The solution is to launch a background task and then explicitly call `self.done()`, like so: 239 | 240 | ```py 241 | from anyio import create_task_group, sleep 242 | from fps import Module 243 | 244 | class MyModule(Module): 245 | async def start(self): 246 | async with create_task_group() as tg: 247 | tg.start_soon(sleep, float("inf")) 248 | self.done() 249 | ``` 250 | 251 | ## Contexts 252 | 253 | FPS offers a `Context` class that allows to share objects independantly of modules. For instance, say you want to share a file object. Here is how you would do: 254 | 255 | ```py 256 | from io import TextIOWrapper 257 | from anyio import run 258 | from fps import Context 259 | 260 | async def main(): 261 | async with Context() as context: 262 | file = open("log.txt", "w") 263 | print("File opened") 264 | 265 | def teardown_callback(): 266 | file.close() 267 | print("File closed") 268 | 269 | shared_file = context.put(file, teardown_callback=teardown_callback) 270 | print("File object published") 271 | acquired_file = await context.get(TextIOWrapper) 272 | print("File object acquired") 273 | assert acquired_file.unwrap() is file 274 | 275 | print("Writing to file") 276 | acquired_file.unwrap().write("Hello, World!\n") 277 | acquired_file.drop() 278 | print("File object dropped") 279 | await shared_file.freed() 280 | 281 | run(main) 282 | ``` 283 | 284 | Running this code will print: 285 | 286 | ``` 287 | File opened 288 | File object published 289 | File object acquired 290 | Writing to file 291 | File object dropped 292 | File closed 293 | ``` 294 | 295 | Let's see what happened: 296 | - We created an object that we want to share, here `file`. This file has to be closed eventually. 297 | - We published it in the `context`, with `context.put(file, teardown_callback=teardown_callback)`. The `teardown_callback` will be called when the context is closed. We got a `shared_file` handle that we can use to check if the object is still in use (see below). 298 | - We acquired the file object with `await context.get(TextIOWrapper)`, and we got an `acquired_file` handle that we can use to drop the object when we are done using it. Note that acquiring an object is usually done in some other part of the program, where only the `context` is available. 299 | - We write to the file using `acquired_file.unwrap().write("Hello, World!\n")`. Note that we call `unwrap()` to get the actual object, since our handle is a wrapper around the object. 300 | - We drop the file object with `acquired_file.drop()`, notifying the `shared_file` that we are done using it and that from our point of view it is safe to close it. 301 | - The publisher can check that the published file is not used anymore with `await shared_file.freed()`. 302 | - When the `context` is closed, it waits for every published object to be freed and then it proceeds with their teardown, if any. 303 | 304 | Contexts ensure that objects are shared safely by their "owner" and that they are torn down when they are not being used anymore, by keeping references of "borrowers". Borrowers must collaborate by explicitly dropping objects when they are done using them. Owners can explicitly check that their objects are free to be disposed, althoug this is optional. 305 | 306 | ## Signals 307 | 308 | FPS offers a `Signal` class which allows one part of the code to send values that can be received in another part. One can listen to a signal by connecting a callback to it or simply by iterating values from it. 309 | 310 | The following code uses a callback: 311 | 312 | ```py 313 | from anyio import run 314 | from fps import Signal 315 | 316 | async def main(): 317 | signal = Signal() 318 | 319 | async def callback(value): 320 | print("Received:", value) 321 | 322 | signal.connect(callback) 323 | 324 | await signal.emit("Hello") 325 | await signal.emit("World!") 326 | 327 | run(main) 328 | 329 | # prints: 330 | # Received: Hello 331 | # Received: World! 332 | ``` 333 | 334 | And the following code uses an iterator: 335 | 336 | ```py 337 | from anyio import TASK_STATUS_IGNORED, create_task_group, run 338 | from anyio.abc import TaskStatus 339 | from fps import Signal 340 | 341 | async def main(): 342 | signal = Signal() 343 | 344 | async def iterate_signal(*, task_status: TaskStatus[None] = TASK_STATUS_IGNORED): 345 | async with signal.iterate() as iterator: 346 | task_status.started() 347 | async for value in iterator: 348 | if not value: 349 | return 350 | 351 | print("Received:", value) 352 | 353 | 354 | async with create_task_group() as tg: 355 | await tg.start(iterate_signal) 356 | 357 | await signal.emit("Hello") 358 | await signal.emit("World!") 359 | await signal.emit("") 360 | 361 | run(main) 362 | 363 | # prints: 364 | # Received: Hello 365 | # Received: World! 366 | ``` 367 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | FPS is a Fast Pluggable System. It was originally designed to create [Jupyverse](https://github.com/jupyter-server/jupyverse), a Jupyter server that is composed of pluggins. But it is a generic framework that can be used to create any type of applications, with the following features: 2 | 3 | - **modularity**: an application is made up of modules that are arranged in a hierarchical tree. 4 | - **configuration**: each module can be configured with a set of parameters accessible from the CLI, and an application can be created declaratively as a Python dictionary or a JSON file. 5 | - **pluggability**: modules can share objects, allowing the use of late binding to connect pluggins at runtime. 6 | - **concurrency**: modules have startup and teardown phases for managing asynchronous resources safely. 7 | -------------------------------------------------------------------------------- /docs/install.md: -------------------------------------------------------------------------------- 1 | FPS can be installed through [PyPI](https://pypi.org) or [conda-forge](https://conda-forge.org). 2 | 3 | ## With `pip` 4 | 5 | ```bash 6 | pip install fps[web] 7 | ``` 8 | 9 | ## With `micromamba` 10 | 11 | We recommend using `micromamba` to manage `conda-forge` environments (see `micromamba`'s 12 | [installation instructions](https://mamba.readthedocs.io/en/latest/installation/micromamba-installation.html)). 13 | First create an environment, here called `my-env`, and activate it: 14 | ```bash 15 | micromamba create -n my-env 16 | micromamba activate my-env 17 | ``` 18 | Then install `fps`. 19 | 20 | ```bash 21 | micromamba install fps 22 | ``` 23 | 24 | ## Development install 25 | 26 | You first need to clone the repository: 27 | ```bash 28 | git clone https://github.com/jupyter-server/fps.git 29 | cd fps 30 | ``` 31 | We recommend working in a conda environment. In order to build `fps`, you will need 32 | `pip`: 33 | ```bash 34 | micromamba create -n fps-dev 35 | micromamba activate fps-dev 36 | micromamba install pip 37 | ``` 38 | Then install `fps` in editable mode: 39 | ```bash 40 | pip install -e ".[web,test,docs]" 41 | ``` 42 | -------------------------------------------------------------------------------- /docs/jupyter.svg: -------------------------------------------------------------------------------- 1 | 2 | Group.svg 3 | Created using Figma 0.90 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: FPS 2 | site_description: Fast Pluggable System 3 | repo_url: https://github.com/jupyter-server/fps 4 | 5 | theme: 6 | name: 'material' 7 | palette: 8 | - scheme: default 9 | primary: 'black' 10 | accent: 'black' 11 | toggle: 12 | icon: material/lightbulb-outline 13 | name: Switch to dark mode 14 | - scheme: slate 15 | primary: 'black' 16 | accent: 'black' 17 | toggle: 18 | icon: material/lightbulb 19 | name: Switch to light mode 20 | features: 21 | - navigation.instant 22 | - navigation.top 23 | - navigation.sections 24 | - search.suggest 25 | - search.highlight 26 | - content.code.annotate 27 | - content.code.copy 28 | logo: jupyter.svg 29 | 30 | nav: 31 | - Overview: index.md 32 | - install.md 33 | - guide.md 34 | - api_reference.md 35 | 36 | markdown_extensions: 37 | - admonition 38 | - pymdownx.details 39 | - pymdownx.superfences 40 | 41 | plugins: 42 | - search 43 | - mkdocstrings: 44 | default_handler: python 45 | handlers: 46 | python: 47 | options: 48 | show_source: false 49 | docstring_style: google 50 | find_stubs_package: true 51 | docstring_options: 52 | ignore_init_summary: false 53 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "fps" 7 | dynamic = ["version"] 8 | description = "A system for creating modular, configurable, pluggable and concurrent applications" 9 | readme = "README.md" 10 | authors = [ 11 | { name = "Jupyter Development Team", email = "jupyter@googlegroups.com" }, 12 | ] 13 | license = {file = "LICENSE"} 14 | classifiers = [ 15 | "Development Status :: 4 - Beta", 16 | "Intended Audience :: Developers", 17 | "License :: OSI Approved :: BSD License", 18 | "Typing :: Typed", 19 | "Programming Language :: Python", 20 | "Programming Language :: Python :: 3", 21 | "Programming Language :: Python :: 3.9", 22 | "Programming Language :: Python :: 3.10", 23 | "Programming Language :: Python :: 3.11", 24 | "Programming Language :: Python :: 3.12", 25 | "Programming Language :: Python :: 3.13", 26 | ] 27 | requires-python = ">= 3.9" 28 | dependencies = [ 29 | "anyio", 30 | "anyioutils >=0.7.0,<0.8.0", 31 | "structlog", 32 | "exceptiongroup; python_version<'3.11'", 33 | "importlib_metadata >=3.6; python_version<'3.10'", 34 | ] 35 | 36 | [project.optional-dependencies] 37 | click = [ 38 | "click >=8.1.8,<9", 39 | ] 40 | fastapi = [ 41 | "fastapi", 42 | ] 43 | anycorn = [ 44 | "anycorn >=0.18.1,<0.19.0", 45 | ] 46 | 47 | [dependency-groups] 48 | test = [ 49 | "pytest >=8,<9", 50 | "trio >=0.27.0,<0.28", 51 | "mypy", 52 | "ruff", 53 | "coverage[toml] >=7,<8", 54 | "httpx", 55 | "pydantic", 56 | ] 57 | docs = [ 58 | "mkdocs", 59 | "mkdocs-material", 60 | "mkdocstrings[python]", 61 | ] 62 | 63 | [project.urls] 64 | Source = "https://github.com/jupyter-server/fps" 65 | Issues = "https://github.com/jupyter-server/fps/issues" 66 | 67 | [project.scripts] 68 | fps = "fps.cli._cli:main" 69 | 70 | [project.entry-points] 71 | "fps.modules" = {fps_module = "fps:Module"} 72 | 73 | [tool.hatch.version] 74 | path = "src/fps/__init__.py" 75 | 76 | [tool.hatch.build.targets.sdist] 77 | include = ["src/fps"] 78 | 79 | [tool.hatch.build.targets.wheel] 80 | ignore-vcs = true 81 | packages = ["src/fps"] 82 | 83 | [tool.coverage.run] 84 | source = ["fps", "tests"] 85 | 86 | [tool.coverage.report] 87 | show_missing = true 88 | -------------------------------------------------------------------------------- /src/fps/__init__.py: -------------------------------------------------------------------------------- 1 | from ._context import Context as Context 2 | from ._context import SharedValue as SharedValue 3 | from ._context import Value as Value 4 | from ._module import Module as Module 5 | from ._module import initialize as initialize 6 | from ._config import get_root_module as get_root_module 7 | from ._config import merge_config as merge_config 8 | from ._signal import Signal as Signal 9 | 10 | __version__ = "0.4.0" 11 | -------------------------------------------------------------------------------- /src/fps/_config.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from copy import deepcopy 4 | from typing import Any 5 | 6 | from ._module import Module 7 | from ._importer import import_from_string 8 | 9 | 10 | def get_root_module(config: dict[str, Any]) -> Module: 11 | for module_name, module_info in config.items(): 12 | module_config = module_info.get("config", {}) 13 | module_type = import_from_string(module_info["type"]) 14 | root_module = module_type(module_name, **module_config) 15 | root_module._config = module_config 16 | submodules = module_info.get("modules", {}) 17 | for submodule_name, submodule_info in submodules.items(): 18 | submodule_config = root_module._uninitialized_modules.setdefault( 19 | submodule_name, {} 20 | ).setdefault("config", {}) 21 | submodule_config.update(submodule_info.get("config", {})) 22 | submodule_type = submodule_info.get("type") 23 | if submodule_type is not None: 24 | root_module._uninitialized_modules[submodule_name]["type"] = ( 25 | submodule_type 26 | ) 27 | root_module._uninitialized_modules[submodule_name]["modules"] = ( 28 | submodule_info.get("modules", {}) 29 | ) 30 | break 31 | return root_module 32 | 33 | 34 | def merge_config( 35 | config: dict[str, Any], override: dict[str, Any], root: bool = True 36 | ) -> dict[str, Any]: 37 | if root: 38 | config = deepcopy(config) 39 | for key, val in override.items(): 40 | if key in config: 41 | if isinstance(val, dict): 42 | config[key] = merge_config(config[key], override[key], root=False) 43 | else: 44 | config[key] = override[key] 45 | else: 46 | config[key] = val 47 | return config 48 | 49 | 50 | def dump_config(config: dict[str, Any]) -> str: 51 | config_lines: list[str] = [] 52 | _dump_config(config_lines, config, "") 53 | return "\n".join(config_lines) 54 | 55 | 56 | def _dump_config(config_lines: list[str], config: dict[str, Any], path: str) -> None: 57 | if path: 58 | path += "." 59 | for name, info in config.items(): 60 | _config = info.get("config", {}) 61 | for param, value in _config.items(): 62 | config_lines.append(f"{path}{name}.{param}={value}") 63 | for module_name, module_info in info.get("modules", {}).items(): 64 | _dump_config(config_lines, {module_name: module_info}, f"{path}{name}") 65 | 66 | 67 | def get_config_description(root_module: Module) -> str: 68 | description_lines: list[str] = [] 69 | _get_config_description(description_lines, root_module) 70 | return "\n".join(description_lines) 71 | 72 | 73 | def _get_config_description(description_lines: list[str], module: Module) -> None: 74 | path = module.get_path(root=False) 75 | if path: 76 | path += "." 77 | if module.config is not None: 78 | for key, value in module.config.model_fields.items(): 79 | title = "" if value.title is None else f" {value.title}" 80 | description_lines.append(f"{path}{key}:{title}") 81 | description_lines.append(f" Default: {value.default}") 82 | description_lines.append(f" Type: {value.annotation}") 83 | description_lines.append(f" Description: {value.description}") 84 | for submodule in module.modules.values(): 85 | _get_config_description(description_lines, submodule) 86 | -------------------------------------------------------------------------------- /src/fps/_context.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from collections.abc import Callable, Awaitable 4 | from contextlib import AsyncExitStack, ExitStack 5 | from functools import lru_cache, partial 6 | from inspect import isawaitable, signature 7 | from typing import ( 8 | Any, 9 | AsyncContextManager, 10 | ContextManager, 11 | Generic, 12 | Iterable, 13 | TypeVar, 14 | cast, 15 | ) 16 | 17 | from anyio import Event, create_task_group, fail_after, move_on_after 18 | 19 | T = TypeVar("T") 20 | 21 | 22 | class Value(Generic[T]): 23 | """ 24 | A `Value` can be obtained from a shared value by calling `await shared_value.get()`, 25 | and can be dropped by calling `value.drop()`. The inner value can be accessed by 26 | calling `value.unwrap()`, unless it was already dropped. 27 | """ 28 | 29 | def __init__(self, shared_value: SharedValue[T]) -> None: 30 | """ 31 | Args: 32 | shared_value: The shared value this `Value` refers to. 33 | """ 34 | self._shared_value = shared_value 35 | 36 | def unwrap(self) -> T: 37 | """ 38 | Get the inner value that is shared. 39 | 40 | Raises: 41 | RuntimeError: If the value was already dropped. 42 | 43 | Returns: 44 | The inner value. 45 | """ 46 | if self not in self._shared_value._borrowers: 47 | raise RuntimeError("Already dropped") 48 | 49 | return self._shared_value._value 50 | 51 | def drop(self) -> None: 52 | """ 53 | Drop the value. 54 | """ 55 | self._shared_value._drop(self) 56 | 57 | 58 | class SharedValue(Generic[T]): 59 | """ 60 | A value that can be shared with so-called borrowers. A borrower borrows a shared value by 61 | calling `await shared_value.get()`, which returns a `Value`. The shared value can be borrowed 62 | any number of times at the same time, unless specified by `max_borrowers`. All borrowers must 63 | drop their `Value` before the shared value can be closed. The shared value can be closed 64 | explicitly by calling `await shared_value.aclose()`, or by using an async context manager. 65 | """ 66 | 67 | def __init__( 68 | self, 69 | value: T, 70 | max_borrowers: float = float("inf"), 71 | manage: bool = False, 72 | teardown_callback: Callable[..., Any] 73 | | Callable[..., Awaitable[Any]] 74 | | None = None, 75 | close_timeout: float | None = None, 76 | ) -> None: 77 | """ 78 | Args: 79 | value: The inner value that is shared. 80 | max_borrowers: The number of times the shared value can be borrowed at the same time. 81 | manage: Whether to use the (async) context manager of the inner value 82 | for setup/teardown. 83 | teardown_callback: The callback to call when closing the shared value. 84 | close_timeout: The timeout to use when closing the shared value. 85 | """ 86 | self._value = value 87 | self._max_borrowers = max_borrowers 88 | self._manage = manage 89 | self._teardown_callback = teardown_callback 90 | self._close_timeout = close_timeout 91 | self._borrowers: set[Value] = set() 92 | self._dropped = Event() 93 | self._exit_stack: ExitStack | None = None 94 | self._async_exit_stack: AsyncExitStack | None = None 95 | self._opened = False 96 | 97 | def _drop(self, borrower: Value) -> None: 98 | if borrower in self._borrowers: 99 | self._borrowers.remove(borrower) 100 | self._dropped.set() 101 | self._dropped = Event() 102 | 103 | async def __aenter__(self) -> SharedValue: 104 | await self._maybe_open() 105 | return self 106 | 107 | async def __aexit__(self, exc_type, exc_value, exc_tb): 108 | await self.aclose(_exc_type=exc_type, _exc_value=exc_value, _exc_tb=exc_tb) 109 | 110 | async def get(self, timeout: float = float("inf")) -> Value: 111 | """ 112 | Borrow the shared value. 113 | 114 | Args: 115 | timeout: The time to wait for the value to be dropped. 116 | 117 | Returns: 118 | The borrowed value. 119 | 120 | Raises: 121 | TimeoutError: If the value could not be borrowed in time. 122 | """ 123 | await self._maybe_open() 124 | value = Value(self) 125 | with fail_after(timeout): 126 | while True: 127 | if len(self._borrowers) < self._max_borrowers: 128 | self._borrowers.add(value) 129 | return value 130 | await self.freed() 131 | 132 | async def freed(self, timeout: float = float("inf")) -> None: 133 | """ 134 | Wait for all borrowers to drop their value. 135 | 136 | Args: 137 | timeout: The time to wait for all borrowers to drop their value. 138 | 139 | Raises: 140 | TimeoutError: If the shared value was not freed in time. 141 | """ 142 | with fail_after(timeout): 143 | while True: 144 | if not self._borrowers: 145 | return 146 | await self._dropped.wait() 147 | 148 | async def _maybe_open(self) -> None: 149 | if not self._manage or self._opened: 150 | return 151 | 152 | self._opened = True 153 | 154 | if hasattr(self._value, "__aenter__"): 155 | async with AsyncExitStack() as async_exit_stack: 156 | self._value = await async_exit_stack.enter_async_context( 157 | cast(AsyncContextManager, self._value) 158 | ) 159 | self._async_exit_stack = async_exit_stack.pop_all() 160 | return 161 | elif hasattr(self._value, "__enter__"): 162 | with ExitStack() as exit_stack: 163 | self._value = exit_stack.enter_context( 164 | cast(ContextManager, self._value) 165 | ) 166 | self._exit_stack = exit_stack.pop_all() 167 | return 168 | 169 | async def aclose( 170 | self, 171 | *, 172 | timeout: float | None = None, 173 | _exc_type=None, 174 | _exc_value: BaseException | None = None, 175 | _exc_tb=None, 176 | ) -> None: 177 | """ 178 | Wait for all borrowers to drop their value, and tear down the shared value. 179 | 180 | Args: 181 | timeout: The time to wait for all borrowers to drop their value. 182 | 183 | Raises: 184 | TimeoutError: If the shared value could not be closed in time. 185 | """ 186 | if timeout is None: 187 | timeout = self._close_timeout 188 | if timeout is None: 189 | timeout = float("inf") 190 | with move_on_after(timeout) as scope: 191 | await self.freed() 192 | 193 | if self._async_exit_stack is not None: 194 | await self._async_exit_stack.__aexit__(_exc_type, _exc_value, _exc_tb) 195 | self._async_exit_stack = None 196 | if self._exit_stack is not None: 197 | self._exit_stack.__exit__(_exc_type, _exc_value, _exc_tb) 198 | self._exit_stack = None 199 | 200 | if self._teardown_callback is not None: 201 | await call(self._teardown_callback, _exc_value) 202 | 203 | if scope.cancelled_caught: 204 | raise TimeoutError 205 | 206 | 207 | class Context: 208 | """ 209 | A context allows to share values. When a shared value is put in a context, 210 | it can be borrowed by calling `await context.get(value_type)`, where `value_type` 211 | is the type of the desired value. 212 | """ 213 | 214 | def __init__(self, *, close_timeout: float | None = None): 215 | """ 216 | Args: 217 | close_timeout: The timeout to use when closing the context. 218 | """ 219 | self._close_timeout = close_timeout 220 | self._context: dict[int, SharedValue] = {} 221 | self._value_added = Event() 222 | self._closed = False 223 | self._teardown_callbacks: list[ 224 | Callable[..., Any] | Callable[..., Awaitable[Any]] 225 | ] = [] 226 | 227 | async def __aenter__(self): 228 | return self 229 | 230 | async def __aexit__(self, exc_type, exc_value, exc_tb): 231 | await self.aclose(_exc_type=exc_type, _exc_value=exc_value, _exc_tb=exc_tb) 232 | 233 | def _get_value_types( 234 | self, value: Any, types: Iterable | Any | None = None 235 | ) -> Iterable: 236 | types = types if types is not None else [type(value)] 237 | try: 238 | for value_type in types: 239 | break 240 | except TypeError: 241 | types = [types] 242 | return types 243 | 244 | def _check_closed(self): 245 | if self._closed: 246 | raise RuntimeError("Context is closed") 247 | 248 | def add_teardown_callback( 249 | self, 250 | teardown_callback: Callable[..., Any] | Callable[..., Awaitable[Any]], 251 | ) -> None: 252 | """ 253 | Register a callback that will be called at context teardown. The callbacks 254 | will be called in the inverse order than they were added. 255 | 256 | Args: 257 | teardown_callback: The callback to add. 258 | """ 259 | self._teardown_callbacks.append(teardown_callback) 260 | 261 | def put( 262 | self, 263 | value: T, 264 | types: Iterable | Any | None = None, 265 | max_borrowers: float = float("inf"), 266 | manage: bool = False, 267 | teardown_callback: Callable[..., Any] 268 | | Callable[..., Awaitable[Any]] 269 | | None = None, 270 | shared_value: SharedValue[T] | None = None, 271 | ) -> SharedValue[T]: 272 | """ 273 | Put a value in the context so that it can be shared. 274 | 275 | Args: 276 | value: The value to put in the context. 277 | types: The type(s) to register the value as. If not 278 | provided, the value type will be used. 279 | max_borrowers: The number of times the shared value can be borrowed at the same time. 280 | manage: Whether to use the (async) context manager of the value 281 | for setup/teardown. 282 | teardown_callback: An optional callback to call when the context is closed. 283 | 284 | Returns: 285 | The shared value. 286 | """ 287 | self._check_closed() 288 | if shared_value is not None: 289 | _shared_value = shared_value 290 | value = _shared_value._value 291 | else: 292 | _shared_value = SharedValue( 293 | value, 294 | max_borrowers=max_borrowers, 295 | manage=manage, 296 | teardown_callback=teardown_callback, 297 | ) 298 | _types = self._get_value_types(value, types) 299 | for value_type in _types: 300 | value_type_id = id(value_type) 301 | if value_type_id in self._context: 302 | raise RuntimeError(f'Value type "{value_type}" already exists') 303 | self._context[value_type_id] = _shared_value 304 | self._value_added.set() 305 | self._value_added = Event() 306 | return _shared_value 307 | 308 | async def get(self, value_type: type[T], timeout: float = float("inf")) -> Value[T]: 309 | """ 310 | Get a value from the context, with the given type. 311 | The value will be returned if/when it is put in the context and when it accepts 312 | to be borrowed (borrowing can be limited with a maximum number of borrowers). 313 | 314 | Args: 315 | value_type: The type of the value to get. 316 | timeout: The time to wait to get the value. 317 | 318 | Returns: 319 | The borrowed `Value`. 320 | 321 | Raises: 322 | TimeoutError: If the value could not be borrowed in time. 323 | """ 324 | self._check_closed() 325 | value_type_id = id(value_type) 326 | while True: 327 | if value_type_id in self._context: 328 | shared_value = self._context[value_type_id] 329 | return await shared_value.get(timeout) 330 | await self._value_added.wait() 331 | 332 | async def aclose( 333 | self, 334 | *, 335 | timeout: float | None = None, 336 | _exc_type=None, 337 | _exc_value: BaseException | None = None, 338 | _exc_tb=None, 339 | ) -> None: 340 | """ 341 | Close the context, after all shared values that were borrowed have been dropped. 342 | The shared values will be torn down, if applicable. 343 | 344 | Args: 345 | timeout: The time to wait for all shared values to be freed. 346 | 347 | Raises: 348 | TimeoutError: If the context could not be closed in time. 349 | """ 350 | if timeout is None: 351 | timeout = self._close_timeout 352 | if timeout is None: 353 | timeout = float("inf") 354 | with fail_after(timeout): 355 | async with create_task_group() as tg: 356 | for shared_value in self._context.values(): 357 | tg.start_soon( 358 | partial( 359 | shared_value.aclose, 360 | _exc_type=_exc_type, 361 | _exc_value=_exc_value, 362 | _exc_tb=_exc_tb, 363 | ) 364 | ) 365 | for callback in self._teardown_callbacks[::-1]: 366 | await call(callback, _exc_value) 367 | self._closed = True 368 | 369 | 370 | @lru_cache(maxsize=1024) 371 | def count_parameters(func: Callable) -> int: 372 | """Count the number of parameters in a callable""" 373 | return len(signature(func).parameters) 374 | 375 | 376 | async def call( 377 | callback: Callable[..., Any] | Callable[..., Awaitable[Any]], 378 | exc_value: BaseException | None, 379 | ) -> None: 380 | param_nb = count_parameters(callback) 381 | params = (exc_value,) 382 | res = callback(*params[:param_nb]) 383 | if isawaitable(res): 384 | await res 385 | -------------------------------------------------------------------------------- /src/fps/_importer.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import importlib 4 | import sys 5 | from typing import Any 6 | 7 | if sys.version_info < (3, 10): # pragma: nocover 8 | from importlib_metadata import entry_points 9 | else: 10 | from importlib.metadata import entry_points 11 | 12 | 13 | class ImportFromStringError(Exception): 14 | pass 15 | 16 | 17 | def import_from_string(import_str: Any) -> Any: 18 | if not isinstance(import_str, str): 19 | return import_str 20 | 21 | if ":" not in import_str: 22 | # this is an entry-point in the "fps.modules" group 23 | for ep in entry_points(group="fps.modules"): 24 | if ep.name == import_str: 25 | return ep.load() 26 | raise RuntimeError( 27 | f'Module could not be found in entry-point group "fps.modules": {import_str}' 28 | ) 29 | 30 | module_str, _, attrs_str = import_str.partition(":") 31 | try: 32 | module = importlib.import_module(module_str) 33 | except ModuleNotFoundError as exc: 34 | if exc.name != module_str: # pragma: nocover 35 | raise exc from None 36 | message = 'Could not import module "{module_str}".' 37 | raise ImportFromStringError(message.format(module_str=module_str)) 38 | 39 | instance = module 40 | try: 41 | for attr_str in attrs_str.split("."): 42 | instance = getattr(instance, attr_str) 43 | except AttributeError: 44 | message = 'Attribute "{attrs_str}" not found in module "{module_str}".' 45 | raise ImportFromStringError( 46 | message.format(attrs_str=attrs_str, module_str=module_str) 47 | ) 48 | 49 | return instance 50 | -------------------------------------------------------------------------------- /src/fps/_module.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import logging 4 | import sys 5 | 6 | from collections.abc import Callable, Awaitable 7 | from contextlib import AsyncExitStack 8 | from inspect import isawaitable, signature, _empty 9 | from typing import TypeVar, Any, Iterable, cast 10 | 11 | import anyio 12 | import structlog 13 | from anyio import Event, create_task_group, fail_after, move_on_after 14 | from anyioutils import create_task, wait, FIRST_COMPLETED 15 | 16 | from ._context import Context, SharedValue, Value 17 | from ._importer import import_from_string 18 | 19 | 20 | if sys.version_info < (3, 11): 21 | from exceptiongroup import BaseExceptionGroup, ExceptionGroup # pragma: no cover 22 | 23 | log = structlog.get_logger() 24 | structlog.stdlib.recreate_defaults(log_level=logging.INFO) 25 | 26 | T_Value = TypeVar("T_Value") 27 | 28 | 29 | class Module: 30 | """ 31 | A module allows to: 32 | 33 | - run services, 34 | - share those services with other modules, 35 | - request services from other modules. 36 | 37 | The services are represented by values that can be published by producers 38 | and borrowed by consumers. Consumers notify producers that their services 39 | are not used anymore by dropping the corresponding borrowed values. Producers 40 | are responsible for tearing down their services when stopping the application. 41 | 42 | Modules can be configured through their [`__init__`][fps.Module.__init__] method's 43 | keyword arguments. Modules have three phases: 44 | 45 | - [`prepare`][fps.Module.prepare]: called before the "start" phase. 46 | - [`start`][fps.Module.start]: called before running the application. 47 | - [`stop`][fps.Module.stop]: called when shutting down the application. 48 | """ 49 | 50 | _exit: Event 51 | _exceptions: list[Exception] 52 | 53 | def __init__( 54 | self, 55 | name: str, 56 | prepare_timeout: float = 1, 57 | start_timeout: float = 1, 58 | stop_timeout: float = 1, 59 | ): 60 | """ 61 | Args: 62 | name: The name to give to the module. 63 | prepare_timeout: The time to wait (in seconds) for the "prepare" phase to complete. 64 | start_timeout: The time to wait (in seconds) for the "start" phase to complete. 65 | stop_timeout: The time to wait (in seconds) for the "stop" phase to complete. 66 | """ 67 | self._initialized = False 68 | self._prepare_timeout = prepare_timeout 69 | self._start_timeout = start_timeout 70 | self._stop_timeout = stop_timeout 71 | self._parent: Module | None = None 72 | self._context = Context() 73 | self._prepared = Event() 74 | self._started = Event() 75 | self._stopped = Event() 76 | self._is_stopping = False 77 | self._name = name 78 | self._path: list[str] = [] 79 | self._uninitialized_modules: dict[str, Any] = {} 80 | self._modules: dict[str, Module] = {} 81 | self._published_values: dict[int, SharedValue] = {} 82 | self._acquired_values: dict[int, Value] = {} 83 | self._context_manager_exits: list[Callable] = [] 84 | self._config: dict[str, Any] = {} 85 | self.config: Any = None 86 | 87 | @property 88 | def parent(self) -> Module | None: 89 | """ 90 | Returns: 91 | The module's parent, unless this is the root module which has no parent. 92 | """ 93 | return self._parent 94 | 95 | @parent.setter 96 | def parent(self, value: Module) -> None: 97 | """ 98 | Args: 99 | value: The module's parent. 100 | """ 101 | self._parent = value 102 | self._exit = value._exit 103 | self._path = value._path + [value._name] 104 | 105 | @property 106 | def name(self) -> str: 107 | """ 108 | Returns: 109 | The module's name. 110 | """ 111 | return self._name 112 | 113 | @property 114 | def path(self) -> str: 115 | """ 116 | Returns: 117 | The module's path, as a period-separated sequence of module names. 118 | """ 119 | return ".".join(self._path + [self._name]) 120 | 121 | @property 122 | def started(self) -> Event: 123 | """ 124 | Returns: 125 | An `Event` that is set when the module has started. 126 | """ 127 | return self._started 128 | 129 | @property 130 | def exceptions(self) -> list[Exception]: 131 | return self._exceptions 132 | 133 | def get_path(self, root: bool = True) -> str: 134 | path = self.path 135 | if not root: 136 | idx = path.find(".") 137 | if idx == -1: 138 | path = "" 139 | else: 140 | path = path[idx + 1 :] 141 | return path 142 | 143 | def _check_init(self): 144 | try: 145 | self._initialized 146 | except AttributeError: 147 | raise RuntimeError( 148 | "You must call super().__init__() in the __init__ method of your module" 149 | ) 150 | 151 | @property 152 | def modules(self) -> dict[str, Module]: 153 | """ 154 | Returns: 155 | The modules added by the current module, as a `dict` of module name to module instance. 156 | """ 157 | return self._modules 158 | 159 | def exit_app(self): 160 | """ 161 | Force the application to exit. This can be called from any module. 162 | """ 163 | self._exit.set() 164 | 165 | def add_module( 166 | self, 167 | module_type: type["Module"] | str, 168 | name: str, 169 | **config, 170 | ) -> None: 171 | """ 172 | Add a module as a child of the current module. 173 | 174 | Args: 175 | module_type: A [Module][fps.Module] type or a string pointing to a module type. 176 | name: The name to give to the module. 177 | config: The module configuration. 178 | """ 179 | self._check_init() 180 | if name in self._uninitialized_modules: 181 | raise RuntimeError(f"Module name already exists: {name}") 182 | module_type = import_from_string(module_type) 183 | self._uninitialized_modules[name] = { 184 | "type": module_type, 185 | "config": config, 186 | "modules": {}, 187 | } 188 | log.debug("Module added", path=self.path, name=name, module_type=module_type) 189 | 190 | async def freed(self, value: Any) -> None: 191 | """ 192 | Wait for a published value to be free, meaning that all borrowers have dropped 193 | the value. 194 | 195 | Args: 196 | value: The value to be freed. 197 | """ 198 | value_id = id(value) 199 | await self._published_values[value_id].freed() 200 | 201 | async def all_freed(self) -> None: 202 | """ 203 | Wait for all published values to be freed, meaning that all borrowers have 204 | dropped their values. 205 | """ 206 | for value in self._published_values.values(): 207 | await value.freed() 208 | 209 | def drop_all(self) -> None: 210 | """ 211 | Drop all borrowed values. 212 | """ 213 | for value in self._acquired_values.values(): 214 | value.drop() 215 | 216 | def drop(self, value: Any) -> None: 217 | """ 218 | Drop a borrowed value, meaning that this module doesn't use it anymore. 219 | 220 | Args: 221 | value: The value to drop. 222 | """ 223 | value_id = id(value) 224 | self._acquired_values[value_id].drop() 225 | 226 | def add_teardown_callback( 227 | self, 228 | teardown_callback: Callable[..., Any] | Callable[..., Awaitable[Any]], 229 | ) -> None: 230 | """ 231 | Register a callback that will be called when stopping the module. The callbacks 232 | will be called in the inverse order than they were added. 233 | 234 | Args: 235 | teardown_callback: The callback to add. 236 | """ 237 | self._context.add_teardown_callback(teardown_callback) 238 | 239 | def put( 240 | self, 241 | value: T_Value, 242 | types: Iterable | Any | None = None, 243 | max_borrowers: float = float("inf"), 244 | teardown_callback: Callable[..., Any] 245 | | Callable[..., Awaitable[Any]] 246 | | None = None, 247 | manage: bool = False, 248 | ) -> None: 249 | """ 250 | Publish a value in the current module context and its parent's (if any). 251 | 252 | Args: 253 | value: The value to publish. 254 | types: The type(s) to publish the value as. If not provided, the type is inferred 255 | from the value. 256 | max_borrowers: The maximum number of simultaneous borrowers of the published value. 257 | teardown_callback: A callback to call when the value is torn down. 258 | manage: Whether to use the (async) context manager of the value for its setup/teardown. 259 | """ 260 | value_id = id(value) 261 | shared_value = self._context.put( 262 | value, 263 | types, 264 | max_borrowers=max_borrowers, 265 | manage=manage, 266 | teardown_callback=teardown_callback, 267 | ) 268 | self._published_values[value_id] = shared_value 269 | if self.parent is not None: 270 | self.parent._context.put( 271 | value, 272 | types, 273 | max_borrowers=max_borrowers, 274 | manage=manage, 275 | teardown_callback=teardown_callback, 276 | shared_value=shared_value, 277 | ) 278 | log.debug("Module added value", path=self.path, types=types) 279 | 280 | async def get( 281 | self, value_type: type[T_Value], timeout: float = float("inf") 282 | ) -> T_Value: 283 | """ 284 | Borrow a value from the current module's context or its parent's (if any). 285 | 286 | Args: 287 | value_type: The type of the value to borrow. 288 | timeout: The time to wait for the value to be published. 289 | 290 | Returns: 291 | The borrowed value. 292 | """ 293 | log.debug("Module getting value", path=self.path, value_type=value_type) 294 | tasks = [create_task(self._context.get(value_type), self._task_group)] 295 | if self.parent is not None: 296 | tasks.append( 297 | create_task(self.parent._context.get(value_type), self._task_group) 298 | ) 299 | with fail_after(timeout): 300 | done, pending = await wait( 301 | tasks, self._task_group, return_when=FIRST_COMPLETED 302 | ) 303 | for task in pending: 304 | task.cancel() 305 | for task in done: 306 | break 307 | value = await task.wait() 308 | value = cast(Value, value) 309 | value_id = id(value.unwrap()) 310 | self._acquired_values[value_id] = value 311 | log.debug("Module got value", path=self.path, value_type=value_type) 312 | return value.unwrap() 313 | 314 | async def __aenter__(self) -> Module: 315 | self._check_init() 316 | log.debug("Running root module", name=self.path) 317 | initialize(self) 318 | async with AsyncExitStack() as exit_stack: 319 | self._task_group = await exit_stack.enter_async_context(create_task_group()) 320 | self._exceptions = [] 321 | self._phase = "preparing" 322 | with move_on_after(self._prepare_timeout) as scope: 323 | self._task_group.start_soon(self._prepare, name=f"{self.path} _prepare") 324 | await self._all_prepared() 325 | if scope.cancelled_caught: 326 | self._get_all_prepare_timeout() 327 | if self._exceptions: 328 | self._exit.set() 329 | else: 330 | self._phase = "starting" 331 | with move_on_after(self._start_timeout) as scope: 332 | self._task_group.start_soon(self._start, name=f"{self.path} start") 333 | await self._all_started() 334 | if scope.cancelled_caught: 335 | self._get_all_start_timeout() 336 | if self._exceptions: 337 | self._exit.set() 338 | if not self._exit.is_set(): 339 | log.debug("Application running") 340 | self._exit_stack = exit_stack.pop_all() 341 | return self 342 | 343 | async def __aexit__(self, exc_type, exc_value, exc_tb): 344 | self._phase = "stopping" 345 | with move_on_after(self._stop_timeout) as scope: 346 | self._task_group.start_soon(self._stop, name=f"{self.path} stop") 347 | await self._all_stopped() 348 | self._exit.set() 349 | if scope.cancelled_caught: 350 | self._get_all_stop_timeout() 351 | self._task_group.cancel_scope.cancel() 352 | try: 353 | await self._exit_stack.aclose() 354 | except anyio.get_cancelled_exc_class(): # pragma: nocover 355 | pass 356 | exceptions = [] 357 | for exc in self._exceptions: 358 | while isinstance(exc, ExceptionGroup): 359 | exc = exc.exceptions[0] 360 | exceptions.append(exc) 361 | if exceptions: 362 | log.critical("Application failed") 363 | for exception in exceptions: 364 | log.critical("Exception", exc_info=exception) 365 | log.debug("Application stopped") 366 | 367 | def context_manager(self, value): 368 | self._context_manager_exits.append(value.__exit__) 369 | return value.__enter__() 370 | 371 | async def async_context_manager(self, value): 372 | self._context_manager_exits.append(value.__aexit__) 373 | return await value.__aenter__() 374 | 375 | def _get_all_prepare_timeout(self): 376 | for module in self._modules.values(): 377 | module._get_all_prepare_timeout() 378 | if not self._prepared.is_set(): 379 | self._exceptions.append( 380 | TimeoutError(f"Module timed out while preparing: {self.path}") 381 | ) 382 | 383 | def _get_all_start_timeout(self): 384 | for module in self._modules.values(): 385 | module._get_all_start_timeout() 386 | if not self._started.is_set(): 387 | self._exceptions.append( 388 | TimeoutError(f"Module timed out while starting: {self.path}") 389 | ) 390 | 391 | def _get_all_stop_timeout(self): 392 | for module in self._modules.values(): 393 | module._get_all_stop_timeout() 394 | if not self._stopped.is_set(): 395 | self._exceptions.append( 396 | TimeoutError(f"Module timed out while stopping: {self.path}") 397 | ) 398 | 399 | async def _all_prepared(self): 400 | for module in self._modules.values(): 401 | await module._all_prepared() 402 | await self._prepared.wait() 403 | 404 | async def _all_started(self): 405 | for module in self._modules.values(): 406 | await module._all_started() 407 | await self._started.wait() 408 | 409 | async def _all_stopped(self): 410 | for module in self._modules.values(): 411 | await module._all_stopped() 412 | await self._stopped.wait() 413 | 414 | def done(self) -> None: 415 | """ 416 | Notify that the current phase is done. This is especially useful when launching 417 | background tasks, as otherwise the current phase would not complete: 418 | ```py 419 | from anyio import create_task_group 420 | from fps import Module 421 | 422 | class MyModule(Module): 423 | async def start(self): 424 | async with create_task_group() as tg: 425 | tg.start_toon(my_async_func) 426 | tg.start_toon(other_async_func) 427 | self.done() 428 | ``` 429 | """ 430 | if self._phase == "preparing": 431 | self._prepared.set() 432 | log.debug("Module prepared", path=self.path) 433 | elif self._phase == "starting": 434 | self._started.set() 435 | log.debug("Module started", path=self.path) 436 | else: 437 | self._is_stopping = True 438 | self._task_group.start_soon(self._finish) 439 | 440 | async def _finish(self): 441 | tasks = ( 442 | create_task(self._drop_and_wait_values(), self._task_group), 443 | create_task(self._exit.wait(), self._task_group), 444 | ) 445 | done, pending = await wait(tasks, self._task_group, return_when=FIRST_COMPLETED) 446 | for task in pending: 447 | task.cancel() 448 | 449 | async def _drop_and_wait_values(self): 450 | self.drop_all() 451 | await self._context.aclose() 452 | self._stopped.set() 453 | log.debug("Module stopped", path=self.path) 454 | 455 | async def _prepare(self) -> None: 456 | log.debug("Preparing module", path=self.path) 457 | try: 458 | async with create_task_group() as tg: 459 | for module in self._modules.values(): 460 | module._task_group = tg 461 | module._phase = self._phase 462 | module._exceptions = self._exceptions 463 | tg.start_soon(module._prepare, name=f"{module.path} _prepare") 464 | tg.start_soon( 465 | self._prepare_and_done, name=f"{self.path} _prepare_and_done" 466 | ) 467 | except ExceptionGroup as exc: 468 | self._exceptions.append(*exc.exceptions) 469 | self._prepared.set() 470 | self._exit.set() 471 | log.critical("Module failed while preparing", path=self.path) 472 | 473 | async def _prepare_and_done(self) -> None: 474 | await self.prepare() 475 | if not self._prepared.is_set(): 476 | self.done() 477 | 478 | async def prepare(self) -> None: 479 | """ 480 | The "prepare" phase occurs before the "start" phase. 481 | """ 482 | pass 483 | 484 | async def _start(self) -> None: 485 | log.debug("Starting module", path=self.path) 486 | try: 487 | async with create_task_group() as tg: 488 | for module in self._modules.values(): 489 | module._task_group = tg 490 | module._phase = self._phase 491 | tg.start_soon(module._start, name=f"{module.path} _start") 492 | tg.start_soon(self._start_and_done, name=f"{self.path} _start_and_done") 493 | except ExceptionGroup as exc: 494 | self._exceptions.append(*exc.exceptions) 495 | self._started.set() 496 | self._exit.set() 497 | log.critical("Module failed while starting", path=self.path) 498 | 499 | async def _start_and_done(self) -> None: 500 | await self.start() 501 | if not self._started.is_set(): 502 | self.done() 503 | 504 | async def start(self) -> None: 505 | """ 506 | The "start" phase occurs after the "prepare" phase. This is usually where 507 | services are started and published as values, and other services are requested 508 | and borrowed as values. 509 | """ 510 | pass 511 | 512 | async def _stop(self) -> None: 513 | log.debug("Stopping module", path=self.path) 514 | try: 515 | async with create_task_group() as tg: 516 | for module in self._modules.values(): 517 | module._task_group = tg 518 | module._phase = self._phase 519 | tg.start_soon(module._stop, name=f"{module.path} _stop") 520 | for context_manager_exit in self._context_manager_exits[::-1]: 521 | res = context_manager_exit(None, None, None) 522 | if isawaitable(res): 523 | await res 524 | tg.start_soon(self._stop_and_done, name=f"{self.path} _stop_and_done") 525 | except ExceptionGroup as exc: 526 | self._exceptions.append(*exc.exceptions) 527 | self._stopped.set() 528 | self._exit.set() 529 | log.critical("Module failed while stoping", path=self.path) 530 | 531 | async def _stop_and_done(self) -> None: 532 | await self.stop() 533 | if not self._is_stopping: 534 | self.done() 535 | 536 | async def stop(self) -> None: 537 | """ 538 | The "stop" phase occurs when the application is torn down. 539 | """ 540 | pass 541 | 542 | async def _main(self) -> None: # pragma: no cover 543 | async with self: 544 | await self._exit.wait() 545 | 546 | def run(self, backend: str = "asyncio") -> None: # pragma: no cover 547 | """ 548 | Run the root module. 549 | 550 | Args: 551 | backend: The backend used to run ("asyncio" or "trio"). 552 | """ 553 | try: 554 | anyio.run(self._main, backend=backend) 555 | except BaseException as exc: 556 | if isinstance(exc, KeyboardInterrupt): 557 | # on asyncio 558 | return 559 | if isinstance(exc, BaseExceptionGroup): 560 | if isinstance(exc.exceptions[0], KeyboardInterrupt): 561 | # on trio 562 | return 563 | raise 564 | 565 | 566 | def initialize(root_module: Module) -> dict[str, Any] | None: 567 | """ 568 | Initialize the root module and all its submodules recursively. 569 | 570 | Args: 571 | root_module: The root module to initialize. 572 | 573 | Returns: 574 | The configuration of the application. 575 | """ 576 | if root_module._initialized: 577 | return None 578 | 579 | root_module._exit = Event() 580 | _config = get_kwargs_with_default(type(root_module).__init__) 581 | _config.update(root_module._config) 582 | config = {root_module.name: {"modules": {}, "config": _config}} 583 | _initialize( 584 | root_module._uninitialized_modules, 585 | root_module, 586 | root_module._uninitialized_modules, 587 | config[root_module.name]["modules"], 588 | ) 589 | root_module._uninitialized_modules = {} 590 | root_module._initialized = True 591 | root_module._config = {} 592 | return config 593 | 594 | 595 | def _initialize( 596 | submodules: dict[str, Any], 597 | parent_module: Module, 598 | root_module_modules: dict[str, Any], 599 | config: dict[str, Any], 600 | ) -> None: 601 | for name, info in root_module_modules.items(): 602 | if name in submodules: 603 | if info.get("type") is not None: 604 | submodules[name]["type"] = info["type"] 605 | else: 606 | submodules[name] = info 607 | for name, info in submodules.items(): 608 | submodule_config = info.get("config", {}) 609 | submodule_config.update(root_module_modules.get(name, {}).get("config", {})) 610 | if "type" not in info: 611 | raise RuntimeError(f"Module not found: {name}") 612 | module_type = import_from_string(info["type"]) 613 | _config = get_kwargs_with_default(module_type.__init__) 614 | config[name] = {"config": _config, "modules": {}} 615 | _config.update(submodule_config) 616 | try: 617 | submodule_instance: Module = module_type(name, **submodule_config) 618 | except Exception as e: 619 | raise RuntimeError( 620 | f"Cannot instantiate module '{parent_module.path}.{name}': {e}" 621 | ) 622 | submodule_instance.parent = parent_module 623 | parent_module._modules[name] = submodule_instance 624 | _initialize( 625 | submodule_instance._uninitialized_modules, 626 | submodule_instance, 627 | root_module_modules.get(name, {}).get("modules", {}), 628 | config[name]["modules"], 629 | ) 630 | submodule_instance._uninitialized_modules = {} 631 | 632 | 633 | def get_kwargs_with_default(function: Callable[..., Any]) -> dict[str, Any]: 634 | """ 635 | Get the keyword arguments which have a default value from a function. 636 | 637 | Args: 638 | function: The function from which to get the keyword arguments. 639 | 640 | Returns: 641 | The keyword arguments of the function, with their default values. 642 | """ 643 | if function is Module.__init__: 644 | return {} 645 | 646 | sig = signature(function) 647 | return { 648 | param.name: param.default 649 | for param in sig.parameters.values() 650 | if param.default != _empty 651 | } 652 | -------------------------------------------------------------------------------- /src/fps/_signal.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Callable 2 | from inspect import iscoroutinefunction 3 | from typing import Generic, TypeVar 4 | 5 | from anyio import BrokenResourceError, create_memory_object_stream, create_task_group 6 | from anyio.streams.memory import MemoryObjectSendStream, MemoryObjectReceiveStream 7 | 8 | 9 | T = TypeVar("T") 10 | 11 | 12 | class Signal(Generic[T]): 13 | def __init__(self) -> None: 14 | self._callbacks: set[Callable[[T], None]] = set() 15 | self._send_streams: set[MemoryObjectSendStream[T]] = set() 16 | 17 | def iterate(self) -> MemoryObjectReceiveStream[T]: 18 | send_stream, receive_stream = create_memory_object_stream[T]() 19 | self._send_streams.add(send_stream) 20 | return receive_stream 21 | 22 | def connect(self, callback: Callable[[T], None]) -> None: 23 | self._callbacks.add(callback) 24 | 25 | def disconnect(self, callback: Callable[[T], None]) -> None: 26 | self._callbacks.remove(callback) 27 | 28 | async def emit(self, value: T) -> None: 29 | to_remove: list[MemoryObjectSendStream[T]] = [] 30 | 31 | async with create_task_group() as tg: 32 | for callback in self._callbacks: 33 | if iscoroutinefunction(callback): 34 | tg.start_soon(callback, value) 35 | else: 36 | callback(value) 37 | 38 | for send_stream in self._send_streams: 39 | tg.start_soon(self._send, send_stream, value, to_remove) 40 | 41 | for send_stream in to_remove: 42 | self._send_streams.remove(send_stream) 43 | 44 | async def _send( 45 | self, 46 | send_stream: MemoryObjectSendStream[T], 47 | value: T, 48 | to_remove: list[MemoryObjectSendStream[T]], 49 | ) -> None: 50 | try: 51 | await send_stream.send(value) 52 | except BrokenResourceError: 53 | to_remove.append(send_stream) 54 | -------------------------------------------------------------------------------- /src/fps/cli/_cli.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import json 4 | import sys 5 | import click 6 | import structlog 7 | from typing import TextIO 8 | 9 | from .._config import dump_config, get_config_description, get_root_module 10 | from .._module import initialize 11 | from .._importer import import_from_string 12 | 13 | 14 | sys.path.insert(0, "") 15 | 16 | log = structlog.get_logger() 17 | CONFIG = None 18 | TEST = False 19 | 20 | 21 | @click.command() 22 | @click.option("--config", type=click.File(), help="The path to the configuration file.") 23 | @click.option( 24 | "--show-config", 25 | is_flag=True, 26 | show_default=True, 27 | default=False, 28 | help="Show the actual configuration.", 29 | ) 30 | @click.option( 31 | "--help-all", 32 | is_flag=True, 33 | show_default=True, 34 | default=False, 35 | help="Show the configuration description.", 36 | ) 37 | @click.option( 38 | "--set", "set_", multiple=True, help="The assignment to the module parameter." 39 | ) 40 | @click.option( 41 | "--backend", 42 | show_default=True, 43 | default="asyncio", 44 | help="The name of the event loop to use (asyncio or trio).", 45 | ) 46 | @click.argument("module", default="") 47 | def main( 48 | module: str, 49 | config: TextIO | None = None, 50 | show_config: bool = False, 51 | help_all: bool = False, 52 | set_: list[str] | None = None, 53 | backend: str = "asyncio", 54 | ): 55 | global CONFIG 56 | if config is None: 57 | module_type = import_from_string(module) 58 | root_module_name = "root_module" 59 | config_dict = { 60 | root_module_name: { 61 | "type": module_type, 62 | } 63 | } 64 | else: 65 | config_dict = json.loads(config.read()) 66 | if module: 67 | config_dict = {module: config_dict[module]} 68 | root_module_name = module 69 | else: 70 | for root_module_name in config_dict: 71 | break 72 | for _set in set_ or []: 73 | if "=" not in _set: 74 | raise click.ClickException( 75 | f"No '=' while setting a module parameter: {_set}" 76 | ) 77 | 78 | key, value = _set.split("=", 1) 79 | path = key.split(".") 80 | modules = config_dict[root_module_name] 81 | for module_name in path[:-1]: 82 | modules = modules.setdefault("modules", {}) 83 | modules = modules.setdefault(module_name, {}) 84 | _config = modules.setdefault("config", {}) 85 | _config[path[-1]] = value 86 | if TEST: 87 | CONFIG = config_dict 88 | return 89 | root_module = get_root_module(config_dict) 90 | actual_config = initialize(root_module) 91 | if help_all: 92 | click.echo(get_config_description(root_module)) 93 | return 94 | if show_config: 95 | assert actual_config is not None 96 | config_str = dump_config(actual_config) 97 | for line in config_str.splitlines(): 98 | param_path, param_value = line.split("=") 99 | kwargs = {param_path: param_value} 100 | log.info("Configuration", **kwargs) 101 | root_module.run(backend=backend) 102 | 103 | 104 | def get_config(): 105 | return CONFIG 106 | -------------------------------------------------------------------------------- /src/fps/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jupyter-server/fps/df19fa7eb34241c65be59b81b946b8cb4c86846c/src/fps/py.typed -------------------------------------------------------------------------------- /src/fps/web/fastapi.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from fastapi import FastAPI 4 | 5 | from fps import Module 6 | 7 | 8 | class FastAPIModule(Module): 9 | def __init__( 10 | self, 11 | name: str, 12 | *, 13 | app: FastAPI | None = None, 14 | debug: bool | None = None, 15 | ) -> None: 16 | super().__init__(name) 17 | debug = debug if debug is not None else __debug__ 18 | self.app = app if app is not None else FastAPI(debug=debug) 19 | 20 | async def prepare(self) -> None: 21 | self.put(self.app) 22 | -------------------------------------------------------------------------------- /src/fps/web/server.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from functools import partial 4 | 5 | from anyio import Event, connect_tcp, create_task_group 6 | from anyioutils import start_task 7 | from anycorn import Config, serve 8 | from fastapi import FastAPI 9 | 10 | from fps import Module 11 | 12 | 13 | class ServerModule(Module): 14 | def __init__( 15 | self, 16 | name: str, 17 | *, 18 | host: str = "127.0.0.1", 19 | port: int = 8000, 20 | ) -> None: 21 | super().__init__(name) 22 | self.host = host 23 | self.port = port 24 | self.shutdown_event = Event() 25 | 26 | async def start(self) -> None: 27 | app = await self.get(FastAPI) 28 | config = Config() 29 | config.bind = [f"{self.host}:{self.port}"] 30 | config.loglevel = "WARN" 31 | async with create_task_group() as tg: 32 | self.server_task = start_task( 33 | partial( 34 | serve, 35 | app, # type: ignore[arg-type] 36 | config, 37 | shutdown_trigger=self.shutdown_event.wait, 38 | mode="asgi", 39 | ), 40 | tg, 41 | ) 42 | while True: 43 | try: 44 | await connect_tcp(self.host, self.port) 45 | except OSError: 46 | pass 47 | else: 48 | break 49 | self.done() 50 | 51 | async def stop(self) -> None: 52 | self.shutdown_event.set() 53 | await self.server_task.wait() 54 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | from socket import socket 2 | 3 | import pytest 4 | 5 | 6 | @pytest.fixture 7 | def unused_tcp_port() -> int: 8 | with socket() as sock: 9 | sock.bind(("127.0.0.1", 0)) 10 | return sock.getsockname()[1] 11 | -------------------------------------------------------------------------------- /tests/test_app.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jupyter-server/fps/df19fa7eb34241c65be59b81b946b8cb4c86846c/tests/test_app.py -------------------------------------------------------------------------------- /tests/test_cli.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import fps 4 | from click.testing import CliRunner 5 | from fps import Module 6 | from fps.cli._cli import get_config, main 7 | from pydantic import BaseModel, Field 8 | from structlog.testing import capture_logs 9 | 10 | 11 | class MyConfig(BaseModel): 12 | param0: str = Field( 13 | default="foo", title="a parameter", description="the first parameter" 14 | ) 15 | param1: str = Field( 16 | default="bar", title="another parameter", description="the second parameter" 17 | ) 18 | 19 | 20 | class MyModule(Module): 21 | def __init__(self, name, param0="param0", param1="param1", add_modules=True): 22 | super().__init__(name) 23 | self.config = MyConfig(param0=param0, param1=param1) 24 | if add_modules: 25 | self.add_module(MyModule, "module0", add_modules=False) 26 | self.add_module(MyModule, "module1", add_modules=False) 27 | 28 | async def start(self): 29 | self.exit_app() 30 | 31 | 32 | class UselessModule(Module): 33 | async def start(self): 34 | self.exit_app() 35 | 36 | 37 | def test_wrong_cli_1(): 38 | runner = CliRunner() 39 | result = runner.invoke( 40 | main, 41 | [ 42 | "fps:Module", 43 | "--set", 44 | "param", 45 | ], 46 | ) 47 | assert result.exit_code == 1 48 | 49 | 50 | def test_wrong_cli_2(): 51 | runner = CliRunner() 52 | result = runner.invoke( 53 | main, 54 | [ 55 | "fps.Module", 56 | ], 57 | ) 58 | assert result.exit_code == 1 59 | 60 | 61 | def test_wrong_cli_3(): 62 | runner = CliRunner() 63 | result = runner.invoke( 64 | main, 65 | [ 66 | "fps:WrongModule", 67 | ], 68 | ) 69 | assert result.exit_code == 1 70 | 71 | 72 | def test_wrong_cli_4(): 73 | runner = CliRunner() 74 | result = runner.invoke( 75 | main, 76 | [ 77 | "wrong_module:Module", 78 | ], 79 | ) 80 | assert result.exit_code == 1 81 | 82 | 83 | def test_cli(): 84 | runner = CliRunner() 85 | fps.cli._cli.TEST = True 86 | result = runner.invoke( 87 | main, 88 | [ 89 | "fps_module", 90 | "--set", 91 | "param=-1", 92 | "--set", 93 | "module0.param0=foo", 94 | "--set", 95 | "module1.param1=bar", 96 | "--set", 97 | "module2.param2=baz", 98 | "--set", 99 | "module2.module3.param3=123", 100 | ], 101 | ) 102 | assert result.exit_code == 0 103 | config = get_config() 104 | fps.cli._cli.TEST = False 105 | assert config == { 106 | "root_module": { 107 | "type": Module, 108 | "config": {"param": "-1"}, 109 | "modules": { 110 | "module0": {"config": {"param0": "foo"}}, 111 | "module1": {"config": {"param1": "bar"}}, 112 | "module2": { 113 | "config": {"param2": "baz"}, 114 | "modules": {"module3": {"config": {"param3": "123"}}}, 115 | }, 116 | }, 117 | } 118 | } 119 | 120 | 121 | def test_cli_show_config(): 122 | runner = CliRunner() 123 | fps.cli._cli.TEST = False 124 | 125 | with capture_logs() as cap_logs: 126 | result = runner.invoke( 127 | main, 128 | [ 129 | "test_cli:MyModule", 130 | "--show-config", 131 | "--set", 132 | "param0=-1", 133 | "--set", 134 | "module0.param0=foo", 135 | "--set", 136 | "module1.param1=bar", 137 | ], 138 | ) 139 | 140 | assert result.exit_code == 0 141 | config = [] 142 | for log in cap_logs: 143 | if log["event"] == "Configuration": 144 | del log["event"] 145 | del log["log_level"] 146 | config.append(log) 147 | 148 | assert config == [ 149 | {"root_module.param0": "-1"}, 150 | {"root_module.param1": "param1"}, 151 | {"root_module.add_modules": "True"}, 152 | {"root_module.module0.param0": "foo"}, 153 | {"root_module.module0.param1": "param1"}, 154 | {"root_module.module0.add_modules": "False"}, 155 | {"root_module.module1.param0": "param0"}, 156 | {"root_module.module1.param1": "bar"}, 157 | {"root_module.module1.add_modules": "False"}, 158 | ] 159 | 160 | 161 | def test_cli_help_all(): 162 | runner = CliRunner() 163 | fps.cli._cli.TEST = False 164 | 165 | result = runner.invoke( 166 | main, 167 | [ 168 | "test_cli:MyModule", 169 | "--help-all", 170 | ], 171 | ) 172 | 173 | assert result.exit_code == 0 174 | assert ( 175 | result.output 176 | == """\ 177 | param0: a parameter 178 | Default: foo 179 | Type: 180 | Description: the first parameter 181 | param1: another parameter 182 | Default: bar 183 | Type: 184 | Description: the second parameter 185 | module0.param0: a parameter 186 | Default: foo 187 | Type: 188 | Description: the first parameter 189 | module0.param1: another parameter 190 | Default: bar 191 | Type: 192 | Description: the second parameter 193 | module1.param0: a parameter 194 | Default: foo 195 | Type: 196 | Description: the first parameter 197 | module1.param1: another parameter 198 | Default: bar 199 | Type: 200 | Description: the second parameter 201 | """ 202 | ) 203 | 204 | 205 | def test_cli_with_config_file(tmp_path): 206 | config_dict = { 207 | "root_module": { 208 | "type": "fps_module", 209 | "config": {"param": 3}, 210 | "modules": { 211 | "module0": { 212 | "type": "fps_module", 213 | "config": { 214 | "param0": 0, 215 | "param1": 1, 216 | }, 217 | }, 218 | }, 219 | }, 220 | } 221 | with (tmp_path / "config.json").open("w") as f: 222 | json.dump(config_dict, f) 223 | 224 | runner = CliRunner() 225 | fps.cli._cli.TEST = True 226 | result = runner.invoke( 227 | main, 228 | [ 229 | "--config", 230 | str(tmp_path / "config.json"), 231 | "--set", 232 | "module0.param1=foo", 233 | "--set", 234 | "param=bar", 235 | ], 236 | ) 237 | assert result.exit_code == 0 238 | config = get_config() 239 | fps.cli._cli.TEST = False 240 | assert config == { 241 | "root_module": { 242 | "type": "fps_module", 243 | "config": {"param": "bar"}, 244 | "modules": { 245 | "module0": { 246 | "type": "fps_module", 247 | "config": { 248 | "param0": 0, 249 | "param1": "foo", 250 | }, 251 | }, 252 | }, 253 | } 254 | } 255 | 256 | 257 | def test_cli_with_config_file_and_module(tmp_path): 258 | config_dict = { 259 | "root_module": { 260 | "type": "fps_module", 261 | "config": {"param": 3}, 262 | "modules": { 263 | "module0": { 264 | "type": "fps_module", 265 | "config": { 266 | "param0": 0, 267 | "param1": 1, 268 | }, 269 | }, 270 | }, 271 | }, 272 | } 273 | with (tmp_path / "config.json").open("w") as f: 274 | json.dump(config_dict, f) 275 | 276 | runner = CliRunner() 277 | fps.cli._cli.TEST = True 278 | result = runner.invoke( 279 | main, 280 | [ 281 | "root_module", 282 | "--config", 283 | str(tmp_path / "config.json"), 284 | "--set", 285 | "module0.param1=foo", 286 | "--set", 287 | "param=bar", 288 | ], 289 | ) 290 | assert result.exit_code == 0 291 | config = get_config() 292 | fps.cli._cli.TEST = False 293 | assert config == { 294 | "root_module": { 295 | "type": "fps_module", 296 | "config": {"param": "bar"}, 297 | "modules": { 298 | "module0": { 299 | "type": "fps_module", 300 | "config": { 301 | "param0": 0, 302 | "param1": "foo", 303 | }, 304 | }, 305 | }, 306 | } 307 | } 308 | 309 | 310 | def test_cli_run_module(): 311 | fps.cli._cli.TEST = False 312 | runner = CliRunner() 313 | result = runner.invoke( 314 | main, 315 | [ 316 | "test_cli:UselessModule", 317 | ], 318 | ) 319 | assert result.exit_code == 0 320 | -------------------------------------------------------------------------------- /tests/test_config.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from fps import Module, get_root_module, initialize, merge_config 4 | 5 | 6 | def test_config_override(): 7 | class Submodule0(Module): 8 | def __init__(self, name, param0="param0", param1="param1"): 9 | super().__init__(name) 10 | self.param0 = param0 11 | self.param1 = param1 12 | 13 | class Submodule1(Module): 14 | def __init__(self, name, param0="param0"): 15 | super().__init__(name) 16 | self.param0 = param0 17 | 18 | class Submodule2(Module): 19 | def __init__(self, name, param2="param2"): 20 | super().__init__(name) 21 | self.add_module(Submodule3, "submodule3", param3="param3*") 22 | self.param2 = param2 23 | 24 | class Submodule3(Module): 25 | def __init__(self, name, param3="param3"): 26 | super().__init__(name) 27 | self.add_module(Submodule4, "submodule4", param4="param4*") 28 | self.param3 = param3 29 | 30 | class Submodule4(Module): 31 | def __init__(self, name, param4="param4"): 32 | super().__init__(name) 33 | self.param4 = param4 34 | 35 | class Module0(Module): 36 | def __init__(self, name, param0="param0"): 37 | super().__init__(name) 38 | self.add_module(Submodule0, "submodule0", param0="param0*") 39 | self.add_module(Submodule1, "submodule1") 40 | self.add_module(Submodule2, "submodule2", param2="param2*") 41 | self.param0 = param0 42 | 43 | module0 = Module0("module0", param0="bar") 44 | initialize(module0) 45 | initialize(module0) 46 | assert module0.param0 == "bar" 47 | assert module0.modules["submodule0"].param0 == "param0*" 48 | assert module0.modules["submodule0"].param1 == "param1" 49 | assert module0.modules["submodule1"].param0 == "param0" 50 | assert module0.modules["submodule2"].param2 == "param2*" 51 | assert module0.modules["submodule2"].modules["submodule3"].param3 == "param3*" 52 | assert ( 53 | module0.modules["submodule2"].modules["submodule3"].modules["submodule4"].param4 54 | == "param4*" 55 | ) 56 | 57 | 58 | def test_config_from_dict(): 59 | class Submodule0(Module): 60 | def __init__(self, name, param0="param0", param1="param1"): 61 | super().__init__(name) 62 | self.param0 = param0 63 | self.param1 = param1 64 | 65 | class Submodule1(Module): 66 | def __init__(self, name, param0="param0"): 67 | super().__init__(name) 68 | self.param0 = param0 69 | 70 | class Submodule2(Module): 71 | def __init__(self, name, param2="param2"): 72 | super().__init__(name) 73 | self.add_module(Submodule3, "submodule3") 74 | self.param2 = param2 75 | 76 | class Submodule3(Module): 77 | def __init__(self, name, param3="param3"): 78 | super().__init__(name) 79 | self.add_module(Submodule4, "submodule4", param4="foo") 80 | self.param3 = param3 81 | 82 | class Submodule4(Module): 83 | def __init__(self, name, param4="param4"): 84 | super().__init__(name) 85 | self.param4 = param4 86 | 87 | class Module0(Module): 88 | def __init__(self, name, param0="param0"): 89 | super().__init__(name) 90 | self.add_module(Submodule0, "submodule0", param0="foo") 91 | self.add_module(Submodule1, "submodule1") 92 | self.add_module(Submodule2, "submodule2") 93 | self.param0 = param0 94 | 95 | config = { 96 | "module0": { 97 | "type": Module0, 98 | "config": { 99 | "param0": "bar", 100 | }, 101 | "modules": { 102 | "submodule0": { 103 | "config": { 104 | "param0": "foo2", 105 | }, 106 | }, 107 | "submodule1": { 108 | "config": { 109 | "param0": "baz", 110 | }, 111 | }, 112 | "submodule2": { 113 | "config": { 114 | "param2": "param2*", 115 | }, 116 | "modules": { 117 | "submodule3": { 118 | "config": { 119 | "param3": "param3*", 120 | }, 121 | "modules": { 122 | "submodule4": { 123 | "config": { 124 | "param4": "param4*", 125 | }, 126 | }, 127 | }, 128 | }, 129 | }, 130 | }, 131 | }, 132 | }, 133 | } 134 | 135 | module0 = get_root_module(config) 136 | initialize(module0) 137 | assert module0.param0 == "bar" 138 | assert module0.modules["submodule0"].param0 == "foo2" 139 | assert module0.modules["submodule0"].param1 == "param1" 140 | assert module0.modules["submodule1"].param0 == "baz" 141 | assert module0.modules["submodule2"].param2 == "param2*" 142 | assert module0.modules["submodule2"].modules["submodule3"].param3 == "param3*" 143 | assert ( 144 | module0.modules["submodule2"].modules["submodule3"].modules["submodule4"].param4 145 | == "param4*" 146 | ) 147 | 148 | 149 | def test_config_from_dict_with_type_as_str(): 150 | config = { 151 | "module0": { 152 | "type": "fps:Module", 153 | "modules": { 154 | "module0": { 155 | "type": "fps:Module", 156 | "modules": { 157 | "module00": { 158 | "type": "fps:Module", 159 | }, 160 | }, 161 | }, 162 | }, 163 | }, 164 | } 165 | 166 | module0 = get_root_module(config) 167 | initialize(module0) 168 | assert module0.modules["module0"].modules["module00"] 169 | 170 | 171 | def test_wrong_config_from_dict_1(): 172 | class Module0(Module): 173 | pass 174 | 175 | config = { 176 | "module0": { 177 | "type": Module0, 178 | "modules": { 179 | "submodule0": { 180 | "config": { 181 | "param0": "foo", 182 | }, 183 | }, 184 | }, 185 | }, 186 | } 187 | 188 | module0 = get_root_module(config) 189 | 190 | with pytest.raises(RuntimeError) as excinfo: 191 | initialize(module0) 192 | 193 | assert str(excinfo.value) == "Module not found: submodule0" 194 | 195 | 196 | def test_wrong_config_from_dict_2(): 197 | class Submodule0(Module): 198 | def __init__(self, name, param0="param0", param1="param1"): 199 | super().__init__(name) 200 | self.param0 = param0 201 | self.param1 = param1 202 | 203 | class Module0(Module): 204 | def __init__(self, name, param0="param0"): 205 | super().__init__(name) 206 | self.add_module(Submodule0, "submodule0", param0="foo") 207 | 208 | config = { 209 | "module0": { 210 | "type": Module0, 211 | "modules": { 212 | "submodule0": { 213 | "config": { 214 | "param0": "foo", 215 | }, 216 | "modules": { 217 | "submodule1": { 218 | "config": { 219 | "param1": "bar", 220 | }, 221 | }, 222 | }, 223 | }, 224 | }, 225 | }, 226 | } 227 | 228 | module0 = get_root_module(config) 229 | 230 | with pytest.raises(RuntimeError) as excinfo: 231 | initialize(module0) 232 | 233 | assert str(excinfo.value) == "Module not found: submodule1" 234 | 235 | 236 | def test_config_from_dict_add_submodules(): 237 | class Submodule0(Module): 238 | def __init__(self, name, param0="param0", param1="param1"): 239 | super().__init__(name) 240 | self.param0 = param0 241 | self.param1 = param1 242 | 243 | class Submodule1(Module): 244 | def __init__(self, name, param0="param0", param1="param1"): 245 | super().__init__(name) 246 | self.param0 = param0 247 | self.param1 = param1 248 | 249 | class Submodule10(Module): 250 | def __init__(self, name, param0="param0", param1="param1"): 251 | super().__init__(name) 252 | self.param0 = param0 253 | self.param1 = param1 254 | 255 | class Module0(Module): 256 | pass 257 | 258 | config = { 259 | "module0": { 260 | "type": Module0, 261 | "modules": { 262 | "submodule0": { 263 | "type": Submodule0, 264 | "config": { 265 | "param0": "foo", 266 | }, 267 | }, 268 | "submodule1": { 269 | "type": Submodule1, 270 | "config": { 271 | "param1": "bar", 272 | }, 273 | "modules": { 274 | "submodule10": { 275 | "type": Submodule10, 276 | "config": { 277 | "param0": "baz", 278 | }, 279 | }, 280 | }, 281 | }, 282 | }, 283 | }, 284 | } 285 | 286 | module0 = get_root_module(config) 287 | initialize(module0) 288 | assert list(module0.modules.keys()) == ["submodule0", "submodule1"] 289 | assert list(module0.modules["submodule0"].modules.keys()) == [] 290 | assert list(module0.modules["submodule1"].modules.keys()) == ["submodule10"] 291 | assert ( 292 | list(module0.modules["submodule1"].modules["submodule10"].modules.keys()) == [] 293 | ) 294 | assert module0.modules["submodule0"].param0 == "foo" 295 | assert module0.modules["submodule0"].param1 == "param1" 296 | assert module0.modules["submodule1"].param0 == "param0" 297 | assert module0.modules["submodule1"].param1 == "bar" 298 | assert module0.modules["submodule1"].modules["submodule10"].param0 == "baz" 299 | assert module0.modules["submodule1"].modules["submodule10"].param1 == "param1" 300 | 301 | 302 | def test_merge_config(): 303 | d0 = { 304 | "module0": { 305 | "type": "Module0", 306 | "config": { 307 | "param0": 0, 308 | "param1": 1, 309 | }, 310 | "modules": { 311 | "module1": { 312 | "type": "Module1", 313 | "modules": { 314 | "module2": { 315 | "type": "Module2", 316 | "config": { 317 | "param2": 2, 318 | "param3": 3, 319 | }, 320 | } 321 | }, 322 | } 323 | }, 324 | } 325 | } 326 | 327 | d1 = { 328 | "module0": { 329 | "config": { 330 | "param1": 11, 331 | }, 332 | "modules": { 333 | "module1": { 334 | "modules": { 335 | "module2": { 336 | "config": { 337 | "param2": 22, 338 | }, 339 | }, 340 | "module3": { 341 | "type": "Module3", 342 | "config": { 343 | "param4": 4, 344 | }, 345 | }, 346 | } 347 | } 348 | }, 349 | } 350 | } 351 | 352 | d = merge_config(d0, d1) 353 | assert d == { 354 | "module0": { 355 | "type": "Module0", 356 | "config": { 357 | "param0": 0, 358 | "param1": 11, 359 | }, 360 | "modules": { 361 | "module1": { 362 | "type": "Module1", 363 | "modules": { 364 | "module2": { 365 | "type": "Module2", 366 | "config": { 367 | "param2": 22, 368 | "param3": 3, 369 | }, 370 | }, 371 | "module3": { 372 | "type": "Module3", 373 | "config": { 374 | "param4": 4, 375 | }, 376 | }, 377 | }, 378 | } 379 | }, 380 | } 381 | } 382 | 383 | 384 | def test_dump_config(): 385 | class Submodule0(Module): 386 | def __init__(self, name, param0="param0_0_0", param1="param0_0_1"): 387 | super().__init__(name) 388 | 389 | class Submodule1(Module): 390 | def __init__(self, name, param0="param0_1_0", param1="param0_1_1"): 391 | super().__init__(name) 392 | self.add_module(Submodule1_0, "submodule1_0") 393 | 394 | class Submodule1_0(Module): 395 | def __init__(self, name, param0="param0_1_0_0", param1="param0_1_0_1"): 396 | super().__init__(name) 397 | 398 | class Module0(Module): 399 | def __init__(self, name, param0="param0", param1="param1"): 400 | super().__init__(name) 401 | self.add_module(Submodule0, "submodule0", param1="foo") 402 | self.add_module(Submodule1, "submodule1") 403 | 404 | config = { 405 | "module0": { 406 | "type": Module0, 407 | "config": {"param1": "ooo"}, 408 | "modules": { 409 | "submodule1": { 410 | "config": { 411 | "param1": "bar", 412 | }, 413 | "modules": { 414 | "submodule1_0": { 415 | "config": { 416 | "param0": "baz", 417 | }, 418 | }, 419 | }, 420 | }, 421 | }, 422 | }, 423 | } 424 | 425 | module0 = get_root_module(config) 426 | actual_config = initialize(module0) 427 | 428 | assert actual_config == { 429 | "module0": { 430 | "config": { 431 | "param0": "param0", 432 | "param1": "ooo", 433 | }, 434 | "modules": { 435 | "submodule0": { 436 | "config": { 437 | "param0": "param0_0_0", 438 | "param1": "foo", 439 | }, 440 | "modules": {}, 441 | }, 442 | "submodule1": { 443 | "config": { 444 | "param0": "param0_1_0", 445 | "param1": "bar", 446 | }, 447 | "modules": { 448 | "submodule1_0": { 449 | "config": { 450 | "param0": "baz", 451 | "param1": "param0_1_0_1", 452 | }, 453 | "modules": {}, 454 | } 455 | }, 456 | }, 457 | }, 458 | } 459 | } 460 | -------------------------------------------------------------------------------- /tests/test_context.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from anyio import fail_after 4 | from fps import Context, SharedValue 5 | 6 | pytestmark = pytest.mark.anyio 7 | 8 | 9 | async def test_context(): 10 | context = Context() 11 | published_value_0 = 1 12 | published_value_1 = "foo" 13 | shared_value_0 = context.put(published_value_0) 14 | shared_value_1 = context.put(published_value_1) 15 | acquired_value_0 = await context.get(int) 16 | acquired_value_1 = await context.get(int) 17 | assert published_value_0 is acquired_value_0.unwrap() 18 | assert published_value_0 is acquired_value_1.unwrap() 19 | 20 | with fail_after(0.1): 21 | await shared_value_1.freed() 22 | 23 | with pytest.raises(TimeoutError): 24 | await shared_value_0.freed(timeout=0.1) 25 | 26 | with pytest.raises(TimeoutError): 27 | await context.aclose(timeout=0.1) 28 | 29 | acquired_value_0.drop() 30 | 31 | with pytest.raises(RuntimeError) as excinfo: 32 | acquired_value_0.unwrap() 33 | assert str(excinfo.value) == "Already dropped" 34 | 35 | with pytest.raises(TimeoutError): 36 | await shared_value_0.freed(timeout=0.1) 37 | 38 | with pytest.raises(TimeoutError): 39 | await context.aclose(timeout=0.1) 40 | 41 | acquired_value_1.drop() 42 | 43 | with fail_after(0.1): 44 | await shared_value_0.freed() 45 | 46 | with fail_after(0.1): 47 | await context.aclose() 48 | 49 | with pytest.raises(RuntimeError) as excinfo: 50 | context.put("foo") 51 | assert str(excinfo.value) == "Context is closed" 52 | 53 | with pytest.raises(RuntimeError) as excinfo: 54 | await context.get(int) 55 | assert str(excinfo.value) == "Context is closed" 56 | 57 | 58 | async def test_context_cm(): 59 | async with Context() as context: 60 | context.put("foo") 61 | value = await context.get(str) 62 | value.drop() 63 | 64 | 65 | async def test_value_teardown_callback(): 66 | with pytest.raises(RuntimeError) as excinfo: 67 | async with Context() as context: 68 | value = ["start"] 69 | 70 | async def callback(exception): 71 | value.append(exception) 72 | 73 | context.put(value, teardown_callback=callback) 74 | error = RuntimeError() 75 | raise error 76 | 77 | assert excinfo.value == error 78 | assert value == ["start", error] 79 | 80 | 81 | async def test_shared_value_cm(): 82 | async with SharedValue("foo") as shared_value: 83 | acquired_value = await shared_value.get() 84 | value = acquired_value.unwrap() 85 | acquired_value.drop() 86 | assert value == "foo" 87 | 88 | 89 | async def test_shared_value_timeout(): 90 | shared_value = SharedValue("foo") 91 | acquired_value = await shared_value.get() 92 | value = acquired_value.unwrap() 93 | assert value == "foo" 94 | 95 | with pytest.raises(TimeoutError): 96 | await shared_value.aclose(timeout=0.1) 97 | 98 | 99 | @pytest.mark.parametrize("manage", (False, True)) 100 | @pytest.mark.parametrize("async_", (False, True)) 101 | async def test_shared_value_manage(manage: bool, async_: bool): 102 | class Foo: 103 | def __init__(self): 104 | self.entered = False 105 | self.exited = False 106 | self.aentered = False 107 | self.aexited = False 108 | 109 | if async_: 110 | 111 | async def __aenter__(self): 112 | self.aentered = True 113 | return self 114 | 115 | async def __aexit__(self, exc_type, exc_value, exc_tb): 116 | self.aexited = True 117 | else: 118 | 119 | def __enter__(self): 120 | self.entered = True 121 | return self 122 | 123 | def __exit__(self, exc_type, exc_value, exc_tb): 124 | self.exited = True 125 | 126 | foo = Foo() 127 | 128 | async with SharedValue(foo, manage=manage) as shared_value: 129 | acquired_value = await shared_value.get() 130 | value = acquired_value.unwrap() 131 | acquired_value.drop() 132 | 133 | assert value == foo 134 | assert foo.entered == (manage and not async_) 135 | assert foo.exited == (manage and not async_) 136 | assert foo.aentered == (manage and async_) 137 | assert foo.aexited == (manage and async_) 138 | 139 | 140 | async def test_context_teardown_callback(): 141 | called = [] 142 | 143 | async def cb0(): 144 | called.append("cb0") 145 | 146 | def cb1(): 147 | called.append("cb1") 148 | 149 | async with Context() as context: 150 | context.add_teardown_callback(cb0) 151 | context.add_teardown_callback(cb1) 152 | 153 | assert called == ["cb1", "cb0"] 154 | -------------------------------------------------------------------------------- /tests/test_exceptions.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from fps import Module 4 | 5 | pytestmark = pytest.mark.anyio 6 | 7 | 8 | async def test_exception_prepare(): 9 | outputs = [] 10 | error = RuntimeError("prepare0") 11 | 12 | class Module0(Module): 13 | async def prepare(self): 14 | outputs.append("prepare0") 15 | raise error 16 | 17 | async def start(self): 18 | # should not be called, since prepare failed 19 | outputs.append("start0") # pragma: no cover 20 | 21 | async def stop(self): 22 | # should always be called 23 | outputs.append("stop0") 24 | 25 | async with Module0("module0") as module0: 26 | pass 27 | 28 | assert module0.exceptions == [error] 29 | assert outputs == ["prepare0", "stop0"] 30 | 31 | 32 | async def test_exception_start(): 33 | outputs = [] 34 | error = RuntimeError("start0") 35 | 36 | class Module0(Module): 37 | async def prepare(self): 38 | outputs.append("prepare0") 39 | 40 | async def start(self): 41 | outputs.append("start0") 42 | raise error 43 | 44 | async def stop(self): 45 | # should always be called 46 | outputs.append("stop0") 47 | 48 | async with Module0("module0") as module0: 49 | pass 50 | 51 | assert module0.exceptions == [error] 52 | 53 | 54 | async def test_exception_stop(): 55 | outputs = [] 56 | error = RuntimeError("stop0") 57 | 58 | class Module0(Module): 59 | async def prepare(self): 60 | outputs.append("prepare0") 61 | 62 | async def start(self): 63 | outputs.append("start0") 64 | 65 | async def stop(self): 66 | outputs.append("stop0") 67 | raise error 68 | 69 | async with Module0("module0") as module0: 70 | pass 71 | 72 | assert module0.exceptions == [error] 73 | assert outputs == ["prepare0", "start0", "stop0"] 74 | 75 | 76 | async def test_exception_prepare_stop(): 77 | outputs = [] 78 | error_prepare0 = RuntimeError("prepare0") 79 | error_stop0 = RuntimeError("stop0") 80 | 81 | class Module0(Module): 82 | async def prepare(self): 83 | outputs.append("prepare0") 84 | raise error_prepare0 85 | 86 | async def start(self): 87 | outputs.append("start0") # pragma: no cover 88 | 89 | async def stop(self): 90 | outputs.append("stop0") 91 | raise error_stop0 92 | 93 | async with Module0("module0") as module0: 94 | pass 95 | 96 | assert module0.exceptions == [error_prepare0, error_stop0] 97 | assert outputs == ["prepare0", "stop0"] 98 | 99 | 100 | async def test_exception_start_stop(): 101 | outputs = [] 102 | error_start0 = RuntimeError("start0") 103 | error_stop0 = RuntimeError("stop0") 104 | 105 | class Module0(Module): 106 | async def prepare(self): 107 | outputs.append("prepare0") 108 | 109 | async def start(self): 110 | outputs.append("start0") 111 | raise error_start0 112 | 113 | async def stop(self): 114 | outputs.append("stop0") 115 | raise error_stop0 116 | 117 | async with Module0("module0") as module0: 118 | pass 119 | 120 | assert module0.exceptions == [error_start0, error_stop0] 121 | assert outputs == ["prepare0", "start0", "stop0"] 122 | 123 | 124 | async def test_exception_submodule(): 125 | outputs = [] 126 | error_sub_start0 = RuntimeError("sub start0") 127 | error_sub_stop0 = RuntimeError("sub stop0") 128 | 129 | class Submodule0(Module): 130 | async def prepare(self): 131 | outputs.append("sub prepare0") 132 | 133 | async def start(self): 134 | outputs.append("sub start0") 135 | raise error_sub_start0 136 | 137 | async def stop(self): 138 | outputs.append("sub stop0") 139 | raise error_sub_stop0 140 | 141 | class Module0(Module): 142 | def __init__(self, name): 143 | super().__init__(name) 144 | self.add_module(Submodule0, "submodule0") 145 | 146 | async def prepare(self): 147 | outputs.append("prepare0") 148 | 149 | async def start(self): 150 | outputs.append("start0") 151 | 152 | async def stop(self): 153 | outputs.append("stop0") 154 | 155 | async with Module0("module0") as module0: 156 | pass 157 | 158 | assert module0.exceptions == [error_sub_start0, error_sub_stop0] 159 | assert outputs == [ 160 | "prepare0", 161 | "sub prepare0", 162 | "start0", 163 | "sub start0", 164 | "stop0", 165 | "sub stop0", 166 | ] 167 | -------------------------------------------------------------------------------- /tests/test_module.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from fps import Module, initialize 4 | 5 | pytestmark = pytest.mark.anyio 6 | 7 | 8 | def test_buggy_module(): 9 | class BuggyModule(Module): 10 | def __init__(self, name): 11 | raise RuntimeError("foo") 12 | 13 | class Module0(Module): 14 | def __init__(self, name): 15 | super().__init__(name) 16 | self.add_module(BuggyModule, "buggy_module") 17 | 18 | with pytest.raises(RuntimeError) as excinfo: 19 | module0 = Module0("module0") 20 | initialize(module0) 21 | 22 | assert str(excinfo.value) == "Cannot instantiate module 'module0.buggy_module': foo" 23 | 24 | 25 | async def test_module(): 26 | outputs = [] 27 | 28 | class Submodule0(Module): 29 | async def start(self): 30 | outputs.append("started0") 31 | 32 | async def stop(self): 33 | outputs.append("stopped0") 34 | 35 | class Submodule1(Module): 36 | async def start(self): 37 | outputs.append("started1") 38 | 39 | async def stop(self): 40 | outputs.append("stopped1") 41 | 42 | class Module0(Module): 43 | def __init__(self, name): 44 | super().__init__(name) 45 | self.add_module(Submodule0, "submodule0") 46 | self.add_module(Submodule1, "submodule1") 47 | 48 | async with Module0("module0") as module0: 49 | pass 50 | 51 | assert module0.started 52 | assert outputs in ( 53 | ["started0", "started1", "stopped0", "stopped1"], 54 | ["started0", "started1", "stopped1", "stopped0"], 55 | ["started1", "started0", "stopped1", "stopped0"], 56 | ["started1", "started0", "stopped0", "stopped1"], 57 | ) 58 | 59 | 60 | async def test_add_module_str(): 61 | class Module0(Module): 62 | def __init__(self, name): 63 | super().__init__(name) 64 | self.add_module("fps:Module", "submodule0") 65 | 66 | async with Module0("module0") as module0: 67 | pass 68 | 69 | assert type(module0.modules["submodule0"]) is Module 70 | 71 | 72 | async def test_add_same_module_name(): 73 | class Submodule0(Module): 74 | pass 75 | 76 | class Module0(Module): 77 | def __init__(self, name): 78 | super().__init__(name) 79 | self.add_module(Submodule0, "submodule0") 80 | self.add_module(Submodule0, "submodule0") 81 | 82 | with pytest.raises(RuntimeError) as excinfo: 83 | Module0("module0") 84 | 85 | assert str(excinfo.value) == "Module name already exists: submodule0" 86 | 87 | 88 | async def test_module_not_initialized(): 89 | class Module0(Module): 90 | def __init__(self, name): 91 | pass 92 | 93 | module0 = Module0("module0") 94 | 95 | with pytest.raises(RuntimeError) as excinfo: 96 | async with module0: 97 | pass # pragma: no cover 98 | 99 | assert ( 100 | str(excinfo.value) 101 | == "You must call super().__init__() in the __init__ method of your module" 102 | ) 103 | 104 | 105 | async def test_module_teardown_callback(): 106 | called = [] 107 | 108 | async def cb0(): 109 | called.append("cb0") 110 | 111 | def cb1(): 112 | called.append("cb1") 113 | 114 | class Module0(Module): 115 | async def start(self): 116 | self.add_teardown_callback(cb0) 117 | self.add_teardown_callback(cb1) 118 | 119 | async with Module0(name="module0"): 120 | pass 121 | 122 | assert called == ["cb1", "cb0"] 123 | -------------------------------------------------------------------------------- /tests/test_signal.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from anyio import TASK_STATUS_IGNORED, create_task_group 4 | from anyio.abc import TaskStatus 5 | from fps import Signal 6 | 7 | pytestmark = pytest.mark.anyio 8 | 9 | 10 | async def test_signal_callback(): 11 | signal = Signal() 12 | values0 = [] 13 | values1 = [] 14 | 15 | async def acallback(value): 16 | values0.append(f"a{value}") 17 | 18 | def callback(value): 19 | values1.append(value) 20 | 21 | signal.connect(acallback) 22 | signal.connect(callback) 23 | 24 | await signal.emit("foo") 25 | await signal.emit("bar") 26 | 27 | assert values0 == ["afoo", "abar"] 28 | assert values1 == ["foo", "bar"] 29 | 30 | signal.disconnect(acallback) 31 | 32 | await signal.emit("baz") 33 | 34 | assert values0 == ["afoo", "abar"] 35 | assert values1 == ["foo", "bar", "baz"] 36 | 37 | 38 | async def test_signal_iterator(): 39 | signal = Signal() 40 | values0 = [] 41 | values1 = [] 42 | 43 | async def task(values, idx, *, task_status: TaskStatus[None] = TASK_STATUS_IGNORED): 44 | async with signal.iterate() as iterator: 45 | task_status.started() 46 | async for value in iterator: 47 | values.append(f"{value}{idx}") 48 | if idx == 0: 49 | if value == "bar": 50 | return 51 | else: 52 | if value == "baz": 53 | return 54 | 55 | async with create_task_group() as tg: 56 | await tg.start(task, values0, 0) 57 | await tg.start(task, values1, 1) 58 | 59 | await signal.emit("foo") 60 | await signal.emit("bar") 61 | await signal.emit("baz") 62 | 63 | assert values0 == ["foo0", "bar0"] 64 | assert values1 == ["foo1", "bar1", "baz1"] 65 | -------------------------------------------------------------------------------- /tests/test_start_stop.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from anyio import sleep 4 | from fps import Module 5 | 6 | pytestmark = pytest.mark.anyio 7 | 8 | 9 | async def test_prepare(): 10 | class Module0(Module): 11 | async def prepare(self): 12 | # will never prepare 13 | await sleep(1) 14 | 15 | async with Module0("module0", prepare_timeout=0.1) as module0: 16 | pass 17 | 18 | assert len(module0.exceptions) == 1 19 | assert str(module0.exceptions[0]) == "Module timed out while preparing: module0" 20 | 21 | 22 | async def test_nested_prepare(): 23 | class Submodule0(Module): 24 | async def prepare(self): 25 | # will never prepare 26 | await sleep(1) 27 | 28 | class Module0(Module): 29 | def __init__(self, *args, **kwargs): 30 | super().__init__(*args, **kwargs) 31 | self.submodule0 = self.add_module(Submodule0, "submodule0") 32 | 33 | async with Module0("module0", prepare_timeout=0.1) as module0: 34 | pass 35 | 36 | assert len(module0.exceptions) == 1 37 | assert ( 38 | str(module0.exceptions[0]) 39 | == "Module timed out while preparing: module0.submodule0" 40 | ) 41 | 42 | 43 | async def test_start(): 44 | class Module0(Module): 45 | async def start(self): 46 | # will never start 47 | await sleep(1) 48 | 49 | async with Module0("module0", start_timeout=0.1) as module0: 50 | pass 51 | 52 | assert len(module0.exceptions) == 1 53 | assert str(module0.exceptions[0]) == "Module timed out while starting: module0" 54 | 55 | 56 | async def test_nested_start(): 57 | class Submodule0(Module): 58 | async def start(self): 59 | # will never start 60 | await sleep(1) 61 | 62 | class Module0(Module): 63 | def __init__(self, name, start_timeout): 64 | super().__init__(name, start_timeout=start_timeout) 65 | self.submodule0 = self.add_module(Submodule0, "submodule0") 66 | 67 | async with Module0("module0", start_timeout=0.1) as module0: 68 | pass 69 | 70 | assert len(module0.exceptions) == 1 71 | assert ( 72 | str(module0.exceptions[0]) 73 | == "Module timed out while starting: module0.submodule0" 74 | ) 75 | 76 | 77 | async def test_stop(): 78 | class Module0(Module): 79 | async def stop(self): 80 | # will never stop 81 | await sleep(1) 82 | 83 | module0 = Module0("module0", stop_timeout=0.1) 84 | 85 | async with module0: 86 | pass 87 | 88 | assert len(module0.exceptions) == 1 89 | assert str(module0.exceptions[0]) == "Module timed out while stopping: module0" 90 | 91 | 92 | async def test_nested_stop(): 93 | class Submodule0(Module): 94 | async def stop(self): 95 | # will never stop 96 | await sleep(1) 97 | 98 | class Module0(Module): 99 | def __init__(self, name, stop_timeout): 100 | super().__init__(name, stop_timeout=stop_timeout) 101 | self.submodule0 = self.add_module(Submodule0, "submodule0") 102 | 103 | async with Module0("module0", stop_timeout=0.1) as module0: 104 | pass 105 | 106 | assert len(module0.exceptions) == 1 107 | assert ( 108 | str(module0.exceptions[0]) 109 | == "Module timed out while stopping: module0.submodule0" 110 | ) 111 | -------------------------------------------------------------------------------- /tests/test_tasks.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | import pytest 4 | 5 | from anyio import TASK_STATUS_IGNORED, create_task_group, sleep 6 | from anyio.abc import TaskStatus 7 | from fps import Module 8 | 9 | if sys.version_info < (3, 11): 10 | from exceptiongroup import ExceptionGroup # pragma: no cover 11 | 12 | pytestmark = pytest.mark.anyio 13 | 14 | 15 | async def test_task1(): 16 | outputs = [] 17 | 18 | class Value0: 19 | pass 20 | 21 | async def task( 22 | message: str, *, task_status: TaskStatus[None] = TASK_STATUS_IGNORED 23 | ) -> None: 24 | outputs.append(message) 25 | task_status.started() 26 | await sleep(float("inf")) 27 | 28 | class Submodule0(Module): 29 | async def prepare(self): 30 | async with create_task_group() as self.tg1: 31 | await self.tg1.start(task, "prepare0", name="prepare0") 32 | self.done() 33 | outputs.append("prepared0") 34 | 35 | async def start(self): 36 | async with create_task_group() as self.tg2: 37 | await self.tg2.start(task, "start0", name="start0") 38 | self.done() 39 | outputs.append("started0") 40 | self.value0 = Value0() 41 | self.put(self.value0) 42 | 43 | async def stop(self): 44 | await self.freed(self.value0) 45 | self.tg1.cancel_scope.cancel() 46 | self.tg2.cancel_scope.cancel() 47 | outputs.append("stopped0") 48 | 49 | class Submodule1(Module): 50 | async def start(self): 51 | self.value0 = await self.get(Value0) 52 | async with create_task_group() as self.tg: 53 | await self.tg.start(task, "start1", name="start1") 54 | self.done() 55 | outputs.append("started1") 56 | 57 | async def stop(self): 58 | self.tg.cancel_scope.cancel() 59 | outputs.append("stopped1") 60 | self.drop(self.value0) 61 | 62 | class Module0(Module): 63 | def __init__(self, name): 64 | super().__init__(name) 65 | self.add_module(Submodule0, "submodule0") 66 | self.add_module(Submodule1, "submodule1") 67 | 68 | module0 = Module0("module0") 69 | 70 | async with module0: 71 | pass 72 | 73 | assert outputs == [ 74 | "prepare0", 75 | "prepared0", 76 | "start0", 77 | "started0", 78 | "start1", 79 | "started1", 80 | "stopped1", 81 | "stopped0", 82 | ] 83 | 84 | 85 | async def test_task2(): 86 | outputs = [] 87 | dt = 0.02 88 | 89 | async def counter(name, number, delay): 90 | await sleep(delay) 91 | for i in range(number): 92 | outputs.append(f"{name} {i}") 93 | await sleep(dt * 10) 94 | 95 | class Submodule0(Module): 96 | def __init__(self, name): 97 | super().__init__(name) 98 | self.add_module(Submodule0_0, "submodule0_0") 99 | 100 | async def prepare(self): 101 | async with create_task_group() as tg: 102 | tg.start_soon(counter, f"{self.name} prepare", 2, dt * 4) 103 | self.done() 104 | 105 | async def start(self): 106 | async with create_task_group() as tg: 107 | tg.start_soon(counter, f"{self.name} start", 2, dt * 6) 108 | self.done() 109 | 110 | async def stop(self): 111 | async with create_task_group() as tg: 112 | tg.start_soon(counter, f"{self.name} stop", 2, dt * 4) 113 | 114 | class Submodule0_0(Module): 115 | async def prepare(self): 116 | async with create_task_group() as tg: 117 | tg.start_soon(counter, f"{self.name} prepare", 3, 0) 118 | self.done() 119 | 120 | async def start(self): 121 | async with create_task_group() as tg: 122 | tg.start_soon(counter, f"{self.name} start", 3, dt * 2) 123 | self.done() 124 | 125 | async def stop(self): 126 | async with create_task_group() as tg: 127 | tg.start_soon(counter, f"{self.name} stop", 3, 0) 128 | 129 | class Submodule1(Module): 130 | async def start(self): 131 | async with create_task_group() as tg: 132 | tg.start_soon(counter, f"{self.name} start", 1, dt * 8) 133 | self.done() 134 | 135 | async def stop(self): 136 | async with create_task_group() as tg: 137 | tg.start_soon(counter, f"{self.name} stop", 1, dt * 8) 138 | 139 | class Module0(Module): 140 | def __init__(self, name): 141 | super().__init__(name) 142 | self.add_module(Submodule0, "submodule0") 143 | self.add_module(Submodule1, "submodule1") 144 | 145 | async with Module0("module0"): 146 | await sleep(dt * 24) 147 | 148 | assert outputs == [ 149 | "submodule0_0 prepare 0", 150 | "submodule0_0 start 0", 151 | "submodule0 prepare 0", 152 | "submodule0 start 0", 153 | "submodule1 start 0", 154 | "submodule0_0 prepare 1", 155 | "submodule0_0 start 1", 156 | "submodule0 prepare 1", 157 | "submodule0 start 1", 158 | "submodule0_0 prepare 2", 159 | "submodule0_0 start 2", 160 | "submodule0_0 stop 0", 161 | "submodule0 stop 0", 162 | "submodule1 stop 0", 163 | "submodule0_0 stop 1", 164 | "submodule0 stop 1", 165 | "submodule0_0 stop 2", 166 | ] 167 | 168 | 169 | async def test_failing_task(): 170 | outputs = [] 171 | 172 | async def failing_task(): 173 | await sleep(0.05) 174 | raise RuntimeError("start0") 175 | 176 | class Submodule0(Module): 177 | async def start(self): 178 | async with create_task_group() as self.tg: 179 | self.tg.start_soon(failing_task) 180 | outputs.append("started0") 181 | 182 | class Submodule1(Module): 183 | pass 184 | 185 | class Module0(Module): 186 | def __init__(self, name): 187 | super().__init__(name) 188 | self.add_module(Submodule0, "submodule0") 189 | self.add_module(Submodule1, "submodule1") 190 | 191 | async with Module0("module0") as module0: 192 | await sleep(0.1) 193 | 194 | assert len(module0.exceptions) == 1 195 | assert type(module0.exceptions[0]) is ExceptionGroup 196 | assert str(module0.exceptions[0].exceptions[0]) == "start0" 197 | 198 | assert outputs == [ 199 | "started0", 200 | ] 201 | -------------------------------------------------------------------------------- /tests/test_value.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from anyio import sleep 4 | from fps import Module 5 | 6 | pytestmark = pytest.mark.anyio 7 | 8 | 9 | async def test_value(): 10 | value0 = None 11 | value1 = None 12 | 13 | class Value0: 14 | pass 15 | 16 | class Value1: 17 | pass 18 | 19 | class Submodule0(Module): 20 | async def start(self): 21 | nonlocal value1 22 | self.value0 = Value0() 23 | self.put(self.value0) 24 | self.value1 = value1 = await self.get(Value1) 25 | 26 | async def stop(self): 27 | self.drop(self.value1) 28 | await self.freed(self.value0) 29 | 30 | class Submodule1(Module): 31 | async def start(self): 32 | nonlocal value0 33 | self.value0 = value0 = await self.get(Value0) 34 | self.value1 = Value1() 35 | self.put(self.value1) 36 | 37 | async def stop(self): 38 | await self.freed(self.value1) 39 | self.drop(self.value0) 40 | 41 | class Module0(Module): 42 | def __init__(self, name): 43 | super().__init__(name) 44 | self.add_module(Submodule0, "submodule0") 45 | self.add_module(Submodule1, "submodule1") 46 | 47 | async with Module0("module0") as module0: 48 | pass 49 | 50 | assert type(module0.modules["submodule1"].value0) is Value0 51 | assert type(module0.modules["submodule0"].value1) is Value1 52 | assert module0.modules["submodule1"].value0 == value0 53 | assert module0.modules["submodule0"].value1 == value1 54 | 55 | 56 | async def test_value_level(): 57 | class Value0: 58 | pass 59 | 60 | class Value1: 61 | pass 62 | 63 | class Value2: 64 | pass 65 | 66 | class Module0(Module): 67 | def __init__(self, name): 68 | super().__init__(name) 69 | self.add_module(Module1, "module1") 70 | 71 | async def start(self): 72 | self.value0 = Value0() 73 | self.put(self.value0) 74 | self.value1 = await self.get(Value1, timeout=0.1) 75 | try: 76 | self.value2 = await self.get(Value2, timeout=0.1) 77 | except TimeoutError: 78 | self.value2 = None 79 | 80 | class Module1(Module): 81 | def __init__(self, name): 82 | super().__init__(name) 83 | self.add_module(Module2, "module2") 84 | 85 | async def start(self): 86 | self.value1 = Value1() 87 | self.put(self.value1) 88 | self.value0 = await self.get(Value0, timeout=0.1) 89 | self.value2 = await self.get(Value2, timeout=0.1) 90 | 91 | class Module2(Module): 92 | async def start(self): 93 | self.value2 = Value2() 94 | self.put(self.value2) 95 | try: 96 | self.value0 = await self.get(Value0, timeout=0.1) 97 | except TimeoutError: 98 | self.value0 = None 99 | self.value1 = await self.get(Value1, timeout=0.1) 100 | 101 | async with Module0("module0") as module0: 102 | pass 103 | 104 | module1 = module0.modules["module1"] 105 | module2 = module1.modules["module2"] 106 | assert module0.value1 == module1.value1 107 | assert module0.value2 is None 108 | assert module1.value0 == module0.value0 109 | assert module1.value2 == module2.value2 110 | assert module2.value0 is None 111 | assert module2.value1 == module1.value1 112 | 113 | 114 | async def test_get_timeout(): 115 | class Module0(Module): 116 | async def start(self): 117 | try: 118 | self.value0 = await self.get(str, timeout=0) 119 | except TimeoutError: 120 | self.value0 = None 121 | 122 | async with Module0("module0") as module0: 123 | pass 124 | 125 | assert module0.value0 is None 126 | 127 | 128 | async def test_value_with_context_manager(): 129 | outputs = [] 130 | 131 | class Value0: 132 | async def __aenter__(self): 133 | outputs.append("aenter") 134 | return self 135 | 136 | async def __aexit__(self, exc_type, exc_value, exc_tb): 137 | outputs.append("aexit") 138 | 139 | def __enter__(self): 140 | outputs.append("enter") 141 | return self 142 | 143 | def __exit__(self, exc_type, exc_value, exc_tb): 144 | outputs.append("exit") 145 | 146 | class Module0(Module): 147 | async def start(self): 148 | outputs.append("start") 149 | value0 = Value0() 150 | await self.async_context_manager(value0) 151 | self.context_manager(value0) 152 | 153 | async def stop(self): 154 | outputs.append("stop") 155 | 156 | async with Module0("module0"): 157 | pass 158 | 159 | assert outputs == [ 160 | "start", 161 | "aenter", 162 | "enter", 163 | "exit", 164 | "aexit", 165 | "stop", 166 | ] 167 | 168 | 169 | async def test_put_with_type(): 170 | class Submodule0(Module): 171 | async def start(self): 172 | self.put(0, types=int) 173 | 174 | class Module0(Module): 175 | def __init__(self, name): 176 | super().__init__(name) 177 | self.add_module(Submodule0, "submodule0") 178 | 179 | async def start(self): 180 | self.value = await self.get(int) 181 | assert self.value == 0 182 | 183 | async def stop(self): 184 | self.drop(self.value) 185 | 186 | async with Module0("module0"): 187 | pass 188 | 189 | 190 | async def test_put_same_value_type(): 191 | class Module0(Module): 192 | async def start(self): 193 | self.put(0) 194 | self.put(0) 195 | 196 | async with Module0("module0") as module0: 197 | pass 198 | 199 | assert len(module0.exceptions) == 1 200 | assert type(module0.exceptions[0]) is RuntimeError 201 | assert str(module0.exceptions[0]) == """Value type "" already exists""" 202 | 203 | 204 | async def test_put_exclusive_value(): 205 | outputs = [] 206 | 207 | class Submodule0(Module): 208 | async def start(self): 209 | self.put(0, max_borrowers=1) 210 | 211 | class Submodule1(Module): 212 | async def start(self): 213 | value = await self.get(int) 214 | outputs.append("get") 215 | await sleep(0.1) 216 | outputs.append("drop") 217 | self.drop(value) 218 | 219 | class Module0(Module): 220 | def __init__(self, name): 221 | super().__init__(name) 222 | self.add_module(Submodule0, "submodule0") 223 | self.add_module(Submodule1, "submodule1") 224 | self.add_module(Submodule1, "submodule2") 225 | 226 | async with Module0("module0"): 227 | pass 228 | 229 | assert outputs == [ 230 | "get", 231 | "drop", 232 | "get", 233 | "drop", 234 | ] 235 | 236 | 237 | async def test_not_freed(): 238 | class Submodule0(Module): 239 | async def start(self): 240 | self.put(0, types=int) 241 | 242 | class Module0(Module): 243 | def __init__(self, *args, **kwargs): 244 | super().__init__(*args, **kwargs) 245 | self.add_module(Submodule0, "submodule0") 246 | 247 | async def start(self): 248 | self.value = await self.get(int) 249 | assert self.value == 0 250 | 251 | async def stop(self): 252 | await sleep(1) 253 | 254 | async with Module0("module0", stop_timeout=0.1) as module0: 255 | pass 256 | 257 | assert len(module0.exceptions) == 2 258 | assert ( 259 | str(module0.exceptions[0]) 260 | == "Module timed out while stopping: module0.submodule0" 261 | ) 262 | assert str(module0.exceptions[1]) == "Module timed out while stopping: module0" 263 | 264 | 265 | async def test_all_freed(): 266 | outputs = [] 267 | 268 | class Submodule0(Module): 269 | async def start(self): 270 | self.put(0, types=int) 271 | 272 | async def stop(self): 273 | await self.all_freed() 274 | outputs.append("all freed") 275 | 276 | class Module0(Module): 277 | def __init__(self, name): 278 | super().__init__(name) 279 | self.add_module(Submodule0, "submodule0") 280 | 281 | async def start(self): 282 | self.value = await self.get(int) 283 | assert self.value == 0 284 | 285 | async def stop(self): 286 | self.drop(self.value) 287 | outputs.append("dropped") 288 | 289 | async with Module0("module0"): 290 | pass 291 | 292 | assert outputs == [ 293 | "dropped", 294 | "all freed", 295 | ] 296 | -------------------------------------------------------------------------------- /tests/test_web.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import httpx 4 | from fastapi import FastAPI 5 | 6 | from fps import Module 7 | from fps.web.fastapi import FastAPIModule 8 | from fps.web.server import ServerModule 9 | 10 | pytestmark = pytest.mark.anyio 11 | 12 | 13 | async def test_web(unused_tcp_port): 14 | class Submodule0(Module): 15 | async def prepare(self): 16 | app = await self.get(FastAPI) 17 | 18 | @app.get("/") 19 | def read_root(): 20 | return {"Hello": "World"} 21 | 22 | class Module0(Module): 23 | def __init__(self, name): 24 | super().__init__(name) 25 | self.add_module(FastAPIModule, "fastapi_module") 26 | self.add_module(ServerModule, "server_module", port=unused_tcp_port) 27 | self.add_module(Submodule0, "submodule0") 28 | 29 | async with Module0("module0"): 30 | async with httpx.AsyncClient() as client: 31 | response = await client.get(f"http://127.0.0.1:{unused_tcp_port}") 32 | 33 | assert response.json() == {"Hello": "World"} 34 | --------------------------------------------------------------------------------