├── .codeclimate.yml ├── .dockerignore ├── .github ├── CODEOWNERS └── workflows │ ├── ci-pipeline.yml │ ├── github-pages.yml │ ├── python-code-style.yml │ ├── python-lint.yml │ ├── python-quality.yml │ ├── python-tests.yml │ └── python-typing.yml ├── .gitignore ├── .gitlab-ci.yml ├── .gitlab_ci ├── _templates.yml ├── base.yml ├── build.yml ├── deploy.yml └── test.yml ├── .idea ├── bootstrap-python-fastapi.iml ├── dataSources.xml ├── inspectionProfiles │ └── profiles_settings.xml ├── misc.xml ├── modules.xml ├── runConfigurations │ ├── Dev_Dependencies.xml │ ├── Dev_Stack.xml │ ├── FastAPI_app.xml │ ├── Socket_io_app.xml │ └── Tests.xml ├── ryecharm-overrides.xml ├── ryecharm.xml └── vcs.xml ├── Dockerfile ├── Makefile ├── README.md ├── alembic.ini ├── auth_volumes ├── kratos │ ├── .gitignore │ ├── identity.schema.json │ ├── kratos.yml │ └── user_registered.jsonnet └── oathkeeper │ ├── access-rules.yml │ ├── id_token.jwks.json │ └── oathkeeper.yml ├── config.alloy ├── credentials.env.template ├── docker-compose.yaml ├── docker.env ├── docs ├── .pages ├── adr │ ├── .markdownlint │ ├── .pages │ ├── 0001-record-architecture-decisions.md │ ├── adr-template.md │ └── summary.md ├── api-documentation.md ├── architecture.md ├── dockerfile.md ├── index.md ├── inversion-of-control.md ├── packages │ ├── alembic.md │ ├── bootstrap.md │ ├── domains.md │ ├── dramatiq_worker.md │ ├── gateways.md │ └── http_app.md └── zero_trust.md ├── mkdocs.yml ├── pyproject.toml ├── renovate.json ├── src ├── alembic.ini ├── common │ ├── __init__.py │ ├── asyncapi.py │ ├── bootstrap.py │ ├── config.py │ ├── di_container.py │ ├── dramatiq.py │ ├── logs │ │ ├── __init__.py │ │ └── processors.py │ ├── storage │ │ ├── SQLAlchemy │ │ │ ├── __init__.py │ │ │ └── default_bind_tables.py │ │ └── __init__.py │ ├── telemetry.py │ └── utils.py ├── domains │ ├── __init__.py │ └── books │ │ ├── __init__.py │ │ ├── _gateway_interfaces.py │ │ ├── _models.py │ │ ├── _service.py │ │ ├── _tasks.py │ │ ├── dto.py │ │ ├── events.py │ │ └── interfaces.py ├── dramatiq_worker │ └── __init__.py ├── gateways │ ├── __init__.py │ └── event.py ├── http_app │ ├── __init__.py │ ├── __main__.py │ ├── context.py │ ├── dependencies.py │ ├── dev_server.py │ ├── jinja_templates │ │ └── hello.html │ ├── routes │ │ ├── README.md │ │ ├── __init__.py │ │ ├── api │ │ │ ├── __init__.py │ │ │ └── books.py │ │ ├── asyncapi.py │ │ ├── auth.py │ │ ├── events.py │ │ ├── graphql │ │ │ ├── __init__.py │ │ │ ├── query.py │ │ │ ├── resolvers.py │ │ │ └── types.py │ │ ├── hello.py │ │ ├── ping.py │ │ └── user_registered_hook.py │ └── templates.py ├── migrations │ ├── __init__.py │ ├── env.py │ ├── fixtures │ │ ├── __init__.py │ │ └── books_example.py │ ├── script.py.mako │ └── versions │ │ ├── 2025-01-26-212326-52b1246eda46_initialize_fixture_tables.py │ │ └── 2025-01-26-212826-bd73bd8a2ac4_create_books_table.py └── socketio_app │ ├── __init__.py │ ├── __main__.py │ ├── dev_server.py │ ├── namespaces │ ├── __init__.py │ └── chat.py │ └── web_routes │ ├── __init__.py │ └── docs.py ├── test-cross-domain-imports.sh ├── tests ├── __init__.py ├── common │ ├── __init__.py │ ├── test_asyncapi.py │ ├── test_dramatiq.py │ ├── test_telemetry.py │ └── test_utils.py ├── conftest.py ├── domains │ ├── __init__.py │ └── books │ │ ├── __init__.py │ │ ├── conftest.py │ │ ├── test_book_service.py │ │ └── test_book_tasks.py ├── http_app │ ├── __init__.py │ ├── conftest.py │ ├── routes │ │ ├── __init__.py │ │ ├── books │ │ │ ├── __init__.py │ │ │ ├── conftest.py │ │ │ ├── graphql │ │ │ │ ├── __init__.py │ │ │ │ └── test_query_books.py │ │ │ ├── test_create_book.py │ │ │ └── test_list_books.py │ │ ├── test_asyncapi.py │ │ ├── test_auth.py │ │ ├── test_events.py │ │ ├── test_hello.py │ │ └── test_ping.py │ ├── test_dependencies.py │ ├── test_exception_handlers.py │ └── test_factory.py ├── socketio_app │ ├── __init__.py │ ├── conftest.py │ ├── namespaces │ │ ├── __init__.py │ │ └── test_chat.py │ ├── test_app_factory.py │ └── web_routes │ │ ├── __init__.py │ │ └── test_docs.py └── storage │ ├── __init__.py │ ├── conftest.py │ ├── tables │ ├── __init__.py │ └── test_book_table.py │ └── test_sqlalchemy_init.py └── uv.lock /.codeclimate.yml: -------------------------------------------------------------------------------- 1 | version: "2" 2 | plugins: 3 | sonar-python: 4 | enabled: true 5 | exclude_patterns: 6 | - "spec/" 7 | - "!spec/support/helpers" 8 | - "config/" 9 | - "src/migrations/" 10 | - "db/" 11 | - "dist/" 12 | - "features/" 13 | - "**/node_modules/" 14 | - "script/" 15 | - "**/spec/" 16 | - "**/test/" 17 | - "**/tests/" 18 | - "Tests/" 19 | - "**/vendor/" 20 | - "**/*_test.go" 21 | - "**/*.d.ts" 22 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @febus982 -------------------------------------------------------------------------------- /.github/workflows/github-pages.yml: -------------------------------------------------------------------------------- 1 | name: Deploy static content to Pages 2 | 3 | on: 4 | push: 5 | branches: ["main"] 6 | # Allows you to run this workflow manually from the Actions tab 7 | workflow_dispatch: 8 | 9 | # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. 10 | # However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. 11 | concurrency: 12 | group: "pages" 13 | cancel-in-progress: false 14 | 15 | jobs: 16 | build: 17 | 18 | permissions: 19 | contents: read 20 | 21 | environment: 22 | name: github-pages 23 | 24 | runs-on: ubuntu-latest 25 | 26 | steps: 27 | - name: Checkout 28 | uses: actions/checkout@v4 29 | 30 | - name: Set up Python 3.13 31 | uses: actions/setup-python@v5 32 | with: 33 | python-version: "3.13" 34 | 35 | - name: Install uv 36 | uses: astral-sh/setup-uv@v6 37 | 38 | - name: Install dependencies 39 | run: make dev-dependencies 40 | 41 | - name: Build static pages 42 | run: make docs-build 43 | 44 | - name: Setup Pages 45 | uses: actions/configure-pages@v5 46 | 47 | - name: Upload artifact 48 | uses: actions/upload-pages-artifact@v3 49 | with: 50 | path: './site' 51 | 52 | deploy: 53 | needs: build 54 | 55 | # Grant GITHUB_TOKEN the permissions required to make a Pages deployment 56 | permissions: 57 | pages: write # to deploy to Pages 58 | id-token: write # to verify the deployment originates from an appropriate source 59 | 60 | environment: 61 | name: github-pages 62 | url: ${{ steps.deployment.outputs.page_url }} 63 | 64 | runs-on: ubuntu-latest 65 | 66 | steps: 67 | - name: Deploy to GitHub Pages 68 | id: deployment 69 | uses: actions/deploy-pages@v4 70 | -------------------------------------------------------------------------------- /.github/workflows/python-code-style.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python 3 | 4 | name: Python code style 5 | 6 | on: 7 | push: 8 | branches: [ "main" ] 9 | pull_request: 10 | branches: [ "main" ] 11 | # Allows you to run this workflow manually from the Actions tab 12 | workflow_dispatch: 13 | 14 | jobs: 15 | format: 16 | runs-on: ubuntu-latest 17 | 18 | steps: 19 | - uses: actions/checkout@v4 20 | - name: Set up Python 3.13 21 | uses: actions/setup-python@v5 22 | with: 23 | python-version: "3.13" 24 | - name: Install uv 25 | uses: astral-sh/setup-uv@v6 26 | - name: Install dependencies 27 | run: make dev-dependencies 28 | - name: Check code style 29 | run: make format 30 | -------------------------------------------------------------------------------- /.github/workflows/python-lint.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python 3 | 4 | name: Python lint 5 | 6 | on: 7 | push: 8 | branches: [ "main" ] 9 | pull_request: 10 | branches: [ "main" ] 11 | # Allows you to run this workflow manually from the Actions tab 12 | workflow_dispatch: 13 | 14 | jobs: 15 | lint: 16 | runs-on: ubuntu-latest 17 | 18 | steps: 19 | - uses: actions/checkout@v4 20 | - name: Set up Python 3.13 21 | uses: actions/setup-python@v5 22 | with: 23 | python-version: "3.13" 24 | - name: Install uv 25 | uses: astral-sh/setup-uv@v6 26 | - name: Install dependencies 27 | run: make dev-dependencies 28 | - name: Lint with ruff 29 | run: make lint 30 | -------------------------------------------------------------------------------- /.github/workflows/python-quality.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python 3 | 4 | name: Python code quality 5 | 6 | on: 7 | push: 8 | branches: [ "main" ] 9 | pull_request: 10 | branches: [ "main" ] 11 | # Allows you to run this workflow manually from the Actions tab 12 | workflow_dispatch: 13 | 14 | jobs: 15 | quality: 16 | runs-on: ubuntu-latest 17 | 18 | steps: 19 | - uses: actions/checkout@v4 20 | - name: Set up Python 3.13 21 | uses: actions/setup-python@v5 22 | with: 23 | python-version: "3.13" 24 | - name: Install uv 25 | uses: astral-sh/setup-uv@v6 26 | - name: Install dependencies 27 | run: make dev-dependencies 28 | - name: Test & publish code coverage 29 | uses: paambaati/codeclimate-action@v9.0.0 30 | env: 31 | CC_TEST_REPORTER_ID: ${{ secrets.CODECLIMATE_REPORTER_ID }} 32 | with: 33 | coverageCommand: make ci-coverage 34 | coverageLocations: | 35 | ${{github.workspace}}/coverage.lcov:lcov 36 | debug: true 37 | -------------------------------------------------------------------------------- /.github/workflows/python-tests.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python 3 | 4 | name: Python tests 5 | 6 | on: 7 | push: 8 | branches: [ "main" ] 9 | pull_request: 10 | branches: [ "main" ] 11 | # Allows you to run this workflow manually from the Actions tab 12 | workflow_dispatch: 13 | 14 | jobs: 15 | test: 16 | strategy: 17 | matrix: 18 | version: ["3.10", "3.11", "3.12", "3.13"] 19 | os: [ubuntu-latest] 20 | runs-on: ${{ matrix.os }} 21 | steps: 22 | - uses: actions/checkout@v4 23 | - name: Set up Python ${{ matrix.version }} 24 | uses: actions/setup-python@v5 25 | with: 26 | python-version: "${{ matrix.version }}" 27 | - name: Install uv 28 | uses: astral-sh/setup-uv@v6 29 | - name: Install dependencies 30 | run: make dev-dependencies 31 | - name: Test with pytest 32 | run: | 33 | make ci-test 34 | - name: Check typing 35 | run: | 36 | make typing 37 | -------------------------------------------------------------------------------- /.github/workflows/python-typing.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python 3 | 4 | name: Python typing 5 | 6 | on: 7 | push: 8 | branches: [ "main" ] 9 | pull_request: 10 | branches: [ "main" ] 11 | # Allows you to run this workflow manually from the Actions tab 12 | workflow_dispatch: 13 | 14 | jobs: 15 | typing: 16 | runs-on: ubuntu-latest 17 | 18 | steps: 19 | - uses: actions/checkout@v4 20 | - name: Set up Python 3.13 21 | uses: actions/setup-python@v5 22 | with: 23 | python-version: "3.13" 24 | - name: Install uv 25 | uses: astral-sh/setup-uv@v6 26 | - name: Install dependencies 27 | run: make dev-dependencies 28 | - name: Check typing 29 | run: make typing 30 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ### JetBrains template 2 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider 3 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 4 | 5 | # SQlite DBs 6 | *.db 7 | 8 | # User-specific stuff 9 | .idea/**/workspace.xml 10 | .idea/**/tasks.xml 11 | .idea/**/usage.statistics.xml 12 | .idea/**/dictionaries 13 | .idea/**/shelf 14 | 15 | # Generated files 16 | .idea/**/contentModel.xml 17 | 18 | # Sensitive or high-churn files 19 | .idea/**/dataSources/ 20 | .idea/**/dataSources.ids 21 | .idea/**/dataSources.local.xml 22 | .idea/**/sqlDataSources.xml 23 | .idea/**/dynamic.xml 24 | .idea/**/uiDesigner.xml 25 | .idea/**/dbnavigator.xml 26 | 27 | # Gradle 28 | .idea/**/gradle.xml 29 | .idea/**/libraries 30 | 31 | # Gradle and Maven with auto-import 32 | # When using Gradle or Maven with auto-import, you should exclude module files, 33 | # since they will be recreated, and may cause churn. Uncomment if using 34 | # auto-import. 35 | # .idea/artifacts 36 | # .idea/compiler.xml 37 | # .idea/jarRepositories.xml 38 | # .idea/modules.xml 39 | # .idea/*.iml 40 | # .idea/modules 41 | # *.iml 42 | # *.ipr 43 | 44 | # CMake 45 | cmake-build-*/ 46 | 47 | # Mongo Explorer plugin 48 | .idea/**/mongoSettings.xml 49 | 50 | # File-based project format 51 | *.iws 52 | 53 | # IntelliJ 54 | out/ 55 | 56 | # mpeltonen/sbt-idea plugin 57 | .idea_modules/ 58 | 59 | # JIRA plugin 60 | atlassian-ide-plugin.xml 61 | 62 | # Cursive Clojure plugin 63 | .idea/replstate.xml 64 | 65 | # Crashlytics plugin (for Android Studio and IntelliJ) 66 | com_crashlytics_export_strings.xml 67 | crashlytics.properties 68 | crashlytics-build.properties 69 | fabric.properties 70 | 71 | # Editor-based Rest Client 72 | .idea/httpRequests 73 | 74 | # Android studio 3.1+ serialized cache file 75 | .idea/caches/build_file_checksums.ser 76 | 77 | ### Linux template 78 | *~ 79 | 80 | # temporary files which can be created if a process still has a handle open of a deleted file 81 | .fuse_hidden* 82 | 83 | # KDE directory preferences 84 | .directory 85 | 86 | # Linux trash folder which might appear on any partition or disk 87 | .Trash-* 88 | 89 | # .nfs files are created when an open file is removed but is still being accessed 90 | .nfs* 91 | 92 | ### Windows template 93 | # Windows thumbnail cache files 94 | Thumbs.db 95 | Thumbs.db:encryptable 96 | ehthumbs.db 97 | ehthumbs_vista.db 98 | 99 | # Dump file 100 | *.stackdump 101 | 102 | # Folder config file 103 | [Dd]esktop.ini 104 | 105 | # Recycle Bin used on file shares 106 | $RECYCLE.BIN/ 107 | 108 | # Windows Installer files 109 | *.cab 110 | *.msi 111 | *.msix 112 | *.msm 113 | *.msp 114 | 115 | # Windows shortcuts 116 | *.lnk 117 | 118 | ### macOS template 119 | # General 120 | .DS_Store 121 | .AppleDouble 122 | .LSOverride 123 | 124 | # Icon must end with two \r 125 | Icon 126 | 127 | # Thumbnails 128 | ._* 129 | 130 | # Files that might appear in the root of a volume 131 | .DocumentRevisions-V100 132 | .fseventsd 133 | .Spotlight-V100 134 | .TemporaryItems 135 | .Trashes 136 | .VolumeIcon.icns 137 | .com.apple.timemachine.donotpresent 138 | 139 | # Directories potentially created on remote AFP share 140 | .AppleDB 141 | .AppleDesktop 142 | Network Trash Folder 143 | Temporary Items 144 | .apdisk 145 | 146 | ### Python template 147 | # Byte-compiled / optimized / DLL files 148 | __pycache__/ 149 | *.py[cod] 150 | *$py.class 151 | 152 | # C extensions 153 | *.so 154 | 155 | # Distribution / packaging 156 | .Python 157 | build/ 158 | develop-eggs/ 159 | dist/ 160 | downloads/ 161 | eggs/ 162 | .eggs/ 163 | lib/ 164 | lib64/ 165 | parts/ 166 | sdist/ 167 | var/ 168 | wheels/ 169 | share/python-wheels/ 170 | *.egg-info/ 171 | .installed.cfg 172 | *.egg 173 | MANIFEST 174 | 175 | # PyInstaller 176 | # Usually these files are written by a python script from a template 177 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 178 | *.manifest 179 | *.spec 180 | 181 | # Installer logs 182 | pip-log.txt 183 | pip-delete-this-directory.txt 184 | 185 | # Unit test / coverage reports 186 | htmlcov/ 187 | .tox/ 188 | .nox/ 189 | .coverage 190 | .coverage.* 191 | .cache 192 | nosetests.xml 193 | coverage.xml 194 | *.cover 195 | *.py,cover 196 | .hypothesis/ 197 | .pytest_cache/ 198 | cover/ 199 | 200 | # Translations 201 | *.mo 202 | *.pot 203 | 204 | # Django stuff: 205 | *.log 206 | local_settings.py 207 | db.sqlite3 208 | db.sqlite3-journal 209 | 210 | # Flask stuff: 211 | instance/ 212 | .webassets-cache 213 | 214 | # Scrapy stuff: 215 | .scrapy 216 | 217 | # Sphinx documentation 218 | docs/_build/ 219 | 220 | # PyBuilder 221 | .pybuilder/ 222 | target/ 223 | 224 | # Jupyter Notebook 225 | .ipynb_checkpoints 226 | 227 | # IPython 228 | profile_default/ 229 | ipython_config.py 230 | 231 | # pyenv 232 | # For a library or package, you might want to ignore these files since the code is 233 | # intended to run in multiple environments; otherwise, check them in: 234 | # .python-version 235 | 236 | # pipenv 237 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 238 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 239 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 240 | # install all needed dependencies. 241 | #Pipfile.lock 242 | 243 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 244 | __pypackages__/ 245 | 246 | # Celery stuff 247 | celerybeat-schedule 248 | celerybeat.pid 249 | 250 | # SageMath parsed files 251 | *.sage.py 252 | 253 | # Environments 254 | .env 255 | .venv 256 | env/ 257 | venv/ 258 | ENV/ 259 | env.bak/ 260 | venv.bak/ 261 | 262 | # Spyder project settings 263 | .spyderproject 264 | .spyproject 265 | 266 | # Rope project settings 267 | .ropeproject 268 | 269 | # mkdocs documentation 270 | /site 271 | 272 | # mypy 273 | .mypy_cache/ 274 | .dmypy.json 275 | dmypy.json 276 | 277 | # Pyre type checker 278 | .pyre/ 279 | 280 | # pytype static type analyzer 281 | .pytype/ 282 | 283 | # Cython debug symbols 284 | cython_debug/ 285 | 286 | credentials.env 287 | volumes/ 288 | -------------------------------------------------------------------------------- /.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | include: 2 | - local: .gitlab_ci/base.yml 3 | -------------------------------------------------------------------------------- /.gitlab_ci/_templates.yml: -------------------------------------------------------------------------------- 1 | variables: 2 | DOCKER_IMAGE_TAG: $CI_COMMIT_SHA 3 | DOCKER_IMAGE_NAME: $CI_PROJECT_NAME 4 | DOCKER_IMAGE_FULL_TAG: $CI_REGISTRY_IMAGE/$DOCKER_IMAGE_NAME:$DOCKER_IMAGE_TAG 5 | DOCKER_VERSION: 27.4 6 | 7 | .docker-gitlab-login: &docker-gitlab-login 8 | - echo $CI_REGISTRY_PASSWORD | docker login -u $CI_REGISTRY_USER --password-stdin $CI_REGISTRY 9 | 10 | # Build Docker image for test 11 | # TODO: Sign image using Cosign 12 | .build-and-push-gitlab: 13 | image: docker:$DOCKER_VERSION 14 | services: 15 | - docker:$DOCKER_VERSION-dind 16 | variables: 17 | DOCKER_BUILDKIT: 1 18 | DOCKER_PLATFORM: "" 19 | DOCKER_TARGET: "" 20 | DOCKER_CACHE_FULL_TAG: $CI_REGISTRY_IMAGE/$DOCKER_IMAGE_NAME:cache 21 | before_script: 22 | - apk add --no-cache bash git 23 | script: 24 | - docker buildx create --use 25 | - docker buildx inspect --bootstrap 26 | - *docker-gitlab-login 27 | - echo "Building $DOCKER_IMAGE_FULL_TAG - Cache from $DOCKER_CACHE_FULL_TAG" 28 | - if [[ -n "$DOCKER_TARGET" ]]; then export TARGET_ARG="--target $DOCKER_TARGET"; fi; 29 | - if [[ -n "$DOCKER_PLATFORM" ]]; then export PLATFORM_ARG="--platform $DOCKER_PLATFORM"; fi; 30 | - if [[ -n "$DOCKER_PLATFORM" ]]; then export PLATFORM_SUFFIX="-$(echo $DOCKER_PLATFORM | sed 's/\///')"; fi; 31 | # remove \ from platform variable 32 | - export SUFFIX=$(echo $DOCKER_PLATFORM | sed 's/\///') 33 | - docker buildx build --push 34 | $TARGET_ARG 35 | --tag $DOCKER_IMAGE_FULL_TAG$PLATFORM_SUFFIX 36 | $PLATFORM_ARG 37 | --cache-from type=registry,ref=$DOCKER_CACHE_FULL_TAG 38 | --cache-to type=registry,ref=$DOCKER_CACHE_FULL_TAG 39 | . 40 | 41 | # Architectures are hardcoded for multiarch, need to make this better 42 | .multiarch-manifest-gitlab: 43 | image: docker:$DOCKER_VERSION 44 | services: 45 | - docker:$DOCKER_VERSION-dind 46 | script: 47 | - *docker-gitlab-login 48 | - echo "Building $DOCKER_IMAGE_FULL_TAG multiarch manifest" 49 | - docker buildx imagetools create 50 | --tag $DOCKER_IMAGE_FULL_TAG 51 | $DOCKER_IMAGE_FULL_TAG-linuxamd64 52 | $DOCKER_IMAGE_FULL_TAG-linuxarm64 53 | 54 | .promote-image: 55 | image: docker:$DOCKER_VERSION 56 | variables: 57 | PROMOTED_ENVIRONMENT: "dev" 58 | DOCKER_BUILDKIT: 1 59 | services: 60 | - docker:$DOCKER_VERSION-dind 61 | script: 62 | - *docker-gitlab-login 63 | # Remove the UTC offset, not supported by `date` in docker image (busybox) 64 | - export CLEAN_DATETIME=$(echo "$CI_JOB_STARTED_AT" | sed 's/+00:00//' | sed 's/Z//') 65 | # Transform in unix timestamp 66 | - export UNIX_TIMESTAMP=$(date -d "$CLEAN_DATETIME" -D "%Y-%m-%dT%H:%M:%S" +%s) 67 | - echo "Unix timestamp - $UNIX_TIMESTAMP" 68 | - echo "Tagging $CI_REGISTRY_IMAGE/$DOCKER_IMAGE_NAME:$PROMOTED_ENVIRONMENT-$UNIX_TIMESTAMP from $DOCKER_IMAGE_FULL_TAG" 69 | - docker buildx imagetools create 70 | --annotation index:org.opencontainers.image.version=$CI_COMMIT_SHORT_SHA 71 | --annotation index:org.opencontainers.image.revision=$CI_COMMIT_SHA 72 | --annotation index:org.opencontainers.image.source=$CI_PROJECT_URL 73 | --annotation index:org.opencontainers.image.created=$CI_JOB_STARTED_AT 74 | --tag $CI_REGISTRY_IMAGE/$DOCKER_IMAGE_NAME:$PROMOTED_ENVIRONMENT-$UNIX_TIMESTAMP 75 | $DOCKER_IMAGE_FULL_TAG 76 | 77 | .python-typing: 78 | image: $DOCKER_IMAGE_FULL_TAG 79 | script: 80 | - make typing 81 | 82 | .python-lint: 83 | image: $DOCKER_IMAGE_FULL_TAG 84 | script: 85 | - make lint 86 | 87 | .python-format: 88 | image: $DOCKER_IMAGE_FULL_TAG 89 | script: 90 | - make format 91 | 92 | .python-tests: 93 | image: $DOCKER_IMAGE_FULL_TAG 94 | script: 95 | - make test 96 | 97 | -------------------------------------------------------------------------------- /.gitlab_ci/base.yml: -------------------------------------------------------------------------------- 1 | variables: 2 | # Use docker.io for Docker Hub if empty 3 | REGISTRY: registry.gitlab.com 4 | # IMAGE_NAME is defined as / in GitLab CI/CD 5 | IMAGE_NAME: $CI_REGISTRY_IMAGE 6 | TEST_TAG: $REGISTRY/$CI_PROJECT_PATH:test 7 | 8 | stages: 9 | - build 10 | - test 11 | - deploy 12 | 13 | include: 14 | - local: /.gitlab_ci/_templates.yml 15 | - local: /.gitlab_ci/build.yml 16 | - local: /.gitlab_ci/test.yml 17 | - local: /.gitlab_ci/deploy.yml 18 | -------------------------------------------------------------------------------- /.gitlab_ci/build.yml: -------------------------------------------------------------------------------- 1 | # Build Docker image for test 2 | build-test: 3 | stage: build 4 | variables: 5 | DOCKER_IMAGE_NAME: $CI_PROJECT_NAME-test 6 | DOCKER_TARGET: dev 7 | rules: 8 | # We run the pipeline only on merge requests or the `main` branch 9 | - if: $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH 10 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" 11 | extends: 12 | - .build-and-push-gitlab 13 | 14 | # TODO: Make the multi-arch build in a single job (perhaps with a nested workflow) 15 | build-http-app-amd64: 16 | stage: build 17 | variables: 18 | DOCKER_IMAGE_NAME: $CI_PROJECT_NAME-http 19 | DOCKER_PLATFORM: "linux/amd64" 20 | DOCKER_TARGET: http 21 | tags: 22 | - saas-linux-small-amd64 23 | rules: 24 | # We run the pipeline only on merge requests or the `main` branch 25 | - if: $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH 26 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" 27 | extends: 28 | - .build-and-push-gitlab 29 | 30 | build-http-app-arm64: 31 | stage: build 32 | variables: 33 | DOCKER_IMAGE_NAME: $CI_PROJECT_NAME-http 34 | DOCKER_PLATFORM: "linux/arm64" 35 | DOCKER_TARGET: http 36 | tags: 37 | - saas-linux-small-arm64 38 | rules: 39 | # We run the pipeline only on merge requests or the `main` branch 40 | - if: $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH 41 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" 42 | extends: 43 | - .build-and-push-gitlab 44 | 45 | aggregate-http-manifests: 46 | stage: build 47 | needs: 48 | - build-http-app-amd64 49 | - build-http-app-arm64 50 | variables: 51 | DOCKER_IMAGE_NAME: $CI_PROJECT_NAME-http 52 | rules: 53 | # We run the pipeline only on merge requests or the `main` branch 54 | - if: $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH 55 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" 56 | extends: 57 | - .multiarch-manifest-gitlab 58 | 59 | -------------------------------------------------------------------------------- /.gitlab_ci/deploy.yml: -------------------------------------------------------------------------------- 1 | promote-dev: 2 | stage: deploy 3 | variables: 4 | DOCKER_IMAGE_NAME: $CI_PROJECT_NAME-http 5 | rules: 6 | # We run the pipeline only on merge requests or the `main` branch 7 | - if: $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH 8 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" 9 | extends: 10 | - .promote-image 11 | when: manual 12 | -------------------------------------------------------------------------------- /.gitlab_ci/test.yml: -------------------------------------------------------------------------------- 1 | # Test Docker image 2 | typing: 3 | stage: test 4 | variables: 5 | DOCKER_IMAGE_NAME: $CI_PROJECT_NAME-test 6 | rules: 7 | # We run the pipeline only on merge requests or the `main` branch 8 | - if: $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH 9 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" 10 | extends: 11 | - .python-typing 12 | 13 | lint: 14 | stage: test 15 | variables: 16 | DOCKER_IMAGE_NAME: $CI_PROJECT_NAME-test 17 | rules: 18 | # We run the pipeline only on merge requests or the `main` branch 19 | - if: $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH 20 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" 21 | extends: 22 | - .python-lint 23 | 24 | format: 25 | stage: test 26 | variables: 27 | DOCKER_IMAGE_NAME: $CI_PROJECT_NAME-test 28 | rules: 29 | # We run the pipeline only on merge requests or the `main` branch 30 | - if: $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH 31 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" 32 | extends: 33 | - .python-format 34 | 35 | tests: 36 | stage: test 37 | variables: 38 | DOCKER_IMAGE_NAME: $CI_PROJECT_NAME-test 39 | rules: 40 | # We run the pipeline only on merge requests or the `main` branch 41 | - if: $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH 42 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" 43 | extends: 44 | - .python-tests 45 | -------------------------------------------------------------------------------- /.idea/bootstrap-python-fastapi.iml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 16 | 17 | 19 | -------------------------------------------------------------------------------- /.idea/dataSources.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | mariadb 6 | true 7 | org.mariadb.jdbc.Driver 8 | jdbc:mariadb://localhost:3306/backend 9 | 10 | 11 | 12 | 13 | 14 | 15 | $ProjectFileDir$ 16 | 17 | 18 | -------------------------------------------------------------------------------- /.idea/inspectionProfiles/profiles_settings.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 6 | -------------------------------------------------------------------------------- /.idea/misc.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | -------------------------------------------------------------------------------- /.idea/modules.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /.idea/runConfigurations/Dev_Dependencies.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 13 | 15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /.idea/runConfigurations/Dev_Stack.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 13 | 16 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /.idea/runConfigurations/FastAPI_app.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 35 | -------------------------------------------------------------------------------- /.idea/runConfigurations/Socket_io_app.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 35 | -------------------------------------------------------------------------------- /.idea/runConfigurations/Tests.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 20 | -------------------------------------------------------------------------------- /.idea/ryecharm-overrides.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 10 | 11 | -------------------------------------------------------------------------------- /.idea/ryecharm.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | -------------------------------------------------------------------------------- /.idea/vcs.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | ARG PYTHON_VERSION=3.13 2 | FROM python:$PYTHON_VERSION-slim AS base 3 | ARG UID=2000 4 | ARG GID=2000 5 | RUN addgroup --gid $GID nonroot && \ 6 | adduser --uid $UID --gid $GID --disabled-password --gecos "" nonroot 7 | WORKDIR /app 8 | RUN chown nonroot:nonroot /app 9 | 10 | # Creating a separate directory for venvs allows to easily 11 | # copy them from the builder and to mount the application 12 | # for local development 13 | RUN mkdir /venv && chown nonroot:nonroot /venv 14 | ENV PATH="/venv/bin:$PATH" 15 | 16 | # Install necessary runtime libraries (e.g. libmysql) 17 | RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ 18 | --mount=type=cache,target=/var/lib/apt,sharing=locked \ 19 | apt-get update \ 20 | && apt-get install -y --no-install-recommends \ 21 | make 22 | 23 | FROM base AS base_builder 24 | ENV UV_PROJECT_ENVIRONMENT=/venv 25 | # Enable bytecode compilation 26 | ENV UV_COMPILE_BYTECODE=1 27 | ENV UV_LINK_MODE=copy 28 | 29 | # Install build system requirements (gcc, library headers, etc.) 30 | # for compiled Python requirements like psycopg2 31 | RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ 32 | --mount=type=cache,target=/var/lib/apt,sharing=locked \ 33 | apt-get update \ 34 | && apt-get install -y --no-install-recommends \ 35 | build-essential gcc git 36 | 37 | COPY --from=ghcr.io/astral-sh/uv:0.7.9 /uv /uvx /bin/ 38 | 39 | # From here we shouldn't need anymore a root user 40 | # Switch to nonroot and config uv 41 | USER nonroot 42 | 43 | COPY --chown=nonroot:nonroot pyproject.toml . 44 | COPY --chown=nonroot:nonroot uv.lock . 45 | COPY --chown=nonroot:nonroot Makefile . 46 | 47 | # Dev image, contains all files and dependencies 48 | FROM base_builder AS dev 49 | COPY --chown=nonroot:nonroot . . 50 | RUN --mount=type=cache,target=/home/nonroot/.cache/uv,sharing=locked,uid=$UID,gid=$GID \ 51 | make dev-dependencies 52 | 53 | # Note that opentelemetry doesn't play well together with uvicorn reloader 54 | # when signals are propagated, we disable it in dev image default CMD 55 | CMD ["uvicorn", "http_app:create_app", "--host", "0.0.0.0", "--port", "8000", "--factory", "--reload"] 56 | 57 | # Installs requirements to run production dramatiq application 58 | FROM base_builder AS dramatiq_builder 59 | RUN --mount=type=cache,target=/home/nonroot/.cache/uv,sharing=locked,uid=$UID,gid=$GID \ 60 | uv sync --no-dev --no-install-project --frozen --no-editable 61 | 62 | # Installs requirements to run production http application 63 | FROM base_builder AS http_builder 64 | RUN --mount=type=cache,target=/home/nonroot/.cache/uv,sharing=locked,uid=$UID,gid=$GID \ 65 | uv sync --no-dev --group http --no-install-project --frozen --no-editable 66 | 67 | # Installs requirements to run production socketio application 68 | FROM base_builder AS socketio_builder 69 | RUN --mount=type=cache,target=/home/nonroot/.cache/uv,sharing=locked,uid=$UID,gid=$GID \ 70 | uv sync --no-dev --group socketio --no-install-project --frozen --no-editable 71 | 72 | # Installs requirements to run production migrations application 73 | FROM base_builder AS migrations_builder 74 | RUN --mount=type=cache,target=/home/nonroot/.cache/uv,sharing=locked,uid=$UID,gid=$GID \ 75 | uv sync --no-dev --group migrations --no-install-project --frozen --no-editable 76 | 77 | # Create the base app with the common python packages 78 | FROM base AS base_app 79 | USER nonroot 80 | COPY --chown=nonroot:nonroot src/common ./common 81 | COPY --chown=nonroot:nonroot src/domains ./domains 82 | COPY --chown=nonroot:nonroot src/gateways ./gateways 83 | 84 | # Copy the http python package and requirements from relevant builder 85 | FROM base_app AS http 86 | COPY --from=http_builder /venv /venv 87 | COPY --chown=nonroot:nonroot src/http_app ./http_app 88 | # Run CMD using array syntax, so it uses `exec` and runs as PID1 89 | CMD ["python", "-m", "http_app"] 90 | 91 | # Copy the socketio python package and requirements from relevant builder 92 | FROM base_app AS socketio 93 | COPY --from=socketio_builder /venv /venv 94 | COPY --chown=nonroot:nonroot src/socketio_app ./socketio_app 95 | # Run CMD using array syntax, so it uses `exec` and runs as PID1 96 | CMD ["python", "-m", "socketio_app"] 97 | 98 | # Copy the socketio python package and requirements from relevant builder 99 | FROM base_app AS migrations 100 | COPY --from=migrations_builder /venv /venv 101 | COPY --chown=nonroot:nonroot src/migrations ./migrations 102 | COPY --chown=nonroot:nonroot src/alembic.ini . 103 | # Run CMD using array syntax, so it uses `exec` and runs as PID1 104 | CMD ["alembic", "upgrade", "heads"] 105 | 106 | # Copy the dramatiq python package and requirements from relevant builder 107 | FROM base_app AS dramatiq 108 | COPY --from=dramatiq_builder /venv /venv 109 | COPY --chown=nonroot:nonroot src/dramatiq_worker ./dramatiq_worker 110 | # Run CMD using array syntax, so it uses `exec` and runs as PID1 111 | # TODO: Review processes/threads 112 | CMD ["dramatiq", "-p", "1", "-t", "1", "dramatiq_worker"] 113 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: docs docs-build adr 2 | 3 | containers: 4 | docker compose build --build-arg UID=`id -u` 5 | 6 | dev-http: 7 | docker compose up dev-http 8 | 9 | dev-socketio: 10 | docker compose up dev-socketio 11 | 12 | migrate: 13 | docker compose run --rm migrate 14 | 15 | autogenerate-migration: 16 | docker compose run --rm autogenerate-migration 17 | 18 | test: 19 | uv run pytest -n auto --cov 20 | 21 | ci-test: 22 | uv run pytest -n 0 23 | 24 | ci-coverage: 25 | uv run pytest -n 0 --cov --cov-report lcov 26 | 27 | typing: 28 | uv run mypy 29 | 30 | install-dependencies: 31 | uv sync --all-groups --no-dev --no-install-project --frozen 32 | 33 | dev-dependencies: 34 | uv sync --all-groups --frozen 35 | 36 | update-dependencies: 37 | uv lock --upgrade 38 | uv sync --all-groups --frozen 39 | 40 | format: 41 | uv run ruff format --check . 42 | 43 | lint: 44 | uv run ruff check . 45 | 46 | fix: 47 | uv run ruff format . 48 | uv run ruff check . --fix 49 | uv run ruff format . 50 | 51 | check: lint format typing test 52 | 53 | docs: 54 | uv run mkdocs serve 55 | 56 | docs-build: 57 | uv run mkdocs build 58 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Bootstrap python service 2 | [![CI Pipeline](https://github.com/febus982/bootstrap-python-fastapi/actions/workflows/ci-pipeline.yml/badge.svg)](https://github.com/febus982/bootstrap-python-fastapi/actions/workflows/ci-pipeline.yml) 3 | [![Python tests](https://github.com/febus982/bootstrap-python-fastapi/actions/workflows/python-tests.yml/badge.svg?branch=main)](https://github.com/febus982/bootstrap-python-fastapi/actions/workflows/python-tests.yml) 4 | [![Test Coverage](https://api.codeclimate.com/v1/badges/a2ab183e64778e21ae14/test_coverage)](https://codeclimate.com/github/febus982/bootstrap-python-fastapi/test_coverage) 5 | [![Maintainability](https://api.codeclimate.com/v1/badges/a2ab183e64778e21ae14/maintainability)](https://codeclimate.com/github/febus982/bootstrap-python-fastapi/maintainability) 6 | 7 | [![Checked with mypy](https://www.mypy-lang.org/static/mypy_badge.svg)](https://mypy-lang.org/) 8 | [![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v1.json)](https://github.com/charliermarsh/ruff) 9 | [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) 10 | [![security: bandit](https://img.shields.io/badge/security-bandit-yellow.svg)](https://github.com/PyCQA/bandit) 11 | 12 | This is an example implementation of a python application applying 13 | concepts from [Clean Architecture](https://blog.cleancoder.com/uncle-bob/2012/08/13/the-clean-architecture.html) 14 | and [SOLID principles](https://en.wikipedia.org/wiki/SOLID). 15 | 16 | * The repository classes are isolated behind interfaces, enforcing the [Interface Segregation principle](https://en.wikipedia.org/wiki/Interface_segregation_principle) 17 | and the [Inversion of Control](https://en.wikipedia.org/wiki/Inversion_of_control) design pattern 18 | * The application frameworks are decoupled from the domain logic 19 | * The storage layer is decoupled from the domain logic 20 | 21 | This template provides out of the box some commonly used functionalities: 22 | 23 | * Sync and Async API Documentation using [FastAPI](https://fastapi.tiangolo.com/) and [AsyncAPI](https://www.asyncapi.com/en) 24 | * Async tasks execution using [Dramatiq](https://dramatiq.io/index.html) 25 | * Websocket application using [Socket.io](https://python-socketio.readthedocs.io/en/stable/index.html) 26 | * Repository pattern for databases using [SQLAlchemy](https://www.sqlalchemy.org/) and [SQLAlchemy bind manager](https://febus982.github.io/sqlalchemy-bind-manager/stable/) 27 | * Database migrations using [Alembic](https://alembic.sqlalchemy.org/en/latest/) (configured supporting both sync and async SQLAlchemy engines) 28 | * Database fixtures support using customized [Alembic](https://alembic.sqlalchemy.org/en/latest/) configuration 29 | * Authentication and Identity Provider using [ORY Zero Trust architecture](https://www.ory.sh/docs/kratos/guides/zero-trust-iap-proxy-identity-access-proxy) 30 | * Full observability setup using [OpenTelemetry](https://opentelemetry.io/) (Metrics, Traces and Logs) 31 | * Example CI/CD deployment pipeline for GitLab (The focus for this repository is still GitHub but, in case you want to use GitLab 🤷) 32 | * [TODO] Producer and consumer to emit and consume events using [CloudEvents](https://cloudevents.io/) format using HTTP, to be used with [Knative Eventing](https://knative.dev/docs/eventing/) 33 | 34 | ## Documentation 35 | 36 | The detailed documentation is available: 37 | 38 | * Online on [GitHub pages](https://febus982.github.io/bootstrap-python-fastapi/) 39 | * Offline by running `make docs` after installing dependencies with `make dev-dependencies` 40 | 41 | ## How to use 42 | 43 | Create your GitHub repository using this template (The big green `Use this template` button). 44 | Optionally tweak name and authors in the `pyproject.toml` file, however the metadata 45 | are not used when building the application, nor are referenced anywhere in the code. 46 | 47 | Before running any commands, install `uv` and `Docker`: 48 | 49 | - You can install `uv` on Mac using `brew`: `brew install uv` 50 | - Download and install Docker: https://www.docker.com/products/docker-desktop/ 51 | 52 | Using Docker: 53 | 54 | * `make containers`: Build containers 55 | * `docker compose up dev-http`: Run HTTP application with hot reload 56 | * `docker compose up dev-socketio`: Run HTTP application with hot reload 57 | * `docker compose up dramatiq-worker`: Run the dramatiq worker 58 | * `docker compose run --rm test`: Run test suite 59 | * `docker compose run --rm migrate`: Run database migrations 60 | * `docker compose run --rm autogenerate-migration`: Generate a new migration file 61 | 62 | Using Make (you still need Docker for most of them): 63 | 64 | * `make install-dependencies`: Install requirements 65 | * `make dev-dependencies`: Install dev requirements 66 | * `make update-dependencies`: Updates requirements 67 | * `make dev-http`: Run HTTP application with hot reload 68 | * `make dev-socketio`: Run HTTP application with hot reload 69 | * `make test`: Run test suite 70 | * `make migrate`: Run database migrations 71 | * `make autogenerate-migration`: Generate a new migration file 72 | 73 | ## Other commands for development 74 | 75 | * `make check`: Run tests, code style and lint checks 76 | * `make fix`: Run tests, code style and lint checks with automatic fixes (where possible) 77 | 78 | ## Multistage dockerfile configuration 79 | 80 | Python docker image tend to become large after installing the application requirements 81 | (the slim base is ~150 MB uncompressed), therefore it's important to spend efforts 82 | to minimise the image size, even if it produces a slightly more complex multistage 83 | Dockerfile. 84 | 85 | The following setup makes sure the production image will keep to a minimal size ("only" 360MB): 86 | * 150MB base image 87 | * 210MB python installed dependencies 88 | 89 | Using the following pipeline the "test" image is instead ~850MB, more than 400MB that would 90 | end up as a cost in traffic on each image pull. 91 | 92 | ![](docs/puml/docker-container.png) 93 | -------------------------------------------------------------------------------- /alembic.ini: -------------------------------------------------------------------------------- 1 | # a multi-database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | script_location = src/migrations 6 | 7 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s 8 | # Uncomment the line below if you want the files to be prepended with date and time 9 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file 10 | # for all available tokens 11 | file_template = %%(year)d-%%(month).2d-%%(day).2d-%%(hour).2d%%(minute).2d%%(second).2d-%%(rev)s_%%(slug)s 12 | 13 | # sys.path path, will be prepended to sys.path if present. 14 | # defaults to the current working directory. 15 | #prepend_sys_path = . 16 | 17 | # timezone to use when rendering the date within the migration file 18 | # as well as the filename. 19 | # If specified, requires the python-dateutil library that can be 20 | # installed by adding `alembic[tz]` to the pip requirements 21 | # string value is passed to dateutil.tz.gettz() 22 | # leave blank for localtime 23 | # timezone = 24 | 25 | # max length of characters to apply to the 26 | # "slug" field 27 | # truncate_slug_length = 40 28 | 29 | # set to 'true' to run the environment during 30 | # the 'revision' command, regardless of autogenerate 31 | # revision_environment = false 32 | 33 | # set to 'true' to allow .pyc and .pyo files without 34 | # a source .py file to be detected as revisions in the 35 | # versions/ directory 36 | # sourceless = false 37 | 38 | # version location specification; This defaults 39 | # to alembic/versions. When using multiple version 40 | # directories, initial revisions must be specified with --version-path. 41 | # The path separator used here should be the separator specified by "version_path_separator" below. 42 | # version_locations = %(here)s/bar:%(here)s/bat:alembic/versions 43 | 44 | # version path separator; As mentioned above, this is the character used to split 45 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. 46 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. 47 | # Valid values for version_path_separator are: 48 | # 49 | # version_path_separator = : 50 | # version_path_separator = ; 51 | # version_path_separator = space 52 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects. 53 | 54 | # the output encoding used when revision files 55 | # are written from script.py.mako 56 | # output_encoding = utf-8 57 | 58 | # We inject db names and config using env.py in alembic directory 59 | 60 | [post_write_hooks] 61 | # post_write_hooks defines scripts or Python functions that are run 62 | # on newly generated revision scripts. See the documentation for further 63 | # detail and examples 64 | 65 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 66 | # hooks = black 67 | # black.type = console_scripts 68 | # black.entrypoint = black 69 | # black.options = -l 79 REVISION_SCRIPT_FILENAME 70 | hooks = ruff 71 | ruff.type = exec 72 | ruff.executable = %(here)s/.venv/bin/ruff 73 | ruff.options = format REVISION_SCRIPT_FILENAME 74 | -------------------------------------------------------------------------------- /auth_volumes/kratos/.gitignore: -------------------------------------------------------------------------------- 1 | db.sqlite 2 | -------------------------------------------------------------------------------- /auth_volumes/kratos/identity.schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema#", 3 | "title": "Person", 4 | "type": "object", 5 | "properties": { 6 | "traits": { 7 | "type": "object", 8 | "properties": { 9 | "email": { 10 | "type": "string", 11 | "format": "email", 12 | "title": "E-Mail", 13 | "minLength": 3, 14 | "ory.sh/kratos": { 15 | "credentials": { 16 | "password": { 17 | "identifier": true 18 | } 19 | }, 20 | "verification": { 21 | "via": "email" 22 | }, 23 | "recovery": { 24 | "via": "email" 25 | } 26 | } 27 | }, 28 | "name": { 29 | "type": "object", 30 | "properties": { 31 | "first": { 32 | "title": "First Name", 33 | "type": "string" 34 | }, 35 | "last": { 36 | "title": "Last Name", 37 | "type": "string" 38 | } 39 | } 40 | } 41 | }, 42 | "required": [ 43 | "email" 44 | ], 45 | "additionalProperties": false 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /auth_volumes/kratos/kratos.yml: -------------------------------------------------------------------------------- 1 | serve: 2 | public: 3 | # This URL has to match the one in oathkeeper rules config 4 | # we'll probably want to remove references to ory and kratos 5 | base_url: http://127.0.0.1:8080/.ory/kratos/public/ 6 | # We're proxying the requests through oathkeeper, need CORS 7 | cors: 8 | enabled: true 9 | allowed_origins: 10 | - http://127.0.0.1:8080 11 | allowed_methods: 12 | - POST 13 | - GET 14 | - PUT 15 | - PATCH 16 | - DELETE 17 | allowed_headers: 18 | - Authorization 19 | - Cookie 20 | - Content-Type 21 | exposed_headers: 22 | - Content-Type 23 | - Set-Cookie 24 | admin: 25 | # This is the internal URL, we'll be accessing using docker network 26 | # mainly to get the JWKS endpoint and do token validation 27 | base_url: http://kratos:4434/ 28 | 29 | selfservice: 30 | # URLs are using the Oathkeeper 31 | default_browser_return_url: http://127.0.0.1:8080/ 32 | allowed_return_urls: 33 | - http://127.0.0.1:8080 34 | - http://localhost:19006/Callback 35 | - exp://localhost:8081/--/Callback 36 | 37 | methods: 38 | password: 39 | enabled: true 40 | # totp: 41 | # config: 42 | # issuer: Kratos 43 | # enabled: true 44 | # lookup_secret: 45 | # enabled: true 46 | # link: 47 | # enabled: true 48 | # code: 49 | # enabled: true 50 | 51 | flows: 52 | error: 53 | ui_url: http://127.0.0.1:8080/error 54 | 55 | settings: 56 | ui_url: http://127.0.0.1:8080/settings 57 | privileged_session_max_age: 15m 58 | required_aal: highest_available 59 | 60 | # If we enable recovery or verification we need also 61 | # MailSlurper in the docker-compose file 62 | recovery: 63 | enabled: false 64 | ui_url: http://127.0.0.1:8080/recovery 65 | use: code 66 | verification: 67 | enabled: false 68 | ui_url: http://127.0.0.1:8080/verification 69 | use: code 70 | after: 71 | default_browser_return_url: http://127.0.0.1:8080/ 72 | 73 | logout: 74 | after: 75 | default_browser_return_url: http://127.0.0.1:8080/login 76 | 77 | login: 78 | ui_url: http://127.0.0.1:8080/login 79 | lifespan: 10m 80 | 81 | registration: 82 | lifespan: 10m 83 | ui_url: http://127.0.0.1:8080/registration 84 | 85 | after: 86 | password: 87 | hooks: 88 | - hook: web_hook 89 | config: 90 | url: http://dev:8000/user_registered/ 91 | method: "POST" 92 | body: file:///etc/config/kratos/user_registered.jsonnet 93 | can_interrupt: true 94 | emit_analytics_event: true 95 | - hook: session 96 | 97 | log: 98 | level: info 99 | format: text 100 | leak_sensitive_values: true 101 | 102 | secrets: 103 | cookie: 104 | - PLEASE-CHANGE-ME-I-AM-VERY-INSECURE 105 | cipher: 106 | - 32-LONG-SECRET-NOT-SECURE-AT-ALL 107 | 108 | ciphers: 109 | algorithm: xchacha20-poly1305 110 | 111 | hashers: 112 | algorithm: bcrypt 113 | bcrypt: 114 | cost: 8 115 | 116 | identity: 117 | default_schema_id: default 118 | schemas: 119 | - id: default 120 | url: file:///etc/config/kratos/identity.schema.json 121 | 122 | courier: 123 | smtp: 124 | connection_uri: smtps://test:test@mailslurper:1025/?skip_ssl_verify=true 125 | 126 | feature_flags: 127 | use_continue_with_transitions: true 128 | -------------------------------------------------------------------------------- /auth_volumes/kratos/user_registered.jsonnet: -------------------------------------------------------------------------------- 1 | function(ctx) { 2 | user_id: ctx.identity.id, 3 | email: ctx.identity.traits.email, 4 | } 5 | -------------------------------------------------------------------------------- /auth_volumes/oathkeeper/access-rules.yml: -------------------------------------------------------------------------------- 1 | # Kratos public API for authorized and unauthorized traffic 2 | - id: "ory:kratos:public" 3 | upstream: 4 | preserve_host: true 5 | url: "http://kratos:4433" 6 | strip_path: /.ory/kratos/public 7 | match: 8 | # This URL has to match serve.public.base_url in kratos config 9 | # we'll probably want to remove references to ory and kratos 10 | url: "http://127.0.0.1:8080/.ory/kratos/public/<**>" 11 | methods: 12 | - GET 13 | - POST 14 | - PUT 15 | - DELETE 16 | - PATCH 17 | authenticators: 18 | - handler: noop 19 | authorizer: 20 | handler: allow 21 | mutators: 22 | - handler: noop 23 | 24 | # UI Access for anonymous traffic (Home page) 25 | - id: "ory:auth-ui:anonymous" 26 | upstream: 27 | preserve_host: true 28 | url: "http://auth-ui:3000" 29 | match: 30 | url: "http://127.0.0.1:8080/" 31 | methods: 32 | - GET 33 | authenticators: 34 | - handler: anonymous 35 | authorizer: 36 | handler: allow 37 | mutators: 38 | - handler: noop 39 | 40 | # UI Access for anonymous traffic (Other pages) 41 | - id: "ory:auth-ui-home:anonymous" 42 | upstream: 43 | preserve_host: true 44 | url: "http://auth-ui:3000" 45 | match: 46 | url: "http://127.0.0.1:8080/<{registration,welcome,recovery,verification,login,error,health/{alive,ready},**.css,**.js,**.png,**.svg,**.woff*}>" 47 | methods: 48 | - GET 49 | authenticators: 50 | - handler: anonymous 51 | authorizer: 52 | handler: allow 53 | mutators: 54 | - handler: noop 55 | 56 | # UI Access for logged-in only pages 57 | - id: "ory:kratos-selfservice-ui-node:protected" 58 | upstream: 59 | preserve_host: true 60 | url: "http://auth-ui:3000" 61 | match: 62 | url: "http://127.0.0.1:8080/<{sessions,settings}>" 63 | methods: 64 | - GET 65 | authenticators: 66 | - handler: cookie_session 67 | authorizer: 68 | handler: allow 69 | mutators: 70 | - handler: id_token 71 | errors: 72 | - handler: redirect 73 | config: 74 | to: http://127.0.0.1:8080/login 75 | 76 | # Dev container access to protected /api/* endpoints, to the dev container 77 | - id: "http_app:protected" 78 | upstream: 79 | preserve_host: true 80 | url: "http://dev:8000" 81 | strip_path: /api 82 | match: 83 | url: "http://127.0.0.1:8080/<{api/,api/**,openapi.json}>" 84 | methods: 85 | - GET 86 | authenticators: 87 | # Get opaque token from cookie 88 | - handler: cookie_session 89 | 90 | # Or from bearer token 91 | # Note this is not a secure way to do authentication but 92 | # but we can use it for local development (i.e. Postman) 93 | # Refer to: https://www.ory.sh/docs/kratos/self-service/flows/user-login#login-for-api-clients-and-clients-without-browsers 94 | - handler: bearer_token 95 | authorizer: 96 | handler: allow 97 | mutators: 98 | - handler: id_token 99 | errors: 100 | - handler: redirect 101 | config: 102 | to: http://127.0.0.1:8080/login 103 | -------------------------------------------------------------------------------- /auth_volumes/oathkeeper/id_token.jwks.json: -------------------------------------------------------------------------------- 1 | { 2 | "keys": [ 3 | { 4 | "use": "sig", 5 | "kty": "RSA", 6 | "kid": "a2aa9739-d753-4a0d-87ee-61f101050277", 7 | "alg": "RS256", 8 | "n": "zpjSl0ySsdk_YC4ZJYYV-cSznWkzndTo0lyvkYmeBkW60YHuHzXaviHqonY_DjFBdnZC0Vs_QTWmBlZvPzTp4Oni-eOetP-Ce3-B8jkGWpKFOjTLw7uwR3b3jm_mFNiz1dV_utWiweqx62Se0SyYaAXrgStU8-3P2Us7_kz5NnBVL1E7aEP40aB7nytLvPhXau-YhFmUfgykAcov0QrnNY0DH0eTcwL19UysvlKx6Uiu6mnbaFE1qx8X2m2xuLpErfiqj6wLCdCYMWdRTHiVsQMtTzSwuPuXfH7J06GTo3I1cEWN8Mb-RJxlosJA_q7hEd43yYisCO-8szX0lgCasw", 9 | "e": "AQAB", 10 | "d": "x3dfY_rna1UQTmFToBoMn6Edte47irhkra4VSNPwwaeTTvI-oN2TO51td7vo91_xD1nw-0c5FFGi4V2UfRcudBv9LD1rHt_O8EPUh7QtAUeT3_XXgjx1Xxpqu5goMZpkTyGZ-B6JzOY3L8lvWQ_Qeia1EXpvxC-oTOjJnKZeuwIPlcoNKMRU-mIYOnkRFfnUvrDm7N9UZEp3PfI3vhE9AquP1PEvz5KTUYkubsfmupqqR6FmMUm6ulGT7guhBw9A3vxIYbYGKvXLdBvn68mENrEYxXrwmu6ITMh_y208M5rC-hgEHIAIvMu1aVW6jNgyQTunsGST3UyrSbwjI0K9UQ", 11 | "p": "77fDvnfHRFEgyi7mh0c6fAdtMEMJ05W8NwTG_D-cSwfWipfTwJJrroWoRwEgdAg5AWGq-MNUzrubTVXoJdC2T4g1o-VRZkKKYoMvav3CvOIMzCBxBs9I_GAKr5NCSk7maksMqiCTMhmkoZ5RPuMYMY_YzxKNAbjBd9qFLfaVAqs", 12 | "q": "3KEmPA2XQkf7dvtpY1Xkp1IfMV_UBdmYk7J6dB5BYqzviQWdEFvWaSATJ_7qV1dw0JDZynOgipp8gvoL-RepfjtArhPz41wB3J2xmBYrBr1sJ-x5eqAvMkQk2bd5KTor44e79TRIkmkFYAIdUQ5JdVXPA13S8WUZfb_bAbwaCBk", 13 | "dp": "5uyy32AJkNFKchqeLsE6INMSp0RdSftbtfCfM86fZFQno5lA_qjOnO_avJPkTILDT4ZjqoKYxxJJOEXCffNCPPltGvbE5GrDXsUbP8k2-LgWNeoml7XFjIGEqcCFQoohQ1IK4DTDN6cmRh76C0e_Pbdh15D6TydJEIlsdGuu_kM", 14 | "dq": "aegFNYCEojFxeTzX6vIZL2RRSt8oJKK-Be__reu0EUzYMtr5-RdMhev6phFMph54LfXKRc9ZOg9MQ4cJ5klAeDKzKpyzTukkj6U20b2aa8LTvxpZec6YuTVSxxu2Ul71IGRQijTNvVIiXWLGddk409Ub6Q7JqkyQfvdwhpWnnUk", 15 | "qi": "P68-EwgcRy9ce_PZ75c909cU7dzCiaGcTX1psJiXmQAFBcG0msWfsyHGbllOZG27pKde78ORGJDYDNk1FqTwsogZyCP87EiBmOoqXWnMvKYfJ1DOx7x42LMAGwMD3bgQj9jgRACxFJG4n3NI6uFlFruyl_CLQzwW_rQFHshLK7Q" 16 | } 17 | ] 18 | } 19 | -------------------------------------------------------------------------------- /auth_volumes/oathkeeper/oathkeeper.yml: -------------------------------------------------------------------------------- 1 | log: 2 | level: info 3 | format: json 4 | 5 | serve: 6 | proxy: 7 | cors: 8 | enabled: true 9 | allowed_origins: 10 | - "*" 11 | allowed_methods: 12 | - POST 13 | - GET 14 | - PUT 15 | - PATCH 16 | - DELETE 17 | allowed_headers: 18 | - Authorization 19 | - Content-Type 20 | - Accept 21 | exposed_headers: 22 | - Content-Type 23 | allow_credentials: true 24 | debug: true 25 | 26 | errors: 27 | fallback: 28 | - json 29 | 30 | handlers: 31 | redirect: 32 | enabled: true 33 | config: 34 | to: http://127.0.0.1:8080/login 35 | when: 36 | - 37 | error: 38 | - unauthorized 39 | - forbidden 40 | request: 41 | header: 42 | accept: 43 | - text/html 44 | json: 45 | enabled: true 46 | config: 47 | verbose: true 48 | 49 | access_rules: 50 | matching_strategy: glob 51 | repositories: 52 | - file:///etc/config/oathkeeper/access-rules.yml 53 | 54 | authenticators: 55 | anonymous: 56 | enabled: true 57 | config: 58 | subject: guest 59 | 60 | cookie_session: 61 | enabled: true 62 | config: 63 | check_session_url: http://kratos:4433/sessions/whoami 64 | preserve_path: true 65 | extra_from: "@this" 66 | subject_from: "identity.id" 67 | only: 68 | - ory_kratos_session 69 | 70 | # Note this is not a secure way to do authentication but 71 | # but we can use it for local development (i.e. Postman) 72 | # Refer to: https://www.ory.sh/docs/kratos/self-service/flows/user-login#login-for-api-clients-and-clients-without-browsers 73 | bearer_token: 74 | enabled: true 75 | config: 76 | check_session_url: http://kratos:4433/sessions/whoami 77 | preserve_path: true 78 | extra_from: "@this" 79 | subject_from: "identity.id" 80 | 81 | noop: 82 | enabled: true 83 | 84 | authorizers: 85 | allow: 86 | enabled: true 87 | 88 | mutators: 89 | noop: 90 | enabled: true 91 | 92 | id_token: 93 | enabled: true 94 | config: 95 | issuer_url: http://127.0.0.1:8080/ 96 | jwks_url: file:///etc/config/oathkeeper/id_token.jwks.json 97 | claims: | 98 | { 99 | {{ if .MatchContext.Header.Get "x-impersonate" }} 100 | "impersonate": {{ .MatchContext.Header.Get "x-impersonate" | toJson }}, 101 | {{ end }} 102 | "session": {{ .Extra | toJson }} 103 | } 104 | -------------------------------------------------------------------------------- /config.alloy: -------------------------------------------------------------------------------- 1 | otelcol.receiver.otlp "default" { 2 | grpc { } 3 | 4 | output { 5 | metrics = [ 6 | otelcol.processor.transform.add_resource_attributes_as_metric_attributes.input, 7 | ] 8 | traces = [ 9 | // This transforms the traces in metrics, we still have to send traces out 10 | otelcol.connector.spanmetrics.asgi_apm.input, 11 | // This also transforms the traces in metrics, we still have to send traces out 12 | otelcol.connector.host_info.default.input, 13 | // This sends the traces out 14 | otelcol.processor.batch.default.input, 15 | ] 16 | logs = [ 17 | otelcol.processor.batch.default.input, 18 | ] 19 | } 20 | } 21 | 22 | otelcol.connector.host_info "default" { 23 | // https://grafana.com/docs/alloy/latest/reference/components/otelcol.connector.host_info/ 24 | host_identifiers = ["host.name"] 25 | 26 | output { 27 | metrics = [otelcol.processor.batch.default.input] 28 | } 29 | } 30 | 31 | otelcol.connector.spanmetrics "asgi_apm" { 32 | dimension { 33 | name = "http.status_code" 34 | } 35 | 36 | dimension { 37 | name = "http.method" 38 | } 39 | 40 | dimension { 41 | name = "http.route" 42 | } 43 | 44 | histogram { 45 | explicit { 46 | buckets = ["2ms", "4ms", "6ms", "8ms", "10ms", "50ms", "100ms", "200ms", "400ms", "800ms", "1s", "1400ms", "2s", "5s", "10s", "15s"] 47 | } 48 | } 49 | 50 | output { 51 | metrics = [otelcol.processor.transform.add_resource_attributes_as_metric_attributes.input] 52 | } 53 | } 54 | 55 | otelcol.processor.transform "add_resource_attributes_as_metric_attributes" { 56 | error_mode = "ignore" 57 | 58 | metric_statements { 59 | context = "datapoint" 60 | statements = [ 61 | "set(attributes[\"deployment.environment\"], resource.attributes[\"deployment.environment\"])", 62 | "set(attributes[\"service.version\"], resource.attributes[\"service.version\"])", 63 | ] 64 | } 65 | 66 | output { 67 | metrics = [otelcol.processor.batch.default.input] 68 | } 69 | } 70 | 71 | otelcol.processor.batch "default" { 72 | output { 73 | // metrics = [otelcol.exporter.otlphttp.grafanacloud.input] 74 | // logs = [otelcol.exporter.otlphttp.grafanacloud.input] 75 | // traces = [otelcol.exporter.otlphttp.grafanacloud.input] 76 | 77 | metrics = [otelcol.exporter.debug.console.input] 78 | logs = [otelcol.exporter.debug.console.input] 79 | traces = [otelcol.exporter.otlp.jaeger.input] 80 | } 81 | } 82 | 83 | otelcol.exporter.otlp "jaeger" { 84 | client { 85 | endpoint = "jaeger:4317" 86 | 87 | tls { 88 | insecure = true 89 | } 90 | } 91 | } 92 | 93 | otelcol.exporter.debug "console" { 94 | verbosity = "Detailed" 95 | } 96 | 97 | 98 | // otelcol.auth.basic "grafanacloud" { 99 | // username = sys.env("GC_USERNAME") 100 | // password = sys.env("GC_PASSWORD") 101 | // } 102 | // 103 | // otelcol.exporter.otlphttp "grafanacloud" { 104 | // client { 105 | // endpoint = sys.env("GC_ENDPOINT") 106 | // auth = otelcol.auth.basic.grafanacloud.handler 107 | // } 108 | // } 109 | -------------------------------------------------------------------------------- /credentials.env.template: -------------------------------------------------------------------------------- 1 | GC_USERNAME: 2 | GC_PASSWORD: 3 | GC_ENDPOINT: 4 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | dev-http: &dev 3 | build: 4 | dockerfile: Dockerfile 5 | context: . 6 | target: dev 7 | env_file: docker.env 8 | environment: 9 | APP_NAME: "backend-fastapi" 10 | ports: 11 | - '8000:8000' 12 | working_dir: "/app/src" 13 | volumes: 14 | - '.:/app' 15 | depends_on: 16 | redis: 17 | condition: service_healthy 18 | otel-collector: 19 | condition: service_started 20 | mariadb: 21 | condition: service_healthy 22 | command: 23 | - python 24 | - ./http_app/dev_server.py 25 | 26 | dev-socketio: 27 | <<: *dev 28 | environment: 29 | APP_NAME: "backend-socketio" 30 | ports: 31 | - '8001:8001' 32 | command: 33 | - python 34 | - ./socketio_app/dev_server.py 35 | 36 | ######################### 37 | #### Helper services #### 38 | ######################### 39 | 40 | jaeger: 41 | image: jaegertracing/all-in-one:latest 42 | ports: 43 | - "6831:6831/udp" # UDP port for Jaeger agent 44 | - "16686:16686" # Web UI 45 | - "14268:14268" # HTTP port for spans 46 | 47 | otel-collector: 48 | image: grafana/alloy:latest 49 | # You can add the Grafana Cloud credentials in this file 50 | # and push observability directly to the provider. 51 | # Refer to the config.alloy file to see what credentials we need. 52 | env_file: 53 | - path: ./credentials.env 54 | required: false 55 | depends_on: 56 | - jaeger 57 | ports: 58 | - "12345:12345" 59 | - "4317:4317" 60 | volumes: 61 | - ./config.alloy:/etc/alloy/config.alloy 62 | command: 63 | - run 64 | - --server.http.listen-addr=0.0.0.0:12345 65 | - --stability.level=experimental 66 | # - --stability.level=public-preview 67 | - /etc/alloy/config.alloy 68 | 69 | redis: 70 | image: redis 71 | healthcheck: 72 | test: ["CMD", "redis-cli", "ping"] 73 | interval: 10s 74 | timeout: 5s 75 | retries: 5 76 | 77 | mariadb: 78 | image: mariadb:11.7 79 | environment: 80 | MYSQL_ROOT_PASSWORD: "stanis" 81 | MYSQL_DATABASE: "backend" 82 | MYSQL_USER: "corinna" 83 | MYSQL_PASSWORD: "gioieiiere" 84 | volumes: 85 | - ./volumes/mariadb:/var/lib/mysql 86 | ports: 87 | - "3306:3306" 88 | healthcheck: 89 | test: [ "CMD", "healthcheck.sh", "--su-mysql", "--connect", "--innodb_initialized" ] 90 | interval: 10s 91 | timeout: 5s 92 | retries: 5 93 | start_period: 30s 94 | 95 | dramatiq-worker: 96 | <<: *dev 97 | environment: 98 | APP_NAME: "backend-dramatiq-worker" 99 | ports: [] 100 | command: 101 | - dramatiq 102 | - --watch 103 | - . 104 | - -p 105 | - "1" 106 | - -t 107 | - "1" 108 | - dramatiq_worker 109 | 110 | ################################# 111 | #### Authentication services #### 112 | ################################# 113 | 114 | kratos-migrate: 115 | image: oryd/kratos:v1.3.1 116 | environment: 117 | DSN: "sqlite:///etc/config/kratos/db.sqlite?_fk=true&mode=rwc" 118 | volumes: 119 | - ./auth_volumes/kratos:/etc/config/kratos 120 | command: -c /etc/config/kratos/kratos.yml migrate sql -e --yes 121 | restart: on-failure 122 | 123 | kratos: 124 | depends_on: 125 | - kratos-migrate 126 | image: oryd/kratos:v1.3.1 127 | # It's not needed to expose these, leaving for documentation 128 | # ports: 129 | # - '4433:4433' # public API 130 | # - '4434:4434' # admin API 131 | restart: unless-stopped 132 | environment: 133 | DSN: "sqlite:///etc/config/kratos/db.sqlite?_fk=true&mode=rwc" 134 | LOG_LEVEL: "trace" 135 | volumes: 136 | - ./auth_volumes/kratos:/etc/config/kratos 137 | command: serve -c /etc/config/kratos/kratos.yml --dev --watch-courier 138 | 139 | auth-ui: 140 | image: oryd/kratos-selfservice-ui-node:v1.3.1 141 | environment: 142 | PORT: 3000 143 | # Internal access URL for the BFF instance 144 | KRATOS_PUBLIC_URL: "http://kratos:4433/" 145 | # External access URL for the browser 146 | KRATOS_BROWSER_URL: "http://127.0.0.1:8080/.ory/kratos/public" 147 | JWKS_URL: "http://oathkeeper:4456/.well-known/jwks.json" 148 | SECURITY_MODE: "jwks" 149 | COOKIE_SECRET: "changeme" 150 | CSRF_COOKIE_NAME: "ory_csrf_ui" 151 | CSRF_COOKIE_SECRET: "changeme" 152 | restart: on-failure 153 | 154 | # mailslurper: 155 | # image: oryd/mailslurper:latest-smtps 156 | # ports: 157 | # - '4436:4436' 158 | # - '4437:4437' 159 | 160 | oathkeeper: 161 | image: oryd/oathkeeper:v0.40.9 162 | depends_on: 163 | - kratos 164 | - auth-ui 165 | - dev-http 166 | ports: 167 | # Public traffic port 168 | - "8080:4455" 169 | # Private traffic port, this is not usually exposed 170 | # among other things it provides the JWKS url 171 | # - "4456:4456" 172 | command: 173 | serve proxy -c "/etc/config/oathkeeper/oathkeeper.yml" 174 | restart: on-failure 175 | volumes: 176 | - ./auth_volumes/oathkeeper:/etc/config/oathkeeper 177 | 178 | ########################## 179 | #### One-off commands #### 180 | ########################## 181 | test: 182 | <<: *dev 183 | environment: 184 | APP_NAME: "backend-test" 185 | ports: [] 186 | command: 187 | - "make" 188 | - "test" 189 | 190 | migrate: 191 | <<: *dev 192 | environment: 193 | APP_NAME: "backend-migrations" 194 | ports: [] 195 | command: 196 | - "alembic" 197 | - "upgrade" 198 | - "heads" 199 | 200 | autogenerate-migration: 201 | <<: *dev 202 | environment: 203 | APP_NAME: "backend-migration-generator" 204 | ports: [] 205 | command: 206 | - "alembic" 207 | - "revision" 208 | - "--autogenerate" 209 | - "-m" 210 | - "Description message" 211 | -------------------------------------------------------------------------------- /docker.env: -------------------------------------------------------------------------------- 1 | ENVIRONMENT: "local" 2 | AUTH__JWKS_URL: "http://oathkeeper:4456/.well-known/jwks.json" 3 | #DRAMATIQ__REDIS_URL: "redis://redis:6379/0" 4 | OTEL_EXPORTER_OTLP_ENDPOINT: "http://otel-collector:4317" 5 | OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST: ".*" 6 | OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE: ".*" 7 | CORS_ORIGINS: ["*"] 8 | GC_USERNAME: FAKE_USERNAME 9 | GC_PASSWORD: FAKE_PASS 10 | GC_ENDPOINT: FAKE_ENDPOINT 11 | SQLALCHEMY_CONFIG__default__engine_url: mysql+asyncmy://corinna:gioieiiere@mariadb/backend?charset=utf8mb4 12 | SQLALCHEMY_CONFIG__default__async_engine: true 13 | -------------------------------------------------------------------------------- /docs/.pages: -------------------------------------------------------------------------------- 1 | nav: 2 | - Home: index.md 3 | - api-documentation.md 4 | - architecture.md 5 | - zero_trust.md 6 | - inversion-of-control.md 7 | - dockerfile.md 8 | - src packages: packages 9 | - ... 10 | - ADR: adr 11 | -------------------------------------------------------------------------------- /docs/adr/.markdownlint: -------------------------------------------------------------------------------- 1 | # source: https://github.com/adr/madr/blob/3.0.0/template/.markdownlint.yml 2 | default: true 3 | 4 | # Allow arbitrary line length 5 | # 6 | # Reason: We apply the one-sentence-per-line rule. A sentence may get longer than 80 characters, especially if links are contained. 7 | # 8 | # Details: https://github.com/DavidAnson/markdownlint/blob/main/doc/Rules.md#md013---line-length 9 | MD013: false 10 | 11 | # Allow duplicate headings 12 | # 13 | # Reasons: 14 | # 15 | # - The chosen option is considerably often used as title of the ADR (e.g., ADR-0015). Thus, that title repeats. 16 | # - We use "Examples" multiple times (e.g., ADR-0010). 17 | # - Markdown lint should support the user and not annoy them. 18 | # 19 | # Details: https://github.com/DavidAnson/markdownlint/blob/main/doc/Rules.md#md024---multiple-headings-with-the-same-content 20 | MD024: false -------------------------------------------------------------------------------- /docs/adr/.pages: -------------------------------------------------------------------------------- 1 | nav: 2 | - Summary: summary.md 3 | - ... | regex=^\d{4}- 4 | -------------------------------------------------------------------------------- /docs/adr/0001-record-architecture-decisions.md: -------------------------------------------------------------------------------- 1 | --- 2 | # source: https://github.com/adr/madr/blob/3.0.0/template/adr-template.md 3 | # These are optional elements. Feel free to remove any of them. 4 | status: accepted 5 | date: 2024-02-03 6 | # status: {proposed | rejected | accepted | deprecated | … | superseded by [ADR-0005](0005-example.md)} 7 | # date: {YYYY-MM-DD when the decision was last updated} 8 | # deciders: {list everyone involved in the decision} 9 | # consulted: {list everyone whose opinions are sought (typically subject-matter experts); and with whom there is a two-way communication} 10 | # informed: {list everyone who is kept up-to-date on progress; and with whom there is a one-way communication} 11 | --- 12 | # Use Markdown Any Decision Records V3 13 | 14 | ## Context and Problem Statement 15 | 16 | We want to record any decisions made in this project independent whether decisions concern the architecture ("architectural decision record"), the code, or other fields. 17 | Which format and structure should these records follow? 18 | 19 | ## Considered Options 20 | 21 | * [MADR](https://adr.github.io/madr/) 3.0.0 – The Markdown Any Decision Records 22 | * [Michael Nygard's template](http://thinkrelevance.com/blog/2011/11/15/documenting-architecture-decisions) – The first incarnation of the term "ADR" 23 | * Other templates listed at 24 | * Formless – No conventions for file format and structure 25 | 26 | ## Decision Outcome 27 | 28 | Chosen option: "MADR 3.0.0", because 29 | 30 | * Implicit assumptions should be made explicit. 31 | Design documentation is important to enable people understanding the decisions later on. 32 | See also [A rational design process: How and why to fake it](https://doi.org/10.1109/TSE.1986.6312940). 33 | * MADR allows for structured capturing of any decision. 34 | * The MADR format is lean and fits our development style. 35 | * The MADR structure is comprehensible and facilitates usage & maintenance. 36 | * The MADR project is vivid. 37 | -------------------------------------------------------------------------------- /docs/adr/adr-template.md: -------------------------------------------------------------------------------- 1 | --- 2 | # source: https://github.com/adr/madr/blob/3.0.0/template/adr-template.md 3 | # These are optional elements. Feel free to remove any of them. 4 | status: {proposed | rejected | accepted | deprecated | … | superseded by [ADR-0005](0005-example.md)} 5 | date: {YYYY-MM-DD when the decision was last updated} 6 | deciders: {list everyone involved in the decision} 7 | consulted: {list everyone whose opinions are sought (typically subject-matter experts); and with whom there is a two-way communication} 8 | informed: {list everyone who is kept up-to-date on progress; and with whom there is a one-way communication} 9 | --- 10 | # {short title of solved problem and solution} 11 | 12 | ## Context and Problem Statement 13 | 14 | {Describe the context and problem statement, e.g., in free form using two to three sentences or in the form of an illustrative story. 15 | You may want to articulate the problem in form of a question and add links to collaboration boards or issue management systems.} 16 | 17 | 18 | ## Decision Drivers 19 | 20 | * {decision driver 1, e.g., a force, facing concern, …} 21 | * {decision driver 2, e.g., a force, facing concern, …} 22 | * … 23 | 24 | ## Considered Options 25 | 26 | * {title of option 1} 27 | * {title of option 2} 28 | * {title of option 3} 29 | * … 30 | 31 | ## Decision Outcome 32 | 33 | Chosen option: "{title of option 1}", because 34 | {justification. e.g., only option, which meets k.o. criterion decision driver | which resolves force {force} | … | comes out best (see below)}. 35 | 36 | 37 | ### Consequences 38 | 39 | * Good, because {positive consequence, e.g., improvement of one or more desired qualities, …} 40 | * Bad, because {negative consequence, e.g., compromising one or more desired qualities, …} 41 | * … 42 | 43 | 44 | ## Validation 45 | 46 | {describe how the implementation of/compliance with the ADR is validated. E.g., by a review or an ArchUnit test} 47 | 48 | 49 | ## Pros and Cons of the Options 50 | 51 | ### {title of option 1} 52 | 53 | 54 | {example | description | pointer to more information | …} 55 | 56 | * Good, because {argument a} 57 | * Good, because {argument b} 58 | 59 | * Neutral, because {argument c} 60 | * Bad, because {argument d} 61 | * … 62 | 63 | ### {title of other option} 64 | 65 | {example | description | pointer to more information | …} 66 | 67 | * Good, because {argument a} 68 | * Good, because {argument b} 69 | * Neutral, because {argument c} 70 | * Bad, because {argument d} 71 | * … 72 | 73 | 74 | ## More Information 75 | 76 | {You might want to provide additional evidence/confidence for the decision outcome here and/or 77 | document the team agreement on the decision and/or 78 | define when this decision when and how the decision should be realized and if/when it should be re-visited and/or 79 | how the decision is validated. 80 | Links to other decisions and resources might here appear as well.} 81 | -------------------------------------------------------------------------------- /docs/adr/summary.md: -------------------------------------------------------------------------------- 1 | # ADR Summary 2 | 3 | {{ adr_summary(adr_path="docs/adr", adr_style="MADR3") }} 4 | -------------------------------------------------------------------------------- /docs/api-documentation.md: -------------------------------------------------------------------------------- 1 | # API Documentation 2 | 3 | API documentation is rendered by [FastAPI](https://fastapi.tiangolo.com/features/) 4 | on `/docs` and `/redoc` paths using OpenAPI format. 5 | 6 | AsyncAPI documentation is rendered using the 7 | [AsyncAPI react components](https://github.com/asyncapi/asyncapi-react). 8 | It is available on `/asyncapi` path. 9 | 10 | ## API versioning 11 | 12 | Versioning an API at resource level provides a much more 13 | flexible approach than versioning the whole API. 14 | 15 | The example `books` domain provides 2 endpoints to demonstrate this approach 16 | 17 | * `/api/books/v1` (POST) 18 | * `/api/books/v2` (POST) 19 | 20 | /// note | Media type versioning 21 | 22 | An improvement could be moving to [media type versioning](https://opensource.zalando.com/restful-api-guidelines/#114) 23 | /// 24 | -------------------------------------------------------------------------------- /docs/architecture.md: -------------------------------------------------------------------------------- 1 | # Application architecture 2 | 3 | This application is structured following the principles of Clean Architecture. 4 | Higher level layers can import directly lower level layers. An inversion of control 5 | pattern has to be used for lower level layers to use higher level ones. 6 | 7 | In this way our components are loosely coupled and the application logic 8 | (the domains package) is completely independent of the chosen framework 9 | and the persistence layer. 10 | 11 | This is a high level list of the packages in this application template: 12 | 13 | * `migrations` (database migration manager) 14 | * `dramatiq_worker` (async tasks runner) 15 | * `common` (some common boilerplate initialisation shared by all applications ) 16 | * `http_app` (http presentation layer) 17 | * `gateways` (database connection manager, repository implementation, event emitter, etc.) 18 | * `domains` (services, repository interfaces) 19 | 20 | Each domain inside the `domains` packages has its own layers, depending on the complexity but 21 | it is usually composed by at least 2 layers: 22 | 23 | * Boundary layer (domain logic, DTO, data access interfaces): This layer is the only one that 24 | should be ever used directly by actors not belonging to the domain (i.e. HTTP routes, other domains) 25 | * Domain Logic (this can be multiple layers, depending on the complexity) 26 | * Entity layer (domain models): No one except the domain should ever use directly the domain models. 27 | 28 | This is a high level representation of the nested layers in the application: 29 | 30 | ```mermaid 31 | flowchart TD 32 | subgraph "Framework & Drivers + Interface Adapters" 33 | migrations 34 | dramatiq_worker 35 | http_app 36 | gateways 37 | subgraph domains.books["Use Cases"] 38 | subgraph boundary["Domain Boundary (domains.books)"] 39 | BookRepositoryInterface 40 | Book 41 | BookService 42 | subgraph tasks["Domain logic"] 43 | BookTask 44 | subgraph entities["Books Entities"] 45 | direction LR 46 | BookModel 47 | BookCreatedV1 48 | end 49 | end 50 | end 51 | end 52 | end 53 | 54 | migrations ~~~ domains.books 55 | dramatiq_worker ~~~ domains.books 56 | http_app ~~~ domains.books 57 | gateways ~~~ domains.books 58 | 59 | 60 | BookCreatedV1 ~~~ BookModel 61 | Book ~~~ tasks 62 | BookService ~~~ tasks 63 | BookRepositoryInterface ~~~ tasks 64 | ``` 65 | 66 | ## Class dependency schema 67 | 68 | A more detailed view showing the class dependencies and the absence of cyclical dependencies. 69 | 70 | ```mermaid 71 | flowchart TD 72 | dramatiq_worker 73 | http_app 74 | subgraph gateways 75 | SQLAlchemyRepository 76 | NullEventGateway 77 | end 78 | 79 | subgraph domains 80 | subgraph books 81 | subgraph domain_boundary 82 | BookService 83 | end 84 | subgraph domain_logic 85 | BookTask 86 | end 87 | subgraph dto 88 | Book 89 | end 90 | 91 | subgraph data_access_interfaces 92 | BookEventGatewayInterface 93 | BookRepositoryInterface 94 | end 95 | subgraph entities 96 | BookEvent 97 | BookModel 98 | end 99 | end 100 | end 101 | 102 | dramatiq_worker-->domain_boundary 103 | dramatiq_worker-->dto 104 | http_app-->domain_boundary 105 | http_app-->dto 106 | domain_boundary-->domain_logic 107 | domain_boundary-->dto 108 | domain_boundary-->data_access_interfaces 109 | domain_logic-->entities 110 | domain_logic-->dto 111 | domain_logic-->data_access_interfaces 112 | gateways-...->|Implement| data_access_interfaces 113 | data_access_interfaces-->entities 114 | ``` 115 | -------------------------------------------------------------------------------- /docs/dockerfile.md: -------------------------------------------------------------------------------- 1 | # Multistage Dockerfile 2 | 3 | Python docker image tend to become large after installing the application requirements 4 | (the slim base is ~150 MB uncompressed), therefore it's important to spend efforts 5 | to minimise the image size, even if it produces a slightly more complex multistage 6 | Dockerfile. 7 | 8 | The implemented Dockerfile makes sure the production image will keep to a minimal size ("only" 360MB): 9 | * 150MB base image 10 | * 210MB python installed dependencies 11 | 12 | If you look at the "dev" image is instead ~850MB, more than 400MB that would 13 | end up as a cost in traffic on each image pull. 14 | 15 | ```mermaid 16 | flowchart TD 17 | subgraph BASE 18 | base["Base 19 | ==== 20 | Contains system runtime 21 | libraries necessary to run 22 | all the applications 23 | (e.g. libmysql)"] 24 | 25 | base_app["Base app 26 | ======== 27 | Copies shared application logic 28 | independent from the used framework 29 | (domains, storage, etc.)"] 30 | 31 | base_builder["Base Builder 32 | ============ 33 | Contains system libraries 34 | necessary to build python 35 | dependencies 36 | (e.g. gcc, library headers)"] 37 | 38 | end 39 | 40 | dev["Dev 41 | === 42 | Fat image containing everything 43 | for local development"] 44 | 45 | base-->base_app 46 | base-->base_builder 47 | base----->dev 48 | 49 | subgraph HTTP 50 | http_builder["HTTP builder 51 | ============ 52 | Installs requirements for 53 | HTTP app in a virtualenv"] 54 | http_app["HTTP app 55 | ======== 56 | Copies HTTP app, 57 | shared logic 58 | and requirements 59 | from previous containers"] 60 | http_builder-->http_app 61 | end 62 | 63 | subgraph Dramatiq 64 | dramatiq_builder["Dramatiq builder 65 | ============ 66 | Installs requirements for 67 | Dramatiq worker in a virtualenv"] 68 | dramatiq_app["HTTP app 69 | ======== 70 | Copies Dramatiq worker app, 71 | shared logic 72 | and requirements 73 | from previous containers"] 74 | dramatiq_builder-->dramatiq_app 75 | end 76 | 77 | base_builder-->http_builder 78 | base_builder-->dramatiq_builder 79 | base_app-->http_app 80 | base_app-->dramatiq_app 81 | ``` 82 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | --8<-- "./README.md" 2 | -------------------------------------------------------------------------------- /docs/packages/alembic.md: -------------------------------------------------------------------------------- 1 | # Alembic 2 | 3 | [Alembic](https://alembic.sqlalchemy.org/en/latest/) setup is super-easy but 4 | we implement some extra features on top of the default configuration: 5 | 6 | * Support for both sync and async SQLAlchemy engines at the same time 7 | * Support for fixtures management 8 | * Grabs the database information from the `SQLAlchemyBindManager` configuration 9 | in the application, so we won't have duplicate configuration. 10 | * `alembic.ini` (not technically part of the python package) is setup to 11 | prepend migration files with the generation datetime for natural file ordering. 12 | -------------------------------------------------------------------------------- /docs/packages/bootstrap.md: -------------------------------------------------------------------------------- 1 | # Bootstrap 2 | 3 | The `bootstrap` package contains logic that is shared among the external layer 4 | (i.e. `http_app`, `dramatiq_worker`, etc.). 5 | 6 | It contains the following submodules and packages (and related responsibilities): 7 | 8 | * `bootstrap.bootstrap`: The application initialisation logic (database, logging, 9 | dramatiq tasks) necessary to run the domain logic. It uses `bootstrap.config` and 10 | `bootstrap.di_container` subpackages. It does not contain the specific HTTP 11 | framework initialisation (or other frameworks such as GRPC). 12 | * `bootstrap.config`: The application config models, based on `BaseSettings` 13 | and `BaseModel` from `pydantic` package to get the values from 14 | environment variables. 15 | * `bootstrap.di_container`: The dependency injection container configuration. 16 | * `bootstrap.storage`: The storage configuration (SQLAlchemy). This setup uses 17 | [Imperative Mapping](https://docs.sqlalchemy.org/en/20/orm/mapping_styles.html#imperative-mapping) 18 | so that our models remains simple classes. 19 | 20 | /// warning | Note about SQLAlchemy ORM Imperative Mapping 21 | 22 | Even if the code for models appears to remain simple classes, imperative mapping 23 | transforms them behind the scenes. However, the code in our application should not 24 | rely on such specific capabilities otherwise we would bind our code to SQLAlchemy. 25 | 26 | To handle database operations we use a repository class that is aware of SQLAlchemy. 27 | In this way, should we need to change our storage implementation (e.g. switch to MongoDB), 28 | we'll only need to change the repository class, without having to change anything in 29 | our application logic. 30 | /// 31 | -------------------------------------------------------------------------------- /docs/packages/domains.md: -------------------------------------------------------------------------------- 1 | # Domains 2 | 3 | The `domains` package contains all the application domain logic separated by domains 4 | (this template provides a single domain: `books`). 5 | 6 | Each domain should be self-contained and not invoke logic from other domains directly. 7 | Same as we do for gateways, or 3rd party providers, other domains should be accessed 8 | through the use of **interfaces**. 9 | 10 | Using interfaces will: 11 | 12 | * Make sure domains do not depend on each other 13 | * Make easier to replace the concrete implementation with a HTTP adapter when the domain 14 | is extracted in a microservice 15 | 16 | ## Book domain structure 17 | 18 | The `domains.book` package provides an example implementation for a domain. It contains 19 | a list of public and protected modules. 20 | 21 | Public package and modules are used by the application to invoke the 22 | domain functionalities: 23 | 24 | * The main `domains.book` package provides the entrypoint for our application: 25 | the `BookService` class. We export it here from the `domains.book._service` 26 | module to hide protected entities that should not be accessed directly. 27 | * The `domains.book.interfaces` provides the `BookServiceInterface` protocol 28 | to be used for Inversion of Control (we don't currently use it in this 29 | application because Clean Architecture doesn't enforce Inversion of Control 30 | from the `http` application, and we don't have yet implemented other domains) 31 | * The `domains.book.dto` provides the data transfer objects required to invoke 32 | the `BookService` class. 33 | * The `domains.book.events` provides the event data structures that the domain 34 | is able to emit. They can be used by other domains to implement event handlers. 35 | 36 | Protected package and modules are used by the implementation of the books domain 37 | and can be used to bootstrap the application: 38 | 39 | * The `domains.book._gateway_interfaces` contains the gateway protocols against 40 | which the domain logic is implemented. We use them to configure the dependency 41 | injection container. 42 | * The `domains.book._models` contains the domain models. We use them also 43 | to bootstrap the SQLAlchemy imperative mapping in `bootstrap.storage.SQLAlchemy` 44 | package. 45 | * The `domains.book._service` contains the `BookService` implementation. 46 | * The `domains.book._tasks` contains the implementation of dramatiq tasks 47 | for operations that can be queued without waiting for a result (e.g. 48 | send an email, invalidate cache). 49 | -------------------------------------------------------------------------------- /docs/packages/dramatiq_worker.md: -------------------------------------------------------------------------------- 1 | # Dramatiq worker 2 | 3 | The `dramatiq_worker` package is a small entrypoint to run Dramatiq workers. 4 | -------------------------------------------------------------------------------- /docs/packages/gateways.md: -------------------------------------------------------------------------------- 1 | # Gateways 2 | 3 | The `gateways` package contains the implementations of the drivers 4 | handling communication with external services (i.e. database repositories, 5 | event producers, HTTP clients). 6 | 7 | The `domains` package, has access to this package only using the 8 | [Inversion of Control](../inversion-of-control.md). 9 | -------------------------------------------------------------------------------- /docs/packages/http_app.md: -------------------------------------------------------------------------------- 1 | # HTTP App 2 | 3 | The `http_app` package contains the implementation of [FastAPI](https://fastapi.tiangolo.com/) 4 | framework and the logic relevant to HTTP communication (routes, graphql schema, HTTP error handling) 5 | -------------------------------------------------------------------------------- /docs/zero_trust.md: -------------------------------------------------------------------------------- 1 | # Zero Trust architecture 2 | 3 | This repository implements [ORY Zero Trust architecture](https://www.ory.sh/docs/kratos/guides/zero-trust-iap-proxy-identity-access-proxy) 4 | using: 5 | 6 | * [ORY Kratos Identity Server](https://github.com/ory/kratos) as authentication and identity provider. 7 | * [ORY Oathkeeper](https://github.com/ory/oathkeeper) as reverse proxy to take care of authentication and access control. 8 | 9 | If you access the API docs at `/docs` you will notice that the `/hello/` endpoint 10 | is protected but the authentication infrastructure doesn't spin up when running 11 | `docker compuse up dev`. 12 | 13 | You can spin up all the authentication infrastructure by running `docker compose up oathkeeper`. 14 | You should be able to access the authentication UI at [http://127.0.0.1:8080](http://127.0.0.1:8080) and, 15 | after you will be authenticated, you will be able to access the protected `/hello` 16 | endpoint at [http://127.0.0.1:8080/hello](http://127.0.0.1:8080/hello) 17 | 18 | /// admonition | Cookie-based security 19 | type: warning 20 | 21 | The current setup is built around the example authentication UI provided by ORY, 22 | which uses the flows for browser-based application, with CSRF protection, and stores 23 | the session token using Cookies. 24 | 25 | While this is not a bad approach, it is not suitable for Single Page Applications 26 | and API-based clients, because it is open to different vector attacks (CSRF among them). 27 | 28 | Reference: [https://www.ory.sh/docs/kratos/self-service/flows/user-login#login-for-api-clients-and-clients-without-browsers](https://www.ory.sh/docs/kratos/self-service/flows/user-login#login-for-api-clients-and-clients-without-browsers) 29 | 30 | 🚧 An authentication flow using [Oauth2](https://oauth.net/2/), based on [ORY Hydra](https://github.com/ory/hydra) 31 | and integrated with this setup, will be added in the future. It will provide provide secure flows for SPAs and other 32 | API based clients. 🚧 33 | /// 34 | 35 | This is a high level representation of the used components: 36 | 37 | ```mermaid 38 | graph TD 39 | subgraph hn[Host Network] 40 | B[Browser] 41 | B-->|Can access URLs via 127.0.0.1:8080|OKPHN 42 | B-->|Can access URLs via 127.0.0.1:8000|DEVHN 43 | OKPHN([Reverse Proxy exposed at :8080]) 44 | DEVHN([Dev Container exposed at :8000]) 45 | end 46 | subgraph dn["Internal Docker Network (intranet)"] 47 | OKPHN-->OO 48 | DEVHN-->DEV 49 | OO-->|Proxies URLss /.ory/kratos/public/* to|OK 50 | OO-->|"Proxies /auth/login, /auth/registration, /dashboard, ... to"|SA 51 | SA-->|Talks to|OK 52 | OO-->|Validates auth sessions using|OK 53 | OO-->|"Proxies /api/* requests (authenticated only)"|DEV 54 | OK[Ory Kratos] 55 | OO["Reverse Proxy (Ory Oathkeeper)"] 56 | SA["SecureApp (Ory Kratos SelfService UI Node Example)"] 57 | DEV[Dev Container] 58 | end 59 | ``` -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://squidfunk.github.io/mkdocs-material/schema.json 2 | 3 | site_name: Bootstrap python fastapi 4 | site_description: 'A template for a python web service written applying Clean Architecture concepts' 5 | site_author: 'Federico Busetti' 6 | docs_dir: docs/ 7 | repo_name: 'febus982/bootstrap-python-fastapi' 8 | repo_url: 'https://github.com/febus982/bootstrap-python-fastapi' 9 | 10 | plugins: 11 | - search 12 | - awesome-pages 13 | - macros: 14 | modules: 15 | - mkdocs_macros_adr_summary 16 | - mermaid2: 17 | arguments: 18 | theme: | 19 | ^(JSON.parse(window.localStorage.getItem('/.__palette')).index == 1) ? 'dark' : 'neutral' 20 | 21 | # Do not use the nav section in this file but reference to the .pages files 22 | # in the docs/ directory and subdirectories (awesome-pages plugin) 23 | # https://github.com/lukasgeiter/mkdocs-awesome-pages-plugin 24 | #nav: 25 | # - Home: index.md 26 | 27 | theme: 28 | name: material 29 | features: 30 | - navigation.expand 31 | - navigation.top 32 | - content.code.copy 33 | 34 | palette: 35 | # Palette toggle for automatic mode 36 | - media: "(prefers-color-scheme)" 37 | toggle: 38 | icon: material/brightness-auto 39 | name: Switch to dark mode 40 | 41 | # Palette toggle for dark mode 42 | - scheme: slate 43 | media: "(prefers-color-scheme: dark)" 44 | primary: teal 45 | toggle: 46 | icon: material/brightness-4 47 | name: Switch to light mode 48 | 49 | # Palette toggle for light mode 50 | - scheme: default 51 | media: "(prefers-color-scheme: light)" 52 | primary: teal 53 | toggle: 54 | icon: material/brightness-7 55 | name: Switch to auto mode 56 | 57 | extra: 58 | social: 59 | - icon: fontawesome/brands/linkedin 60 | link: https://www.linkedin.com/in/federico-b-a0b78232 61 | 62 | markdown_extensions: 63 | - pymdownx.details 64 | - pymdownx.blocks.admonition 65 | - pymdownx.blocks.details 66 | - pymdownx.snippets 67 | - pymdownx.superfences: 68 | custom_fences: 69 | - name: mermaid 70 | class: mermaid 71 | format: !!python/name:mermaid2.fence_mermaid_custom 72 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | authors = [ 3 | {name = "Federico Busetti", email = "729029+febus982@users.noreply.github.com"}, 4 | ] 5 | requires-python = "<3.14,>=3.10" 6 | name = "bootstrap-fastapi-service" 7 | version = "0.1.0" 8 | description = "" 9 | readme = "README.md" 10 | dependencies = [ 11 | "aiosqlite>=0.18.0", 12 | "asgiref<4.0.0,>=3.7.2", 13 | "asyncmy>=0.2.10", 14 | "cloudevents-pydantic<1.0.0,>=0.0.3", 15 | "dependency-injector[pydantic]<5.0.0,>=4.41.0", 16 | "dramatiq[redis,watch]<2.0.0,>=1.17.1", 17 | "hiredis<4.0.0,>=3.1.0", # Recommended by dramatiq 18 | "httpx>=0.23.0", 19 | "opentelemetry-instrumentation-httpx", 20 | "opentelemetry-instrumentation-sqlalchemy", 21 | "opentelemetry-instrumentor-dramatiq", 22 | "opentelemetry-exporter-otlp", 23 | "opentelemetry-sdk", 24 | "orjson<4.0.0,>=3.10.12", 25 | "pydantic<3.0.0,>=2.2.1", 26 | "pydantic-asyncapi>=0.2.1", 27 | "pydantic-settings<3.0.0,>=2.0.3", 28 | "SQLAlchemy[asyncio,mypy]<3.0.0,>=2.0.0", 29 | "sqlalchemy-bind-manager", 30 | "structlog<25.3.1,>=25.3.0", 31 | ] 32 | 33 | [dependency-groups] 34 | http = [ 35 | "cryptography>=44.0.0", 36 | "fastapi>=0.99.0", 37 | "jinja2<4.0.0,>=3.1.2", 38 | # FastAPI instrumentation is based on the generic ASGI instrumentation, 39 | # but automatically creates span when routes are invoked. 40 | # If we decide to change framework, the generic ASGI instrumentation 41 | # will still name metrics with a generic naming. 42 | "opentelemetry-instrumentation-fastapi", 43 | "pyjwt>=2.10.1", 44 | "strawberry-graphql[debug-server]>=0.204.0", 45 | "uvicorn[standard]<1.0.0,>=0.34.0", 46 | ] 47 | socketio = [ 48 | "python-socketio>=5.12.1", 49 | "starlette>=0.46.2", 50 | "uvicorn[standard]<1.0.0,>=0.34.0", 51 | ] 52 | migrations = [ 53 | "alembic<2.0.0,>=1.11.1", 54 | "factory-boy<4.0.0,>=3.3.3", 55 | ] 56 | dev = [ 57 | "asynctest", 58 | "coverage", 59 | "mkdocs", 60 | "mkdocs-awesome-pages-plugin", 61 | "mkdocs-macros-adr-summary", 62 | "mkdocs-material", 63 | "mkdocs-mermaid2-plugin", 64 | "mypy", 65 | "mypy-protobuf", 66 | "pytest", 67 | "pytest-cov", 68 | "pytest-factoryboy", 69 | "pytest-xdist", 70 | "ruff", 71 | "strawberry-graphql[debug-server,fastapi]", 72 | "pymdown-extensions", 73 | ] 74 | 75 | [build-system] 76 | requires = ["pdm-backend"] 77 | build-backend = "pdm.backend" 78 | 79 | [tool.pdm.build] 80 | includes = ["src/**/*.py"] 81 | 82 | ############################ 83 | ### Tools configuration ### 84 | ############################ 85 | [tool.coverage.run] 86 | branch = true 87 | source = ["src"] 88 | omit = [ 89 | "src/migrations/*", 90 | "src/common/config.py", 91 | "src/common/logs/*", 92 | "src/dramatiq_worker/__init__.py", 93 | "src/**/__main__.py", 94 | "src/**/dev_server.py", 95 | ] 96 | # It's not necessary to configure concurrency here 97 | # because pytest-cov takes care of that 98 | 99 | [tool.coverage.report] 100 | fail_under = 100 101 | exclude_also = [ 102 | "pragma: no cover", 103 | "pass", 104 | "\\.\\.\\.", 105 | ] 106 | 107 | [tool.mypy] 108 | files = ["src", "tests"] 109 | exclude = ["migrations"] 110 | # Pydantic plugin causes some issues: https://github.com/pydantic/pydantic-settings/issues/403 111 | #plugins = "pydantic.mypy,strawberry.ext.mypy_plugin" 112 | plugins = "strawberry.ext.mypy_plugin" 113 | python_version = "3.10" 114 | 115 | [[tool.mypy.overrides]] 116 | module = [ 117 | "opentelemetry_instrumentor_dramatiq.*" 118 | ] 119 | ignore_missing_imports = true 120 | 121 | [[tool.mypy.overrides]] 122 | module = [ 123 | "socketio.*" 124 | ] 125 | ignore_missing_imports = true 126 | 127 | [tool.pytest.ini_options] 128 | minversion = "6.0" 129 | addopts = "-n auto --cov-report=term-missing" 130 | testpaths = [ 131 | "tests", 132 | ] 133 | 134 | [tool.ruff] 135 | target-version = "py39" 136 | line-length = 120 137 | extend-exclude = [ 138 | "docs", 139 | ] 140 | 141 | [tool.ruff.lint] 142 | select = [ 143 | "E", # pycodestyle 144 | "W", # pycodestyle 145 | "F", # pyflakes 146 | "I", # isort 147 | "N", # pep8-naming 148 | "S", # flake8-bandit 149 | "RUF", # ruff-specific-rules 150 | ] 151 | # Ignoring rules problematic with formatter 152 | # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules 153 | ignore = [ 154 | "W191", 155 | "E111", 156 | "E114", 157 | "E117", 158 | "D206", 159 | "D300", 160 | "Q000", 161 | "Q001", 162 | "Q002", 163 | "Q003", 164 | "COM812", 165 | "COM819", 166 | "ISC001", 167 | "ISC002", 168 | ] 169 | 170 | [tool.ruff.lint.per-file-ignores] 171 | "__init__.py" = ["F401"] # Ignore unused imports on init files 172 | "__main__.py" = ["S104"] # Ignore 0.0.0.0 bindings for startup script 173 | "dev_server.py" = ["S104"] # Ignore 0.0.0.0 bindings for startup script 174 | "tests/**/*.py" = ["S101"] # Allow assert usage on tests 175 | "src/migrations/env.py" = ["E501"] # Allow long lines 176 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "extends": [ 4 | "config:recommended" 5 | ], 6 | "packageRules": [ 7 | { 8 | "groupName": "all non-major dependencies", 9 | "groupSlug": "all-minor-patch", 10 | "matchUpdateTypes": [ 11 | "minor", 12 | "patch" 13 | ], 14 | "matchPackageNames": [ 15 | "*", 16 | "!python" 17 | ] 18 | } 19 | ], 20 | "lockFileMaintenance": { 21 | "enabled": true 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/alembic.ini: -------------------------------------------------------------------------------- 1 | # Copy of alembic.ini used to run migrations inside the container 2 | 3 | [alembic] 4 | script_location = migrations 5 | prepend_sys_path = . 6 | file_template = %%(year)d-%%(month).2d-%%(day).2d-%%(hour).2d%%(minute).2d%%(second).2d-%%(rev)s_%%(slug)s 7 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects. 8 | 9 | [post_write_hooks] 10 | hooks = ruff 11 | ruff.type = exec 12 | ruff.executable = /venv/bin/ruff 13 | ruff.options = format REVISION_SCRIPT_FILENAME 14 | -------------------------------------------------------------------------------- /src/common/__init__.py: -------------------------------------------------------------------------------- 1 | from .bootstrap import application_init 2 | from .config import AppConfig 3 | -------------------------------------------------------------------------------- /src/common/bootstrap.py: -------------------------------------------------------------------------------- 1 | from dependency_injector.containers import Container as DI_Container 2 | from dependency_injector.providers import Object 3 | from pydantic import BaseModel, ConfigDict 4 | 5 | from .asyncapi import init_asyncapi_info 6 | from .config import AppConfig 7 | from .di_container import Container 8 | from .dramatiq import init_dramatiq 9 | from .logs import init_logger 10 | from .storage import init_storage 11 | from .telemetry import instrument_opentelemetry 12 | 13 | 14 | class InitReference(BaseModel): 15 | di_container: DI_Container 16 | 17 | model_config = ConfigDict(arbitrary_types_allowed=True) 18 | 19 | 20 | def application_init( 21 | app_config: AppConfig, 22 | external_di_container: Container | None = None, 23 | ) -> InitReference: 24 | container = external_di_container or Container( 25 | config=Object(app_config), 26 | ) 27 | init_logger(app_config) 28 | init_storage() 29 | init_dramatiq(app_config) 30 | init_asyncapi_info(app_config.APP_NAME) 31 | instrument_opentelemetry(app_config) 32 | 33 | return InitReference( 34 | di_container=container, 35 | ) 36 | -------------------------------------------------------------------------------- /src/common/config.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Literal, Optional 2 | 3 | from pydantic import BaseModel, Field 4 | from pydantic_settings import BaseSettings, SettingsConfigDict 5 | from sqlalchemy_bind_manager import SQLAlchemyConfig 6 | 7 | TYPE_ENVIRONMENT = Literal["local", "test", "staging", "production"] 8 | 9 | 10 | class DramatiqConfig(BaseModel): 11 | REDIS_URL: Optional[str] = None 12 | 13 | 14 | class AuthConfig(BaseModel): 15 | JWT_ALGORITHM: str = "RS256" 16 | JWKS_URL: Optional[str] = None 17 | 18 | 19 | class AppConfig(BaseSettings): 20 | model_config = SettingsConfigDict(env_nested_delimiter="__") 21 | 22 | APP_NAME: str = "bootstrap" 23 | CORS_ORIGINS: list[str] = Field(default_factory=list) 24 | CORS_METHODS: list[str] = ["*"] 25 | CORS_HEADERS: list[str] = ["*"] 26 | AUTH: AuthConfig = AuthConfig() 27 | DRAMATIQ: DramatiqConfig = DramatiqConfig() 28 | DEBUG: bool = False 29 | ENVIRONMENT: TYPE_ENVIRONMENT = "local" 30 | SQLALCHEMY_CONFIG: Dict[str, SQLAlchemyConfig] = dict( 31 | default=SQLAlchemyConfig( 32 | engine_url="mysql+asyncmy://corinna:gioieiiere@127.0.0.1/backend?charset=utf8mb4", 33 | async_engine=True, 34 | ), 35 | ) 36 | OTEL_EXPORTER_OTLP_ENDPOINT: Optional[str] = None 37 | OTEL_EXPORTER_OTLP_TRACES_ENDPOINT: Optional[str] = None 38 | OTEL_EXPORTER_OTLP_METRICS_ENDPOINT: Optional[str] = None 39 | OTEL_EXPORTER_OTLP_LOGS_ENDPOINT: Optional[str] = None 40 | -------------------------------------------------------------------------------- /src/common/di_container.py: -------------------------------------------------------------------------------- 1 | from dependency_injector.containers import DeclarativeContainer, WiringConfiguration 2 | from dependency_injector.providers import Dependency, Factory, Singleton 3 | from sqlalchemy_bind_manager import SQLAlchemyBindManager 4 | from sqlalchemy_bind_manager.repository import SQLAlchemyAsyncRepository 5 | 6 | from common.config import AppConfig 7 | from domains.books._gateway_interfaces import ( 8 | BookEventGatewayInterface, 9 | BookRepositoryInterface, 10 | ) 11 | from domains.books._models import BookModel 12 | from gateways.event import NullEventGateway 13 | 14 | 15 | class Container(DeclarativeContainer): 16 | """ 17 | Dependency injection container. 18 | 19 | Docs: https://python-dependency-injector.ets-labs.org/ 20 | """ 21 | 22 | wiring_config = WiringConfiguration( 23 | packages=[ 24 | "common", 25 | "domains", 26 | ] 27 | ) 28 | 29 | """ 30 | We could use the config provider but it would transform our nice typed 31 | configuration in a dictionary, therefore we return it as a raw object. 32 | """ 33 | config = Dependency(instance_of=AppConfig) 34 | 35 | """ 36 | Class mappings 37 | 38 | These are classes we want the container to manage the life cycle for 39 | (e.g. Singletons), we map them using their class name directly. 40 | """ 41 | SQLAlchemyBindManager = Singleton( 42 | SQLAlchemyBindManager, 43 | config=config.provided.SQLALCHEMY_CONFIG, 44 | ) 45 | 46 | """ 47 | Interface => Class mappings 48 | 49 | We use the interface class name as key so that we can trigger the injection 50 | using `class.__name__` and avoid using any hardcoded string or constant. 51 | 52 | e.g. 53 | Mapping 54 | MyInterface = providers.Factory("http_app.storage.repositories.ConcreteClass") 55 | 56 | Usage 57 | @inject 58 | def function( 59 | service: MyInterface = Provide[MyInterface.__name__], 60 | ) 61 | """ 62 | 63 | BookRepositoryInterface: Factory[BookRepositoryInterface] = Factory( 64 | SQLAlchemyAsyncRepository, 65 | bind=SQLAlchemyBindManager.provided.get_bind.call(), 66 | model_class=BookModel, 67 | ) 68 | BookEventGatewayInterface: Factory[BookEventGatewayInterface] = Factory(NullEventGateway) 69 | -------------------------------------------------------------------------------- /src/common/dramatiq.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import orjson 4 | from dramatiq import set_broker, set_encoder 5 | from dramatiq.broker import Broker 6 | from dramatiq.brokers.redis import RedisBroker 7 | from dramatiq.brokers.stub import StubBroker 8 | from dramatiq.encoder import DecodeError, Encoder, MessageData 9 | from dramatiq.middleware import AsyncIO 10 | 11 | from .config import AppConfig 12 | 13 | 14 | class ORJSONEncoder(Encoder): 15 | """Encodes messages as JSON. orjson is much faster than json.""" 16 | 17 | def encode(self, data: MessageData) -> bytes: 18 | return orjson.dumps(data) 19 | 20 | def decode(self, data: bytes) -> MessageData: 21 | try: 22 | return orjson.loads(data) 23 | except orjson.JSONDecodeError as e: 24 | raise DecodeError("failed to decode message %r" % (data,), data, e) from None 25 | 26 | 27 | def init_dramatiq(config: AppConfig): 28 | broker: Broker 29 | 30 | if config.DRAMATIQ.REDIS_URL is not None: 31 | broker = RedisBroker(url=config.DRAMATIQ.REDIS_URL) 32 | else: 33 | broker = StubBroker() 34 | # broker.emit_after("process_boot") 35 | if config.ENVIRONMENT not in ["test", "local"]: 36 | logging.critical( 37 | "Running a non-test/non-local environment without Redis URL set", 38 | extra={"ENVIRONMENT": config.ENVIRONMENT}, 39 | ) 40 | broker.add_middleware(AsyncIO()) 41 | set_broker(broker) 42 | set_encoder(ORJSONEncoder()) 43 | -------------------------------------------------------------------------------- /src/common/logs/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import List 3 | 4 | import structlog 5 | from opentelemetry.sdk._logs import LoggingHandler 6 | from structlog.typing import Processor 7 | 8 | from ..config import AppConfig 9 | from .processors import ( 10 | add_logging_open_telemetry_spans, 11 | drop_color_message_key, 12 | extract_from_record, 13 | ) 14 | 15 | 16 | def init_logger(config: AppConfig) -> None: 17 | """ 18 | Function to initialize logging configuration using `structlog` and Python's standard 19 | logging module. It supports dynamic log level adjustment, shared processors for 20 | structlog and standard loggers, and tailored configurations for different environments 21 | (local, test, or production). Ensures consistent formatting across application logs 22 | and integrates handlers for OpenTelemetry logs if present. 23 | 24 | Args: 25 | config (AppConfig): Configuration object containing application-wide settings 26 | such as DEBUG flag and environment status. 27 | 28 | Raises: 29 | None 30 | 31 | Returns: 32 | None 33 | 34 | """ 35 | # Strongly inspired by https://gist.github.com/nymous/f138c7f06062b7c43c060bf03759c29e 36 | 37 | # These processors will be used by both structlog and stdlib logger 38 | shared_processors: List[Processor] = [ 39 | structlog.contextvars.merge_contextvars, 40 | structlog.stdlib.add_logger_name, 41 | structlog.stdlib.add_log_level, 42 | structlog.stdlib.PositionalArgumentsFormatter(), 43 | structlog.stdlib.ExtraAdder(), 44 | drop_color_message_key, 45 | add_logging_open_telemetry_spans, 46 | structlog.processors.StackInfoRenderer(), 47 | ] 48 | 49 | # stdlib_processors are executed before the shared ones, so processors 50 | # accessing processor metadata such as `_extract_from_record` must 51 | # run here, before `remove_processors_meta` 52 | stdlib_processors: List[Processor] = [ 53 | extract_from_record, 54 | structlog.stdlib.ProcessorFormatter.remove_processors_meta, 55 | ] 56 | 57 | log_level = logging.DEBUG if config.DEBUG else logging.INFO 58 | if config.ENVIRONMENT in ["local", "test"]: 59 | shared_processors.append(structlog.processors.TimeStamper(fmt="%d-%m-%Y %H:%M:%S", utc=True)) 60 | stdlib_processors.append(structlog.dev.ConsoleRenderer()) 61 | else: 62 | shared_processors.append(structlog.processors.TimeStamper(fmt="iso", utc=True)) 63 | shared_processors.append(structlog.processors.dict_tracebacks) 64 | stdlib_processors.append(structlog.processors.JSONRenderer()) 65 | 66 | """ 67 | Even if we set the loglevel using the stdlib setLevel later, 68 | using make_filtering_bound_logger will filter events before 69 | in the chain, producing a performance improvement 70 | """ 71 | structlog.configure( 72 | wrapper_class=structlog.make_filtering_bound_logger(log_level), 73 | processors=[ 74 | *shared_processors, 75 | # This prepares the log events to be handled by stdlib 76 | structlog.stdlib.ProcessorFormatter.wrap_for_formatter, 77 | ], 78 | logger_factory=structlog.stdlib.LoggerFactory(), 79 | cache_logger_on_first_use=True, 80 | ) 81 | 82 | # Create a handler for stdlib logger 83 | stdlib_handler = logging.StreamHandler() 84 | stdlib_handler.setFormatter( 85 | structlog.stdlib.ProcessorFormatter( 86 | foreign_pre_chain=shared_processors, 87 | processors=stdlib_processors, 88 | ) 89 | ) 90 | 91 | # Get root logger 92 | stdlib_logger = logging.getLogger() 93 | 94 | # In case there's a OTEL handler we keep it but remove all the others, 95 | # in case this function is called multiple times. 96 | # NOTE all the processors are not applied to OTEL logs! 97 | for handler in stdlib_logger.handlers: 98 | if not isinstance(handler, LoggingHandler): 99 | stdlib_logger.removeHandler(handler) 100 | 101 | # Use structlog to format logs coming from stdlib logger 102 | stdlib_logger.addHandler(stdlib_handler) 103 | stdlib_logger.setLevel(log_level) 104 | 105 | for _log in [ 106 | "dramatiq", 107 | "uvicorn", 108 | "uvicorn.error", 109 | "uvicorn.access", 110 | ]: 111 | # Clear the log handlers for uvicorn loggers, and enable propagation 112 | # so the messages are caught by our root logger and formatted correctly 113 | # by structlog. Initial messages from reloader startup are not caught. 114 | logging.getLogger(_log).handlers.clear() 115 | logging.getLogger(_log).propagate = True 116 | -------------------------------------------------------------------------------- /src/common/logs/processors.py: -------------------------------------------------------------------------------- 1 | from opentelemetry import trace 2 | from structlog.typing import EventDict 3 | 4 | 5 | def extract_from_record(_, __, event_dict: EventDict) -> EventDict: 6 | """ 7 | Extract thread and process names and add them to the event dict. 8 | """ 9 | record = event_dict["_record"] 10 | event_dict["thread_name"] = record.threadName 11 | event_dict["process_name"] = record.processName 12 | return event_dict 13 | 14 | 15 | def drop_color_message_key(_, __, event_dict: EventDict) -> EventDict: 16 | """ 17 | Uvicorn logs the message a second time in the extra `color_message`, but we don't 18 | need it. This processor drops the key from the event dict if it exists. 19 | """ 20 | event_dict.pop("color_message", None) 21 | return event_dict 22 | 23 | 24 | def add_logging_open_telemetry_spans(_, __, event_dict: EventDict) -> EventDict: 25 | span = trace.get_current_span() 26 | if not span.is_recording(): 27 | event_dict["span"] = None 28 | return event_dict 29 | 30 | ctx = span.get_span_context() 31 | parent = getattr(span, "parent", None) 32 | 33 | event_dict["span"] = { 34 | "span_id": hex(ctx.span_id), 35 | "trace_id": hex(ctx.trace_id), 36 | "parent_span_id": None if not parent else hex(parent.span_id), 37 | } 38 | 39 | return event_dict 40 | -------------------------------------------------------------------------------- /src/common/storage/SQLAlchemy/__init__.py: -------------------------------------------------------------------------------- 1 | from typing import Callable, Dict 2 | 3 | from dependency_injector.wiring import Provide, inject 4 | from sqlalchemy_bind_manager import SQLAlchemyBindManager 5 | 6 | from . import default_bind_tables 7 | 8 | TABLE_INIT_REGISTRY: Dict[str, Callable] = { 9 | "default": default_bind_tables.init_tables, 10 | } 11 | 12 | 13 | def init_sqlalchemy(): 14 | init_tables() 15 | 16 | 17 | @inject 18 | def init_tables( 19 | sqlalchemy_manager: SQLAlchemyBindManager = Provide[SQLAlchemyBindManager.__name__], 20 | ): 21 | for name, bind in sqlalchemy_manager.get_binds().items(): 22 | init_function = TABLE_INIT_REGISTRY.get(name) 23 | if init_function: 24 | init_function(bind.registry_mapper) 25 | -------------------------------------------------------------------------------- /src/common/storage/SQLAlchemy/default_bind_tables.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Column, Integer, String, Table 2 | from sqlalchemy.orm import registry 3 | 4 | from domains.books._models import BookModel 5 | 6 | 7 | def init_tables(registry_mapper: registry): 8 | books = Table( 9 | "books", 10 | registry_mapper.metadata, 11 | Column("book_id", Integer, primary_key=True), 12 | Column("title", String(50)), 13 | Column("author_name", String(50)), 14 | ) 15 | registry_mapper.map_imperatively(BookModel, books) 16 | -------------------------------------------------------------------------------- /src/common/storage/__init__.py: -------------------------------------------------------------------------------- 1 | from .SQLAlchemy import init_sqlalchemy 2 | 3 | 4 | def init_storage(): 5 | init_sqlalchemy() 6 | -------------------------------------------------------------------------------- /src/common/utils.py: -------------------------------------------------------------------------------- 1 | def apply_decorator_to_methods(decorator, protected_methods: bool = False, private_methods: bool = False): 2 | """ 3 | Class decorator to apply a given function or coroutine decorator 4 | to all functions and coroutines within a class. 5 | """ 6 | 7 | def class_decorator(cls): 8 | for attr_name, attr_value in cls.__dict__.items(): 9 | # Check if the attribute is a callable (method or coroutine) 10 | if not callable(attr_value): 11 | continue 12 | 13 | if attr_name.startswith(f"_{cls.__name__}__"): 14 | if not private_methods: 15 | continue 16 | 17 | elif attr_name.startswith("_") and not protected_methods: 18 | continue 19 | 20 | # Replace the original callable with the decorated version 21 | setattr(cls, attr_name, decorator(attr_value)) 22 | return cls 23 | 24 | return class_decorator 25 | -------------------------------------------------------------------------------- /src/domains/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/febus982/bootstrap-python-fastapi/ff0d9b1a7bd09d2e50784fbd11ae82594b74aaab/src/domains/__init__.py -------------------------------------------------------------------------------- /src/domains/books/__init__.py: -------------------------------------------------------------------------------- 1 | from ._service import BookService 2 | -------------------------------------------------------------------------------- /src/domains/books/_gateway_interfaces.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Iterable, Mapping 2 | from typing import Any, List, Literal, Protocol, Tuple, Union 3 | 4 | from cloudevents_pydantic.events import CloudEvent 5 | 6 | from domains.books._models import BookModel 7 | 8 | 9 | class BookRepositoryInterface(Protocol): 10 | async def save(self, book: BookModel) -> BookModel: ... 11 | 12 | async def find( 13 | self, 14 | search_params: Union[None, Mapping[str, Any]] = None, 15 | order_by: Union[None, Iterable[Union[str, Tuple[str, Literal["asc", "desc"]]]]] = None, 16 | ) -> List[BookModel]: ... 17 | 18 | 19 | class BookEventGatewayInterface(Protocol): 20 | async def emit(self, event: CloudEvent) -> None: ... 21 | -------------------------------------------------------------------------------- /src/domains/books/_models.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Union 3 | 4 | 5 | @dataclass 6 | class BookModel: 7 | title: str 8 | author_name: str 9 | book_id: Union[int, None] = None 10 | -------------------------------------------------------------------------------- /src/domains/books/_service.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from collections.abc import Iterable 3 | 4 | from anyio import to_thread 5 | from dependency_injector.wiring import Provide, inject 6 | 7 | from common.telemetry import trace_function 8 | from common.utils import apply_decorator_to_methods 9 | 10 | from ._gateway_interfaces import BookEventGatewayInterface, BookRepositoryInterface 11 | from ._models import BookModel 12 | from ._tasks import book_cpu_intensive_task 13 | from .dto import Book, BookData 14 | from .events import BookCreatedV1, BookCreatedV1Data 15 | 16 | 17 | @apply_decorator_to_methods(trace_function()) 18 | class BookService: 19 | _book_repository: BookRepositoryInterface 20 | _event_gateway: BookEventGatewayInterface 21 | 22 | @inject 23 | def __init__( 24 | self, 25 | book_repository: BookRepositoryInterface = Provide[BookRepositoryInterface.__name__], 26 | event_gateway: BookEventGatewayInterface = Provide[BookEventGatewayInterface.__name__], 27 | ) -> None: 28 | super().__init__() 29 | self._book_repository = book_repository 30 | self._event_gateway = event_gateway 31 | 32 | async def create_book(self, book: BookData) -> Book: 33 | # Example of CPU intensive task ran in a different thread 34 | # Using processes could be better, but it would bring technical complexity 35 | # https://anyio.readthedocs.io/en/3.x/subprocesses.html#running-functions-in-worker-processes 36 | book_data_altered: dict = await to_thread.run_sync(self._some_cpu_intensive_blocking_task, book.model_dump()) 37 | 38 | book_model = BookModel(**book_data_altered) 39 | book = Book.model_validate(await self._book_repository.save(book_model), from_attributes=True) 40 | 41 | # Example of CPU intensive task ran in a dramatiq task. We should not rely on 42 | # dramatiq if we need to wait the operation result. 43 | # The worker could be terminated (e.g. during deployments) and this function 44 | # would time out or raise an error. 45 | book_cpu_intensive_task.send(book_id=str(book.book_id)) 46 | 47 | await self._event_gateway.emit( 48 | BookCreatedV1.event_factory(data=BookCreatedV1Data.model_validate(book_model, from_attributes=True)) 49 | ) 50 | return book 51 | 52 | async def list_books(self) -> Iterable[Book]: 53 | books = await self._book_repository.find() 54 | return [Book.model_validate(x, from_attributes=True) for x in books] 55 | 56 | async def book_created_event_handler( 57 | self, 58 | book_id: int, 59 | ) -> None: # pragma: no cover 60 | # This is just an example placeholder, there's nothing to test. 61 | logging.info( 62 | "Processed book crated event`", 63 | extra={ 64 | "book_id": book_id, 65 | }, 66 | ) 67 | 68 | def _some_cpu_intensive_blocking_task(self, book: dict) -> dict: 69 | # This is just an example placeholder, 70 | # there's nothing to test. 71 | return book # pragma: no cover 72 | -------------------------------------------------------------------------------- /src/domains/books/_tasks.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tasks defined in this module are considered intensive operations 3 | that happen as part of one of the BookService methods, 4 | therefore we shouldn't invoke again the book service directly 5 | from here. 6 | 7 | Tasks that invoke the BookService could exist (e.g. an event 8 | worker), there are 2 options to implement them: 9 | - Create a different module, that would behave similar to HTTP 10 | routes, and invoke the service from there. 11 | - Invoke the service using inversion of control. 12 | 13 | IMPORTANT: It's dangerous to have nested task when they depend 14 | on each other's result. If you find yourself in this scenario 15 | it is probably better to redesign your application. 16 | """ 17 | 18 | import logging 19 | 20 | import dramatiq 21 | 22 | 23 | @dramatiq.actor(max_retries=3) 24 | def book_cpu_intensive_task(book_id: str, **kwargs) -> str: 25 | logging.info("Book CPU intensive executed", extra={"book_id": book_id}) 26 | return book_id 27 | -------------------------------------------------------------------------------- /src/domains/books/dto.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | 3 | from pydantic import BaseModel 4 | 5 | 6 | class BookData(BaseModel): 7 | title: str 8 | author_name: str 9 | 10 | 11 | class Book(BookData): 12 | book_id: Union[int, None] = None 13 | -------------------------------------------------------------------------------- /src/domains/books/events.py: -------------------------------------------------------------------------------- 1 | from typing import Annotated, Literal 2 | 3 | import pydantic 4 | from cloudevents_pydantic.events import CloudEvent 5 | from cloudevents_pydantic.events.fields import metadata 6 | from cloudevents_pydantic.events.fields.types import URI, URIReference 7 | from pydantic import ConfigDict, Field 8 | 9 | 10 | class BookCreatedV1Data(pydantic.BaseModel): 11 | book_id: int 12 | title: str 13 | author_name: str 14 | 15 | 16 | def _dataschema_url(value: str) -> str: 17 | return f"https://this_service/dataschemas/{value}" 18 | 19 | 20 | class BookCreatedV1(CloudEvent): 21 | source: Annotated[ 22 | URIReference, 23 | Field(default="/book_service", validate_default=True), 24 | metadata.FieldSource, 25 | ] 26 | type: Annotated[Literal["book.created.v1"], Field(default="book.created.v1"), metadata.FieldType] 27 | dataschema: Annotated[ 28 | URI, 29 | Field(default=_dataschema_url("book.created.v1"), validate_default=True), 30 | metadata.FieldDataSchema, 31 | ] 32 | 33 | data: BookCreatedV1Data 34 | 35 | # The first example is used to generate the OpenAPI documentation! 36 | # Examples are good! Add examples! 37 | model_config = ConfigDict( 38 | json_schema_extra={ 39 | "examples": [ 40 | { 41 | "source": "this.service.url.here", 42 | "type": "book.created.v1", 43 | "dataschema": "/dataschemas/book.created.v1", 44 | "datacontenttype": "application/json", 45 | "subject": "123", 46 | "data": {"book_id": 0, "title": "string", "author_name": "string"}, 47 | "id": "A234-1234-1234", 48 | "specversion": "1.0", 49 | "time": "2018-04-05T17:31:00Z", 50 | } 51 | ] 52 | } 53 | ) 54 | 55 | 56 | class BookUpdatedV1(CloudEvent): 57 | source: Annotated[ 58 | URIReference, 59 | Field(default="/book_service", validate_default=True), 60 | metadata.FieldSource, 61 | ] 62 | type: Annotated[Literal["book.updated.v1"], Field(default="book.updated.v1"), metadata.FieldType] 63 | dataschema: Annotated[ 64 | URI, 65 | Field(default=_dataschema_url("book.updated.v1"), validate_default=True), 66 | metadata.FieldDataSchema, 67 | ] 68 | 69 | # This is just an example, too lazy to use a different data class 70 | data: BookCreatedV1Data 71 | 72 | # The first example is used to generate the OpenAPI documentation! 73 | # Examples are good! Add examples! 74 | model_config = ConfigDict( 75 | json_schema_extra={ 76 | "examples": [ 77 | { 78 | "source": "this.service.url.here", 79 | "type": "book.updated.v1", 80 | "dataschema": "/dataschemas/book.updated.v1", 81 | "datacontenttype": "application/json", 82 | "subject": "123", 83 | "data": {"book_id": 0, "title": "string", "author_name": "string"}, 84 | "id": "A234-1234-1234", 85 | "specversion": "1.0", 86 | "time": "2018-04-05T17:31:00Z", 87 | } 88 | ] 89 | } 90 | ) 91 | -------------------------------------------------------------------------------- /src/domains/books/interfaces.py: -------------------------------------------------------------------------------- 1 | from typing import Iterable, Protocol, runtime_checkable 2 | 3 | from .dto import Book, BookData 4 | 5 | 6 | @runtime_checkable 7 | class BookServiceInterface(Protocol): 8 | async def create_book(self, book: BookData) -> Book: ... 9 | 10 | async def list_books(self) -> Iterable[Book]: ... 11 | 12 | async def book_created_event_handler(self, book_id: int) -> None: ... 13 | -------------------------------------------------------------------------------- /src/dramatiq_worker/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This is a tiny layer that takes care of initialising the shared 3 | application layers (storage, logs) when running standalone workers 4 | without having to initialise the HTTP framework (or other ones) 5 | """ 6 | 7 | from common.bootstrap import AppConfig, application_init 8 | from common.telemetry import instrument_third_party 9 | 10 | # These instrumentors patch and wrap libraries, we want 11 | # to execute them ASAP 12 | instrument_third_party() 13 | application_init(AppConfig()) 14 | -------------------------------------------------------------------------------- /src/gateways/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/febus982/bootstrap-python-fastapi/ff0d9b1a7bd09d2e50784fbd11ae82594b74aaab/src/gateways/__init__.py -------------------------------------------------------------------------------- /src/gateways/event.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from cloudevents_pydantic.events import CloudEvent 4 | 5 | 6 | class NullEventGateway: 7 | async def emit(self, event: CloudEvent) -> None: # pragma: no cover # No need to test this 8 | logging.info("Event emitted", extra={"cloudevent": event.model_dump()}) 9 | -------------------------------------------------------------------------------- /src/http_app/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from fastapi import FastAPI, Request 4 | from fastapi.middleware.cors import CORSMiddleware 5 | from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor 6 | from starlette.responses import JSONResponse 7 | 8 | from common import AppConfig, application_init 9 | from common.di_container import Container 10 | from common.telemetry import instrument_third_party 11 | from http_app import context 12 | from http_app.routes import init_routes 13 | 14 | # These instrumentors patch and wrap libraries, we want 15 | # to execute them ASAP 16 | instrument_third_party() 17 | 18 | 19 | def create_app( 20 | test_config: AppConfig | None = None, 21 | test_di_container: Container | None = None, 22 | ) -> FastAPI: 23 | app_config = test_config or AppConfig() 24 | 25 | """ 26 | The config is submitted here at runtime, this means 27 | that we cannot declare a function to be used with 28 | FastAPI dependency injection system because Depends 29 | is evaluated before this function is called. 30 | A context variable will achieve the same purpose. 31 | """ 32 | context.app_config.set(app_config) 33 | 34 | ref = application_init(app_config, test_di_container) 35 | ref.di_container.wire(packages=["http_app"]) 36 | 37 | app = FastAPI( 38 | debug=app_config.DEBUG, 39 | title=app_config.APP_NAME, 40 | ) 41 | init_exception_handlers(app) 42 | 43 | if app_config.CORS_ORIGINS: 44 | app.add_middleware( 45 | CORSMiddleware, 46 | allow_origins=app_config.CORS_ORIGINS, 47 | allow_credentials=True, 48 | allow_methods=app_config.CORS_METHODS, 49 | allow_headers=app_config.CORS_HEADERS, 50 | ) 51 | 52 | init_routes(app) 53 | FastAPIInstrumentor.instrument_app(app) 54 | 55 | return app 56 | 57 | 58 | def init_exception_handlers(app: FastAPI) -> None: 59 | # This is a catch-all middleware for unhandled exceptions 60 | # other Exception handlers should be initialised using 61 | # the @app.exception_handler decorator 62 | # https://fastapi.tiangolo.com/tutorial/handling-errors/#install-custom-exception-handlers 63 | @app.middleware("http") 64 | async def add_exception_middleware(request: Request, call_next): 65 | try: 66 | return await call_next(request) 67 | except Exception as e: 68 | logging.exception(e) 69 | return JSONResponse({"error": "Internal server error"}, status_code=500) 70 | -------------------------------------------------------------------------------- /src/http_app/__main__.py: -------------------------------------------------------------------------------- 1 | import uvicorn 2 | 3 | from common import AppConfig 4 | from common.logs import init_logger 5 | 6 | if __name__ == "__main__": 7 | init_logger(AppConfig()) 8 | uvicorn.run("http_app:create_app", factory=True, host="0.0.0.0", port=8000) 9 | -------------------------------------------------------------------------------- /src/http_app/context.py: -------------------------------------------------------------------------------- 1 | from contextvars import ContextVar 2 | 3 | from common import AppConfig 4 | 5 | app_config: ContextVar[AppConfig] = ContextVar("app_config") 6 | -------------------------------------------------------------------------------- /src/http_app/dependencies.py: -------------------------------------------------------------------------------- 1 | from common import AppConfig 2 | from http_app import context 3 | 4 | 5 | def get_app_config() -> AppConfig: 6 | return context.app_config.get() 7 | -------------------------------------------------------------------------------- /src/http_app/dev_server.py: -------------------------------------------------------------------------------- 1 | import uvicorn 2 | 3 | from common import AppConfig 4 | from common.logs import init_logger 5 | 6 | if __name__ == "__main__": 7 | init_logger(AppConfig()) 8 | uvicorn.run("http_app:create_app", factory=True, host="0.0.0.0", port=8000, reload=True) 9 | -------------------------------------------------------------------------------- /src/http_app/jinja_templates/hello.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | Hello world 4 | 5 | 6 |

Hello world

7 |

Your JWT token payload:

8 |
{{ token_payload | tojson(4) }}
9 | 10 | -------------------------------------------------------------------------------- /src/http_app/routes/README.md: -------------------------------------------------------------------------------- 1 | # Routes 2 | 3 | This package contains only the HTTP routes. They are responsible for: 4 | 5 | * HTTP Request validation (FastAPI should take care of this automatically) 6 | * Data transformation between Pydantic models (schemas) and 7 | application models (from models package) 8 | * HTTP Response preparation -------------------------------------------------------------------------------- /src/http_app/routes/__init__.py: -------------------------------------------------------------------------------- 1 | from fastapi import FastAPI 2 | 3 | from http_app.routes import ( 4 | api, 5 | asyncapi, 6 | events, 7 | graphql, 8 | hello, 9 | ping, 10 | user_registered_hook, 11 | ) 12 | 13 | 14 | def init_routes(app: FastAPI) -> None: 15 | app.include_router(api.router) 16 | app.include_router(asyncapi.router) 17 | app.include_router(ping.router) 18 | app.include_router(hello.router) 19 | app.include_router(events.router) 20 | app.include_router(user_registered_hook.router) 21 | app.include_router(graphql.router, prefix="/graphql") 22 | -------------------------------------------------------------------------------- /src/http_app/routes/api/__init__.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | 3 | from . import books 4 | 5 | router = APIRouter(prefix="/api") 6 | 7 | router.include_router(books.router_v1) 8 | router.include_router(books.router_v2) 9 | -------------------------------------------------------------------------------- /src/http_app/routes/api/books.py: -------------------------------------------------------------------------------- 1 | from typing import Iterable 2 | 3 | from fastapi import APIRouter, status 4 | from pydantic import BaseModel, ConfigDict 5 | 6 | from domains.books import BookService, dto 7 | 8 | router_v1 = APIRouter(prefix="/books/v1") 9 | router_v2 = APIRouter(prefix="/books/v2") 10 | 11 | 12 | class CreateBookResponse(BaseModel): 13 | book: dto.Book 14 | model_config = ConfigDict( 15 | json_schema_extra={ 16 | "example": { 17 | "title": "The Hitchhiker's Guide to the Galaxy", 18 | "author_name": "Douglas Adams", 19 | "book_id": 123, 20 | } 21 | } 22 | ) 23 | 24 | 25 | class ListBooksResponse(BaseModel): 26 | books: Iterable[dto.Book] 27 | model_config = ConfigDict( 28 | json_schema_extra={ 29 | "example": { 30 | "books": [ 31 | { 32 | "title": "The Hitchhiker's Guide to the Galaxy", 33 | "author_name": "Douglas Adams", 34 | "book_id": 123, 35 | }, 36 | { 37 | "title": "Clean Architecture: A Craftsman's Guide to Software Structure and Design", 38 | "author_name": "Robert C. 'Uncle Bob' Martin", 39 | "book_id": 321, 40 | }, 41 | ] 42 | } 43 | } 44 | ) 45 | 46 | 47 | class CreateBookRequest(BaseModel): 48 | title: str 49 | author_name: str 50 | 51 | model_config = ConfigDict( 52 | json_schema_extra={ 53 | "example": { 54 | "title": "The Hitchhiker's Guide to the Galaxy", 55 | "author_name": "Douglas Adams", 56 | } 57 | } 58 | ) 59 | 60 | 61 | """ 62 | The views defined here have the functionalities of two components: 63 | 64 | - Controller: transforms data coming from the HTTP Request into 65 | the data model required to use the application logic 66 | 67 | - Presenter: transforms the data coming from the application logic 68 | into the format needed for the proper HTTP Response 69 | """ 70 | 71 | 72 | @router_v1.get("/", status_code=status.HTTP_200_OK) 73 | async def list_books() -> ListBooksResponse: 74 | book_service = BookService() 75 | books = await book_service.list_books() 76 | return ListBooksResponse(books=books) 77 | 78 | 79 | @router_v1.post("/", status_code=status.HTTP_201_CREATED) 80 | async def create_book( 81 | data: CreateBookRequest, 82 | ) -> CreateBookResponse: 83 | book_service = BookService() 84 | created_book = await book_service.create_book(book=dto.BookData.model_validate(data, from_attributes=True)) 85 | return CreateBookResponse(book=created_book) 86 | 87 | 88 | @router_v2.post("/", status_code=status.HTTP_201_CREATED) 89 | async def create_book_v2( 90 | data: CreateBookRequest, 91 | some_optional_query_param: bool = False, 92 | ) -> CreateBookResponse: 93 | book_service = BookService() 94 | created_book = await book_service.create_book(book=dto.BookData.model_validate(data, from_attributes=True)) 95 | return CreateBookResponse(book=created_book) 96 | -------------------------------------------------------------------------------- /src/http_app/routes/asyncapi.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import Annotated 3 | 4 | import pydantic_asyncapi as pa 5 | from fastapi import APIRouter 6 | from fastapi.params import Depends 7 | from starlette.responses import HTMLResponse 8 | 9 | from common import AppConfig 10 | from common.asyncapi import get_schema 11 | from http_app.dependencies import get_app_config 12 | 13 | router = APIRouter(prefix="/asyncapi") 14 | 15 | 16 | @router.get( 17 | "/asyncapi.json", 18 | response_model_exclude_unset=True, 19 | include_in_schema=False, 20 | ) 21 | def asyncapi_raw() -> pa.v3.AsyncAPI: 22 | return get_schema() 23 | 24 | 25 | ASYNCAPI_COMPONENT_VERSION = "latest" 26 | 27 | ASYNCAPI_JS_DEFAULT_URL = ( 28 | f"https://unpkg.com/@asyncapi/react-component@{ASYNCAPI_COMPONENT_VERSION}/browser/standalone/index.js" 29 | ) 30 | NORMALIZE_CSS_DEFAULT_URL = "https://cdn.jsdelivr.net/npm/modern-normalize/modern-normalize.min.css" 31 | ASYNCAPI_CSS_DEFAULT_URL = ( 32 | f"https://unpkg.com/@asyncapi/react-component@{ASYNCAPI_COMPONENT_VERSION}/styles/default.min.css" 33 | ) 34 | 35 | 36 | # https://github.com/asyncapi/asyncapi-react/blob/v2.5.0/docs/usage/standalone-bundle.md 37 | @router.get("") 38 | async def get_asyncapi_html( 39 | app_config: Annotated[AppConfig, Depends(get_app_config)], 40 | sidebar: bool = True, 41 | info: bool = True, 42 | servers: bool = True, 43 | operations: bool = True, 44 | messages: bool = True, 45 | schemas: bool = True, 46 | errors: bool = True, 47 | expand_message_examples: bool = False, 48 | ) -> HTMLResponse: 49 | """Generate HTML for displaying an AsyncAPI document.""" 50 | config = { 51 | "schema": { 52 | "url": "/asyncapi/asyncapi.json", 53 | }, 54 | "config": { 55 | "show": { 56 | "sidebar": sidebar, 57 | "info": info, 58 | "servers": servers, 59 | "operations": operations, 60 | "messages": messages, 61 | "schemas": schemas, 62 | "errors": errors, 63 | }, 64 | "expand": { 65 | "messageExamples": expand_message_examples, 66 | }, 67 | "sidebar": { 68 | "showServers": "byDefault", 69 | "showOperations": "byDefault", 70 | }, 71 | }, 72 | } 73 | 74 | return HTMLResponse( 75 | """ 76 | 77 | 78 | 79 | """ 80 | f""" 81 | {app_config.APP_NAME} AsyncAPI 82 | """ 83 | """ 84 | 85 | 86 | 87 | 88 | """ 89 | f""" 90 | 91 | 92 | """ 93 | """ 94 | 95 | 96 | 97 | 98 |
99 | """ 100 | f""" 101 | 102 | 112 | 113 | 114 | """ 115 | ) 116 | -------------------------------------------------------------------------------- /src/http_app/routes/auth.py: -------------------------------------------------------------------------------- 1 | from typing import Annotated, Optional 2 | 3 | import jwt 4 | from fastapi import Depends, HTTPException, status 5 | from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer, SecurityScopes 6 | 7 | from common import AppConfig 8 | from http_app.dependencies import get_app_config 9 | 10 | 11 | class MissingAuthorizationServerException(HTTPException): 12 | def __init__(self, **kwargs): 13 | super().__init__( 14 | status.HTTP_500_INTERNAL_SERVER_ERROR, 15 | detail="Authorization server not available", 16 | ) 17 | 18 | 19 | class UnauthorizedException(HTTPException): 20 | def __init__(self, detail: str, **kwargs): 21 | super().__init__(status.HTTP_403_FORBIDDEN, detail=detail) 22 | 23 | 24 | class UnauthenticatedException(HTTPException): 25 | def __init__(self): 26 | super().__init__(status_code=status.HTTP_401_UNAUTHORIZED, detail="Requires authentication") 27 | 28 | 29 | def _jwks_client(config: Annotated[AppConfig, Depends(get_app_config)]) -> jwt.PyJWKClient: 30 | if not config.AUTH.JWKS_URL: 31 | raise MissingAuthorizationServerException() 32 | return jwt.PyJWKClient(config.AUTH.JWKS_URL) 33 | 34 | 35 | async def decode_jwt( 36 | security_scopes: SecurityScopes, 37 | config: AppConfig = Depends(get_app_config), 38 | jwks_client: jwt.PyJWKClient = Depends(_jwks_client), 39 | token: Optional[HTTPAuthorizationCredentials] = Depends(HTTPBearer()), 40 | ): 41 | if token is None: 42 | raise UnauthenticatedException() 43 | 44 | try: 45 | signing_key = jwks_client.get_signing_key_from_jwt(token.credentials).key 46 | except jwt.exceptions.PyJWKClientError as error: 47 | raise UnauthorizedException(str(error)) 48 | except jwt.exceptions.DecodeError as error: 49 | raise UnauthorizedException(str(error)) 50 | 51 | try: 52 | # TODO: Review decode setup and verifications 53 | # https://pyjwt.readthedocs.io/en/stable/api.html#jwt.decode 54 | payload = jwt.decode( 55 | jwt=token.credentials, 56 | key=signing_key, 57 | algorithms=[config.AUTH.JWT_ALGORITHM], 58 | ) 59 | except Exception as error: 60 | raise UnauthorizedException(str(error)) 61 | 62 | return payload 63 | -------------------------------------------------------------------------------- /src/http_app/routes/events.py: -------------------------------------------------------------------------------- 1 | from typing import ( 2 | Annotated, 3 | Dict, 4 | List, 5 | Literal, 6 | Type, 7 | Union, 8 | get_args, 9 | get_origin, 10 | ) 11 | 12 | from fastapi import APIRouter, Body, Header, HTTPException 13 | from fastapi.openapi.models import Example 14 | from pydantic import Field 15 | 16 | from domains.books import BookService 17 | from domains.books.events import BookCreatedV1, BookUpdatedV1 18 | 19 | router = APIRouter(prefix="/events") 20 | 21 | """ 22 | In a real application these events would be the events the app RECEIVES and 23 | not the ones our application SENDS. This is only an example to illustrate 24 | how to handle different CloudEvent classes in FastAPI""" 25 | _EVENTS_UNION_TYPE = Annotated[Union[BookCreatedV1, BookUpdatedV1], Field(discriminator="type")] 26 | 27 | 28 | def _parse_event_registry() -> Dict[str, Type[_EVENTS_UNION_TYPE]]: 29 | annotation, field = get_args(_EVENTS_UNION_TYPE) 30 | discriminator = field.discriminator 31 | return { 32 | get_args(m.model_fields[discriminator].annotation)[0]: m 33 | for m in get_args(annotation) 34 | if get_origin(m.model_fields[discriminator].annotation) is Literal 35 | } 36 | 37 | 38 | _EVENT_REGISTRY = _parse_event_registry() 39 | 40 | 41 | def _event_schema_examples(mode: Literal["single", "batch"]) -> dict[str, Example]: 42 | missing_example_message = ( 43 | "No example has been added to this event but you can" 44 | " still explore the event schema. (Ask the developer" 45 | " to add an example in the event model if you see this" 46 | " message!)" 47 | ) 48 | examples: Dict[str, Union[dict, str]] = { 49 | k: getattr(v, "model_config", {}).get("json_schema_extra", {}).get("examples", [missing_example_message])[0] 50 | for k, v in _EVENT_REGISTRY.items() 51 | } 52 | 53 | return {k: Example(value=v if (mode == "single" or isinstance(v, str)) else [v]) for k, v in examples.items()} 54 | 55 | 56 | @router.get( 57 | "/dataschemas/{event}", 58 | description="Returns the schema for a supported event", 59 | responses={404: {"model": str}}, 60 | ) 61 | async def event_schema(event: str): 62 | event_model = _EVENT_REGISTRY.get(event) 63 | if event_model: 64 | return event_model.model_json_schema(mode="serialization") 65 | else: 66 | raise HTTPException(status_code=404, detail="Schema not found") 67 | 68 | 69 | @router.get( 70 | "/dataschemas", 71 | description=""" 72 | Provides the list of supported event types. Each event schema can be retrieved 73 | from the `/dataschemas/{type}` endpoint. The event schema for `book.created.v1` 74 | is `/dataschemas/book.created.v1` 75 | """, 76 | ) 77 | async def event_schema_list() -> List[str]: 78 | return list(_EVENT_REGISTRY.keys()) 79 | 80 | 81 | @router.post( 82 | "", 83 | status_code=204, 84 | description=""" 85 | Entrypoint for CloudEvent processing, it supports only single events. 86 | The list of supported CloudEvents and their schema can be retrieved 87 | from the /events/dataschemas endpoint. 88 | """, 89 | ) 90 | async def submit_event( 91 | event_data: Annotated[ 92 | _EVENTS_UNION_TYPE, 93 | Body( 94 | media_type="application/cloudevents+json; charset=UTF-8", 95 | openapi_examples=_event_schema_examples(mode="single"), 96 | discriminator="type", 97 | ), 98 | ], 99 | content_type: Annotated[Literal["application/cloudevents+json; charset=UTF-8"], Header()], 100 | ) -> None: 101 | # Some routing will be necessary when multiple event types will be supported 102 | await BookService().book_created_event_handler(event_data.data.book_id) 103 | 104 | 105 | @router.post( 106 | "/batch", 107 | status_code=204, 108 | description=""" 109 | Entrypoint for CloudEvent batch processing. 110 | The list of supported CloudEvents and their schema can be retrieved 111 | from the /events/dataschemas endpoint. 112 | """, 113 | ) 114 | async def submit_event_batch( 115 | event_batch: Annotated[ 116 | List[_EVENTS_UNION_TYPE], 117 | Body( 118 | media_type="application/cloudevents-batch+json; charset=UTF-8", 119 | openapi_examples=_event_schema_examples(mode="batch"), 120 | ), 121 | ], 122 | content_type: Annotated[Literal["application/cloudevents-batch+json; charset=UTF-8"], Header()], 123 | ) -> None: 124 | for event in event_batch: 125 | await BookService().book_created_event_handler(event.data.book_id) 126 | -------------------------------------------------------------------------------- /src/http_app/routes/graphql/__init__.py: -------------------------------------------------------------------------------- 1 | from strawberry import Schema 2 | from strawberry.fastapi import GraphQLRouter 3 | 4 | from .query import Query 5 | 6 | schema = Schema(query=Query) 7 | 8 | router: GraphQLRouter = GraphQLRouter(schema) 9 | -------------------------------------------------------------------------------- /src/http_app/routes/graphql/query.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | import strawberry 4 | 5 | from .resolvers import list_books 6 | from .types import Book 7 | 8 | 9 | @strawberry.type 10 | class Query: 11 | books: List[Book] = strawberry.field(resolver=list_books) 12 | -------------------------------------------------------------------------------- /src/http_app/routes/graphql/resolvers.py: -------------------------------------------------------------------------------- 1 | from domains.books import BookService, dto 2 | 3 | 4 | async def list_books(): 5 | book_service = BookService() 6 | books = await book_service.list_books() 7 | return [dto.Book.model_validate(x, from_attributes=True) for x in books] 8 | -------------------------------------------------------------------------------- /src/http_app/routes/graphql/types.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | 3 | import strawberry 4 | 5 | 6 | @strawberry.type 7 | class Book: 8 | book_id: Union[int, None] = None 9 | title: str 10 | author_name: str 11 | -------------------------------------------------------------------------------- /src/http_app/routes/hello.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter, Request, Security 2 | from fastapi.responses import HTMLResponse 3 | 4 | from http_app.templates import templates 5 | 6 | from .auth import decode_jwt 7 | 8 | router = APIRouter(prefix="/hello") 9 | 10 | 11 | @router.get("/", response_class=HTMLResponse, include_in_schema=True) 12 | async def hello(request: Request, jwt_token=Security(decode_jwt)): 13 | return templates.TemplateResponse( 14 | name="hello.html", 15 | request=request, 16 | context={"token_payload": jwt_token}, 17 | ) 18 | -------------------------------------------------------------------------------- /src/http_app/routes/ping.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | from pydantic import BaseModel, ConfigDict 3 | 4 | router = APIRouter() 5 | 6 | 7 | class PingResponse(BaseModel): 8 | ping: str 9 | 10 | model_config = ConfigDict( 11 | json_schema_extra={ 12 | "example": { 13 | "ping": "pong!", 14 | } 15 | } 16 | ) 17 | 18 | 19 | @router.get("/ping") 20 | async def ping() -> PingResponse: 21 | return PingResponse(ping="pong!") 22 | -------------------------------------------------------------------------------- /src/http_app/routes/user_registered_hook.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from fastapi import APIRouter, status 4 | from fastapi.responses import JSONResponse, Response 5 | from pydantic import BaseModel 6 | 7 | router = APIRouter(prefix="/user_registered") 8 | 9 | 10 | class UserRegisteredWebhook(BaseModel): 11 | user_id: str 12 | email: str 13 | 14 | 15 | @router.post("/") 16 | async def user_registered(user: UserRegisteredWebhook): # pragma: no cover 17 | """ 18 | Handles the user registration webhook. 19 | 20 | This function is triggered when a user registration webhook is received. 21 | It logs the event details, evaluates the email validity, and returns an 22 | appropriate HTTP response based on the validation. If the user's email 23 | is invalid, it returns an error response along with a structured error 24 | message. Otherwise, it confirms successful processing with no additional 25 | content. 26 | 27 | Args: 28 | user (UserRegisteredWebhook): The webhook payload received when a user 29 | registers, containing user details such as email and traits. 30 | 31 | Returns: 32 | Response: An HTTP response with a 403 Forbidden status and structured 33 | error message if the user email is invalid. 34 | Otherwise, an HTTP 204 No Content response to confirm successful 35 | processing. 36 | """ 37 | logging.info("User registered", extra={"user": user.model_dump()}) 38 | 39 | error_message = { 40 | "messages": [ 41 | { 42 | "instance_ptr": "#/traits/email", 43 | "messages": [ 44 | { 45 | "id": 123, # Error id to be evaluated in frontend 46 | "text": "You are not allowed to register.", 47 | "type": "error", 48 | "context": { # Additional context we can send to the Frontend 49 | "value": "short value", 50 | "any": "additional information", 51 | }, 52 | } 53 | ], 54 | } 55 | ] 56 | } 57 | 58 | if user.email == "invalid@test.com": 59 | return JSONResponse( 60 | error_message, 61 | status.HTTP_403_FORBIDDEN, 62 | ) 63 | else: 64 | return Response(status_code=status.HTTP_204_NO_CONTENT) 65 | -------------------------------------------------------------------------------- /src/http_app/templates.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from starlette.templating import Jinja2Templates 4 | 5 | templates = Jinja2Templates(directory=Path(__file__).parent.joinpath("jinja_templates")) 6 | -------------------------------------------------------------------------------- /src/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/febus982/bootstrap-python-fastapi/ff0d9b1a7bd09d2e50784fbd11ae82594b74aaab/src/migrations/__init__.py -------------------------------------------------------------------------------- /src/migrations/fixtures/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/febus982/bootstrap-python-fastapi/ff0d9b1a7bd09d2e50784fbd11ae82594b74aaab/src/migrations/fixtures/__init__.py -------------------------------------------------------------------------------- /src/migrations/fixtures/books_example.py: -------------------------------------------------------------------------------- 1 | """ 2 | `fixtures` is a dictionary following the format: 3 | 4 | "BIND_NAME": "LIST_OF_FACTORIES" 5 | """ 6 | 7 | from factory import Factory 8 | 9 | from domains.books._models import BookModel 10 | 11 | 12 | class BookFactory(Factory): 13 | class Meta: 14 | model = BookModel 15 | 16 | 17 | fixtures = { 18 | "default": [ 19 | BookFactory( 20 | title="The Shining", 21 | author_name="Stephen King", 22 | ), 23 | ], 24 | } 25 | -------------------------------------------------------------------------------- /src/migrations/script.py.mako: -------------------------------------------------------------------------------- 1 | <%! 2 | import re 3 | 4 | %>"""${message} 5 | 6 | Revision ID: ${up_revision} 7 | Revises: ${down_revision | comma,n} 8 | Create Date: ${create_date} 9 | 10 | """ 11 | from alembic import op 12 | import sqlalchemy as sa 13 | ${imports if imports else ""} 14 | 15 | # revision identifiers, used by Alembic. 16 | revision = ${repr(up_revision)} 17 | down_revision = ${repr(down_revision)} 18 | branch_labels = ${repr(branch_labels)} 19 | depends_on = ${repr(depends_on)} 20 | 21 | 22 | def upgrade(engine_name: str) -> None: 23 | globals()[f"upgrade_{engine_name}"]() 24 | 25 | 26 | def downgrade(engine_name: str) -> None: 27 | globals()[f"downgrade_{engine_name}"]() 28 | <% 29 | db_names = config.get_main_option("databases") 30 | %> 31 | ## generate an "upgrade_() / downgrade_()" function 32 | ## for each database name in the ini file. 33 | 34 | % for db_name in re.split(r',\s*', db_names): 35 | def upgrade_${db_name}() -> None: 36 | ${context.get(f"{db_name}_upgrades", "pass")} 37 | 38 | 39 | def downgrade_${db_name}() -> None: 40 | ${context.get(f"{db_name}_downgrades", "pass")} 41 | % endfor 42 | -------------------------------------------------------------------------------- /src/migrations/versions/2025-01-26-212326-52b1246eda46_initialize_fixture_tables.py: -------------------------------------------------------------------------------- 1 | """Initialize fixture tables 2 | 3 | Revision ID: 52b1246eda46 4 | Revises: 5 | Create Date: 2025-01-26 21:23:26.321986 6 | 7 | """ 8 | 9 | import sqlalchemy as sa 10 | from alembic import op 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "52b1246eda46" 14 | down_revision = None 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(engine_name: str) -> None: 20 | globals()[f"upgrade_{engine_name}"]() 21 | 22 | 23 | def downgrade(engine_name: str) -> None: 24 | globals()[f"downgrade_{engine_name}"]() 25 | 26 | 27 | def upgrade_default() -> None: 28 | op.create_table( 29 | "alembic_fixtures", 30 | sa.Column("bind", sa.String(length=255), nullable=False), 31 | sa.Column("module_name", sa.String(length=255), nullable=False), 32 | sa.Column("signature", sa.String(length=255), nullable=False), 33 | sa.Column("alembic_head_revisions", sa.String(length=255), nullable=False), 34 | sa.Column("processed_at", sa.DateTime(timezone=True), nullable=False), 35 | sa.PrimaryKeyConstraint("bind", "module_name"), 36 | ) 37 | 38 | 39 | def downgrade_default() -> None: 40 | op.drop_table("alembic_fixtures") 41 | -------------------------------------------------------------------------------- /src/migrations/versions/2025-01-26-212826-bd73bd8a2ac4_create_books_table.py: -------------------------------------------------------------------------------- 1 | """Create books table 2 | 3 | Revision ID: bd73bd8a2ac4 4 | Revises: 52b1246eda46 5 | Create Date: 2025-01-26 21:28:26.321986 6 | 7 | """ 8 | 9 | import sqlalchemy as sa 10 | from alembic import op 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "bd73bd8a2ac4" 14 | down_revision = "52b1246eda46" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(engine_name: str) -> None: 20 | globals()[f"upgrade_{engine_name}"]() 21 | 22 | 23 | def downgrade(engine_name: str) -> None: 24 | globals()[f"downgrade_{engine_name}"]() 25 | 26 | 27 | def upgrade_default() -> None: 28 | op.create_table( 29 | "books", 30 | sa.Column("book_id", sa.Integer(), nullable=False), 31 | sa.Column("title", sa.String(length=50), nullable=True), 32 | sa.Column("author_name", sa.String(length=50), nullable=True), 33 | sa.PrimaryKeyConstraint("book_id"), 34 | ) 35 | 36 | 37 | def downgrade_default() -> None: 38 | op.drop_table("books") 39 | -------------------------------------------------------------------------------- /src/socketio_app/__init__.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | 3 | import socketio 4 | from starlette.routing import Mount, Route, Router 5 | 6 | from common import AppConfig, application_init 7 | from common.di_container import Container 8 | from common.telemetry import instrument_third_party 9 | from socketio_app.namespaces.chat import ChatNamespace 10 | from socketio_app.web_routes import docs 11 | 12 | # These instrumentors patch and wrap libraries, we want 13 | # to execute them ASAP 14 | instrument_third_party() 15 | 16 | 17 | def create_app( 18 | test_config: Union[AppConfig, None] = None, 19 | test_di_container: Union[Container, None] = None, 20 | ) -> Router: 21 | _config = test_config or AppConfig() 22 | ref = application_init(_config, test_di_container) 23 | ref.di_container.wire(packages=["socketio_app"]) 24 | 25 | # SocketIO App 26 | sio = socketio.AsyncServer(async_mode="asgi") 27 | # Namespaces are the equivalent of Routes. 28 | sio.register_namespace(ChatNamespace("/chat")) 29 | 30 | # Render /docs endpoint using starlette, and all the rest handled with Socket.io 31 | routes = [ 32 | Route("/docs/asyncapi.json", docs.asyncapi_json, methods=["GET"]), 33 | Route("/docs", docs.get_asyncapi_html, methods=["GET"]), 34 | Mount("", app=socketio.ASGIApp(sio), name="socketio"), 35 | ] 36 | 37 | # No need for whole starlette, we're rendering a simple couple of endpoints 38 | # https://www.starlette.io/routing/#working-with-router-instances 39 | app = Router(routes=routes) 40 | 41 | return app 42 | -------------------------------------------------------------------------------- /src/socketio_app/__main__.py: -------------------------------------------------------------------------------- 1 | import uvicorn 2 | 3 | from common import AppConfig 4 | from common.logs import init_logger 5 | 6 | if __name__ == "__main__": 7 | init_logger(AppConfig()) 8 | uvicorn.run("socketio_app:create_app", factory=True, host="0.0.0.0", port=8001) 9 | -------------------------------------------------------------------------------- /src/socketio_app/dev_server.py: -------------------------------------------------------------------------------- 1 | import uvicorn 2 | 3 | from common import AppConfig 4 | from common.logs import init_logger 5 | 6 | if __name__ == "__main__": 7 | init_logger(AppConfig()) 8 | uvicorn.run("socketio_app:create_app", factory=True, host="0.0.0.0", port=8001, reload=True) 9 | -------------------------------------------------------------------------------- /src/socketio_app/namespaces/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/febus982/bootstrap-python-fastapi/ff0d9b1a7bd09d2e50784fbd11ae82594b74aaab/src/socketio_app/namespaces/__init__.py -------------------------------------------------------------------------------- /src/socketio_app/namespaces/chat.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import socketio 4 | 5 | from common.telemetry import trace_function 6 | 7 | 8 | class ChatNamespace(socketio.AsyncNamespace): 9 | def on_connect(self, sid, environ): 10 | pass 11 | 12 | def on_disconnect(self, sid, reason): 13 | pass 14 | 15 | @trace_function() 16 | async def on_echo_message(self, sid, data): 17 | # Note: this log line is only used to verify opentelemetry instrumentation works 18 | logging.info("received message") 19 | await self.emit("echo_response", data) 20 | -------------------------------------------------------------------------------- /src/socketio_app/web_routes/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/febus982/bootstrap-python-fastapi/ff0d9b1a7bd09d2e50784fbd11ae82594b74aaab/src/socketio_app/web_routes/__init__.py -------------------------------------------------------------------------------- /src/socketio_app/web_routes/docs.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from pydantic import BaseModel 4 | from starlette.requests import Request 5 | from starlette.responses import HTMLResponse, JSONResponse 6 | 7 | from common import AppConfig 8 | from common.asyncapi import get_schema 9 | 10 | 11 | class PydanticResponse(JSONResponse): 12 | def render(self, content: BaseModel) -> bytes: 13 | return content.model_dump_json( 14 | exclude_unset=True, 15 | ).encode("utf-8") 16 | 17 | 18 | async def asyncapi_json(request: Request) -> JSONResponse: 19 | return PydanticResponse(get_schema()) 20 | 21 | 22 | ASYNCAPI_COMPONENT_VERSION = "latest" 23 | 24 | ASYNCAPI_JS_DEFAULT_URL = ( 25 | f"https://unpkg.com/@asyncapi/react-component@{ASYNCAPI_COMPONENT_VERSION}/browser/standalone/index.js" 26 | ) 27 | NORMALIZE_CSS_DEFAULT_URL = "https://cdn.jsdelivr.net/npm/modern-normalize/modern-normalize.min.css" 28 | ASYNCAPI_CSS_DEFAULT_URL = ( 29 | f"https://unpkg.com/@asyncapi/react-component@{ASYNCAPI_COMPONENT_VERSION}/styles/default.min.css" 30 | ) 31 | 32 | 33 | # https://github.com/asyncapi/asyncapi-react/blob/v2.5.0/docs/usage/standalone-bundle.md 34 | async def get_asyncapi_html( 35 | request: Request, 36 | ) -> HTMLResponse: 37 | app_config = AppConfig() 38 | """Generate HTML for displaying an AsyncAPI document.""" 39 | config = { 40 | "schema": { 41 | "url": "/docs/asyncapi.json", 42 | }, 43 | "config": { 44 | "show": { 45 | "sidebar": request.query_params.get("sidebar", "true") == "true", 46 | "info": request.query_params.get("info", "true") == "true", 47 | "servers": request.query_params.get("servers", "true") == "true", 48 | "operations": request.query_params.get("operations", "true") == "true", 49 | "messages": request.query_params.get("messages", "true") == "true", 50 | "schemas": request.query_params.get("schemas", "true") == "true", 51 | "errors": request.query_params.get("errors", "true") == "true", 52 | }, 53 | "expand": { 54 | "messageExamples": request.query_params.get("expand_message_examples") == "true", 55 | }, 56 | "sidebar": { 57 | "showServers": "byDefault", 58 | "showOperations": "byDefault", 59 | }, 60 | }, 61 | } 62 | 63 | return HTMLResponse( 64 | """ 65 | 66 | 67 | 68 | """ 69 | f""" 70 | {app_config.APP_NAME} AsyncAPI 71 | """ 72 | """ 73 | 74 | 75 | 76 | 77 | """ 78 | f""" 79 | 80 | 81 | """ 82 | """ 83 | 84 | 85 | 86 | 87 |
88 | """ 89 | f""" 90 | 91 | 101 | 102 | 103 | """ 104 | ) 105 | -------------------------------------------------------------------------------- /test-cross-domain-imports.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | shopt -s extglob 3 | 4 | # TODO: Review this script for new directory structure 5 | export CROSSIMPORTFOUND=0 6 | 7 | cd http_app/domains 8 | 9 | 10 | for dir in $(ls -d !(__pycache__)/ ) 11 | do 12 | echo "DIR=$dir" 13 | regex="app\.domains(?!\.${dir::-1}\.)" 14 | echo "$regex" 15 | for file in $(find ${dir} -type f -name '*.py') 16 | do 17 | grep -nH -P "${regex}" $file 18 | if [ $? -eq 0 ] 19 | then 20 | CROSSIMPORTFOUND=1 21 | fi 22 | done 23 | done 24 | 25 | if [ $CROSSIMPORTFOUND -eq 1 ] 26 | then 27 | echo "Nested domain module or wider 'domains' import detected." 28 | fi 29 | 30 | exit $CROSSIMPORTFOUND 31 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/febus982/bootstrap-python-fastapi/ff0d9b1a7bd09d2e50784fbd11ae82594b74aaab/tests/__init__.py -------------------------------------------------------------------------------- /tests/common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/febus982/bootstrap-python-fastapi/ff0d9b1a7bd09d2e50784fbd11ae82594b74aaab/tests/common/__init__.py -------------------------------------------------------------------------------- /tests/common/test_asyncapi.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pydantic import BaseModel 3 | 4 | from common.asyncapi import ( 5 | get_schema, 6 | init_asyncapi_info, 7 | register_channel, 8 | register_channel_operation, 9 | register_server, 10 | ) 11 | 12 | 13 | # Test fixtures 14 | @pytest.fixture 15 | def reset_asyncapi_state(): 16 | """Reset all global state between tests""" 17 | from common.asyncapi import _channels, _components_schemas, _operations, _servers 18 | 19 | _servers.clear() 20 | _channels.clear() 21 | _operations.clear() 22 | _components_schemas.clear() 23 | yield 24 | _servers.clear() 25 | _channels.clear() 26 | _operations.clear() 27 | _components_schemas.clear() 28 | 29 | 30 | # Test message models 31 | class SomeTestMessage(BaseModel): 32 | content: str 33 | timestamp: int 34 | 35 | 36 | class AnotherTestMessage(BaseModel): 37 | status: bool 38 | code: int 39 | nested: SomeTestMessage 40 | 41 | 42 | # Test cases 43 | def test_init_asyncapi_info(): 44 | """Test initialization of AsyncAPI info""" 45 | title = "Test API" 46 | version = "2.0.0" 47 | 48 | init_asyncapi_info(title=title, version=version) 49 | schema = get_schema() 50 | 51 | assert schema.info.title == title 52 | assert schema.info.version == version 53 | 54 | 55 | def test_register_server(reset_asyncapi_state): 56 | """Test server registration""" 57 | server_id = "test-server" 58 | host = "localhost" 59 | protocol = "ws" 60 | pathname = "/ws" 61 | 62 | register_server(id=server_id, host=host, protocol=protocol, pathname=pathname) 63 | 64 | schema = get_schema() 65 | assert server_id in schema.servers 66 | assert schema.servers[server_id].host == host 67 | assert schema.servers[server_id].protocol == protocol 68 | assert schema.servers[server_id].pathname == pathname 69 | 70 | 71 | def test_register_channel(reset_asyncapi_state): 72 | """Test channel registration""" 73 | channel_id = "test-channel" 74 | address = "test/topic" 75 | description = "Test channel" 76 | title = "Test Channel" 77 | 78 | register_channel(address=address, id=channel_id, description=description, title=title) 79 | 80 | schema = get_schema() 81 | assert channel_id in schema.channels 82 | assert schema.channels[channel_id].address == address 83 | assert schema.channels[channel_id].description == description 84 | assert schema.channels[channel_id].title == title 85 | 86 | 87 | def test_register_channel_with_server(reset_asyncapi_state): 88 | """Test channel registration with server reference""" 89 | server_id = "test-server" 90 | channel_id = "test-channel" 91 | 92 | register_server(id=server_id, host="localhost", protocol="ws") 93 | register_channel(address="test/topic", id=channel_id, server_id=server_id) 94 | 95 | schema = get_schema() 96 | assert len(schema.channels[channel_id].servers) == 1 97 | assert schema.channels[channel_id].servers[0].ref == f"#/servers/{server_id}" 98 | 99 | 100 | def test_register_channel_operation(reset_asyncapi_state): 101 | """Test channel operation registration""" 102 | channel_id = "test-channel" 103 | operation_type = "receive" 104 | 105 | register_channel(address="test/topic", id=channel_id) 106 | register_channel_operation( 107 | channel_id=channel_id, 108 | operation_type=operation_type, 109 | messages=[SomeTestMessage], 110 | operation_name="test-operation", 111 | ) 112 | 113 | schema = get_schema() 114 | assert "test-operation" in schema.operations 115 | assert schema.operations["test-operation"].action == operation_type 116 | assert schema.operations["test-operation"].channel.ref == f"#/channels/{channel_id}" 117 | assert SomeTestMessage.__name__ in schema.components.schemas 118 | 119 | 120 | def test_register_channel_operation_invalid_channel(reset_asyncapi_state): 121 | """Test channel operation registration with invalid channel""" 122 | with pytest.raises(ValueError, match="Channel non-existent does not exist"): 123 | register_channel_operation(channel_id="non-existent", operation_type="receive", messages=[SomeTestMessage]) 124 | 125 | 126 | def test_multiple_messages_registration(reset_asyncapi_state): 127 | """Test registration of multiple messages for an operation""" 128 | channel_id = "test-channel" 129 | 130 | register_channel(address="test/topic", id=channel_id) 131 | register_channel_operation( 132 | channel_id=channel_id, operation_type="send", messages=[SomeTestMessage, AnotherTestMessage] 133 | ) 134 | 135 | schema = get_schema() 136 | assert SomeTestMessage.__name__ in schema.components.schemas 137 | assert AnotherTestMessage.__name__ in schema.components.schemas 138 | assert SomeTestMessage.__name__ in schema.channels[channel_id].messages 139 | assert AnotherTestMessage.__name__ in schema.channels[channel_id].messages 140 | -------------------------------------------------------------------------------- /tests/common/test_dramatiq.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from unittest.mock import MagicMock, patch 3 | 4 | import orjson 5 | import pytest 6 | from dramatiq import get_broker, get_encoder 7 | from dramatiq.brokers.stub import StubBroker 8 | from dramatiq.encoder import DecodeError 9 | 10 | from common import AppConfig 11 | from common.config import DramatiqConfig 12 | from common.dramatiq import ORJSONEncoder, init_dramatiq 13 | 14 | 15 | @patch("common.dramatiq.orjson.dumps", return_value=b"serialized") 16 | @patch("common.dramatiq.orjson.loads", return_value="deserialized") 17 | def test_orjson_encoder( 18 | mocked_loads: MagicMock, 19 | mocked_dumps: MagicMock, 20 | ): 21 | encoder = ORJSONEncoder() 22 | 23 | serialized = encoder.encode({}) 24 | assert serialized == b"serialized" 25 | mocked_dumps.assert_called_once_with({}) 26 | deserialized = encoder.decode(serialized) 27 | assert deserialized == "deserialized" 28 | mocked_loads.assert_called_once_with(b"serialized") 29 | 30 | 31 | @patch( 32 | "common.dramatiq.orjson.loads", 33 | side_effect=orjson.JSONDecodeError("msg", "doc", 123), 34 | ) 35 | def test_orjson_encoder_fails( 36 | mocked_loads: MagicMock, 37 | ): 38 | encoder = ORJSONEncoder() 39 | 40 | with pytest.raises(DecodeError): 41 | encoder.decode(b"serialized") 42 | 43 | 44 | def test_init_dramatiq_with_test_env(): 45 | """Test if the StubBroker is set in the 'test' environment.""" 46 | config = AppConfig(ENVIRONMENT="test", DRAMATIQ=DramatiqConfig()) # Mock config 47 | init_dramatiq(config) 48 | assert isinstance(get_broker(), StubBroker) 49 | assert isinstance(get_encoder(), ORJSONEncoder) 50 | 51 | 52 | def test_init_dramatiq_with_redis(): 53 | """Test if the RedisBroker is set with a valid Redis URL.""" 54 | redis_url = "redis://localhost:6379/0" 55 | config = AppConfig(ENVIRONMENT="production", DRAMATIQ=DramatiqConfig(REDIS_URL=redis_url)) # Mock config 56 | with patch("common.dramatiq.RedisBroker") as mock_redis_broker: 57 | init_dramatiq(config) 58 | mock_redis_broker.assert_called_once_with(url=redis_url) 59 | assert get_broker() == mock_redis_broker.return_value 60 | assert isinstance(get_encoder(), ORJSONEncoder) 61 | 62 | 63 | def test_init_dramatiq_without_redis_url(caplog): 64 | """Test if an exception is raised when in non-test environment without Redis URL.""" 65 | config = AppConfig(ENVIRONMENT="production", DRAMATIQ=DramatiqConfig(REDIS_URL=None)) # Mock config 66 | with caplog.at_level(logging.CRITICAL): 67 | init_dramatiq(config) 68 | 69 | assert "Running a non-test/non-local environment without Redis URL set" in caplog.text 70 | -------------------------------------------------------------------------------- /tests/common/test_utils.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | 5 | from common.utils import apply_decorator_to_methods 6 | 7 | 8 | @pytest.mark.parametrize( 9 | "apply_to_protected_methods", 10 | [ 11 | pytest.param(True, id="protected_methods"), 12 | pytest.param(False, id="no_protected_methods"), 13 | ], 14 | ) 15 | @pytest.mark.parametrize( 16 | "apply_to_private_methods", 17 | [ 18 | pytest.param(True, id="private_methods"), 19 | pytest.param(False, id="no_private_methods"), 20 | ], 21 | ) 22 | async def test_class_decorator( 23 | apply_to_protected_methods: bool, 24 | apply_to_private_methods: bool, 25 | ): 26 | def add_ten_decorator(func): 27 | def wrapper(*args, **kwargs): 28 | result = func(*args, **kwargs) 29 | return result + 10 30 | 31 | async def async_wrapper(*args, **kwargs): 32 | result = await func(*args, **kwargs) 33 | return result + 10 34 | 35 | return wrapper if not asyncio.iscoroutinefunction(func) else async_wrapper 36 | 37 | @apply_decorator_to_methods( 38 | decorator=add_ten_decorator, 39 | protected_methods=apply_to_protected_methods, 40 | private_methods=apply_to_private_methods, 41 | ) 42 | class MyClass: 43 | def get_public(self): 44 | return 10 45 | 46 | def _get_protected(self): 47 | return 10 48 | 49 | def __get_private(self): 50 | return 10 51 | 52 | async def get_apublic(self): 53 | return 10 54 | 55 | async def _get_aprotected(self): 56 | return 10 57 | 58 | async def __get_aprivate(self): 59 | return 10 60 | 61 | c = MyClass() 62 | assert c.get_public() == 20 63 | assert c._get_protected() == 20 if apply_to_protected_methods else 10 64 | assert c._MyClass__get_private() == 20 if apply_to_private_methods else 10 # type: ignore 65 | assert await c.get_apublic() == 20 66 | assert await c._get_aprotected() == 20 if apply_to_protected_methods else 10 67 | assert await c._MyClass__get_aprivate() == 20 if apply_to_private_methods else 10 # type: ignore 68 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from common import AppConfig 4 | 5 | 6 | @pytest.fixture(autouse=True) 7 | def anyio_backend(): 8 | """ 9 | For now, we don't have reason to test anything but asyncio 10 | https://anyio.readthedocs.io/en/stable/testing.html 11 | """ 12 | return "asyncio" 13 | 14 | 15 | @pytest.fixture(scope="session") 16 | def test_config() -> AppConfig: 17 | return AppConfig(SQLALCHEMY_CONFIG={}, ENVIRONMENT="test", CORS_ORIGINS=["*"]) 18 | -------------------------------------------------------------------------------- /tests/domains/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/febus982/bootstrap-python-fastapi/ff0d9b1a7bd09d2e50784fbd11ae82594b74aaab/tests/domains/__init__.py -------------------------------------------------------------------------------- /tests/domains/books/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/febus982/bootstrap-python-fastapi/ff0d9b1a7bd09d2e50784fbd11ae82594b74aaab/tests/domains/books/__init__.py -------------------------------------------------------------------------------- /tests/domains/books/conftest.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import AsyncMock, MagicMock 2 | 3 | import pytest 4 | 5 | from domains.books._gateway_interfaces import BookRepositoryInterface 6 | 7 | 8 | @pytest.fixture 9 | def book_repository() -> MagicMock: 10 | def _save_book(x): 11 | x.book_id = 123 12 | return x 13 | 14 | repo = MagicMock(spec=BookRepositoryInterface) 15 | repo.save = AsyncMock(side_effect=_save_book) 16 | 17 | return repo 18 | -------------------------------------------------------------------------------- /tests/domains/books/test_book_service.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import AsyncMock, MagicMock, patch 2 | 3 | from domains.books import _service, dto 4 | from domains.books._gateway_interfaces import BookEventGatewayInterface 5 | from domains.books._models import BookModel 6 | from domains.books._tasks import book_cpu_intensive_task 7 | 8 | 9 | async def test_create_book(book_repository): 10 | event_gateway = MagicMock(spec=BookEventGatewayInterface) 11 | book_service = _service.BookService( 12 | book_repository=book_repository, 13 | event_gateway=event_gateway, 14 | ) 15 | book = dto.Book( 16 | title="test", 17 | author_name="other", 18 | ) 19 | mocked_task_return = MagicMock 20 | mocked_task_return.get = MagicMock(return_value=book_cpu_intensive_task(book)) 21 | with patch.object(book_cpu_intensive_task, "send", return_value=mocked_task_return): 22 | returned_book = await book_service.create_book(book) 23 | assert book.title == returned_book.title 24 | assert book.author_name == returned_book.author_name 25 | assert returned_book.book_id is not None 26 | event_gateway.emit.assert_called_once() 27 | book_repository.save.assert_called_once() 28 | 29 | 30 | async def test_list_books(book_repository): 31 | event_gateway = MagicMock(spec=BookEventGatewayInterface) 32 | book_service = _service.BookService( 33 | book_repository=book_repository, 34 | event_gateway=event_gateway, 35 | ) 36 | book = BookModel( 37 | book_id=2, 38 | title="test", 39 | author_name="other", 40 | ) 41 | 42 | book_repository.find = AsyncMock(return_value=[book]) 43 | 44 | returned_books = await book_service.list_books() 45 | assert [dto.Book.model_validate(book, from_attributes=True)] == returned_books 46 | book_repository.find.assert_called_once() 47 | -------------------------------------------------------------------------------- /tests/domains/books/test_book_tasks.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import MagicMock, patch 2 | 3 | from domains.books._service import BookService 4 | from domains.books._tasks import book_cpu_intensive_task 5 | 6 | 7 | @patch.object(BookService, "book_created_event_handler", return_value=None) 8 | def test_book_created_task(mocked_task_handler: MagicMock): 9 | assert book_cpu_intensive_task("some_string") == "some_string" 10 | -------------------------------------------------------------------------------- /tests/http_app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/febus982/bootstrap-python-fastapi/ff0d9b1a7bd09d2e50784fbd11ae82594b74aaab/tests/http_app/__init__.py -------------------------------------------------------------------------------- /tests/http_app/conftest.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Iterator 2 | from unittest.mock import patch 3 | 4 | import pytest 5 | from dependency_injector.providers import Object 6 | from fastapi import FastAPI 7 | 8 | from common.di_container import Container 9 | from http_app import create_app 10 | 11 | 12 | @pytest.fixture(scope="session") 13 | def test_di_container(test_config) -> Container: 14 | return Container( 15 | config=Object(test_config), 16 | ) 17 | 18 | 19 | @pytest.fixture(scope="session") 20 | def testapp(test_config, test_di_container) -> Iterator[FastAPI]: 21 | # We don't need the storage to test the HTTP app 22 | with patch("common.bootstrap.init_storage", return_value=None): 23 | app = create_app(test_config=test_config, test_di_container=test_di_container) 24 | yield app 25 | -------------------------------------------------------------------------------- /tests/http_app/routes/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/febus982/bootstrap-python-fastapi/ff0d9b1a7bd09d2e50784fbd11ae82594b74aaab/tests/http_app/routes/__init__.py -------------------------------------------------------------------------------- /tests/http_app/routes/books/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/febus982/bootstrap-python-fastapi/ff0d9b1a7bd09d2e50784fbd11ae82594b74aaab/tests/http_app/routes/books/__init__.py -------------------------------------------------------------------------------- /tests/http_app/routes/books/conftest.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Iterator 2 | from secrets import randbelow 3 | from unittest.mock import AsyncMock, MagicMock, patch 4 | 5 | import pytest 6 | from fastapi import FastAPI 7 | 8 | from domains.books import BookService, dto 9 | from http_app import create_app 10 | 11 | 12 | @pytest.fixture 13 | def book_service() -> Iterator[MagicMock]: 14 | svc = MagicMock(autospec=BookService) 15 | svc.create_book = AsyncMock(side_effect=lambda book: dto.Book(book_id=randbelow(1000), **book.model_dump())) 16 | svc.list_books = AsyncMock( 17 | return_value=[ 18 | dto.Book( 19 | book_id=123, 20 | title="The Shining", 21 | author_name="Stephen King", 22 | ) 23 | ] 24 | ) 25 | 26 | with patch("domains.books.BookService.__new__", return_value=svc): 27 | yield svc 28 | 29 | 30 | @pytest.fixture(scope="function") 31 | def testapp(test_config, book_service) -> Iterator[FastAPI]: 32 | # We don't need the storage to test the HTTP app 33 | with patch("common.bootstrap.init_storage", return_value=None): 34 | yield create_app(test_config=test_config) 35 | -------------------------------------------------------------------------------- /tests/http_app/routes/books/graphql/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/febus982/bootstrap-python-fastapi/ff0d9b1a7bd09d2e50784fbd11ae82594b74aaab/tests/http_app/routes/books/graphql/__init__.py -------------------------------------------------------------------------------- /tests/http_app/routes/books/graphql/test_query_books.py: -------------------------------------------------------------------------------- 1 | from fastapi import status 2 | from fastapi.testclient import TestClient 3 | 4 | 5 | async def test_create_book(testapp): 6 | query = "{books{authorName, title, bookId}}" 7 | ac = TestClient(app=testapp, base_url="http://test") 8 | response = ac.post( 9 | "/graphql", 10 | json=dict(query=query), 11 | ) 12 | assert response.status_code == status.HTTP_200_OK 13 | assert response.json() == { 14 | "data": {"books": [{"authorName": "Stephen King", "bookId": 123, "title": "The Shining"}]} 15 | } 16 | 17 | """ 18 | Check new_book_data is a subset of response.json()["book"] 19 | (response.json()["book"] contains also the generated primary key) 20 | """ 21 | -------------------------------------------------------------------------------- /tests/http_app/routes/books/test_create_book.py: -------------------------------------------------------------------------------- 1 | from fastapi import status 2 | from fastapi.testclient import TestClient 3 | 4 | 5 | async def test_create_book(testapp): 6 | new_book_data = dict( 7 | title="test", 8 | author_name="another", 9 | ) 10 | ac = TestClient(app=testapp, base_url="http://test") 11 | response = ac.post( 12 | "/api/books/v1/", 13 | json=new_book_data, 14 | ) 15 | assert response.status_code == status.HTTP_201_CREATED 16 | """ 17 | Check new_book_data is a subset of response.json()["book"] 18 | (response.json()["book"] contains also the generated primary key) 19 | """ 20 | assert new_book_data.items() <= response.json()["book"].items() 21 | 22 | 23 | async def test_create_book_v2(testapp): 24 | new_book_data = dict( 25 | title="test", 26 | author_name="another", 27 | ) 28 | ac = TestClient(app=testapp, base_url="http://test") 29 | response = ac.post( 30 | "/api/books/v2/", 31 | json=new_book_data, 32 | ) 33 | assert response.status_code == status.HTTP_201_CREATED 34 | """ 35 | Check new_book_data is a subset of response.json()["book"] 36 | (response.json()["book"] contains also the generated primary key) 37 | """ 38 | assert new_book_data.items() <= response.json()["book"].items() 39 | -------------------------------------------------------------------------------- /tests/http_app/routes/books/test_list_books.py: -------------------------------------------------------------------------------- 1 | from fastapi import status 2 | from fastapi.testclient import TestClient 3 | 4 | 5 | async def test_list_books(testapp): 6 | ac = TestClient(app=testapp, base_url="http://test") 7 | response = ac.get("/api/books/v1/") 8 | assert response.status_code == status.HTTP_200_OK 9 | body = response.json() 10 | assert "books" in body 11 | assert len(body["books"]) == 1 12 | assert body["books"][0]["title"] == "The Shining" 13 | assert body["books"][0]["author_name"] == "Stephen King" 14 | -------------------------------------------------------------------------------- /tests/http_app/routes/test_asyncapi.py: -------------------------------------------------------------------------------- 1 | import json 2 | from unittest.mock import MagicMock, patch 3 | 4 | import pytest 5 | from fastapi import FastAPI, status 6 | from fastapi.testclient import TestClient 7 | from pydantic_asyncapi.v3 import AsyncAPI, Info 8 | 9 | fake_schema = AsyncAPI( 10 | asyncapi="3.0.0", 11 | info=Info( 12 | title="Some fake schema", 13 | version="1.2.3", 14 | ), 15 | ) 16 | 17 | 18 | @patch("http_app.routes.asyncapi.get_schema", return_value=fake_schema) 19 | async def test_asyncapi_json_is_whatever_returned_by_schema( 20 | mock_schema: MagicMock, 21 | testapp: FastAPI, 22 | ): 23 | ac = TestClient(app=testapp, base_url="http://test") 24 | response = ac.get( 25 | "/asyncapi/asyncapi.json", 26 | ) 27 | 28 | assert response.status_code == status.HTTP_200_OK 29 | assert response.text == fake_schema.model_dump_json(exclude_unset=True) 30 | 31 | 32 | @pytest.mark.parametrize("sidebar", (True, False)) 33 | @pytest.mark.parametrize("info", (True, False)) 34 | @pytest.mark.parametrize("servers", (True, False)) 35 | @pytest.mark.parametrize("operations", (True, False)) 36 | @pytest.mark.parametrize("messages", (True, False)) 37 | @pytest.mark.parametrize("schema", (True, False)) 38 | @pytest.mark.parametrize("errors", (True, False)) 39 | @pytest.mark.parametrize("expand_message_examples", (True, False)) 40 | async def test_ws_docs_renders_config_based_on_params( 41 | sidebar: bool, 42 | info: bool, 43 | servers: bool, 44 | operations: bool, 45 | messages: bool, 46 | schema: bool, 47 | errors: bool, 48 | expand_message_examples: bool, 49 | testapp: FastAPI, 50 | ): 51 | config = json.dumps( 52 | { 53 | "schema": { 54 | "url": "/asyncapi/asyncapi.json", 55 | }, 56 | "config": { 57 | "show": { 58 | "sidebar": sidebar, 59 | "info": info, 60 | "servers": servers, 61 | "operations": operations, 62 | "messages": messages, 63 | "schemas": schema, 64 | "errors": errors, 65 | }, 66 | "expand": { 67 | "messageExamples": expand_message_examples, 68 | }, 69 | "sidebar": { 70 | "showServers": "byDefault", 71 | "showOperations": "byDefault", 72 | }, 73 | }, 74 | } 75 | ) 76 | 77 | ac = TestClient(app=testapp, base_url="http://test") 78 | response = ac.get( 79 | "/asyncapi", 80 | params={ 81 | "sidebar": sidebar, 82 | "info": info, 83 | "servers": servers, 84 | "operations": operations, 85 | "messages": messages, 86 | "schemas": schema, 87 | "errors": errors, 88 | "expand_message_examples": expand_message_examples, 89 | }, 90 | ) 91 | assert response.status_code == status.HTTP_200_OK 92 | assert config in response.text 93 | -------------------------------------------------------------------------------- /tests/http_app/routes/test_auth.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import MagicMock, patch 2 | 3 | import pytest 4 | from fastapi.security import HTTPAuthorizationCredentials, SecurityScopes 5 | from jwt import PyJWK, PyJWKClient 6 | from jwt.exceptions import DecodeError, PyJWKClientError 7 | 8 | from common import AppConfig 9 | from common.config import AuthConfig 10 | from http_app.routes.auth import ( 11 | MissingAuthorizationServerException, 12 | UnauthenticatedException, 13 | UnauthorizedException, 14 | _jwks_client, 15 | decode_jwt, 16 | ) 17 | 18 | 19 | def test_jwks_client_raises_without_jwks_url(): 20 | with pytest.raises(MissingAuthorizationServerException): 21 | _jwks_client(config=AppConfig(AUTH=AuthConfig(JWKS_URL=None))) 22 | 23 | 24 | def test_jwks_client_returns_a_client_with_jwks_url(): 25 | result = _jwks_client(config=AppConfig(AUTH=AuthConfig(JWKS_URL="http://test.com"))) 26 | assert isinstance(result, PyJWKClient) 27 | 28 | 29 | async def test_decode_jwt_raises_without_token(): 30 | with pytest.raises(UnauthenticatedException): 31 | await decode_jwt( 32 | security_scopes=SecurityScopes(), 33 | config=AppConfig(), 34 | jwks_client=MagicMock(), 35 | token=None, 36 | ) 37 | 38 | 39 | @pytest.mark.parametrize("exception", (PyJWKClientError, DecodeError)) 40 | async def test_decode_jwt_raises_if_jwks_client_fails(exception): 41 | mock_jwks_client = MagicMock(spec=PyJWKClient) 42 | mock_jwks_client.get_signing_key_from_jwt = MagicMock(side_effect=exception) 43 | with pytest.raises(UnauthorizedException): 44 | await decode_jwt( 45 | security_scopes=SecurityScopes(), 46 | config=AppConfig(), 47 | jwks_client=mock_jwks_client, 48 | token=HTTPAuthorizationCredentials(scheme="bearer", credentials="some_token"), 49 | ) 50 | 51 | 52 | async def test_decode_jwt_raises_if_decode_fails(): 53 | returned_key = MagicMock(spec=PyJWK) 54 | returned_key.key = "some_key" 55 | mock_jwks_client = MagicMock(spec=PyJWKClient) 56 | mock_jwks_client.get_signing_key_from_jwt = MagicMock(return_value=returned_key) 57 | 58 | with pytest.raises(UnauthorizedException): 59 | await decode_jwt( 60 | security_scopes=SecurityScopes(), 61 | config=AppConfig(), 62 | jwks_client=mock_jwks_client, 63 | token=HTTPAuthorizationCredentials( 64 | # The token cannot be decrypted and will trigger the exception 65 | scheme="bearer", 66 | credentials="some_token", 67 | ), 68 | ) 69 | 70 | 71 | async def test_decode_jwt_returns_the_decoded_jwt_payload(): 72 | returned_key = MagicMock(spec=PyJWK) 73 | returned_key.key = "some_key" 74 | mock_jwks_client = MagicMock(spec=PyJWKClient) 75 | mock_jwks_client.get_signing_key_from_jwt = MagicMock(return_value=returned_key) 76 | 77 | with patch("jwt.decode", return_value={"decoded": "token"}): 78 | result = await decode_jwt( 79 | security_scopes=SecurityScopes(), 80 | config=AppConfig(), 81 | jwks_client=mock_jwks_client, 82 | token=HTTPAuthorizationCredentials( 83 | # The token cannot be decrypted and will trigger the exception 84 | scheme="bearer", 85 | credentials="some_token", 86 | ), 87 | ) 88 | 89 | assert result == {"decoded": "token"} 90 | -------------------------------------------------------------------------------- /tests/http_app/routes/test_events.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | from unittest.mock import AsyncMock, MagicMock, patch 3 | 4 | import pytest 5 | from cloudevents_pydantic.events import CloudEvent 6 | from fastapi.testclient import TestClient 7 | 8 | from domains.books import BookService 9 | from domains.books.events import BookCreatedV1 10 | 11 | 12 | class FakeEvent(CloudEvent): 13 | type: str = "aa" 14 | # source: Annotated[URIReference, Field(default="https://example.com")] 15 | 16 | 17 | async def test_event_schema_returns_data_if_present_in_registry(testapp): 18 | with patch.dict("http_app.routes.events._EVENT_REGISTRY", {"test_event": FakeEvent}, clear=True): 19 | ac = TestClient(app=testapp, base_url="http://test") 20 | response = ac.get("/events/dataschemas/test_event") 21 | assert response.status_code == 200 22 | 23 | 24 | async def test_event_schema_returns_404_when_not_present_in_registry(testapp): 25 | ac = TestClient(app=testapp, base_url="http://test") 26 | response = ac.get("/events/dataschemas/inexisting") 27 | assert response.status_code == 404 28 | 29 | 30 | async def test_event_schema_list_returns_data_from_registry(testapp): 31 | with patch.dict("http_app.routes.events._EVENT_REGISTRY", {"test_event": FakeEvent}, clear=True): 32 | ac = TestClient(app=testapp, base_url="http://test") 33 | response = ac.get("/events/dataschemas") 34 | assert response.status_code == 200 35 | assert response.json() == ["test_event"] 36 | 37 | 38 | @pytest.mark.parametrize( 39 | ["batch"], 40 | ( 41 | pytest.param(True, id="batch"), 42 | pytest.param(False, id="single"), 43 | ), 44 | ) 45 | async def test_event_endpoints_returns_204(testapp, batch): 46 | url = "/events" if not batch else "/events/batch" 47 | content_type = ( 48 | "application/cloudevents+json; charset=UTF-8" 49 | if not batch 50 | else "application/cloudevents-batch+json; charset=UTF-8" 51 | ) 52 | 53 | fake_event = BookCreatedV1.event_factory( 54 | data={"book_id": 0, "title": "string", "author_name": "string"}, 55 | ) 56 | svc = MagicMock(autospec=BookService) 57 | svc.book_created_event_handler = AsyncMock(return_value=None) 58 | with patch("domains.books.BookService.__new__", return_value=svc): 59 | ac = TestClient(app=testapp, base_url="http://test") 60 | response = ac.post( 61 | url, 62 | headers={"content-type": content_type}, 63 | content=fake_event.model_dump_json() if not batch else f"[{fake_event.model_dump_json()}]", 64 | ) 65 | svc.book_created_event_handler.assert_called_once() 66 | assert response.status_code == 204 67 | 68 | 69 | @pytest.mark.parametrize( 70 | ["batch"], 71 | ( 72 | pytest.param(True, id="batch"), 73 | pytest.param(False, id="single"), 74 | ), 75 | ) 76 | async def test_malformed_event_returns_422(testapp, batch): 77 | url = "/events" if not batch else "/events/batch" 78 | content_type = ( 79 | "application/cloudevents+json; charset=UTF-8" 80 | if not batch 81 | else "application/cloudevents-batch+json; charset=UTF-8" 82 | ) 83 | 84 | class MalformedBookCreatedV1(BookCreatedV1): 85 | source: Any = None 86 | 87 | fake_event = MalformedBookCreatedV1.event_factory( 88 | data={"book_id": 0, "title": "string", "author_name": "string"}, 89 | ) 90 | fake_event.source = None 91 | ac = TestClient(app=testapp, base_url="http://test") 92 | response = ac.post( 93 | url, 94 | headers={"content-type": content_type}, 95 | content=fake_event.model_dump_json() if not batch else f"[{fake_event.model_dump_json()}]", 96 | ) 97 | assert response.status_code == 422 98 | 99 | 100 | @pytest.mark.parametrize( 101 | ["batch"], 102 | ( 103 | pytest.param(True, id="batch"), 104 | pytest.param(False, id="single"), 105 | ), 106 | ) 107 | async def test_wrong_content_type_returns_422(testapp, batch): 108 | url = "/events" if not batch else "/events/batch" 109 | 110 | fake_event = BookCreatedV1.event_factory( 111 | data={"book_id": 0, "title": "string", "author_name": "string"}, 112 | ) 113 | ac = TestClient(app=testapp, base_url="http://test") 114 | response = ac.post( 115 | url, 116 | headers={"content-type": "application/json"}, 117 | content=fake_event.model_dump_json() if not batch else f"[{fake_event.model_dump_json()}]", 118 | ) 119 | assert response.status_code == 422 120 | -------------------------------------------------------------------------------- /tests/http_app/routes/test_hello.py: -------------------------------------------------------------------------------- 1 | from fastapi import Depends, FastAPI, status 2 | from fastapi.security import HTTPBearer 3 | from fastapi.testclient import TestClient 4 | 5 | from http_app.routes.auth import decode_jwt 6 | 7 | 8 | async def _fake_decode_jwt( 9 | security_scopes=None, 10 | config=None, 11 | jwks_client=None, 12 | token=Depends(HTTPBearer()), 13 | ): 14 | return {"token": token.credentials} 15 | 16 | 17 | async def test_hello_renders_what_returned_by_decoder( 18 | testapp: FastAPI, 19 | ): 20 | testapp.dependency_overrides[decode_jwt] = _fake_decode_jwt 21 | ac = TestClient(app=testapp, base_url="http://test") 22 | response = ac.get( 23 | "/hello/", 24 | headers={"Authorization": "Bearer some_token"}, 25 | ) 26 | assert response.status_code == status.HTTP_200_OK 27 | assert '"token": "some_token"' in response.text 28 | 29 | 30 | async def test_hello_returns_403_without_token(testapp: FastAPI): 31 | testapp.dependency_overrides[decode_jwt] = _fake_decode_jwt 32 | ac = TestClient(app=testapp, base_url="http://test") 33 | response = ac.get("/hello/") 34 | assert response.status_code == status.HTTP_403_FORBIDDEN 35 | -------------------------------------------------------------------------------- /tests/http_app/routes/test_ping.py: -------------------------------------------------------------------------------- 1 | from fastapi.testclient import TestClient 2 | 3 | 4 | async def test_root(testapp): 5 | ac = TestClient(app=testapp, base_url="http://test") 6 | response = ac.get("/ping") 7 | assert response.status_code == 200 8 | assert response.json() == {"ping": "pong!"} 9 | -------------------------------------------------------------------------------- /tests/http_app/test_dependencies.py: -------------------------------------------------------------------------------- 1 | from common import AppConfig 2 | from http_app import context 3 | from http_app.dependencies import get_app_config 4 | 5 | 6 | def test_app_config_return_context_variable(): 7 | config = AppConfig(APP_NAME="SomeOtherAppName") 8 | context.app_config.set(config) 9 | assert get_app_config() is config 10 | -------------------------------------------------------------------------------- /tests/http_app/test_exception_handlers.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import MagicMock, patch 2 | 3 | from fastapi import FastAPI 4 | from fastapi.testclient import TestClient 5 | 6 | 7 | @patch("logging.exception") 8 | async def test_exception_is_logged_handler_returns_500( 9 | mocked_logging_exception: MagicMock, 10 | testapp: FastAPI, 11 | ): 12 | my_exc = Exception("Some random exception") 13 | 14 | @testapp.get("/ppp") 15 | async def fake_endpoint(): 16 | raise my_exc 17 | 18 | ac = TestClient(app=testapp, base_url="http://test") 19 | response = ac.get("/ppp") 20 | 21 | assert response.status_code == 500 22 | assert response.json() == {"error": "Internal server error"} 23 | mocked_logging_exception.assert_called_once_with(my_exc) 24 | -------------------------------------------------------------------------------- /tests/http_app/test_factory.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import patch 2 | 3 | from common.config import AppConfig 4 | from http_app import create_app 5 | 6 | 7 | def test_with_default_config() -> None: 8 | """Test create_app without passing test config.""" 9 | with patch("common.bootstrap.init_storage", return_value=None): 10 | app = create_app() 11 | assert app.debug is False 12 | 13 | 14 | def test_with_debug_config() -> None: 15 | # We don't need the storage to test the HTTP app 16 | with patch("common.bootstrap.init_storage", return_value=None): 17 | app = create_app( 18 | test_config=AppConfig( 19 | SQLALCHEMY_CONFIG={}, 20 | ENVIRONMENT="test", 21 | DEBUG=True, 22 | ) 23 | ) 24 | 25 | assert app.debug is True 26 | -------------------------------------------------------------------------------- /tests/socketio_app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/febus982/bootstrap-python-fastapi/ff0d9b1a7bd09d2e50784fbd11ae82594b74aaab/tests/socketio_app/__init__.py -------------------------------------------------------------------------------- /tests/socketio_app/conftest.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Iterator 2 | from unittest.mock import patch 3 | 4 | import pytest 5 | from dependency_injector.providers import Object 6 | from starlette.routing import Router 7 | 8 | from common.di_container import Container 9 | from socketio_app import create_app 10 | 11 | 12 | @pytest.fixture(scope="session") 13 | def test_di_container(test_config) -> Container: 14 | return Container( 15 | config=Object(test_config), 16 | ) 17 | 18 | 19 | @pytest.fixture(scope="session") 20 | def testapp(test_config, test_di_container) -> Iterator[Router]: 21 | # We don't need the storage to test the HTTP app 22 | with patch("common.bootstrap.init_storage", return_value=None): 23 | app = create_app(test_config=test_config, test_di_container=test_di_container) 24 | yield app 25 | -------------------------------------------------------------------------------- /tests/socketio_app/namespaces/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/febus982/bootstrap-python-fastapi/ff0d9b1a7bd09d2e50784fbd11ae82594b74aaab/tests/socketio_app/namespaces/__init__.py -------------------------------------------------------------------------------- /tests/socketio_app/namespaces/test_chat.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import AsyncMock, patch 2 | 3 | import pytest 4 | 5 | from socketio_app import ChatNamespace 6 | 7 | 8 | @pytest.fixture 9 | def chat_namespace(): 10 | return ChatNamespace("/chat") 11 | 12 | 13 | async def test_on_connect(chat_namespace): 14 | sid = "test_session_id" 15 | environ = {} 16 | 17 | # Test that connect doesn't raise any exceptions 18 | chat_namespace.on_connect(sid, environ) 19 | 20 | 21 | async def test_on_disconnect(chat_namespace): 22 | sid = "test_session_id" 23 | reason = "test_reason" 24 | 25 | # Test that disconnect doesn't raise any exceptions 26 | chat_namespace.on_disconnect(sid, reason) 27 | 28 | 29 | async def test_on_echo_message(chat_namespace): 30 | sid = "test_session_id" 31 | test_data = {"message": "Hello, World!"} 32 | 33 | # Mock the emit method 34 | chat_namespace.emit = AsyncMock() 35 | 36 | # Mock the logging 37 | with patch("logging.info") as mock_log: 38 | await chat_namespace.on_echo_message(sid, test_data) 39 | 40 | # Verify logging was called 41 | mock_log.assert_called_once_with("received message") 42 | 43 | # Verify emit was called with correct arguments 44 | chat_namespace.emit.assert_called_once_with("echo_response", test_data) 45 | -------------------------------------------------------------------------------- /tests/socketio_app/test_app_factory.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import patch 2 | 3 | import socketio 4 | from starlette.routing import Mount, Router 5 | 6 | from common import AppConfig 7 | from socketio_app import create_app 8 | 9 | 10 | def test_create_app_returns_router(): 11 | """Test that create_app returns a Router instance""" 12 | app = create_app() 13 | assert isinstance(app, Router) 14 | 15 | 16 | def test_create_app_with_custom_config(): 17 | """Test that create_app accepts custom config""" 18 | test_config = AppConfig(DEBUG=True) 19 | with patch("common.bootstrap.init_storage", return_value=None): 20 | app = create_app(test_config=test_config) 21 | 22 | assert isinstance(app, Router) 23 | 24 | 25 | def test_create_app_routes(): 26 | """Test that create_app creates all expected routes""" 27 | with patch("common.bootstrap.init_storage", return_value=None): 28 | app = create_app() 29 | 30 | # Check that we have exactly 3 routes (docs JSON, docs HTML, and socketio mount) 31 | assert len(app.routes) == 3 32 | 33 | # Check routes paths and methods 34 | routes = [(route.path, getattr(route, "methods", None)) for route in app.routes] 35 | assert ("/docs/asyncapi.json", {"GET", "HEAD"}) in routes 36 | assert ("/docs", {"GET", "HEAD"}) in routes 37 | 38 | # Check that one route is a Mount instance for socketio 39 | mount_routes = [route for route in app.routes if isinstance(route, Mount)] 40 | assert len(mount_routes) == 1 41 | assert mount_routes[0].name == "socketio" 42 | assert isinstance(mount_routes[0].app, socketio.ASGIApp) 43 | 44 | 45 | def test_create_app_socketio_namespace(): 46 | """Test that socketio server has the chat namespace registered""" 47 | with patch("common.bootstrap.init_storage", return_value=None): 48 | app = create_app() 49 | 50 | # Find the socketio mount 51 | socketio_mount = next(route for route in app.routes if isinstance(route, Mount)) 52 | sio_app = socketio_mount.app 53 | 54 | # Check that the chat namespace is registered 55 | assert "/chat" in sio_app.engineio_server.namespace_handlers 56 | -------------------------------------------------------------------------------- /tests/socketio_app/web_routes/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/febus982/bootstrap-python-fastapi/ff0d9b1a7bd09d2e50784fbd11ae82594b74aaab/tests/socketio_app/web_routes/__init__.py -------------------------------------------------------------------------------- /tests/socketio_app/web_routes/test_docs.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import Mock, patch 2 | 3 | import pytest 4 | from pydantic import BaseModel 5 | from starlette.requests import Request 6 | 7 | from socketio_app.web_routes.docs import ( 8 | ASYNCAPI_CSS_DEFAULT_URL, 9 | ASYNCAPI_JS_DEFAULT_URL, 10 | NORMALIZE_CSS_DEFAULT_URL, 11 | PydanticResponse, 12 | asyncapi_json, 13 | get_asyncapi_html, 14 | ) 15 | 16 | 17 | # Test model 18 | class TestModel(BaseModel): 19 | name: str 20 | value: int 21 | 22 | 23 | # Fixtures 24 | @pytest.fixture 25 | def test_model(): 26 | return TestModel(name="test", value=42) 27 | 28 | 29 | @pytest.fixture 30 | def mock_request(): 31 | return Mock(spec=Request) 32 | 33 | 34 | @pytest.fixture 35 | def mock_app_config(): 36 | with patch("socketio_app.web_routes.docs.AppConfig") as mock: 37 | mock.return_value.APP_NAME = "Test App" 38 | yield mock 39 | 40 | 41 | # Tests for PydanticResponse 42 | def test_pydantic_response_render(test_model): 43 | response = PydanticResponse(test_model) 44 | expected = b'{"name":"test","value":42}' 45 | assert response.render(test_model) == expected 46 | 47 | 48 | # Tests for asyncapi_json endpoint 49 | async def test_asyncapi_json(mock_request, test_model): 50 | with patch("socketio_app.web_routes.docs.get_schema") as mock_get_schema: 51 | mock_get_schema.return_value = test_model 52 | response = await asyncapi_json(mock_request) 53 | assert isinstance(response, PydanticResponse) 54 | assert response.body == b'{"name":"test","value":42}' 55 | 56 | 57 | # Tests for get_asyncapi_html endpoint 58 | async def test_get_asyncapi_html_default_params(mock_request, mock_app_config): 59 | mock_request.query_params = {} 60 | response = await get_asyncapi_html(mock_request) 61 | 62 | assert response.status_code == 200 63 | assert response.headers["content-type"] == "text/html; charset=utf-8" 64 | 65 | content = response.body.decode() 66 | assert "Test App AsyncAPI" in content 67 | assert ASYNCAPI_JS_DEFAULT_URL in content 68 | assert NORMALIZE_CSS_DEFAULT_URL in content 69 | assert ASYNCAPI_CSS_DEFAULT_URL in content 70 | assert '"sidebar": true' in content 71 | assert '"info": true' in content 72 | 73 | 74 | async def test_get_asyncapi_html_custom_params(mock_request, mock_app_config): 75 | mock_request.query_params = { 76 | "sidebar": "false", 77 | "info": "false", 78 | "servers": "false", 79 | "operations": "false", 80 | "messages": "false", 81 | "schemas": "false", 82 | "errors": "false", 83 | "expand_message_examples": "true", 84 | } 85 | 86 | response = await get_asyncapi_html(mock_request) 87 | content = response.body.decode() 88 | 89 | assert '"sidebar": false' in content 90 | assert '"info": false' in content 91 | assert '"servers": false' in content 92 | assert '"operations": false' in content 93 | assert '"messages": false' in content 94 | assert '"schemas": false' in content 95 | assert '"errors": false' in content 96 | assert '"messageExamples": true' in content 97 | -------------------------------------------------------------------------------- /tests/storage/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/febus982/bootstrap-python-fastapi/ff0d9b1a7bd09d2e50784fbd11ae82594b74aaab/tests/storage/__init__.py -------------------------------------------------------------------------------- /tests/storage/conftest.py: -------------------------------------------------------------------------------- 1 | from collections.abc import AsyncIterator 2 | 3 | import pytest 4 | from sqlalchemy.orm import clear_mappers 5 | from sqlalchemy_bind_manager import SQLAlchemyBindManager, SQLAlchemyConfig 6 | 7 | from common.storage.SQLAlchemy import init_tables 8 | 9 | 10 | @pytest.fixture(scope="function") 11 | async def test_sa_manager() -> AsyncIterator[SQLAlchemyBindManager]: 12 | clear_mappers() 13 | 14 | db_config = SQLAlchemyConfig( 15 | engine_url="sqlite+aiosqlite://", 16 | engine_options=dict(connect_args={"check_same_thread": False}), 17 | async_engine=True, 18 | ) 19 | sa_manager = SQLAlchemyBindManager(config=db_config) 20 | init_tables(sqlalchemy_manager=sa_manager) 21 | for k, v in sa_manager.get_binds().items(): 22 | async with v.engine.begin() as conn: # type: ignore 23 | await conn.run_sync(v.registry_mapper.metadata.create_all) 24 | 25 | yield sa_manager 26 | clear_mappers() 27 | -------------------------------------------------------------------------------- /tests/storage/tables/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/febus982/bootstrap-python-fastapi/ff0d9b1a7bd09d2e50784fbd11ae82594b74aaab/tests/storage/tables/__init__.py -------------------------------------------------------------------------------- /tests/storage/tables/test_book_table.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy_bind_manager._repository import SQLAlchemyAsyncRepository 2 | 3 | from domains.books._models import BookModel 4 | 5 | 6 | # This test is to ensure the book table is initialised correctly 7 | async def test_book_table_works(test_sa_manager): 8 | repo = SQLAlchemyAsyncRepository(bind=test_sa_manager.get_bind(), model_class=BookModel) 9 | async with repo._get_session() as session: 10 | book = await session.get(BookModel, 1) 11 | assert book is None 12 | 13 | await repo.save( 14 | BookModel( 15 | title="pippo", 16 | author_name="pluto", 17 | book_id=1, 18 | ) 19 | ) 20 | 21 | async with repo._get_session() as session: 22 | book = await session.get(BookModel, 1) 23 | assert book is not None 24 | assert book.book_id == 1 25 | -------------------------------------------------------------------------------- /tests/storage/test_sqlalchemy_init.py: -------------------------------------------------------------------------------- 1 | import os 2 | from unittest.mock import Mock, patch 3 | from uuid import uuid4 4 | 5 | from sqlalchemy_bind_manager import SQLAlchemyBindManager, SQLAlchemyConfig 6 | 7 | from common.storage import init_storage 8 | from common.storage.SQLAlchemy import TABLE_INIT_REGISTRY, init_tables 9 | 10 | 11 | def test_init_tables_calls_only_supported_bind_initialisation(): 12 | db1_path = f"./{uuid4()}.db" 13 | db2_path = f"./{uuid4()}.db" 14 | 15 | sa_manager = SQLAlchemyBindManager( 16 | config={ 17 | "default": SQLAlchemyConfig( 18 | engine_url=f"sqlite+aiosqlite:///{db1_path}", 19 | engine_options=dict(connect_args={"check_same_thread": False}), 20 | session_options=dict(expire_on_commit=False), 21 | async_engine=True, 22 | ), 23 | "not_existing": SQLAlchemyConfig( 24 | engine_url=f"sqlite+aiosqlite:///{db2_path}", 25 | engine_options=dict(connect_args={"check_same_thread": False}), 26 | session_options=dict(expire_on_commit=False), 27 | async_engine=True, 28 | ), 29 | } 30 | ) 31 | 32 | mock_db1_table_init = Mock(return_value=None) 33 | mock_db2_table_init = Mock(return_value=None) 34 | 35 | with patch.dict( 36 | TABLE_INIT_REGISTRY, 37 | { 38 | "default": mock_db1_table_init, 39 | "other": mock_db2_table_init, 40 | }, 41 | ): 42 | init_tables(sqlalchemy_manager=sa_manager) 43 | 44 | mock_db1_table_init.assert_called_once() 45 | mock_db2_table_init.assert_not_called() 46 | 47 | try: 48 | os.unlink(db1_path) 49 | except FileNotFoundError: 50 | pass 51 | 52 | try: 53 | os.unlink(db2_path) 54 | except FileNotFoundError: 55 | pass 56 | 57 | 58 | def test_init_storage_calls_sqlalchemy_init_tables(): 59 | with patch("common.storage.SQLAlchemy.init_tables", return_value=None) as mocked_init_tables: 60 | init_storage() 61 | 62 | mocked_init_tables.assert_called_once() 63 | --------------------------------------------------------------------------------