├── .dockerignore
├── .github
├── FUNDING.yml
└── workflows
│ ├── release.yml
│ └── test.yml
├── .gitignore
├── Dockerfile
├── LICENSE
├── README.md
├── fastapi_template
├── __init__.py
├── __main__.py
├── cli.py
├── input_model.py
├── template
│ ├── cookiecutter.json
│ ├── hooks
│ │ ├── __init__.py
│ │ ├── post_gen_project.py
│ │ └── pre_gen_project.py
│ └── {{cookiecutter.project_name}}
│ │ ├── .dockerignore
│ │ ├── .editorconfig
│ │ ├── .env
│ │ ├── .github
│ │ └── workflows
│ │ │ └── tests.yml
│ │ ├── .gitignore
│ │ ├── .gitlab-ci.yml
│ │ ├── .pre-commit-config.yaml
│ │ ├── Dockerfile
│ │ ├── README.md
│ │ ├── alembic.ini
│ │ ├── conditional_files.json
│ │ ├── deploy
│ │ ├── docker-compose.dev.yml
│ │ ├── docker-compose.otlp.yml
│ │ ├── kube
│ │ │ ├── app.yml
│ │ │ ├── db.yml
│ │ │ ├── namespace.yml
│ │ │ ├── rabbit.yml
│ │ │ └── redis.yml
│ │ └── otel-collector-config.yml
│ │ ├── docker-compose.yml
│ │ ├── pyproject.toml
│ │ ├── replaceable_files.json
│ │ ├── tests
│ │ ├── __init__.py
│ │ ├── conftest.py
│ │ ├── test_dummy.py
│ │ ├── test_echo.py
│ │ ├── test_kafka.py
│ │ ├── test_rabbit.py
│ │ ├── test_redis.py
│ │ └── test_{{cookiecutter.project_name}}.py
│ │ └── {{cookiecutter.project_name}}
│ │ ├── __init__.py
│ │ ├── __main__.py
│ │ ├── db_beanie
│ │ ├── dao
│ │ │ ├── __init__.py
│ │ │ └── dummy_dao.py
│ │ └── models
│ │ │ ├── __init__.py
│ │ │ └── dummy_model.py
│ │ ├── db_ormar
│ │ ├── base.py
│ │ ├── dao
│ │ │ ├── __init__.py
│ │ │ └── dummy_dao.py
│ │ ├── migrations
│ │ │ ├── __init__.py
│ │ │ ├── env.py
│ │ │ ├── script.py.mako
│ │ │ └── versions
│ │ │ │ ├── 2021-08-16-16-53_819cbf6e030b.py
│ │ │ │ ├── 2021-08-16-16-55_2b7380507a71.py
│ │ │ │ └── __init__.py
│ │ ├── models
│ │ │ ├── __init__.py
│ │ │ └── dummy_model.py
│ │ └── utils.py
│ │ ├── db_piccolo
│ │ ├── app_conf.py
│ │ ├── dao
│ │ │ ├── __init__.py
│ │ │ └── dummy_dao.py
│ │ ├── migrations
│ │ │ ├── 2022-04-16T17-38-51-672827.py
│ │ │ └── __init__.py
│ │ └── models
│ │ │ ├── __init__.py
│ │ │ └── dummy_model.py
│ │ ├── db_psycopg
│ │ ├── dao
│ │ │ └── dummy_dao.py
│ │ ├── dependencies.py
│ │ └── models
│ │ │ └── dummy_model.py
│ │ ├── db_sa
│ │ ├── base.py
│ │ ├── dao
│ │ │ ├── __init__.py
│ │ │ └── dummy_dao.py
│ │ ├── dependencies.py
│ │ ├── meta.py
│ │ ├── migrations
│ │ │ ├── __init__.py
│ │ │ ├── env.py
│ │ │ ├── script.py.mako
│ │ │ └── versions
│ │ │ │ ├── 2021-08-16-16-53_819cbf6e030b.py
│ │ │ │ ├── 2021-08-16-16-55_2b7380507a71.py
│ │ │ │ └── __init__.py
│ │ ├── models
│ │ │ ├── __init__.py
│ │ │ ├── dummy_model.py
│ │ │ └── users.py
│ │ └── utils.py
│ │ ├── db_tortoise
│ │ ├── config.py
│ │ ├── dao
│ │ │ ├── __init__.py
│ │ │ └── dummy_dao.py
│ │ ├── migrations
│ │ │ └── models
│ │ │ │ ├── 0_20210928165300_init_mysql.sql
│ │ │ │ ├── 0_20210928165300_init_pg.sql
│ │ │ │ ├── 0_20210928165300_init_sqlite.sql
│ │ │ │ ├── 1_20210928165300_init_dummy_mysql.sql
│ │ │ │ ├── 1_20210928165300_init_dummy_pg.sql
│ │ │ │ └── 1_20210928165300_init_dummy_sqlite.sql
│ │ └── models
│ │ │ ├── __init__.py
│ │ │ └── dummy_model.py
│ │ ├── gunicorn_runner.py
│ │ ├── log.py
│ │ ├── piccolo_conf.py
│ │ ├── services
│ │ ├── __init__.py
│ │ ├── kafka
│ │ │ ├── __init__.py
│ │ │ ├── dependencies.py
│ │ │ └── lifespan.py
│ │ ├── rabbit
│ │ │ ├── __init__.py
│ │ │ ├── dependencies.py
│ │ │ └── lifespan.py
│ │ └── redis
│ │ │ ├── __init__.py
│ │ │ ├── dependency.py
│ │ │ └── lifespan.py
│ │ ├── settings.py
│ │ ├── static
│ │ └── docs
│ │ │ ├── redoc.standalone.js
│ │ │ ├── swagger-ui-bundle.js
│ │ │ └── swagger-ui.css
│ │ ├── tkq.py
│ │ └── web
│ │ ├── __init__.py
│ │ ├── api
│ │ ├── __init__.py
│ │ ├── docs
│ │ │ ├── __init__.py
│ │ │ └── views.py
│ │ ├── dummy
│ │ │ ├── __init__.py
│ │ │ ├── schema.py
│ │ │ └── views.py
│ │ ├── echo
│ │ │ ├── __init__.py
│ │ │ ├── schema.py
│ │ │ └── views.py
│ │ ├── kafka
│ │ │ ├── __init__.py
│ │ │ ├── schema.py
│ │ │ └── views.py
│ │ ├── monitoring
│ │ │ ├── __init__.py
│ │ │ └── views.py
│ │ ├── rabbit
│ │ │ ├── __init__.py
│ │ │ ├── schema.py
│ │ │ └── views.py
│ │ ├── redis
│ │ │ ├── __init__.py
│ │ │ ├── schema.py
│ │ │ └── views.py
│ │ ├── router.py
│ │ └── users
│ │ │ ├── __init__.py
│ │ │ └── views.py
│ │ ├── application.py
│ │ ├── gql
│ │ ├── __init__.py
│ │ ├── context.py
│ │ ├── dummy
│ │ │ ├── __init__.py
│ │ │ ├── mutation.py
│ │ │ ├── query.py
│ │ │ └── schema.py
│ │ ├── echo
│ │ │ ├── __init__.py
│ │ │ ├── mutation.py
│ │ │ └── query.py
│ │ ├── kafka
│ │ │ ├── __init__.py
│ │ │ ├── mutation.py
│ │ │ └── schema.py
│ │ ├── rabbit
│ │ │ ├── __init__.py
│ │ │ ├── mutation.py
│ │ │ └── schema.py
│ │ ├── redis
│ │ │ ├── __init__.py
│ │ │ ├── mutation.py
│ │ │ ├── query.py
│ │ │ └── schema.py
│ │ └── router.py
│ │ └── lifespan.py
└── tests
│ ├── conftest.py
│ ├── test_generator.py
│ └── utils.py
├── images
└── logo.png
├── poetry.lock
├── pyproject.toml
└── scripts
└── version_bumper.py
/.dockerignore:
--------------------------------------------------------------------------------
1 | .idea
2 | .vscode
3 |
4 | *.sqlite3
5 |
6 | # Byte-compiled / optimized / DLL files
7 | __pycache__/
8 | *.py[cod]
9 | *$py.class
10 |
11 | # C extensions
12 | *.so
13 |
14 | # Distribution / packaging
15 | .Python
16 | build/
17 | develop-eggs/
18 | dist/
19 | downloads/
20 | eggs/
21 | .eggs/
22 | lib/
23 | lib64/
24 | parts/
25 | sdist/
26 | var/
27 | wheels/
28 | share/python-wheels/
29 | *.egg-info/
30 | .installed.cfg
31 | *.egg
32 | MANIFEST
33 |
34 | # PyInstaller
35 | # Usually these files are written by a python script from a template
36 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
37 | *.manifest
38 | *.spec
39 |
40 | # Installer logs
41 | pip-log.txt
42 | pip-delete-this-directory.txt
43 |
44 | # Unit test / coverage reports
45 | htmlcov/
46 | .tox/
47 | .nox/
48 | .coverage
49 | .coverage.*
50 | .cache
51 | nosetests.xml
52 | coverage.xml
53 | *.cover
54 | *.py,cover
55 | .hypothesis/
56 | .pytest_cache/
57 | cover/
58 |
59 | # Translations
60 | *.mo
61 | *.pot
62 |
63 | # Django stuff:
64 | *.log
65 | local_settings.py
66 | db.sqlite3
67 | db.sqlite3-journal
68 |
69 | # Flask stuff:
70 | instance/
71 | .webassets-cache
72 |
73 | # Scrapy stuff:
74 | .scrapy
75 |
76 | # Sphinx documentation
77 | docs/_build/
78 |
79 | # PyBuilder
80 | .pybuilder/
81 | target/
82 |
83 | # Jupyter Notebook
84 | .ipynb_checkpoints
85 |
86 | # IPython
87 | profile_default/
88 | ipython_config.py
89 |
90 | # pyenv
91 | # For a library or package, you might want to ignore these files since the code is
92 | # intended to run in multiple environments; otherwise, check them in:
93 | # .python-version
94 |
95 | # pipenv
96 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
97 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
98 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
99 | # install all needed dependencies.
100 | #Pipfile.lock
101 |
102 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
103 | __pypackages__/
104 |
105 | # Celery stuff
106 | celerybeat-schedule
107 | celerybeat.pid
108 |
109 | # SageMath parsed files
110 | *.sage.py
111 |
112 | # Environments
113 | .venv
114 | env/
115 | venv/
116 | ENV/
117 | env.bak/
118 | venv.bak/
119 |
120 | # Spyder project settings
121 | .spyderproject
122 | .spyproject
123 |
124 | # Rope project settings
125 | .ropeproject
126 |
127 | # mkdocs documentation
128 | /site
129 |
130 | # mypy
131 | .mypy_cache/
132 | .dmypy.json
133 | dmypy.json
134 |
135 | # Pyre type checker
136 | .pyre/
137 |
138 | # pytype static type analyzer
139 | .pytype/
140 |
141 | # Cython debug symbols
142 | cython_debug/
143 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | open_collective: fastapi-template
2 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: Release python package
2 |
3 | on:
4 | release:
5 | types:
6 | - released
7 |
8 | jobs:
9 | pypi:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - uses: actions/checkout@v2
13 | - name: Install poetry
14 | run: pipx install poetry
15 | - name: Set up Python
16 | uses: actions/setup-python@v4
17 | with:
18 | python-version: "3.11"
19 | - name: Install deps
20 | run: poetry install
21 | - name: Set version
22 | run: poetry version "${{ github.ref_name }}"
23 | - name: Release package
24 | env:
25 | POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_TOKEN }}
26 | run: poetry publish --build
27 | docker:
28 | runs-on: ubuntu-latest
29 | permissions:
30 | packages: write
31 | contents: read
32 | steps:
33 | - name: Checkout
34 | uses: actions/checkout@v4
35 | - name: Set up Docker
36 | uses: docker/setup-qemu-action@v3
37 | - name: Set up Docker Buildx
38 | uses: docker/setup-buildx-action@v3
39 | - name: Login to GitHub Container Registry
40 | uses: docker/login-action@v2
41 | with:
42 | registry: ghcr.io
43 | username: ${{ github.actor }}
44 | password: ${{ secrets.GITHUB_TOKEN }}
45 | - name: Build and push
46 | uses: docker/build-push-action@v2
47 | with:
48 | context: .
49 | file: ./Dockerfile
50 | platforms: linux/amd64
51 | push: true
52 | tags: ghcr.io/s3rius/fastapi_template:latest,ghcr.io/s3rius/fastapi_template:${{ github.ref_name }}
53 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | name: Testing fastapi-template
2 |
3 | on:
4 | pull_request:
5 |
6 | jobs:
7 | pre_job:
8 | # continue-on-error: true # Uncomment once integration is finished
9 | runs-on: ubuntu-latest
10 | # Map a step output to a job output
11 | outputs:
12 | should_skip: ${{ steps.skip_check.outputs.should_skip }}
13 | steps:
14 | - id: skip_check
15 | uses: fkirc/skip-duplicate-actions@master
16 | with:
17 | # All of these options are optional, so you can remove them if you are happy with the defaults
18 | concurrent_skipping: 'same_content'
19 | skip_after_successful_duplicate: 'true'
20 | paths_ignore: '["**/README.md"]'
21 | pytest:
22 | needs: pre_job
23 | if: ${{ needs.pre_job.outputs.should_skip != 'true' }}
24 | runs-on: ubuntu-latest
25 | steps:
26 | - uses: actions/checkout@v4
27 | - name: Set up Python
28 | uses: actions/setup-python@v2
29 | with:
30 | python-version: '3.12'
31 | - name: Install deps
32 | run: |
33 | pip install -U pip
34 | pip install poetry==1.8.2
35 | poetry install
36 | env:
37 | POETRY_VIRTUALENVS_CREATE: "False"
38 | - name: Setup GIT
39 | run: |
40 | git config --global user.name "fastapi_template"
41 | git config --global user.email "fastapi_template@pytest.python"
42 | - name: Run tests
43 | run: pytest -vv -n auto
44 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea
2 | .vscode
3 |
4 | *.sqlite3
5 |
6 | # Byte-compiled / optimized / DLL files
7 | __pycache__/
8 | *.py[cod]
9 | *$py.class
10 |
11 | # C extensions
12 | *.so
13 |
14 | # Distribution / packaging
15 | .Python
16 | build/
17 | develop-eggs/
18 | dist/
19 | downloads/
20 | eggs/
21 | .eggs/
22 | lib/
23 | lib64/
24 | parts/
25 | sdist/
26 | var/
27 | wheels/
28 | share/python-wheels/
29 | *.egg-info/
30 | .installed.cfg
31 | *.egg
32 | MANIFEST
33 |
34 | # PyInstaller
35 | # Usually these files are written by a python script from a template
36 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
37 | *.manifest
38 | *.spec
39 |
40 | # Installer logs
41 | pip-log.txt
42 | pip-delete-this-directory.txt
43 |
44 | # Unit test / coverage reports
45 | htmlcov/
46 | .tox/
47 | .nox/
48 | .coverage
49 | .coverage.*
50 | .cache
51 | nosetests.xml
52 | coverage.xml
53 | *.cover
54 | *.py,cover
55 | .hypothesis/
56 | .pytest_cache/
57 | cover/
58 |
59 | # Translations
60 | *.mo
61 | *.pot
62 |
63 | # Django stuff:
64 | *.log
65 | local_settings.py
66 | db.sqlite3
67 | db.sqlite3-journal
68 |
69 | # Flask stuff:
70 | instance/
71 | .webassets-cache
72 |
73 | # Scrapy stuff:
74 | .scrapy
75 |
76 | # Sphinx documentation
77 | docs/_build/
78 |
79 | # PyBuilder
80 | .pybuilder/
81 | target/
82 |
83 | # Jupyter Notebook
84 | .ipynb_checkpoints
85 |
86 | # IPython
87 | profile_default/
88 | ipython_config.py
89 |
90 | # pyenv
91 | # For a library or package, you might want to ignore these files since the code is
92 | # intended to run in multiple environments; otherwise, check them in:
93 | # .python-version
94 |
95 | # pipenv
96 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
97 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
98 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
99 | # install all needed dependencies.
100 | #Pipfile.lock
101 |
102 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
103 | __pypackages__/
104 |
105 | # Celery stuff
106 | celerybeat-schedule
107 | celerybeat.pid
108 |
109 | # SageMath parsed files
110 | *.sage.py
111 |
112 | # Environments
113 | .venv
114 | env/
115 | venv/
116 | ENV/
117 | env.bak/
118 | venv.bak/
119 |
120 | # Spyder project settings
121 | .spyderproject
122 | .spyproject
123 |
124 | # Rope project settings
125 | .ropeproject
126 |
127 | # mkdocs documentation
128 | /site
129 |
130 | # mypy
131 | .mypy_cache/
132 | .dmypy.json
133 | dmypy.json
134 |
135 | # Pyre type checker
136 | .pyre/
137 |
138 | # pytype static type analyzer
139 | .pytype/
140 |
141 | # Cython debug symbols
142 | cython_debug/
143 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11.4-alpine
2 |
3 | RUN apk add --no-cache \
4 | curl \
5 | # For building dependencies. \
6 | gcc \
7 | musl-dev \
8 | git \
9 | g++ \
10 | libffi-dev \
11 | # For psycopg \
12 | postgresql-dev \
13 | # For mysql deps \
14 | mariadb-dev \
15 | # For UI \
16 | ncurses \
17 | bash
18 |
19 | RUN adduser --disabled-password fastapi_template
20 | RUN mkdir /projects /src
21 | RUN chown -R fastapi_template:fastapi_template /projects /src
22 | USER fastapi_template
23 |
24 | WORKDIR /src
25 |
26 | ENV PATH ${PATH}:/home/fastapi_template/.local/bin
27 |
28 | RUN pip install poetry==1.5.1
29 |
30 | COPY . /src/
31 | RUN pip install .
32 |
33 | USER root
34 | RUN rm -rfv /src
35 | RUN apk del curl
36 | USER fastapi_template
37 |
38 | RUN git config --global user.name "Fastapi Template"
39 | RUN git config --global user.email "fastapi-template@no-reply.com"
40 |
41 | VOLUME /projects
42 | WORKDIR /projects
43 |
44 | ENTRYPOINT ["/home/fastapi_template/.local/bin/fastapi_template"]
45 |
46 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 Pavel Kirilin
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |  [](https://pypi.org/project/fastapi-template/)
2 | [](https://pypi.org/project/fastapi-template/)
3 |
4 |

5 |
Flexible general-purpose template for FastAPI.
6 |
7 |
8 | ## Usage
9 |
10 | ⚠️ [Git](https://git-scm.com/downloads), [Python](https://www.python.org/) and [Poetry](https://python-poetry.org/) must be installed and accessible ⚠️
11 |
12 | Poetry version must be greater or equal than 1.1.8. Otherwise it won't be able to install SQLAlchemy.
13 |
14 |
15 |

16 |
Templator in action
17 |
18 |
19 | You can install it directly from pypi with pip.
20 | ```bash
21 | python3 -m pip install fastapi_template
22 | python3 -m fastapi_template
23 | # or fastapi_template
24 | # Answer all the questions
25 | # 🍪 Enjoy your new project 🍪
26 | cd new_project
27 | docker-compose up --build
28 | ```
29 |
30 | If you want to install it from sources, try this:
31 | ```shell
32 | python3 -m pip install poetry
33 | python3 -m pip install .
34 | python3 -m fastapi_template
35 | ```
36 |
37 | Also, you can use it with docker.
38 | ```bash
39 | docker run --rm -it -v "$(pwd):/projects" ghcr.io/s3rius/fastapi_template
40 | ```
41 |
42 | ## Features
43 |
44 | One of the coolest features is that this project is extremely configurable.
45 | You can choose between different databases and even ORMs, or
46 | you can even generate a project without a database!
47 | Currently SQLAlchemy 2.0, TortoiseORM, Piccolo, Ormar and Beanie are supported.
48 |
49 | This project can run as TUI or CLI and has excellent code documentation.
50 |
51 | Generator features:
52 | - Pydantic V2 (Where it's possible. Some libs doesn't have support);
53 | - You can choose between GraphQL and REST api;
54 | - Uvicorn and gunicorn;
55 | - Different databases support;
56 | - Different ORMs support;
57 | - Optional migrations for each ORM except raw drivers;
58 | - Optional redis support;
59 | - Optional rabbitmq support;
60 | - different CI\CD;
61 | - Optional Kubernetes config generation;
62 | - Optional Demo routers and models (This helps you to see how project is structured);
63 | - Pre-commit integration;
64 | - Generated tests with almost 90% coverage;
65 | - Tests for the generator itself;
66 | - Optional Prometheus integration;
67 | - Optional Sentry integration;
68 | - Optional Loguru logger;
69 | - Optional Opentelemetry integration.
70 | - Optional taskiq integration.
71 |
72 |
73 | This project can handle arguments passed through command line.
74 |
75 | ```shell
76 | $ python -m fastapi_template --help
77 |
78 | Usage: fastapi_template [OPTIONS]
79 |
80 | Options:
81 | -n, --name TEXT Name of your awesome project
82 | -V, --version Prints current version
83 | --force Owerrite directory if it exists
84 | --quiet Do not ask for features during generation
85 | --api-type [rest|graphql] Select API type for your application
86 | --db [none|sqlite|mysql|postgresql|mongodb]
87 | Select a database for your app
88 | --orm [none|ormar|sqlalchemy|tortoise|psycopg|piccolo|beanie]
89 | Choose Object–Relational Mapper lib
90 | --ci [none|gitlab_ci|github] Select a CI for your app
91 | --redis Add redis support
92 | --add_users Add fastapi-users support
93 | --rabbit Add RabbitMQ support
94 | --taskiq Add Taskiq support
95 | --migrations Add Migrations
96 | --kube Add kubernetes configs
97 | --dummy Add dummy model
98 | --routers Add example routers
99 | --swagger Add self hosted swagger
100 | --prometheus Add prometheus compatible metrics
101 | --sentry Add sentry integration
102 | --loguru Add loguru logger
103 | --opentelemetry Add opentelemetry integration
104 | --traefik Adds traefik labels to docker container
105 | --kafka Add Kafka support
106 | --gunicorn Add gunicorn server
107 | --cookie-auth Add authentication via cookie support
108 | --jwt-auth Add JWT auth support
109 | --help Show this message and exit.
110 | ```
111 |
--------------------------------------------------------------------------------
/fastapi_template/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/s3rius/FastAPI-template/c236a2faf8ae6fe3e0d0c4a2425e03a248cf988d/fastapi_template/__init__.py
--------------------------------------------------------------------------------
/fastapi_template/__main__.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 |
3 | from cookiecutter.exceptions import (FailedHookException,
4 | OutputDirExistsException)
5 | from cookiecutter.main import cookiecutter
6 | from termcolor import cprint
7 |
8 | from fastapi_template.cli import run_command
9 | from fastapi_template.input_model import BuilderContext
10 |
11 | script_dir = Path(__file__).parent
12 |
13 |
14 | def generate_project(context: BuilderContext) -> None:
15 | """
16 | Generate actual project with given context.
17 |
18 | :param context: builder_context
19 | """
20 | try:
21 | cookiecutter(
22 | template=f"{script_dir}/template",
23 | extra_context=context.dict(),
24 | default_config=BuilderContext().dict(),
25 | no_input=True,
26 | overwrite_if_exists=context.force,
27 | )
28 | except (FailedHookException, OutputDirExistsException) as exc:
29 | if isinstance(exc, OutputDirExistsException):
30 | cprint("Directory with such name already exists!", "red")
31 | return
32 | cprint(
33 | "Project successfully generated. You can read information about usage in README.md"
34 | )
35 |
36 |
37 | def main() -> None:
38 | """Starting point."""
39 | run_command(generate_project)
40 |
41 |
42 | if __name__ == "__main__":
43 | main()
44 |
--------------------------------------------------------------------------------
/fastapi_template/input_model.py:
--------------------------------------------------------------------------------
1 | import abc
2 | from collections import UserDict
3 | from typing import Any, Callable, List, Optional
4 |
5 | import click
6 | from prompt_toolkit.shortcuts import checkboxlist_dialog, radiolist_dialog
7 | from pydantic import BaseModel
8 |
9 | try:
10 | from simple_term_menu import TerminalMenu
11 | except Exception:
12 | TerminalMenu = None
13 |
14 | class BuilderContext(UserDict):
15 | """Options for project generation."""
16 |
17 | def __init__(self, **kwargs: Any) -> None:
18 | self.__dict__["data"] = kwargs
19 |
20 | def __getattr__(self, name: str) -> Any:
21 | try:
22 | return self.__dict__["data"][name]
23 | except KeyError:
24 | cls_name = self.__class__.__name__
25 | raise AttributeError(f"'{cls_name}' object has no attribute '{name}'")
26 |
27 | def __setattr__(self, name: str, value: Any) -> None:
28 | self[name] = value
29 |
30 | def dict(self) -> dict[str, Any]:
31 | return self.__dict__["data"]
32 |
33 | class Database(BaseModel):
34 | name: str
35 | image: Optional[str] = None
36 | driver: Optional[str] = None
37 | async_driver: Optional[str] = None
38 | port: Optional[int] = None
39 | driver_short: Optional[str] = None
40 |
41 |
42 | class MenuEntry(BaseModel):
43 | code: str
44 | cli_name: Optional[str] = None
45 | user_view: str
46 | description: str
47 | is_hidden: Optional[Callable[["BuilderContext"], bool]] = None
48 | additional_info: Any = None
49 |
50 | @property
51 | def generated_name(self) -> str:
52 | """
53 | Property to generate parameter name.
54 |
55 | It checks if cli_name is present,
56 | otherwise, code is used.
57 |
58 | :return: string to use in CLI.
59 | """
60 | if self.cli_name:
61 | return self.cli_name
62 | return self.code
63 |
64 | SKIP_ENTRY = MenuEntry(
65 | code="skip",
66 | user_view="skip",
67 | description="skip",
68 | )
69 |
70 |
71 | class BaseMenuModel(BaseModel, abc.ABC):
72 | title: str
73 | entries: List[MenuEntry]
74 | description: str = ""
75 |
76 | def _preview(self, current_value: str):
77 |
78 | for entry in self.entries:
79 | if entry.user_view == current_value:
80 | return entry.description
81 | return "Unknown value"
82 |
83 | @abc.abstractmethod
84 | def get_cli_options(self) -> List[click.Option]:
85 | pass
86 |
87 | @abc.abstractmethod
88 | def ask(self, context: "BuilderContext") -> Optional["BuilderContext"]:
89 | pass
90 |
91 | @abc.abstractmethod
92 | def need_ask(self, context: "BuilderContext") -> bool:
93 | pass
94 |
95 | def after_ask(self, context: "BuilderContext") -> "BuilderContext":
96 | """Function run after the menu finished work."""
97 | return context
98 |
99 |
100 | class SingularMenuModel(BaseMenuModel):
101 | code: str
102 | cli_name: Optional[str] = None
103 | description: str
104 | before_ask_fun: Optional[Callable[["BuilderContext"], Optional[MenuEntry]]] = None
105 | after_ask_fun: Optional[
106 | Callable[["BuilderContext", "SingularMenuModel"], "BuilderContext"]
107 | ] = None
108 | parser: Optional[Callable[[str], Any]] = None
109 |
110 | def get_cli_options(self) -> List[click.Option]:
111 | cli_name = self.code
112 | if self.cli_name is not None:
113 | cli_name = self.cli_name
114 | choices = [entry.generated_name for entry in self.entries]
115 | return [
116 | click.Option(
117 | param_decls=[f"--{cli_name}", self.code],
118 | type=click.Choice(choices, case_sensitive=False),
119 | default=None,
120 | help=self.description,
121 | )
122 | ]
123 |
124 | def need_ask(self, context: "BuilderContext") -> bool:
125 | if getattr(context, self.code, None) is None:
126 | return True
127 | return False
128 |
129 | def ask(self, context: "BuilderContext") -> Optional["BuilderContext"]:
130 | chosen_entry = None
131 | if self.before_ask_fun is not None:
132 | chosen_entry = self.before_ask_fun(context)
133 |
134 | ctx_value = context.dict().get(self.code)
135 | if ctx_value:
136 | for entry in self.entries:
137 | if entry.code == ctx_value:
138 | chosen_entry = entry
139 |
140 | if not chosen_entry:
141 | available_entries = []
142 | for entry in self.entries:
143 | if entry.is_hidden is None:
144 | available_entries.append(entry)
145 | elif not entry.is_hidden(context):
146 | available_entries.append(entry)
147 | if TerminalMenu is not None:
148 | menu = TerminalMenu(
149 | title=self.title,
150 | menu_entries=[entry.user_view for entry in available_entries],
151 | multi_select=False,
152 | preview_title="Description",
153 | preview_command=self._preview,
154 | preview_size=0.5,
155 | )
156 | idx = menu.show()
157 | if idx is None:
158 | return None
159 |
160 | chosen_entry = available_entries[idx]
161 | else:
162 | chosen_entry = (
163 | radiolist_dialog(
164 | title=self.title,
165 | text=self.description,
166 | values=[
167 | (entry, entry.user_view) for entry in available_entries
168 | ],
169 | ).run()
170 | or SKIP_ENTRY
171 | )
172 |
173 | if chosen_entry == SKIP_ENTRY:
174 | return
175 |
176 | setattr(context, self.code, chosen_entry.code)
177 |
178 | return context
179 |
180 | def after_ask(self, context: "BuilderContext") -> "BuilderContext":
181 | if self.after_ask_fun:
182 | return self.after_ask_fun(context, self)
183 | return super().after_ask(context)
184 |
185 |
186 | class MultiselectMenuModel(BaseMenuModel):
187 | before_ask: Optional[Callable[["BuilderContext"], Optional[List[MenuEntry]]]]
188 |
189 | def get_cli_options(self) -> List[click.Option]:
190 | options = []
191 | for entry in self.entries:
192 | options.append(
193 | click.Option(
194 | param_decls=[f"--{entry.generated_name}", entry.code],
195 | is_flag=True,
196 | help=entry.user_view,
197 | default=None,
198 | )
199 | )
200 | return options
201 |
202 | def need_ask(self, context: "BuilderContext") -> bool:
203 | for entry in self.entries:
204 | if getattr(context, entry.code, None) is None:
205 | return True
206 | return False
207 |
208 | def ask(self, context: "BuilderContext") -> Optional["BuilderContext"]:
209 | chosen_entries = None
210 | if self.before_ask is not None:
211 | chosen_entries = self.before_ask(context)
212 |
213 | if chosen_entries is None:
214 | unknown_entries = []
215 | for entry in self.entries:
216 | if not context.dict().get(entry.code):
217 | unknown_entries.append(entry)
218 |
219 | visible_entries = []
220 | for entry in unknown_entries:
221 | if entry.is_hidden is None:
222 | visible_entries.append(entry)
223 | elif not entry.is_hidden(context):
224 | visible_entries.append(entry)
225 |
226 | if TerminalMenu is not None:
227 | menu = TerminalMenu(
228 | title=self.title,
229 | menu_entries=[entry.user_view for entry in visible_entries],
230 | multi_select=True,
231 | preview_title="Description",
232 | preview_command=self._preview,
233 | )
234 |
235 | idxs = menu.show()
236 |
237 | if idxs is None:
238 | return None
239 |
240 | chosen_entries = []
241 | for idx in idxs:
242 | chosen_entries.append(visible_entries[idx])
243 | else:
244 | chosen_entries = checkboxlist_dialog(
245 | title=self.title,
246 | text=self.description,
247 | values=[(entry, entry.user_view) for entry in visible_entries],
248 | ).run() or [SKIP_ENTRY]
249 |
250 | if chosen_entries == [SKIP_ENTRY]:
251 | return context
252 |
253 | for entry in chosen_entries:
254 | setattr(context, entry.code, True)
255 |
256 | return context
257 |
258 |
259 |
--------------------------------------------------------------------------------
/fastapi_template/template/cookiecutter.json:
--------------------------------------------------------------------------------
1 | {
2 | "project_name": {
3 | "type": "string"
4 | },
5 | "api_type": {
6 | "type": "dict"
7 | },
8 | "db_info": {
9 | "type": "dict"
10 | },
11 | "enable_redis": {
12 | "type": "bool"
13 | },
14 | "enable_rmq": {
15 | "type": "bool"
16 | },
17 | "ci_type": {
18 | "type": "string"
19 | },
20 | "enable_migrations": {
21 | "type": "bool"
22 | },
23 | "enable_taskiq": {
24 | "type": "bool"
25 | },
26 | "enable_kube": {
27 | "type": "bool"
28 | },
29 | "kube_name": {
30 | "type": "string"
31 | },
32 | "enable_routers": {
33 | "type": "bool"
34 | },
35 | "enable_kafka": {
36 | "type": "bool"
37 | },
38 | "enable_loguru": {
39 | "type": "bool"
40 | },
41 | "traefik_labels": {
42 | "type": "bool"
43 | },
44 | "add_dummy": {
45 | "type": "bool"
46 | },
47 | "orm": {
48 | "type": "str"
49 | },
50 | "self_hosted_swagger": {
51 | "type": "bool"
52 | },
53 | "prometheus_enabled": {
54 | "type": "bool"
55 | },
56 | "sentry_enabled": {
57 | "type": "bool"
58 | },
59 | "otlp_enabled": {
60 | "type": "bool"
61 | },
62 | "gunicorn": {
63 | "type": "bool"
64 | },
65 | "add_users": {
66 | "type": "bool"
67 | },
68 | "cookie_auth": {
69 | "type": "bool"
70 | },
71 | "jwt_auth": {
72 | "type": "bool"
73 | },
74 | "_extensions": [
75 | "cookiecutter.extensions.RandomStringExtension"
76 | ],
77 | "_copy_without_render": [
78 | "*.js",
79 | "*.css"
80 | ]
81 | }
82 |
--------------------------------------------------------------------------------
/fastapi_template/template/hooks/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/s3rius/FastAPI-template/c236a2faf8ae6fe3e0d0c4a2425e03a248cf988d/fastapi_template/template/hooks/__init__.py
--------------------------------------------------------------------------------
/fastapi_template/template/hooks/post_gen_project.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import json
3 | import os
4 | import shutil
5 | import subprocess
6 |
7 | from termcolor import cprint, colored
8 | from pathlib import Path
9 |
10 | CONDITIONAL_MANIFEST = "conditional_files.json"
11 | REPLACE_MANIFEST = "replaceable_files.json"
12 |
13 |
14 | def delete_resource(resource):
15 | if os.path.isfile(resource):
16 | os.remove(resource)
17 | elif os.path.isdir(resource):
18 | shutil.rmtree(resource)
19 |
20 |
21 | def delete_resources_for_disabled_features():
22 | with open(CONDITIONAL_MANIFEST) as manifest_file:
23 | manifest = json.load(manifest_file)
24 | for feature_name, feature in manifest.items():
25 | if feature["enabled"].lower() != "true":
26 | text = "{} resources for disabled feature {}...".format(
27 | colored("Removing", color="red"),
28 | colored(feature_name, color="magenta", attrs=["underline"]),
29 | )
30 | print(text)
31 | for resource in feature["resources"]:
32 | delete_resource(resource)
33 | delete_resource(CONDITIONAL_MANIFEST)
34 | cprint("cleanup complete!", color="green")
35 |
36 |
37 | def replace_resources():
38 | print(
39 | "⭐ Placing {} nicely in your {} ⭐".format(
40 | colored("resources", color="green"), colored("new project", color="blue")
41 | )
42 | )
43 | with open(REPLACE_MANIFEST) as replace_manifest:
44 | manifest = json.load(replace_manifest)
45 | for target, replaces in manifest.items():
46 | target_path = Path(target)
47 | delete_resource(target_path)
48 | for src_file in map(Path, replaces):
49 | if src_file.exists():
50 | shutil.move(src_file, target_path)
51 | delete_resource(REPLACE_MANIFEST)
52 | print(
53 | "Resources are happy to be where {}.".format(
54 | colored("they are needed the most", color="green", attrs=["underline"])
55 | )
56 | )
57 |
58 |
59 | def init_repo():
60 | subprocess.run(["git", "init"], stdout=subprocess.PIPE)
61 | cprint("Git repository initialized.", "green")
62 | subprocess.run(["git", "add", "."], stdout=subprocess.PIPE)
63 | cprint("Added files to index.", "green")
64 | subprocess.run(["poetry", "install", "-n"])
65 | subprocess.run(["poetry", "run", "pre-commit", "install"])
66 | cprint("pre-commit installed.", "green")
67 | subprocess.run(["poetry", "run", "pre-commit", "run", "-a"])
68 | subprocess.run(["git", "add", "."], stdout=subprocess.PIPE)
69 | subprocess.run(["git", "commit", "-m", "Initial commit"], stdout=subprocess.PIPE)
70 |
71 | if __name__ == "__main__":
72 | delete_resources_for_disabled_features()
73 | replace_resources()
74 | init_repo()
75 |
--------------------------------------------------------------------------------
/fastapi_template/template/hooks/pre_gen_project.py:
--------------------------------------------------------------------------------
1 | """
2 | Pre generation hooks
3 | """
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/.dockerignore:
--------------------------------------------------------------------------------
1 | ### Python template
2 |
3 | deploy/
4 | .idea/
5 | .vscode/
6 | .git/
7 | # Byte-compiled / optimized / DLL files
8 | __pycache__/
9 | *.py[cod]
10 | *$py.class
11 |
12 | # C extensions
13 | *.so
14 |
15 | # Distribution / packaging
16 | .Python
17 | build/
18 | develop-eggs/
19 | dist/
20 | downloads/
21 | eggs/
22 | .eggs/
23 | lib/
24 | lib64/
25 | parts/
26 | sdist/
27 | var/
28 | wheels/
29 | share/python-wheels/
30 | *.egg-info/
31 | .installed.cfg
32 | *.egg
33 | MANIFEST
34 |
35 | # PyInstaller
36 | # Usually these files are written by a python script from a template
37 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
38 | *.manifest
39 | *.spec
40 |
41 | # Installer logs
42 | pip-log.txt
43 | pip-delete-this-directory.txt
44 |
45 | # Unit test / coverage reports
46 | htmlcov/
47 | .tox/
48 | .nox/
49 | .coverage
50 | .coverage.*
51 | .cache
52 | nosetests.xml
53 | coverage.xml
54 | *.cover
55 | *.py,cover
56 | .hypothesis/
57 | .pytest_cache/
58 | cover/
59 |
60 | # Translations
61 | *.mo
62 | *.pot
63 |
64 | # Django stuff:
65 | *.log
66 | local_settings.py
67 | db.sqlite3
68 | db.sqlite3-journal
69 |
70 | # Flask stuff:
71 | instance/
72 | .webassets-cache
73 |
74 | # Scrapy stuff:
75 | .scrapy
76 |
77 | # Sphinx documentation
78 | docs/_build/
79 |
80 | # PyBuilder
81 | .pybuilder/
82 | target/
83 |
84 | # Jupyter Notebook
85 | .ipynb_checkpoints
86 |
87 | # IPython
88 | profile_default/
89 | ipython_config.py
90 |
91 | # pyenv
92 | # For a library or package, you might want to ignore these files since the code is
93 | # intended to run in multiple environments; otherwise, check them in:
94 | # .python-version
95 |
96 | # pipenv
97 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
98 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
99 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
100 | # install all needed dependencies.
101 | #Pipfile.lock
102 |
103 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
104 | __pypackages__/
105 |
106 | # Celery stuff
107 | celerybeat-schedule
108 | celerybeat.pid
109 |
110 | # SageMath parsed files
111 | *.sage.py
112 |
113 | # Environments
114 | .env
115 | .venv
116 | env/
117 | venv/
118 | ENV/
119 | env.bak/
120 | venv.bak/
121 |
122 | # Spyder project settings
123 | .spyderproject
124 | .spyproject
125 |
126 | # Rope project settings
127 | .ropeproject
128 |
129 | # mkdocs documentation
130 | /site
131 |
132 | # mypy
133 | .mypy_cache/
134 | .dmypy.json
135 | dmypy.json
136 |
137 | # Pyre type checker
138 | .pyre/
139 |
140 | # pytype static type analyzer
141 | .pytype/
142 |
143 | # Cython debug symbols
144 | cython_debug/
145 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*]
4 | tab_width = 4
5 | end_of_line = lf
6 | max_line_length = 88
7 | ij_visual_guides = 88
8 | insert_final_newline = true
9 | trim_trailing_whitespace = true
10 |
11 | [*.{js,py,html}]
12 | charset = utf-8
13 |
14 | [*.md]
15 | trim_trailing_whitespace = false
16 |
17 | [*.{yml,yaml}]
18 | indent_style = space
19 | indent_size = 2
20 |
21 | [Makefile]
22 | indent_style = tab
23 |
24 | [.flake8]
25 | indent_style = space
26 | indent_size = 2
27 |
28 | [*.py]
29 | indent_style = space
30 | indent_size = 4
31 | ij_python_from_import_parentheses_force_if_multiline = true
32 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/.env:
--------------------------------------------------------------------------------
1 | {{cookiecutter.project_name | upper}}_RELOAD=True
2 | {%- if cookiecutter.db_info.name == "sqlite" %}
3 | {{cookiecutter.project_name | upper}}_DB_FILE=db.sqlite3
4 | {%- elif cookiecutter.db_info.name != 'none' %}
5 | {{cookiecutter.project_name | upper}}_DB_HOST=localhost
6 | {%- endif %}
7 | {%- if cookiecutter.add_users == "True" %}
8 | USERS_SECRET=""
9 | {%- endif %}
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/.github/workflows/tests.yml:
--------------------------------------------------------------------------------
1 | name: Testing {{cookiecutter.project_name}}
2 |
3 | on: push
4 |
5 | jobs:
6 | lint:
7 | strategy:
8 | matrix:
9 | cmd:
10 | - black
11 | - ruff
12 | - mypy
13 | runs-on: ubuntu-latest
14 | steps:
15 | - uses: actions/checkout@v4
16 | - name: Install poetry
17 | run: pipx install poetry
18 | - name: Set up Python
19 | uses: actions/setup-python@v5
20 | with:
21 | python-version: '3.11'
22 | cache: 'poetry'
23 | - name: Install deps
24 | run: poetry install
25 | - name: Run lint check
26 | run: poetry run pre-commit run -a {{ '${{' }} matrix.cmd {{ '}}' }}
27 | pytest:
28 | runs-on: ubuntu-latest
29 | steps:
30 | - uses: actions/checkout@v4
31 | - name: Create .env
32 | run: touch .env
33 | - name: Set up Python
34 | uses: actions/setup-python@v5
35 | with:
36 | python-version: '3.11'
37 | - name: Update docker-compose
38 | uses: KengoTODA/actions-setup-docker-compose@v1
39 | with:
40 | version: "2.28.0"
41 | - name: run tests
42 | run: docker-compose run --rm api pytest -vv
43 |
44 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/.gitignore:
--------------------------------------------------------------------------------
1 | ### Python template
2 |
3 | .idea/
4 | .vscode/
5 | # Byte-compiled / optimized / DLL files
6 | __pycache__/
7 | *.py[cod]
8 | *$py.class
9 |
10 | # C extensions
11 | *.so
12 |
13 | # Distribution / packaging
14 | .Python
15 | build/
16 | develop-eggs/
17 | dist/
18 | downloads/
19 | eggs/
20 | .eggs/
21 | lib/
22 | lib64/
23 | parts/
24 | sdist/
25 | var/
26 | wheels/
27 | share/python-wheels/
28 | *.egg-info/
29 | .installed.cfg
30 | *.egg
31 | MANIFEST
32 |
33 | # PyInstaller
34 | # Usually these files are written by a python script from a template
35 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
36 | *.manifest
37 | *.spec
38 |
39 | # Installer logs
40 | pip-log.txt
41 | pip-delete-this-directory.txt
42 |
43 | # Unit test / coverage reports
44 | htmlcov/
45 | .tox/
46 | .nox/
47 | .coverage
48 | .coverage.*
49 | .cache
50 | nosetests.xml
51 | coverage.xml
52 | *.cover
53 | *.py,cover
54 | .hypothesis/
55 | .pytest_cache/
56 | cover/
57 |
58 | # Translations
59 | *.mo
60 | *.pot
61 |
62 | # Django stuff:
63 | *.log
64 | local_settings.py
65 | *.sqlite3
66 | *.sqlite3-journal
67 |
68 | # Flask stuff:
69 | instance/
70 | .webassets-cache
71 |
72 | # Scrapy stuff:
73 | .scrapy
74 |
75 | # Sphinx documentation
76 | docs/_build/
77 |
78 | # PyBuilder
79 | .pybuilder/
80 | target/
81 |
82 | # Jupyter Notebook
83 | .ipynb_checkpoints
84 |
85 | # IPython
86 | profile_default/
87 | ipython_config.py
88 |
89 | # pyenv
90 | # For a library or package, you might want to ignore these files since the code is
91 | # intended to run in multiple environments; otherwise, check them in:
92 | # .python-version
93 |
94 | # pipenv
95 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
96 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
97 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
98 | # install all needed dependencies.
99 | #Pipfile.lock
100 |
101 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
102 | __pypackages__/
103 |
104 | # Celery stuff
105 | celerybeat-schedule
106 | celerybeat.pid
107 |
108 | # SageMath parsed files
109 | *.sage.py
110 |
111 | # Environments
112 | .env
113 | .venv
114 | env/
115 | venv/
116 | ENV/
117 | env.bak/
118 | venv.bak/
119 |
120 | # Spyder project settings
121 | .spyderproject
122 | .spyproject
123 |
124 | # Rope project settings
125 | .ropeproject
126 |
127 | # mkdocs documentation
128 | /site
129 |
130 | # mypy
131 | .mypy_cache/
132 | .dmypy.json
133 | dmypy.json
134 |
135 | # Pyre type checker
136 | .pyre/
137 |
138 | # pytype static type analyzer
139 | .pytype/
140 |
141 | # Cython debug symbols
142 | cython_debug/
143 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/.gitlab-ci.yml:
--------------------------------------------------------------------------------
1 | stages:
2 | - "test"
3 |
4 | .test-template:
5 | stage: test
6 | image: python:3.11.4-slim-bullseye
7 | tags:
8 | - kubernetes-runner
9 | - docker-runner
10 | except:
11 | - tags
12 | before_script:
13 | - apt update && apt install -y git
14 | - pip install poetry==1.8.2
15 | - poetry config virtualenvs.create false
16 | - poetry install
17 |
18 | black:
19 | extends:
20 | - .test-template
21 | script:
22 | - pre-commit run black -a
23 |
24 | ruff:
25 | extends:
26 | - .test-template
27 | script:
28 | - pre-commit run ruff -a
29 |
30 | mypy:
31 | extends:
32 | - .test-template
33 | script:
34 | - pre-commit run ruff -a
35 |
36 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | # See https://pre-commit.com for more information
3 | # See https://pre-commit.com/hooks.html for more hooks
4 | repos:
5 | - repo: local
6 | hooks:
7 |
8 | - id: black
9 | name: Format with Black
10 | entry: poetry run black
11 | language: system
12 | types: [python]
13 |
14 | - id: ruff
15 | name: Check with Ruff
16 | entry: poetry run ruff
17 | language: system
18 | pass_filenames: false
19 | always_run: true
20 | args: ["check", "{{cookiecutter.project_name}}", "tests", "--fix"]
21 |
22 | - id: mypy
23 | name: Validate types with MyPy
24 | entry: poetry run mypy
25 | language: system
26 | types: [python]
27 | pass_filenames: false
28 | args:
29 | - "{{cookiecutter.project_name}}"
30 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11.4-slim-bullseye AS prod
2 |
3 | {%- if cookiecutter.db_info.name == "mysql" %}
4 | RUN apt-get update && apt-get install -y \
5 | default-libmysqlclient-dev \
6 | gcc \
7 | pkg-config \
8 | && rm -rf /var/lib/apt/lists/*
9 | {%- endif %}
10 |
11 |
12 | {%- if cookiecutter.db_info.name == "postgresql" %}
13 | RUN apt-get update && apt-get install -y \
14 | gcc \
15 | && rm -rf /var/lib/apt/lists/*
16 | {%- endif %}
17 |
18 |
19 | RUN pip install poetry==1.8.2
20 |
21 | # Configuring poetry
22 | RUN poetry config virtualenvs.create false
23 | RUN poetry config cache-dir /tmp/poetry_cache
24 |
25 | # Copying requirements of a project
26 | COPY pyproject.toml poetry.lock /app/src/
27 | WORKDIR /app/src
28 |
29 | # Installing requirements
30 | RUN --mount=type=cache,target=/tmp/poetry_cache poetry install --only main
31 |
32 | {%- if cookiecutter.db_info.name == "mysql" or cookiecutter.db_info.name == "postgresql" %}
33 | # Removing gcc
34 | RUN apt-get purge -y \
35 | gcc \
36 | && rm -rf /var/lib/apt/lists/*
37 | {%- endif %}
38 |
39 | # Copying actuall application
40 | COPY . /app/src/
41 | RUN --mount=type=cache,target=/tmp/poetry_cache poetry install --only main
42 |
43 | CMD ["/usr/local/bin/python", "-m", "{{cookiecutter.project_name}}"]
44 |
45 | FROM prod AS dev
46 |
47 | RUN --mount=type=cache,target=/tmp/poetry_cache poetry install
48 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/README.md:
--------------------------------------------------------------------------------
1 | # {{cookiecutter.project_name}}
2 |
3 | This project was generated using fastapi_template.
4 |
5 | ## Poetry
6 |
7 | This project uses poetry. It's a modern dependency management
8 | tool.
9 |
10 | To run the project use this set of commands:
11 |
12 | ```bash
13 | poetry install
14 | poetry run python -m {{cookiecutter.project_name}}
15 | ```
16 |
17 | This will start the server on the configured host.
18 |
19 | You can find swagger documentation at `/api/docs`.
20 |
21 | You can read more about poetry here: https://python-poetry.org/
22 |
23 | ## Docker
24 |
25 | You can start the project with docker using this command:
26 |
27 | ```bash
28 | docker-compose up --build
29 | ```
30 |
31 | If you want to develop in docker with autoreload and exposed ports add `-f deploy/docker-compose.dev.yml` to your docker command.
32 | Like this:
33 |
34 | ```bash
35 | docker-compose -f docker-compose.yml -f deploy/docker-compose.dev.yml --project-directory . up --build
36 | ```
37 |
38 | This command exposes the web application on port 8000, mounts current directory and enables autoreload.
39 |
40 | But you have to rebuild image every time you modify `poetry.lock` or `pyproject.toml` with this command:
41 |
42 | ```bash
43 | docker-compose build
44 | ```
45 |
46 | ## Project structure
47 |
48 | ```bash
49 | $ tree "{{cookiecutter.project_name}}"
50 | {{cookiecutter.project_name}}
51 | ├── conftest.py # Fixtures for all tests.
52 | {%- if cookiecutter.db_info.name != "none" %}
53 | ├── db # module contains db configurations
54 | │ ├── dao # Data Access Objects. Contains different classes to interact with database.
55 | │ └── models # Package contains different models for ORMs.
56 | {%- endif %}
57 | ├── __main__.py # Startup script. Starts uvicorn.
58 | ├── services # Package for different external services such as rabbit or redis etc.
59 | ├── settings.py # Main configuration settings for project.
60 | ├── static # Static content.
61 | ├── tests # Tests for project.
62 | └── web # Package contains web server. Handlers, startup config.
63 | ├── api # Package with all handlers.
64 | │ └── router.py # Main router.
65 | ├── application.py # FastAPI application configuration.
66 | └── lifespan.py # Contains actions to perform on startup and shutdown.
67 | ```
68 |
69 | ## Configuration
70 |
71 | This application can be configured with environment variables.
72 |
73 | You can create `.env` file in the root directory and place all
74 | environment variables here.
75 |
76 | All environment variables should start with "{{cookiecutter.project_name | upper}}_" prefix.
77 |
78 | For example if you see in your "{{cookiecutter.project_name}}/settings.py" a variable named like
79 | `random_parameter`, you should provide the "{{cookiecutter.project_name | upper}}_RANDOM_PARAMETER"
80 | variable to configure the value. This behaviour can be changed by overriding `env_prefix` property
81 | in `{{cookiecutter.project_name}}.settings.Settings.Config`.
82 |
83 | An example of .env file:
84 | ```bash
85 | {{cookiecutter.project_name | upper}}_RELOAD="True"
86 | {{cookiecutter.project_name | upper}}_PORT="8000"
87 | {{cookiecutter.project_name | upper}}_ENVIRONMENT="dev"
88 | ```
89 |
90 | You can read more about BaseSettings class here: https://pydantic-docs.helpmanual.io/usage/settings/
91 |
92 | {%- if cookiecutter.otlp_enabled == "True" %}
93 | ## OpenTelemetry
94 |
95 | If you want to start your project with OpenTelemetry collector
96 | you can add `-f ./deploy/docker-compose.otlp.yml` to your docker command.
97 |
98 | Like this:
99 |
100 | ```bash
101 | docker-compose -f docker-compose.yml -f deploy/docker-compose.otlp.yml --project-directory . up
102 | ```
103 |
104 | This command will start OpenTelemetry collector and jaeger.
105 | After sending a requests you can see traces in jaeger's UI
106 | at http://localhost:16686/.
107 |
108 | This docker configuration is not supposed to be used in production.
109 | It's only for demo purpose.
110 |
111 | You can read more about OpenTelemetry here: https://opentelemetry.io/
112 | {%- endif %}
113 |
114 | ## Pre-commit
115 |
116 | To install pre-commit simply run inside the shell:
117 | ```bash
118 | pre-commit install
119 | ```
120 |
121 | pre-commit is very useful to check your code before publishing it.
122 | It's configured using .pre-commit-config.yaml file.
123 |
124 | By default it runs:
125 | * black (formats your code);
126 | * mypy (validates types);
127 | * ruff (spots possible bugs);
128 |
129 |
130 | You can read more about pre-commit here: https://pre-commit.com/
131 |
132 |
133 | {%- if cookiecutter.enable_kube == 'True' %}
134 |
135 | ## Kubernetes
136 | To run your app in kubernetes
137 | just run:
138 | ```bash
139 | kubectl apply -f deploy/kube
140 | ```
141 |
142 | It will create needed components.
143 |
144 | If you haven't pushed to docker registry yet, you can build image locally.
145 |
146 | ```bash
147 | docker-compose build
148 | docker save --output {{cookiecutter.project_name}}.tar {{cookiecutter.project_name}}:latest
149 | ```
150 |
151 | {%- endif %}
152 | {%- if cookiecutter.enable_migrations == 'True' %}
153 |
154 | ## Migrations
155 |
156 | If you want to migrate your database, you should run following commands:
157 | ```bash
158 | {%- if cookiecutter.orm in ['sqlalchemy', 'ormar'] %}
159 | # To run all migrations until the migration with revision_id.
160 | alembic upgrade ""
161 |
162 | # To perform all pending migrations.
163 | alembic upgrade "head"
164 | {%- elif cookiecutter.orm == 'tortoise' %}
165 | # Upgrade database to the last migration.
166 | aerich upgrade
167 |
168 | {%- elif cookiecutter.orm == 'piccolo' %}
169 | # You have to set a PICCOLO_CONF variable
170 | export PICCOLO_CONF="{{cookiecutter.project_name}}.piccolo_conf"
171 | # Now you can easily run migrations using
172 | piccolo migrations forwards all
173 | {%- endif %}
174 | ```
175 |
176 | ### Reverting migrations
177 |
178 | If you want to revert migrations, you should run:
179 | ```bash
180 | {%- if cookiecutter.orm in ['sqlalchemy', 'ormar'] %}
181 | # revert all migrations up to: revision_id.
182 | alembic downgrade
183 |
184 | # Revert everything.
185 | alembic downgrade base
186 | {%- elif cookiecutter.orm == 'tortoise' %}
187 | aerich downgrade
188 | {%- endif %}
189 | ```
190 |
191 | ### Migration generation
192 |
193 | To generate migrations you should run:
194 | ```bash
195 | {%- if cookiecutter.orm in ['sqlalchemy', 'ormar'] %}
196 | # For automatic change detection.
197 | alembic revision --autogenerate
198 |
199 | # For empty file generation.
200 | alembic revision
201 | {%- elif cookiecutter.orm == 'tortoise' %}
202 | aerich migrate
203 | {%- endif %}
204 | ```
205 | {%- endif %}
206 |
207 |
208 | ## Running tests
209 |
210 | If you want to run it in docker, simply run:
211 |
212 | ```bash
213 | docker-compose run --build --rm api pytest -vv .
214 | docker-compose down
215 | ```
216 |
217 | For running tests on your local machine.
218 |
219 | {%- if cookiecutter.db_info.name != "none" %}
220 | {%- if cookiecutter.db_info.name != "sqlite" %}
221 | 1. you need to start a database.
222 |
223 | I prefer doing it with docker:
224 | ```
225 | {%- if cookiecutter.db_info.name == "postgresql" %}
226 | docker run -p "{{cookiecutter.db_info.port}}:{{cookiecutter.db_info.port}}" -e "POSTGRES_PASSWORD={{cookiecutter.project_name}}" -e "POSTGRES_USER={{cookiecutter.project_name}}" -e "POSTGRES_DB={{cookiecutter.project_name}}" {{cookiecutter.db_info.image}}
227 | {%- endif %}
228 | {%- if cookiecutter.db_info.name == "mysql" %}
229 | docker run -p "{{cookiecutter.db_info.port}}:{{cookiecutter.db_info.port}}" -e "MYSQL_PASSWORD={{cookiecutter.project_name}}" -e "MYSQL_USER={{cookiecutter.project_name}}" -e "MYSQL_DATABASE={{cookiecutter.project_name}}" -e ALLOW_EMPTY_PASSWORD=yes {{cookiecutter.db_info.image}}
230 | {%- endif %}
231 | ```
232 | {%- endif %}
233 | {%- endif %}
234 |
235 |
236 | 2. Run the pytest.
237 | ```bash
238 | pytest -vv .
239 | ```
240 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/alembic.ini:
--------------------------------------------------------------------------------
1 | [alembic]
2 | script_location = {{cookiecutter.project_name}}/db/migrations
3 | file_template = %%(year)d-%%(month).2d-%%(day).2d-%%(hour).2d-%%(minute).2d_%%(rev)s
4 | prepend_sys_path = .
5 | output_encoding = utf-8
6 | # truncate_slug_length = 40
7 |
8 |
9 | [post_write_hooks]
10 | hooks = black,ruff
11 |
12 | black.type = console_scripts
13 | black.entrypoint = black
14 |
15 | ruff.type = exec
16 | ruff.executable = ruff
17 | ruff.options = check --fix REVISION_SCRIPT_FILENAME --ignore N999
18 |
19 | # Logging configuration
20 | [loggers]
21 | keys = root,sqlalchemy,alembic
22 |
23 | [handlers]
24 | keys = console
25 |
26 | [formatters]
27 | keys = generic
28 |
29 | [logger_root]
30 | level = WARN
31 | handlers = console
32 | qualname =
33 |
34 | [logger_sqlalchemy]
35 | level = WARN
36 | handlers =
37 | qualname = sqlalchemy.engine
38 |
39 | [logger_alembic]
40 | level = INFO
41 | handlers =
42 | qualname = alembic
43 |
44 | [handler_console]
45 | class = StreamHandler
46 | args = (sys.stderr,)
47 | level = NOTSET
48 | formatter = generic
49 |
50 | [formatter_generic]
51 | format = %(levelname)-5.5s [%(name)s] %(message)s
52 | datefmt = %H:%M:%S
53 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/deploy/docker-compose.dev.yml:
--------------------------------------------------------------------------------
1 | services:
2 | api:
3 | ports:
4 | # Exposes application port.
5 | - "8000:8000"
6 | build:
7 | context: .
8 | volumes:
9 | # Adds current directory as volume.
10 | - .:/app/src/
11 | environment:
12 | # Enables autoreload.
13 | {{cookiecutter.project_name | upper}}_RELOAD: "True"
14 |
15 | {%- if cookiecutter.enable_taskiq == "True" %}
16 |
17 | taskiq-worker:
18 | volumes:
19 | # Adds current directory as volume.
20 | - .:/app/src/
21 | command:
22 | - taskiq
23 | - worker
24 | - {{cookiecutter.project_name}}.tkq:broker
25 | - --reload
26 | {%- endif %}
27 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/deploy/docker-compose.otlp.yml:
--------------------------------------------------------------------------------
1 | services:
2 | api:
3 | environment:
4 | # Adds opentelemetry endpoint.
5 | {{cookiecutter.project_name | upper}}_OPENTELEMETRY_ENDPOINT: "http://otel-collector:4317"
6 |
7 | otel-collector:
8 | image: otel/opentelemetry-collector-contrib:0.53.0
9 | volumes:
10 | # Adds config for opentelemetry.
11 | - ./deploy/otel-collector-config.yml:/config.yml
12 | command: --config config.yml
13 | ports:
14 | # Collector's endpoint
15 | - "4317:4317"
16 |
17 | jaeger:
18 | image: jaegertracing/all-in-one:1.35
19 | hostname: jaeger
20 | ports:
21 | # Jaeger UI
22 | - 16686:16686
23 |
24 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/deploy/kube/app.yml:
--------------------------------------------------------------------------------
1 | ---
2 | apiVersion: apps/v1
3 | kind: Deployment
4 | metadata:
5 | namespace: {{cookiecutter.kube_name}}
6 | name: {{cookiecutter.kube_name}}-app
7 | spec:
8 | replicas: 2
9 | selector:
10 | matchLabels:
11 | app: {{cookiecutter.kube_name}}-app
12 | template:
13 | metadata:
14 | labels:
15 | app: {{cookiecutter.kube_name}}-app
16 | spec:
17 | containers:
18 | - name: app
19 | image: {{cookiecutter.project_name}}:latest
20 | readinessProbe:
21 | httpGet:
22 | path: /api/health
23 | port: api-port
24 | initialDelaySeconds: 5
25 | periodSeconds: 10
26 | {%- if cookiecutter.db_info.name == "sqlite" %}
27 | command: ["/bin/sh"]
28 | args:
29 | - -c
30 | - >-
31 | {%- if cookiecutter.enable_migrations == "True" %}
32 | {%- if cookiecutter.orm in ['sqlalchemy', 'ormar'] %}
33 | alembic upgrade head &&
34 | {%- elif cookiecutter.orm == 'tortoise' %}
35 | aerich upgrade &&
36 | {%- endif %}
37 | {%- endif %}
38 | python -m {{cookiecutter.project_name }}
39 | {%- endif %}
40 | env:
41 | - name: {{cookiecutter.project_name | upper }}_HOST
42 | value: "0.0.0.0"
43 | - name: {{cookiecutter.project_name | upper }}_WORKERS_COUNT
44 | value: "10"
45 | {%- if cookiecutter.db_info.name not in ["none", "sqlite"] %}
46 | - name: {{cookiecutter.project_name | upper }}_DB_HOST
47 | value: "{{cookiecutter.kube_name}}-db-service"
48 | {%- endif %}
49 | {%- if cookiecutter.enable_redis == 'True' %}
50 | - name: {{cookiecutter.project_name | upper }}_REDIS_HOST
51 | value: "{{cookiecutter.kube_name}}-redis-service"
52 | {%- endif %}
53 | resources:
54 | limits:
55 | memory: "200Mi"
56 | cpu: "100m"
57 | ports:
58 | - containerPort: 8000
59 | name: api-port
60 | ---
61 | apiVersion: v1
62 | kind: Service
63 | metadata:
64 | namespace: {{cookiecutter.kube_name}}
65 | name: {{cookiecutter.kube_name}}-app-service
66 | spec:
67 | selector:
68 | app: {{cookiecutter.kube_name}}-app
69 | ports:
70 | - protocol: TCP
71 | port: 80
72 | targetPort: api-port
73 | name: api-port
74 |
75 | ---
76 | apiVersion: networking.k8s.io/v1
77 | kind: Ingress
78 | metadata:
79 | name: {{cookiecutter.kube_name}}-app
80 | namespace: {{cookiecutter.kube_name}}
81 | labels:
82 | name: {{cookiecutter.kube_name}}-app
83 | spec:
84 | rules:
85 | - host: {{ cookiecutter.kube_name }}.local
86 | http:
87 | paths:
88 | - pathType: Prefix
89 | path: "/"
90 | backend:
91 | service:
92 | name: {{cookiecutter.kube_name}}-app-service
93 | port:
94 | name: api-port
95 |
96 | ---
97 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/deploy/kube/db.yml:
--------------------------------------------------------------------------------
1 | ---
2 | apiVersion: apps/v1
3 | kind: Deployment
4 | metadata:
5 | namespace: {{cookiecutter.kube_name}}
6 | name: {{cookiecutter.kube_name}}-db
7 | spec:
8 | selector:
9 | matchLabels:
10 | app: {{cookiecutter.kube_name}}-db
11 | template:
12 | metadata:
13 | labels:
14 | app: {{cookiecutter.kube_name}}-db
15 | spec:
16 | containers:
17 | - name: database
18 | image: {{cookiecutter.db_info.image}}
19 | resources:
20 | limits:
21 | memory: "300Mi"
22 | cpu: "200m"
23 | env:
24 | {%- if cookiecutter.db_info.name == 'postgresql' %}
25 | - name: POSTGRES_PASSWORD
26 | value: "{{cookiecutter.project_name}}"
27 | - name: POSTGRES_USER
28 | value: "{{cookiecutter.project_name}}"
29 | - name: POSTGRES_DB
30 | value: "{{cookiecutter.project_name}}"
31 | {%- elif cookiecutter.db_info.name == 'mysql' %}
32 | - name: MYSQL_PASSWORD
33 | value: "{{cookiecutter.project_name}}"
34 | - name: MYSQL_USER
35 | value: "{{cookiecutter.project_name}}"
36 | - name: MYSQL_DATABASE
37 | value: "{{cookiecutter.project_name}}"
38 | - name: ALLOW_EMPTY_PASSWORD
39 | value: "yes"
40 | {%- endif %}
41 | ports:
42 | - containerPort: {{cookiecutter.db_info.port}}
43 | ---
44 | apiVersion: v1
45 | kind: Service
46 | metadata:
47 | namespace: {{cookiecutter.kube_name}}
48 | name: "{{cookiecutter.kube_name}}-db-service"
49 | spec:
50 | selector:
51 | app: {{cookiecutter.kube_name}}-db
52 | ports:
53 | - port: {{cookiecutter.db_info.port}}
54 | targetPort: {{cookiecutter.db_info.port}}
55 | ---
56 | {%- if cookiecutter.enable_migrations == "True" %}
57 | apiVersion: batch/v1
58 | kind: Job
59 | metadata:
60 | namespace: {{cookiecutter.kube_name}}
61 | name: {{cookiecutter.kube_name}}-migrator
62 | spec:
63 | ttlSecondsAfterFinished: 100
64 | template:
65 | spec:
66 | containers:
67 | - name: migrator
68 | image: {{cookiecutter.project_name}}:latest
69 | command:
70 | {%- if cookiecutter.orm in ['sqlalchemy', 'ormar'] %}
71 | - "alembic"
72 | - "upgrade"
73 | - "head"
74 | {%- elif cookiecutter.orm == 'tortoise' %}
75 | - "aerich"
76 | - "upgrade"
77 | {%- elif cookiecutter.orm == 'piccolo' %}
78 | - "piccolo"
79 | - "migrations"
80 | - "forwards"
81 | - "all"
82 | {%- endif %}
83 | resources:
84 | limits:
85 | memory: "200Mi"
86 | cpu: "250m"
87 | env:
88 | - name: "{{cookiecutter.project_name | upper }}_DB_HOST"
89 | value: "{{cookiecutter.kube_name}}-db-service"
90 | {%- if cookiecutter.orm == 'piccolo' %}
91 | - name: "PICCOLO_CONF"
92 | value: "{{cookiecutter.project_name}}.piccolo_conf"
93 | {%- endif %}
94 | initContainers:
95 | - name: wait-for-db
96 | image: toschneck/wait-for-it:latest
97 | command: ["./wait-for-it.sh", "-t", "60", "{{cookiecutter.kube_name}}-db-service:{{cookiecutter.db_info.port}}"]
98 | restartPolicy: Never
99 |
100 | ---
101 | {%- endif %}
102 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/deploy/kube/namespace.yml:
--------------------------------------------------------------------------------
1 | ---
2 | apiVersion: v1
3 | kind: Namespace
4 | metadata:
5 | name: "{{cookiecutter.kube_name}}"
6 |
7 | ---
8 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/deploy/kube/rabbit.yml:
--------------------------------------------------------------------------------
1 | ---
2 | apiVersion: apps/v1
3 | kind: Deployment
4 | metadata:
5 | namespace: "{{cookiecutter.kube_name}}"
6 | name: "{{cookiecutter.kube_name}}-rmq"
7 | spec:
8 | selector:
9 | matchLabels:
10 | app: "{{cookiecutter.kube_name}}-rmq"
11 | template:
12 | metadata:
13 | labels:
14 | app: "{{cookiecutter.kube_name}}-rmq"
15 | spec:
16 | containers:
17 | - name: rabbit
18 | image: rabbitmq:3.9.16-alpine
19 | startupProbe:
20 | exec:
21 | command: ["rabbitmq-diagnostics", "check_running", "-q"]
22 | failureThreshold: 30
23 | periodSeconds: 5
24 | timeoutSeconds: 10
25 | env:
26 | - name: RABBITMQ_DEFAULT_USER
27 | value: "guest"
28 | - name: RABBITMQ_DEFAULT_PASS
29 | value: "guest"
30 | - name: RABBITMQ_DEFAULT_VHOST
31 | value: "/"
32 | resources:
33 | limits:
34 | memory: "200Mi"
35 | cpu: "250m"
36 | ports:
37 | - containerPort: 5672
38 | name: amqp
39 | ---
40 | apiVersion: v1
41 | kind: Service
42 | metadata:
43 | namespace: "{{cookiecutter.kube_name}}"
44 | name: "{{cookiecutter.kube_name}}-rmq-service"
45 | spec:
46 | selector:
47 | app: "{{cookiecutter.kube_name}}-rmq"
48 | ports:
49 | - port: 5672
50 | targetPort: amqp
51 |
52 | ---
53 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/deploy/kube/redis.yml:
--------------------------------------------------------------------------------
1 | ---
2 | apiVersion: apps/v1
3 | kind: Deployment
4 | metadata:
5 | namespace: "{{cookiecutter.kube_name}}"
6 | name: "{{cookiecutter.kube_name}}-redis"
7 | spec:
8 | selector:
9 | matchLabels:
10 | app: "{{cookiecutter.kube_name}}-redis"
11 | template:
12 | metadata:
13 | labels:
14 | app: "{{cookiecutter.kube_name}}-redis"
15 | spec:
16 | containers:
17 | - name: redis
18 | image: bitnami/redis:6.2.5
19 | startupProbe:
20 | exec:
21 | command: ["redis-cli", "ping"]
22 | failureThreshold: 30
23 | periodSeconds: 5
24 | env:
25 | - name: ALLOW_EMPTY_PASSWORD
26 | value: "yes"
27 | resources:
28 | limits:
29 | memory: "50Mi"
30 | cpu: "50m"
31 | ports:
32 | - containerPort: 6379
33 | ---
34 | apiVersion: v1
35 | kind: Service
36 | metadata:
37 | namespace: "{{cookiecutter.kube_name}}"
38 | name: "{{cookiecutter.kube_name}}-redis-service"
39 | spec:
40 | selector:
41 | app: "{{cookiecutter.kube_name}}-redis"
42 | ports:
43 | - port: 6379
44 | targetPort: 6379
45 |
46 | ---
47 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/deploy/otel-collector-config.yml:
--------------------------------------------------------------------------------
1 | # Receives all info with OpenTelemetry protocol.
2 | receivers:
3 | otlp:
4 | protocols:
5 | grpc:
6 | http:
7 |
8 | # Batch all spans.
9 | processors:
10 | batch:
11 |
12 | exporters:
13 | # Exports spans to log.
14 | logging:
15 | logLevel: info
16 |
17 | # Exports spans to jaeger.
18 | jaeger:
19 | endpoint: "jaeger:14250"
20 | tls:
21 | insecure: true
22 |
23 | extensions:
24 | health_check:
25 | pprof:
26 |
27 | service:
28 | extensions: [health_check, pprof]
29 | pipelines:
30 | traces:
31 | receivers: [otlp]
32 | processors: [batch]
33 | exporters: [logging, jaeger]
34 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/replaceable_files.json:
--------------------------------------------------------------------------------
1 | {
2 | "{{cookiecutter.project_name}}/db": [
3 | "{{cookiecutter.project_name}}/db_sa",
4 | "{{cookiecutter.project_name}}/db_ormar",
5 | "{{cookiecutter.project_name}}/db_tortoise",
6 | "{{cookiecutter.project_name}}/db_psycopg",
7 | "{{cookiecutter.project_name}}/db_piccolo",
8 | "{{cookiecutter.project_name}}/db_beanie"
9 | ]
10 | }
11 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/tests/__init__.py:
--------------------------------------------------------------------------------
1 | """Tests for {{cookiecutter.project_name}}."""
2 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/tests/test_dummy.py:
--------------------------------------------------------------------------------
1 | import uuid
2 | from typing import Any
3 |
4 | import pytest
5 | from fastapi import FastAPI
6 | from httpx import AsyncClient
7 |
8 | {%- if cookiecutter.orm == 'sqlalchemy' %}
9 | from sqlalchemy.ext.asyncio import AsyncSession
10 |
11 | {%- elif cookiecutter.orm == 'psycopg' %}
12 | from psycopg.connection_async import AsyncConnection
13 | from psycopg_pool import AsyncConnectionPool
14 |
15 | {%- endif %}
16 | from starlette import status
17 | from {{cookiecutter.project_name}}.db.dao.dummy_dao import DummyDAO
18 | from {{cookiecutter.project_name}}.db.models.dummy_model import DummyModel
19 |
20 |
21 | @pytest.mark.anyio
22 | async def test_creation(
23 | fastapi_app: FastAPI,
24 | client: AsyncClient,
25 | {%- if cookiecutter.orm == "sqlalchemy" %}
26 | dbsession: AsyncSession,
27 | {%- elif cookiecutter.orm == "psycopg" %}
28 | dbpool: AsyncConnectionPool[Any],
29 | {%- endif %}
30 | ) -> None:
31 | """Tests dummy instance creation."""
32 | {%- if cookiecutter.api_type == 'rest' %}
33 | url = fastapi_app.url_path_for('create_dummy_model')
34 | {%- elif cookiecutter.api_type == 'graphql' %}
35 | url = fastapi_app.url_path_for('handle_http_post')
36 | {%- endif %}
37 | test_name = uuid.uuid4().hex
38 | {%- if cookiecutter.api_type == 'rest' %}
39 | response = await client.put(url, json={
40 | "name": test_name
41 | })
42 | {%- elif cookiecutter.api_type == 'graphql' %}
43 | response = await client.post(
44 | url,
45 | json={
46 | "query": "mutation($name: String!){createDummyModel(name: $name)}",
47 | "variables": {"name": test_name},
48 | },
49 | )
50 | {%- endif %}
51 | assert response.status_code == status.HTTP_200_OK
52 | {%- if cookiecutter.orm == "sqlalchemy" %}
53 | dao = DummyDAO(dbsession)
54 | {%- elif cookiecutter.orm == "psycopg" %}
55 | dao = DummyDAO(dbpool)
56 | {%- else %}
57 | dao = DummyDAO()
58 | {%- endif %}
59 |
60 | instances = await dao.filter(name=test_name)
61 | assert instances[0].name == test_name
62 |
63 | {%- if cookiecutter.orm == "beanie" %}
64 | # Clean up the object we just inserted
65 | await dao.delete_dummy_model_by_name(name=test_name)
66 | {%- endif %}
67 |
68 | @pytest.mark.anyio
69 | async def test_getting(
70 | fastapi_app: FastAPI,
71 | client: AsyncClient,
72 | {%- if cookiecutter.orm == "sqlalchemy" %}
73 | dbsession: AsyncSession,
74 | {%- elif cookiecutter.orm == "psycopg" %}
75 | dbpool: AsyncConnectionPool[Any],
76 | {%- endif %}
77 | ) -> None:
78 | """Tests dummy instance retrieval."""
79 | {%- if cookiecutter.orm == "sqlalchemy" %}
80 | dao = DummyDAO(dbsession)
81 | {%- elif cookiecutter.orm == "psycopg" %}
82 | dao = DummyDAO(dbpool)
83 | {%- else %}
84 | dao = DummyDAO()
85 | {%- endif %}
86 | test_name = uuid.uuid4().hex
87 |
88 | assert not await dao.filter()
89 |
90 | await dao.create_dummy_model(name=test_name)
91 |
92 | {%- if cookiecutter.api_type == 'rest' %}
93 | url = fastapi_app.url_path_for('get_dummy_models')
94 | {%- elif cookiecutter.api_type == 'graphql' %}
95 | url = fastapi_app.url_path_for('handle_http_post')
96 | {%- endif %}
97 |
98 | {%- if cookiecutter.api_type == 'rest' %}
99 | response = await client.get(url)
100 | dummies = response.json()
101 | {%- elif cookiecutter.api_type == 'graphql' %}
102 | response = await client.post(
103 | url,
104 | json={"query": "query{dumies:getDummyModels{id name}}"},
105 | )
106 | dummies = response.json()["data"]["dumies"]
107 | {%- endif %}
108 |
109 | assert response.status_code == status.HTTP_200_OK
110 | assert len(dummies) == 1
111 | assert dummies[0]['name'] == test_name
112 |
113 | {%- if cookiecutter.orm == "beanie" %}
114 | # Clean up the object we just inserted
115 | await dao.delete_dummy_model_by_name(name=test_name)
116 | {%- endif %}
117 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/tests/test_echo.py:
--------------------------------------------------------------------------------
1 | import uuid
2 |
3 | import pytest
4 | from fastapi import FastAPI
5 | from httpx import AsyncClient
6 | from starlette import status
7 |
8 |
9 | @pytest.mark.anyio
10 | async def test_echo(fastapi_app: FastAPI, client: AsyncClient) -> None:
11 | """
12 | Tests that echo route works.
13 |
14 | :param fastapi_app: current application.
15 | :param client: client for the app.
16 | """
17 | {%- if cookiecutter.api_type == 'rest' %}
18 | url = fastapi_app.url_path_for('send_echo_message')
19 | {%- elif cookiecutter.api_type == 'graphql' %}
20 | url = fastapi_app.url_path_for('handle_http_post')
21 | {%- endif %}
22 | message = uuid.uuid4().hex
23 | {%- if cookiecutter.api_type == 'rest' %}
24 | response = await client.post(url, json={
25 | "message": message
26 | })
27 | assert response.status_code == status.HTTP_200_OK
28 | assert response.json()['message'] == message
29 | {%- elif cookiecutter.api_type == 'graphql' %}
30 | response = await client.post(
31 | url,
32 | json={
33 | "query": "query($message: String!){echo(message: $message)}",
34 | "variables": {
35 | "message": message,
36 | },
37 | },
38 | )
39 | assert response.status_code == status.HTTP_200_OK
40 | assert response.json()['data']['echo'] == message
41 | {%- endif %}
42 |
43 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/tests/test_kafka.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import uuid
3 |
4 | import pytest
5 | from aiokafka import AIOKafkaConsumer
6 | from fastapi import FastAPI
7 | from httpx import AsyncClient
8 | from starlette import status
9 | from {{cookiecutter.project_name}}.settings import settings
10 |
11 |
12 | @pytest.mark.anyio
13 | async def test_message_publishing(
14 | fastapi_app: FastAPI,
15 | client: AsyncClient,
16 | ) -> None:
17 | """
18 | Test that messages are published correctly.
19 |
20 | It sends message to kafka, reads it and
21 | validates that received message has the same
22 | value.
23 |
24 | :param fastapi_app: current application.
25 | :param client: httpx client.
26 | """
27 | topic_name = uuid.uuid4().hex
28 | message = uuid.uuid4().hex
29 | consumer = AIOKafkaConsumer(
30 | topic_name,
31 | bootstrap_servers=settings.kafka_bootstrap_servers,
32 | )
33 | await consumer.start()
34 |
35 | {%- if cookiecutter.api_type == 'rest' %}
36 | url = fastapi_app.url_path_for("send_kafka_message")
37 | response = await client.post(
38 | url,
39 | json={
40 | "topic": topic_name,
41 | "message": message,
42 | },
43 | )
44 | {%- elif cookiecutter.api_type == 'graphql' %}
45 | url = fastapi_app.url_path_for('handle_http_post')
46 | response = await client.post(
47 | url,
48 | json={
49 | "query": "mutation($message:KafkaMessageDTO!)"
50 | "{sendKafkaMessage(message:$message)}",
51 | "variables": {
52 | "message": {
53 | "topic": topic_name,
54 | "message": message,
55 | },
56 | },
57 | },
58 | )
59 | {%- endif %}
60 |
61 |
62 | assert response.status_code == status.HTTP_200_OK
63 |
64 | msg = await asyncio.wait_for(consumer.getone(), timeout=1)
65 | assert msg.value == message.encode()
66 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/tests/test_rabbit.py:
--------------------------------------------------------------------------------
1 | import uuid
2 |
3 | import pytest
4 | from aio_pika import Channel
5 | from aio_pika.abc import AbstractQueue
6 | from aio_pika.exceptions import QueueEmpty
7 | from aio_pika.pool import Pool
8 | from fastapi import FastAPI
9 | from httpx import AsyncClient
10 |
11 |
12 | @pytest.mark.anyio
13 | async def test_message_publishing(
14 | fastapi_app: FastAPI,
15 | client: AsyncClient,
16 | test_queue: AbstractQueue,
17 | test_exchange_name: str,
18 | test_routing_key: str,
19 | ) -> None:
20 | """
21 | Tests that message is published correctly.
22 |
23 | It sends message to rabbitmq and reads it
24 | from binded queue.
25 | """
26 | message_text = uuid.uuid4().hex
27 | {%- if cookiecutter.api_type == 'rest' %}
28 | url = fastapi_app.url_path_for("send_rabbit_message")
29 | await client.post(
30 | url,
31 | json={
32 | "exchange_name": test_exchange_name,
33 | "routing_key": test_routing_key,
34 | "message": message_text,
35 | },
36 | )
37 | {%- elif cookiecutter.api_type == 'graphql' %}
38 | url = fastapi_app.url_path_for('handle_http_post')
39 | await client.post(
40 | url,
41 | json={
42 | "query": "mutation($message:RabbitMessageDTO!)"
43 | "{sendRabbitMessage(message:$message)}",
44 | "variables": {
45 | "message": {
46 | "exchangeName": test_exchange_name,
47 | "routingKey": test_routing_key,
48 | "message": message_text,
49 | },
50 | },
51 | },
52 | )
53 | {%- endif %}
54 | message = await test_queue.get(timeout=1)
55 | assert message is not None
56 | await message.ack()
57 | assert message.body.decode("utf-8") == message_text
58 |
59 |
60 | @pytest.mark.anyio
61 | async def test_message_wrong_exchange(
62 | fastapi_app: FastAPI,
63 | client: AsyncClient,
64 | test_queue: AbstractQueue,
65 | test_exchange_name: str,
66 | test_routing_key: str,
67 | test_rmq_pool: Pool[Channel],
68 | ) -> None:
69 | """
70 | Tests that message can be published in undeclared exchange.
71 |
72 | It sends message to random queue,
73 | tries to get message from binded queue
74 | and checks that new exchange were created.
75 | """
76 | random_exchange = uuid.uuid4().hex
77 | assert random_exchange != test_exchange_name
78 | message_text = uuid.uuid4().hex
79 | {%- if cookiecutter.api_type == 'rest' %}
80 | url = fastapi_app.url_path_for("send_rabbit_message")
81 | await client.post(
82 | url,
83 | json={
84 | "exchange_name": random_exchange,
85 | "routing_key": test_routing_key,
86 | "message": message_text,
87 | },
88 | )
89 | {%- elif cookiecutter.api_type == 'graphql' %}
90 | url = fastapi_app.url_path_for('handle_http_post')
91 | await client.post(
92 | url,
93 | json={
94 | "query": "mutation($message:RabbitMessageDTO!)"
95 | "{sendRabbitMessage(message:$message)}",
96 | "variables": {
97 | "message": {
98 | "exchangeName": random_exchange,
99 | "routingKey": test_routing_key,
100 | "message": message_text,
101 | },
102 | },
103 | },
104 | )
105 | {%- endif %}
106 | with pytest.raises(QueueEmpty):
107 | await test_queue.get(timeout=1)
108 |
109 | async with test_rmq_pool.acquire() as conn:
110 | exchange = await conn.get_exchange(random_exchange, ensure=True)
111 | await exchange.delete(if_unused=False)
112 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/tests/test_redis.py:
--------------------------------------------------------------------------------
1 | import uuid
2 |
3 | import fakeredis
4 | import pytest
5 | from fastapi import FastAPI
6 | from httpx import AsyncClient
7 | from redis.asyncio import ConnectionPool, Redis
8 | from starlette import status
9 |
10 |
11 | @pytest.mark.anyio
12 | async def test_setting_value(
13 | fastapi_app: FastAPI,
14 | fake_redis_pool: ConnectionPool,
15 | client: AsyncClient,
16 | ) -> None:
17 | """
18 | Tests that you can set value in redis.
19 |
20 | :param fastapi_app: current application fixture.
21 | :param fake_redis_pool: fake redis pool.
22 | :param client: client fixture.
23 | """
24 | {%- if cookiecutter.api_type == 'rest' %}
25 | url = fastapi_app.url_path_for('set_redis_value')
26 | {%- elif cookiecutter.api_type == 'graphql' %}
27 | url = fastapi_app.url_path_for('handle_http_post')
28 | {%- endif %}
29 |
30 | test_key = uuid.uuid4().hex
31 | test_val = uuid.uuid4().hex
32 | {%- if cookiecutter.api_type == 'rest' %}
33 | response = await client.put(url, json={
34 | "key": test_key,
35 | "value": test_val
36 | })
37 | {%- elif cookiecutter.api_type == 'graphql' %}
38 | query = """
39 | mutation ($key: String!, $val: String!) {
40 | setRedisValue(data: { key: $key, value: $val }) {
41 | key
42 | value
43 | }
44 | }
45 | """
46 | response = await client.post(
47 | url,
48 | json={
49 | "query": query,
50 | "variables": {"key": test_key, "val": test_val},
51 | },
52 | )
53 | {%- endif %}
54 |
55 | assert response.status_code == status.HTTP_200_OK
56 | async with Redis(connection_pool=fake_redis_pool) as redis:
57 | actual_value = await redis.get(test_key)
58 | assert actual_value.decode() == test_val
59 |
60 |
61 | @pytest.mark.anyio
62 | async def test_getting_value(
63 | fastapi_app: FastAPI,
64 | fake_redis_pool: ConnectionPool,
65 | client: AsyncClient,
66 | ) -> None:
67 | """
68 | Tests that you can get value from redis by key.
69 |
70 | :param fastapi_app: current application fixture.
71 | :param fake_redis_pool: fake redis pool.
72 | :param client: client fixture.
73 | """
74 | test_key = uuid.uuid4().hex
75 | test_val = uuid.uuid4().hex
76 | async with Redis(connection_pool=fake_redis_pool) as redis:
77 | await redis.set(test_key, test_val)
78 |
79 | {%- if cookiecutter.api_type == 'rest' %}
80 | url = fastapi_app.url_path_for('get_redis_value')
81 | {%- elif cookiecutter.api_type == 'graphql' %}
82 | url = fastapi_app.url_path_for('handle_http_post')
83 | {%- endif %}
84 |
85 | {%- if cookiecutter.api_type == 'rest' %}
86 | response = await client.get(url, params={"key": test_key})
87 |
88 | assert response.status_code == status.HTTP_200_OK
89 | assert response.json()['key'] == test_key
90 | assert response.json()['value'] == test_val
91 | {%- elif cookiecutter.api_type == 'graphql' %}
92 | response = await client.post(
93 | url,
94 | json={
95 | "query": "query($key:String!){redis:getRedisValue(key:$key){key value}}",
96 | "variables": {
97 | "key": test_key,
98 | },
99 | },
100 | )
101 |
102 | assert response.status_code == status.HTTP_200_OK
103 | assert response.json()["data"]["redis"]["key"] == test_key
104 | assert response.json()["data"]["redis"]["value"] == test_val
105 | {%- endif %}
106 |
107 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/tests/test_{{cookiecutter.project_name}}.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from fastapi import FastAPI
3 | from httpx import AsyncClient
4 | from starlette import status
5 |
6 |
7 | @pytest.mark.anyio
8 | async def test_health(client: AsyncClient, fastapi_app: FastAPI) -> None:
9 | """
10 | Checks the health endpoint.
11 |
12 | :param client: client for the app.
13 | :param fastapi_app: current FastAPI application.
14 | """
15 | url = fastapi_app.url_path_for('health_check')
16 | response = await client.get(url)
17 | assert response.status_code == status.HTTP_200_OK
18 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/__init__.py:
--------------------------------------------------------------------------------
1 | """{{cookiecutter.project_name}} package."""
2 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/__main__.py:
--------------------------------------------------------------------------------
1 | import os
2 | import shutil
3 | from pathlib import Path
4 |
5 | import uvicorn
6 |
7 | {%- if cookiecutter.gunicorn == "True" %}
8 | from {{cookiecutter.project_name}}.gunicorn_runner import GunicornApplication
9 | {%- endif %}
10 | from {{cookiecutter.project_name}}.settings import settings
11 |
12 | {%- if cookiecutter.prometheus_enabled == "True" %}
13 | def set_multiproc_dir() -> None:
14 | """
15 | Sets mutiproc_dir env variable.
16 |
17 | This function cleans up the multiprocess directory
18 | and recreates it. This actions are required by prometheus-client
19 | to share metrics between processes.
20 |
21 | After cleanup, it sets two variables.
22 | Uppercase and lowercase because different
23 | versions of the prometheus-client library
24 | depend on different environment variables,
25 | so I've decided to export all needed variables,
26 | to avoid undefined behaviour.
27 | """
28 | shutil.rmtree(settings.prometheus_dir, ignore_errors=True)
29 | Path(settings.prometheus_dir).mkdir(parents=True)
30 | os.environ["prometheus_multiproc_dir"] = str( # noqa: SIM112
31 | settings.prometheus_dir.expanduser().absolute(),
32 | )
33 | os.environ["PROMETHEUS_MULTIPROC_DIR"] = str(
34 | settings.prometheus_dir.expanduser().absolute(),
35 | )
36 | {%- endif %}
37 |
38 |
39 | def main() -> None:
40 | """Entrypoint of the application."""
41 | {%- if cookiecutter.prometheus_enabled == "True" %}
42 | set_multiproc_dir()
43 | {%- endif %}
44 | {%- if cookiecutter.orm == "piccolo" %}
45 | os.environ['PICCOLO_CONF'] = "{{cookiecutter.project_name}}.piccolo_conf"
46 | {%- endif %}
47 | {%- if cookiecutter.gunicorn == "True" %}
48 | if settings.reload:
49 | uvicorn.run(
50 | "{{cookiecutter.project_name}}.web.application:get_app",
51 | workers=settings.workers_count,
52 | host=settings.host,
53 | port=settings.port,
54 | reload=settings.reload,
55 | log_level=settings.log_level.value.lower(),
56 | factory=True,
57 | )
58 | else:
59 | # We choose gunicorn only if reload
60 | # option is not used, because reload
61 | # feature doesn't work with gunicorn workers.
62 | GunicornApplication(
63 | "{{cookiecutter.project_name}}.web.application:get_app",
64 | host=settings.host,
65 | port=settings.port,
66 | workers=settings.workers_count,
67 | factory=True,
68 | accesslog="-",
69 | loglevel=settings.log_level.value.lower(),
70 | access_log_format='%r "-" %s "-" %Tf', # noqa: WPS323
71 | ).run()
72 | {%- else %}
73 | uvicorn.run(
74 | "{{cookiecutter.project_name}}.web.application:get_app",
75 | workers=settings.workers_count,
76 | host=settings.host,
77 | port=settings.port,
78 | reload=settings.reload,
79 | log_level=settings.log_level.value.lower(),
80 | factory=True,
81 | )
82 | {%- endif %}
83 |
84 | if __name__ == "__main__":
85 | main()
86 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_beanie/dao/__init__.py:
--------------------------------------------------------------------------------
1 | """DAO classes."""
2 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_beanie/dao/dummy_dao.py:
--------------------------------------------------------------------------------
1 | from typing import List, Optional
2 |
3 | from {{cookiecutter.project_name}}.db.models.dummy_model import DummyModel
4 |
5 |
6 | class DummyDAO:
7 | """Class for accessing dummy table."""
8 |
9 | async def create_dummy_model(self, name: str) -> None:
10 | """
11 | Add single dummy to session.
12 |
13 | :param name: name of a dummy.
14 | """
15 | await DummyModel.insert_one(DummyModel(name=name))
16 |
17 | async def get_all_dummies(self, limit: int, offset: int) -> List[DummyModel]:
18 | """
19 | Get all dummy models with limit/offset pagination.
20 |
21 | :param limit: limit of dummies.
22 | :param offset: offset of dummies.
23 | :return: stream of dummies.
24 | """
25 | return await DummyModel.find_all(skip=offset, limit=limit).to_list()
26 |
27 | async def filter(
28 | self,
29 | name: Optional[str] = None
30 | ) -> List[DummyModel]:
31 | """
32 | Get specific dummy model.
33 |
34 | :param name: name of dummy instance.
35 | :return: dummy models.
36 | """
37 | if name is None:
38 | return []
39 | return await DummyModel.find(DummyModel.name == name).to_list()
40 |
41 | async def delete_dummy_model_by_name(
42 | self,
43 | name: str,
44 | ) -> Optional[DummyModel]:
45 | """
46 | Delete a dummy model by name.
47 |
48 | :param name: name of dummy instance.
49 | :return: option of a dummy model.
50 | """
51 | res = await DummyModel.find_one(DummyModel.name == name)
52 | if res is None:
53 | return res
54 | await res.delete()
55 | return res
56 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_beanie/models/__init__.py:
--------------------------------------------------------------------------------
1 | """{{cookiecutter.project_name}} models."""
2 |
3 | {%- if cookiecutter.add_dummy == "True" %}
4 | from {{cookiecutter.project_name}}.db.models.dummy_model import DummyModel
5 | {%- endif %}
6 |
7 | from beanie import Document
8 | from typing import Type, Sequence
9 |
10 | def load_all_models() -> Sequence[Type[Document]]:
11 | """Load all models from this folder.""" # noqa: DAR201
12 | return [
13 | {%- if cookiecutter.add_dummy == "True" %}
14 | DummyModel,
15 | {%- endif %}
16 | ]
17 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_beanie/models/dummy_model.py:
--------------------------------------------------------------------------------
1 | from beanie import Document
2 |
3 | class DummyModel(Document):
4 | """Model for demo purpose."""
5 | name: str
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_ormar/base.py:
--------------------------------------------------------------------------------
1 | import sqlalchemy as sa
2 | from databases import Database
3 | from ormar import OrmarConfig
4 | from {{cookiecutter.project_name}}.settings import settings
5 |
6 | meta = sa.MetaData()
7 | database = Database(str(settings.db_url))
8 |
9 | ormar_config = OrmarConfig(metadata=meta, database=database)
10 |
11 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_ormar/dao/__init__.py:
--------------------------------------------------------------------------------
1 | """DAO classes."""
2 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_ormar/dao/dummy_dao.py:
--------------------------------------------------------------------------------
1 | from typing import List, Optional
2 |
3 | from {{cookiecutter.project_name}}.db.models.dummy_model import DummyModel
4 |
5 |
6 | class DummyDAO:
7 | """Class for accessing dummy table."""
8 |
9 | async def create_dummy_model(self, name: str) -> None:
10 | """
11 | Add single dummy to session.
12 |
13 | :param name: name of a dummy.
14 | """
15 | await DummyModel.objects.create(name=name)
16 |
17 | async def get_all_dummies(self, limit: int, offset: int) -> List[DummyModel]:
18 | """
19 | Get all dummy models with limit/offset pagination.
20 |
21 | :param limit: limit of dummies.
22 | :param offset: offset of dummies.
23 | :return: stream of dummies.
24 | """
25 | return await DummyModel.objects.limit(limit).offset(offset).all()
26 |
27 | async def filter(
28 | self,
29 | name: Optional[str] = None,
30 | ) -> List[DummyModel]:
31 | """
32 | Get specific dummy model.
33 |
34 | :param name: name of dummy instance.
35 | :return: dummy models.
36 | """
37 | query = DummyModel.objects
38 | if name:
39 | query = query.filter(DummyModel.name == name)
40 | return await query.all()
41 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_ormar/migrations/__init__.py:
--------------------------------------------------------------------------------
1 | """Alembic migrations."""
2 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_ormar/migrations/env.py:
--------------------------------------------------------------------------------
1 | from logging.config import fileConfig
2 |
3 | from alembic import context
4 | from sqlalchemy.engine import Connection, create_engine
5 | from {{cookiecutter.project_name}}.db.base import meta
6 | from {{cookiecutter.project_name}}.db.models import load_all_models
7 | from {{cookiecutter.project_name}}.settings import settings
8 |
9 | # this is the Alembic Config object, which provides
10 | # access to the values within the .ini file in use.
11 | config = context.config
12 |
13 |
14 | load_all_models()
15 | # Interpret the config file for Python logging.
16 | # This line sets up loggers basically.
17 | if config.config_file_name is not None:
18 | fileConfig(config.config_file_name)
19 |
20 | # add your model's MetaData object here
21 | # for 'autogenerate' support
22 | # from myapp import mymodel
23 | # target_metadata = mymodel.Base.metadata
24 | target_metadata = meta
25 |
26 | # other values from the config, defined by the needs of env.py,
27 | # can be acquired:
28 | # my_important_option = config.get_main_option("my_important_option")
29 | # ... etc.
30 |
31 |
32 | def run_migrations_offline() -> None:
33 | """Run migrations in 'offline' mode.
34 |
35 | This configures the context with just a URL
36 | and not an Engine, though an Engine is acceptable
37 | here as well. By skipping the Engine creation
38 | we don't even need a DBAPI to be available.
39 |
40 | Calls to context.execute() here emit the given string to the
41 | script output.
42 |
43 | """
44 | context.configure(
45 | url=str(settings.db_url),
46 | target_metadata=target_metadata,
47 | literal_binds=True,
48 | dialect_opts={"paramstyle": "named"},
49 | )
50 |
51 | with context.begin_transaction():
52 | context.run_migrations()
53 |
54 |
55 | def do_run_migrations(connection: Connection) -> None:
56 | """
57 | Run actual sync migrations.
58 |
59 | :param connection: connection to the database.
60 | """
61 | context.configure(connection=connection, target_metadata=target_metadata)
62 |
63 | with context.begin_transaction():
64 | context.run_migrations()
65 |
66 |
67 | def run_migrations_online() -> None:
68 | """
69 | Run migrations in 'online' mode.
70 |
71 | In this scenario we need to create an Engine
72 | and associate a connection with the context.
73 | """
74 | connectable = create_engine(str(settings.db_url))
75 |
76 | with connectable.connect() as connection:
77 | do_run_migrations(connection)
78 |
79 |
80 | if context.is_offline_mode():
81 | run_migrations_offline()
82 | else:
83 | run_migrations_online()
84 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_ormar/migrations/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}${"." if not message.endswith(".") else ""}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision | comma,n}
5 | Create Date: ${create_date}
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | ${imports if imports else ""}
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = ${repr(up_revision)}
14 | down_revision = ${repr(down_revision)}
15 | branch_labels = ${repr(branch_labels)}
16 | depends_on = ${repr(depends_on)}
17 |
18 |
19 | def upgrade() -> None:
20 | """Run the migration."""
21 | ${upgrades if upgrades else ""}
22 |
23 |
24 | def downgrade() -> None:
25 | """Undo the migration."""
26 | ${downgrades if downgrades else ""}
27 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_ormar/migrations/versions/2021-08-16-16-53_819cbf6e030b.py:
--------------------------------------------------------------------------------
1 | """Initial migration.
2 |
3 | Revision ID: 819cbf6e030b
4 | Revises:
5 | Create Date: 2021-08-16 16:53:05.484024
6 |
7 | """
8 |
9 | # revision identifiers, used by Alembic.
10 | revision = "819cbf6e030b"
11 | down_revision = None
12 | branch_labels = None
13 | depends_on = None
14 |
15 |
16 | def upgrade() -> None:
17 | """Run the upgrade migrations."""
18 |
19 |
20 | def downgrade() -> None:
21 | """Run the downgrade migrations."""
22 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_ormar/migrations/versions/2021-08-16-16-55_2b7380507a71.py:
--------------------------------------------------------------------------------
1 | """Created Dummy Model.
2 |
3 | Revision ID: 2b7380507a71
4 | Revises: 819cbf6e030b
5 | Create Date: 2021-08-16 16:55:25.157309
6 |
7 | """
8 | import sqlalchemy as sa
9 | from alembic import op
10 |
11 | # revision identifiers, used by Alembic.
12 | revision = "2b7380507a71"
13 | down_revision = "819cbf6e030b"
14 | branch_labels = None
15 | depends_on = None
16 |
17 |
18 | def upgrade() -> None:
19 | """Run the upgrade migrations."""
20 | op.create_table(
21 | "dummy_model",
22 | sa.Column("id", sa.Integer(), nullable=False),
23 | sa.Column("name", sa.String(length=200), nullable=False),
24 | sa.PrimaryKeyConstraint("id"),
25 | )
26 | # ### end Alembic commands ###
27 |
28 |
29 | def downgrade() -> None:
30 | """Run the downgrade migrations."""
31 | op.drop_table("dummy_model")
32 | # ### end Alembic commands ###
33 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_ormar/migrations/versions/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/s3rius/FastAPI-template/c236a2faf8ae6fe3e0d0c4a2425e03a248cf988d/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_ormar/migrations/versions/__init__.py
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_ormar/models/__init__.py:
--------------------------------------------------------------------------------
1 | """{{cookiecutter.project_name}} models."""
2 | import pkgutil
3 | from pathlib import Path
4 |
5 |
6 | def load_all_models() -> None:
7 | """Load all models from this folder."""
8 | package_dir = Path(__file__).resolve().parent
9 | modules = pkgutil.walk_packages(
10 | path=[str(package_dir)],
11 | prefix="{{cookiecutter.project_name}}.db.models.",
12 | )
13 | for module in modules:
14 | __import__(module.name) # noqa: WPS421
15 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_ormar/models/dummy_model.py:
--------------------------------------------------------------------------------
1 | import ormar
2 | from {{cookiecutter.project_name}}.db.base import ormar_config
3 |
4 |
5 | class DummyModel(ormar.Model):
6 | """Model for demo purpose."""
7 | ormar_config = ormar_config.copy(tablename="dummy_model")
8 |
9 | id: int = ormar.Integer(primary_key=True)
10 | name: str = ormar.String(max_length=200) # noqa: WPS432
11 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_ormar/utils.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from pathlib import Path
4 | from sqlalchemy import text
5 | from sqlalchemy.engine import URL, create_engine, make_url
6 | from {{cookiecutter.project_name}}.settings import settings
7 |
8 | {% if cookiecutter.db_info.name == "postgresql" -%}
9 | def create_database() -> None:
10 | """Create a database."""
11 | db_url = make_url(str(settings.db_url.with_path('/postgres')))
12 | engine = create_engine(db_url, isolation_level="AUTOCOMMIT")
13 |
14 | with engine.connect() as conn:
15 | database_existance = conn.execute(
16 | text(
17 | f"SELECT 1 FROM pg_database WHERE datname='{settings.db_base}'", # noqa: E501, S608
18 | )
19 | )
20 | database_exists = database_existance.scalar() == 1
21 |
22 | if database_exists:
23 | drop_database()
24 |
25 | with engine.connect() as conn: # noqa: WPS440
26 | conn.execute(
27 | text(
28 | f'CREATE DATABASE "{settings.db_base}" ENCODING "utf8" TEMPLATE template1', # noqa: E501
29 | )
30 | )
31 |
32 | def drop_database() -> None:
33 | """Drop current database."""
34 | db_url = make_url(str(settings.db_url.with_path('/postgres')))
35 | engine = create_engine(db_url, isolation_level="AUTOCOMMIT")
36 | with engine.connect() as conn:
37 | disc_users = (
38 | "SELECT pg_terminate_backend(pg_stat_activity.pid) " # noqa: S608
39 | "FROM pg_stat_activity "
40 | f"WHERE pg_stat_activity.datname = '{settings.db_base}' "
41 | "AND pid <> pg_backend_pid();"
42 | )
43 | conn.execute(text(disc_users))
44 | conn.execute(text(f'DROP DATABASE "{settings.db_base}"'))
45 |
46 |
47 | {%- endif %}
48 | {%- if cookiecutter.db_info.name == "mysql" %}
49 | def create_database() -> None:
50 | """Create a database."""
51 | engine = create_engine(str(settings.db_url.with_path("/mysql")))
52 |
53 | with engine.connect() as conn:
54 | database_existance = conn.execute(
55 | text(
56 | "SELECT 1 FROM INFORMATION_SCHEMA.SCHEMATA" # noqa: S608
57 | f" WHERE SCHEMA_NAME='{settings.db_base}';",
58 | )
59 | )
60 | database_exists = database_existance.scalar() == 1
61 |
62 | if database_exists:
63 | drop_database()
64 |
65 | with engine.connect() as conn: # noqa: WPS440
66 | conn.execute(
67 | text(
68 | f'CREATE DATABASE {settings.db_base};'
69 | )
70 | )
71 |
72 | def drop_database() -> None:
73 | """Drop current database."""
74 | engine = create_engine(str(settings.db_url.with_path("/mysql")))
75 | with engine.connect() as conn:
76 | conn.execute(text(f'DROP DATABASE {settings.db_base};'))
77 | {%- endif %}
78 | {%- if cookiecutter.db_info.name == "sqlite" %}
79 | def create_database() -> None:
80 | """Create a database."""
81 | drop_database()
82 | Path(settings.db_file).touch()
83 |
84 | def drop_database() -> None:
85 | """Drop current database."""
86 | Path(settings.db_file).unlink(missing_ok=True)
87 |
88 | {%- endif %}
89 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_piccolo/app_conf.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from piccolo.conf.apps import AppConfig, table_finder
4 | from pathlib import Path
5 |
6 | CURRENT_DIRECTORY = Path(__file__).parent
7 |
8 |
9 | APP_CONFIG = AppConfig(
10 | app_name='{{cookiecutter.project_name}}_db',
11 | migrations_folder_path=str(CURRENT_DIRECTORY / 'migrations'),
12 | table_classes=table_finder(modules=[
13 | {%- if cookiecutter.add_dummy == "True" %}
14 | "{{cookiecutter.project_name}}.db.models.dummy_model"
15 | {%- endif %}
16 | ]),
17 | migration_dependencies=[],
18 | commands=[]
19 | )
20 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_piccolo/dao/__init__.py:
--------------------------------------------------------------------------------
1 | """{{cookiecutter.project_name}} DAOs."""
2 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_piccolo/dao/dummy_dao.py:
--------------------------------------------------------------------------------
1 | from typing import List, Optional
2 |
3 | from {{cookiecutter.project_name}}.db.models.dummy_model import DummyModel
4 |
5 |
6 | class DummyDAO:
7 | """Class for accessing dummy table."""
8 |
9 | async def create_dummy_model(self, name: str) -> None:
10 | """
11 | Add single dummy to session.
12 |
13 | :param name: name of a dummy.
14 | """
15 | await DummyModel.insert(DummyModel(name=name))
16 |
17 | async def get_all_dummies(self, limit: int, offset: int) -> List[DummyModel]:
18 | """
19 | Get all dummy models with limit/offset pagination.
20 |
21 | :param limit: limit of dummies.
22 | :param offset: offset of dummies.
23 | :return: stream of dummies.
24 | """
25 | return await DummyModel.objects().limit(limit).offset(offset)
26 |
27 | async def filter(
28 | self,
29 | name: Optional[str] = None,
30 | ) -> List[DummyModel]:
31 | """
32 | Get specific dummy model.
33 |
34 | :param name: name of dummy instance.
35 | :return: dummy models.
36 | """
37 | query = DummyModel.objects()
38 | if name:
39 | query = query.where(DummyModel.name == name)
40 | return await query
41 |
42 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_piccolo/migrations/2022-04-16T17-38-51-672827.py:
--------------------------------------------------------------------------------
1 | from piccolo.apps.migrations.auto.migration_manager import MigrationManager
2 | from piccolo.columns.column_types import Varchar
3 | from piccolo.columns.indexes import IndexMethod
4 |
5 | ID = "2022-04-16T17:38:51:672827"
6 | VERSION = "0.74.0"
7 | DESCRIPTION = "Creates dummy model"
8 |
9 |
10 | async def forwards() -> MigrationManager:
11 | manager = MigrationManager(
12 | migration_id=ID, app_name="ptest_db", description=DESCRIPTION
13 | )
14 |
15 | manager.add_table("DummyModel", tablename="dummy_model")
16 |
17 | manager.add_column(
18 | table_class_name="DummyModel",
19 | tablename="dummy_model",
20 | column_name="name",
21 | db_column_name="name",
22 | column_class_name="Varchar",
23 | column_class=Varchar,
24 | params={
25 | "length": 200,
26 | "default": "",
27 | "null": False,
28 | "primary_key": False,
29 | "unique": False,
30 | "index": False,
31 | "index_method": IndexMethod.btree,
32 | "choices": None,
33 | "db_column_name": None,
34 | "secret": False,
35 | },
36 | )
37 |
38 | return manager
39 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_piccolo/migrations/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/s3rius/FastAPI-template/c236a2faf8ae6fe3e0d0c4a2425e03a248cf988d/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_piccolo/migrations/__init__.py
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_piccolo/models/__init__.py:
--------------------------------------------------------------------------------
1 | """{{cookiecutter.project_name}} models."""
2 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_piccolo/models/dummy_model.py:
--------------------------------------------------------------------------------
1 | from piccolo.columns import Varchar
2 | from piccolo.table import Table
3 |
4 |
5 | class DummyModel(Table):
6 | """Model for demo purpose."""
7 |
8 | name = Varchar(length=200) # noqa: WPS432
9 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_psycopg/dao/dummy_dao.py:
--------------------------------------------------------------------------------
1 | from termios import OFDEL
2 | from typing import Any, List, Optional
3 |
4 | from fastapi import Depends
5 | from psycopg.rows import class_row
6 | from psycopg_pool import AsyncConnectionPool
7 | from {{cookiecutter.project_name}}.db.dependencies import get_db_pool
8 | from {{cookiecutter.project_name}}.db.models.dummy_model import DummyModel
9 |
10 |
11 | class DummyDAO:
12 | """Class for accessing dummy table."""
13 |
14 | def __init__(
15 | self,
16 | db_pool: AsyncConnectionPool[Any] = Depends(get_db_pool),
17 | ) -> None:
18 | self.db_pool = db_pool
19 |
20 |
21 | async def create_dummy_model(self, name: str) -> None:
22 | """
23 | Creates new dummy in a database.
24 |
25 | :param name: name of a dummy.
26 | """
27 | async with self.db_pool.connection() as connection, connection.cursor(binary=True) as cur:
28 | await cur.execute(
29 | "INSERT INTO dummy (name) VALUES (%(name)s);",
30 | params={
31 | "name": name,
32 | }
33 | )
34 |
35 | async def get_all_dummies(self, limit: int, offset: int) -> List[DummyModel]:
36 | """
37 | Get all dummy models with limit/offset pagination.
38 |
39 | :param limit: limit of dummies.
40 | :param offset: offset of dummies.
41 | :return: stream of dummies.
42 | """
43 | async with self.db_pool.connection() as connection, connection.cursor(
44 | binary=True,
45 | row_factory=class_row(DummyModel)
46 | ) as cur:
47 | res = await cur.execute(
48 | "SELECT id, name FROM dummy LIMIT %(limit)s OFFSET %(offset)s;",
49 | params={
50 | "limit": limit,
51 | "offset": offset,
52 | }
53 | )
54 | return await res.fetchall()
55 |
56 | async def filter(
57 | self,
58 | name: Optional[str] = None,
59 | ) -> List[DummyModel]:
60 | """
61 | Get specific dummy model.
62 |
63 | :param name: name of dummy instance.
64 | :return: dummy models.
65 | """
66 | async with self.db_pool.connection() as connection, connection.cursor(
67 | binary=True,
68 | row_factory=class_row(DummyModel)
69 | ) as cur:
70 | if name is not None:
71 | res = await cur.execute(
72 | "SELECT id, name FROM dummy WHERE name=%(name)s;",
73 | params={
74 | "name": name,
75 | }
76 | )
77 | else:
78 | res = await cur.execute("SELECT id, name FROM dummy;")
79 | return await res.fetchall()
80 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_psycopg/dependencies.py:
--------------------------------------------------------------------------------
1 | from psycopg_pool import AsyncConnectionPool
2 | from typing import Any
3 | from starlette.requests import Request
4 |
5 | {%- if cookiecutter.enable_taskiq == "True" %}
6 | from taskiq import TaskiqDepends
7 |
8 | {%- endif %}
9 |
10 | async def get_db_pool(request: Request {%- if cookiecutter.enable_taskiq == "True" %} = TaskiqDepends(){%- endif %}) -> AsyncConnectionPool[Any]:
11 | """
12 | Return database connections pool.
13 |
14 | :param request: current request.
15 | :returns: database connections pool.
16 | """
17 | return request.app.state.db_pool
18 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_psycopg/models/dummy_model.py:
--------------------------------------------------------------------------------
1 | from pydantic import BaseModel
2 |
3 |
4 | class DummyModel(BaseModel):
5 | """Dummy model for database."""
6 |
7 | id: int
8 | name: str
9 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_sa/base.py:
--------------------------------------------------------------------------------
1 | from sqlalchemy.orm import DeclarativeBase
2 | from {{cookiecutter.project_name}}.db.meta import meta
3 |
4 |
5 | class Base(DeclarativeBase):
6 | """Base for all models."""
7 |
8 | metadata = meta
9 |
10 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_sa/dao/__init__.py:
--------------------------------------------------------------------------------
1 | """DAO classes."""
2 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_sa/dao/dummy_dao.py:
--------------------------------------------------------------------------------
1 | from typing import List, Optional
2 |
3 | from fastapi import Depends
4 | from sqlalchemy import select
5 | from sqlalchemy.ext.asyncio import AsyncSession
6 | from {{cookiecutter.project_name}}.db.dependencies import get_db_session
7 | from {{cookiecutter.project_name}}.db.models.dummy_model import DummyModel
8 |
9 |
10 | class DummyDAO:
11 | """Class for accessing dummy table."""
12 |
13 | def __init__(self, session: AsyncSession = Depends(get_db_session)) -> None:
14 | self.session = session
15 |
16 | async def create_dummy_model(self, name: str) -> None:
17 | """
18 | Add single dummy to session.
19 |
20 | :param name: name of a dummy.
21 | """
22 | self.session.add(DummyModel(name=name))
23 |
24 | async def get_all_dummies(self, limit: int, offset: int) -> List[DummyModel]:
25 | """
26 | Get all dummy models with limit/offset pagination.
27 |
28 | :param limit: limit of dummies.
29 | :param offset: offset of dummies.
30 | :return: stream of dummies.
31 | """
32 | raw_dummies = await self.session.execute(
33 | select(DummyModel).limit(limit).offset(offset),
34 | )
35 |
36 | return list(raw_dummies.scalars().fetchall())
37 |
38 | async def filter(
39 | self,
40 | name: Optional[str] = None
41 | ) -> List[DummyModel]:
42 | """
43 | Get specific dummy model.
44 |
45 | :param name: name of dummy instance.
46 | :return: dummy models.
47 | """
48 | query = select(DummyModel)
49 | if name:
50 | query = query.where(DummyModel.name == name)
51 | rows = await self.session.execute(query)
52 | return list(rows.scalars().fetchall())
53 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_sa/dependencies.py:
--------------------------------------------------------------------------------
1 | from typing import AsyncGenerator
2 |
3 | from sqlalchemy.ext.asyncio import AsyncSession
4 | from starlette.requests import Request
5 |
6 | {%- if cookiecutter.enable_taskiq == "True" %}
7 | from taskiq import TaskiqDepends
8 |
9 | {%- endif %}
10 |
11 |
12 | async def get_db_session(request: Request {%- if cookiecutter.enable_taskiq == "True" %} = TaskiqDepends(){%- endif %}) -> AsyncGenerator[AsyncSession, None]:
13 | """
14 | Create and get database session.
15 |
16 | :param request: current request.
17 | :yield: database session.
18 | """
19 | session: AsyncSession = request.app.state.db_session_factory()
20 |
21 | try: # noqa: WPS501
22 | yield session
23 | finally:
24 | await session.commit()
25 | await session.close()
26 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_sa/meta.py:
--------------------------------------------------------------------------------
1 | import sqlalchemy as sa
2 |
3 | meta = sa.MetaData()
4 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_sa/migrations/__init__.py:
--------------------------------------------------------------------------------
1 | """Alembic migrations."""
2 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_sa/migrations/env.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from logging.config import fileConfig
3 |
4 | from alembic import context
5 | from sqlalchemy.ext.asyncio.engine import create_async_engine
6 | from sqlalchemy.future import Connection
7 | from {{cookiecutter.project_name}}.db.meta import meta
8 | from {{cookiecutter.project_name}}.db.models import load_all_models
9 | from {{cookiecutter.project_name}}.settings import settings
10 |
11 | # this is the Alembic Config object, which provides
12 | # access to the values within the .ini file in use.
13 | config = context.config
14 |
15 |
16 | load_all_models()
17 | # Interpret the config file for Python logging.
18 | # This line sets up loggers basically.
19 | if config.config_file_name is not None:
20 | fileConfig(config.config_file_name)
21 |
22 | # add your model's MetaData object here
23 | # for 'autogenerate' support
24 | # from myapp import mymodel
25 | # target_metadata = mymodel.Base.metadata
26 | target_metadata = meta
27 |
28 | # other values from the config, defined by the needs of env.py,
29 | # can be acquired:
30 | # my_important_option = config.get_main_option("my_important_option")
31 | # ... etc.
32 |
33 |
34 | async def run_migrations_offline() -> None:
35 | """Run migrations in 'offline' mode.
36 |
37 | This configures the context with just a URL
38 | and not an Engine, though an Engine is acceptable
39 | here as well. By skipping the Engine creation
40 | we don't even need a DBAPI to be available.
41 |
42 | Calls to context.execute() here emit the given string to the
43 | script output.
44 |
45 | """
46 | context.configure(
47 | url=str(settings.db_url),
48 | target_metadata=target_metadata,
49 | literal_binds=True,
50 | dialect_opts={"paramstyle": "named"},
51 | )
52 |
53 | with context.begin_transaction():
54 | context.run_migrations()
55 |
56 |
57 | def do_run_migrations(connection: Connection) -> None:
58 | """
59 | Run actual sync migrations.
60 |
61 | :param connection: connection to the database.
62 | """
63 | context.configure(connection=connection, target_metadata=target_metadata)
64 |
65 | with context.begin_transaction():
66 | context.run_migrations()
67 |
68 |
69 | async def run_migrations_online() -> None:
70 | """
71 | Run migrations in 'online' mode.
72 |
73 | In this scenario we need to create an Engine
74 | and associate a connection with the context.
75 | """
76 | connectable = create_async_engine(str(settings.db_url))
77 |
78 | async with connectable.connect() as connection:
79 | await connection.run_sync(do_run_migrations)
80 |
81 | loop = asyncio.get_event_loop()
82 | if context.is_offline_mode():
83 | task = run_migrations_offline()
84 | else:
85 | task = run_migrations_online()
86 |
87 | loop.run_until_complete(task)
88 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_sa/migrations/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}${"." if not message.endswith(".") else ""}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision | comma,n}
5 | Create Date: ${create_date}
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | ${imports if imports else ""}
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = ${repr(up_revision)}
14 | down_revision = ${repr(down_revision)}
15 | branch_labels = ${repr(branch_labels)}
16 | depends_on = ${repr(depends_on)}
17 |
18 |
19 | def upgrade() -> None:
20 | """Run the migration."""
21 | ${upgrades if upgrades else ""}
22 |
23 |
24 | def downgrade() -> None:
25 | """Undo the migration."""
26 | ${downgrades if downgrades else ""}
27 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_sa/migrations/versions/2021-08-16-16-53_819cbf6e030b.py:
--------------------------------------------------------------------------------
1 | """Initial migration.
2 |
3 | Revision ID: 819cbf6e030b
4 | Revises:
5 | Create Date: 2021-08-16 16:53:05.484024
6 |
7 | """
8 |
9 |
10 | # revision identifiers, used by Alembic.
11 | revision = "819cbf6e030b"
12 | down_revision = None
13 | branch_labels = None
14 | depends_on = None
15 |
16 |
17 | def upgrade() -> None:
18 | """Run the upgrade migrations."""
19 | pass
20 |
21 |
22 | def downgrade() -> None:
23 | """Run the downgrade migrations."""
24 | pass
25 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_sa/migrations/versions/2021-08-16-16-55_2b7380507a71.py:
--------------------------------------------------------------------------------
1 | """Created Dummy Model.
2 |
3 | Revision ID: 2b7380507a71
4 | Revises: 819cbf6e030b
5 | Create Date: 2021-08-16 16:55:25.157309
6 |
7 | """
8 | import sqlalchemy as sa
9 | from alembic import op
10 |
11 | # revision identifiers, used by Alembic.
12 | revision = "2b7380507a71"
13 | down_revision = "819cbf6e030b"
14 | branch_labels = None
15 | depends_on = None
16 |
17 |
18 | def upgrade() -> None:
19 | """Run the upgrade migrations."""
20 | op.create_table(
21 | "dummy_model",
22 | sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
23 | sa.Column("name", sa.String(length=200), nullable=True),
24 | sa.PrimaryKeyConstraint("id"),
25 | )
26 | # ### end Alembic commands ###
27 |
28 |
29 | def downgrade() -> None:
30 | """Run the downgrade migrations."""
31 | op.drop_table("dummy_model")
32 | # ### end Alembic commands ###
33 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_sa/migrations/versions/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/s3rius/FastAPI-template/c236a2faf8ae6fe3e0d0c4a2425e03a248cf988d/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_sa/migrations/versions/__init__.py
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_sa/models/__init__.py:
--------------------------------------------------------------------------------
1 | """{{cookiecutter.project_name}} models."""
2 | import pkgutil
3 | from pathlib import Path
4 |
5 |
6 | def load_all_models() -> None:
7 | """Load all models from this folder."""
8 | package_dir = Path(__file__).resolve().parent
9 | modules = pkgutil.walk_packages(
10 | path=[str(package_dir)],
11 | prefix="{{cookiecutter.project_name}}.db.models.",
12 | )
13 | for module in modules:
14 | __import__(module.name) # noqa: WPS421
15 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_sa/models/dummy_model.py:
--------------------------------------------------------------------------------
1 | from sqlalchemy.orm import Mapped, mapped_column
2 | from sqlalchemy.sql.sqltypes import String
3 | from {{cookiecutter.project_name}}.db.base import Base
4 |
5 |
6 | class DummyModel(Base):
7 | """Model for demo purpose."""
8 |
9 | __tablename__ = "dummy_model"
10 |
11 | id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
12 | name: Mapped[str] = mapped_column(String(length=200)) # noqa: WPS432
13 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_sa/models/users.py:
--------------------------------------------------------------------------------
1 | # type: ignore
2 | import uuid
3 |
4 | from fastapi import Depends
5 | from fastapi_users import BaseUserManager, FastAPIUsers, UUIDIDMixin, schemas
6 | from fastapi_users.authentication import (
7 | AuthenticationBackend,
8 | BearerTransport,
9 |
10 | CookieTransport,
11 | JWTStrategy,
12 | )
13 | from fastapi_users.db import SQLAlchemyBaseUserTableUUID, SQLAlchemyUserDatabase
14 | from sqlalchemy.ext.asyncio import AsyncSession
15 |
16 | from {{cookiecutter.project_name}}.db.base import Base
17 | from {{cookiecutter.project_name}}.db.dependencies import get_db_session
18 | from {{cookiecutter.project_name}}.settings import settings
19 |
20 |
21 | class User(SQLAlchemyBaseUserTableUUID, Base):
22 | """Represents a user entity."""
23 |
24 |
25 | class UserRead(schemas.BaseUser[uuid.UUID]):
26 | """Represents a read command for a user."""
27 |
28 |
29 | class UserCreate(schemas.BaseUserCreate):
30 | """Represents a create command for a user."""
31 |
32 |
33 | class UserUpdate(schemas.BaseUserUpdate):
34 | """Represents an update command for a user."""
35 |
36 |
37 | class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
38 | """Manages a user session and its tokens."""
39 | reset_password_token_secret = settings.users_secret
40 | verification_token_secret = settings.users_secret
41 |
42 |
43 | async def get_user_db(session: AsyncSession = Depends(get_db_session)) -> SQLAlchemyUserDatabase:
44 | """
45 | Yield a SQLAlchemyUserDatabase instance.
46 |
47 | :param session: asynchronous SQLAlchemy session.
48 | :yields: instance of SQLAlchemyUserDatabase.
49 | """
50 | yield SQLAlchemyUserDatabase(session, User)
51 |
52 |
53 | async def get_user_manager(user_db: SQLAlchemyUserDatabase = Depends(get_user_db)) -> UserManager:
54 | """
55 | Yield a UserManager instance.
56 |
57 | :param user_db: SQLAlchemy user db instance
58 | :yields: an instance of UserManager.
59 | """
60 | yield UserManager(user_db)
61 |
62 |
63 | def get_jwt_strategy() -> JWTStrategy:
64 | """
65 | Return a JWTStrategy in order to instantiate it dynamically.
66 |
67 | :returns: instance of JWTStrategy with provided settings.
68 | """
69 | return JWTStrategy(secret=settings.users_secret, lifetime_seconds=None)
70 |
71 |
72 | {%- if cookiecutter.jwt_auth == "True" %}
73 | bearer_transport = BearerTransport(tokenUrl="auth/jwt/login")
74 | auth_jwt = AuthenticationBackend(
75 | name="jwt",
76 | transport=bearer_transport,
77 | get_strategy=get_jwt_strategy,
78 | )
79 | {%- endif %}
80 |
81 | {%- if cookiecutter.cookie_auth == "True" %}
82 | cookie_transport = CookieTransport()
83 | auth_cookie = AuthenticationBackend(
84 | name="cookie", transport=cookie_transport, get_strategy=get_jwt_strategy
85 | )
86 | {%- endif %}
87 |
88 | backends = [
89 | {%- if cookiecutter.cookie_auth == "True" %}
90 | auth_cookie,
91 | {%- endif %}
92 | {%- if cookiecutter.jwt_auth == "True" %}
93 | auth_jwt,
94 | {%- endif %}
95 | ]
96 |
97 | api_users = FastAPIUsers[User, uuid.UUID](get_user_manager, backends)
98 |
99 | current_active_user = api_users.current_user(active=True)
100 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_sa/utils.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from sqlalchemy import text
4 | from sqlalchemy.engine import URL, make_url
5 | from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, create_async_engine
6 | from sqlalchemy.orm import sessionmaker
7 | from pathlib import Path
8 | from {{cookiecutter.project_name}}.settings import settings
9 |
10 | {% if cookiecutter.db_info.name == "postgresql" -%}
11 | async def create_database() -> None:
12 | """Create a database."""
13 | db_url = make_url(str(settings.db_url.with_path('/postgres')))
14 | engine = create_async_engine(db_url, isolation_level="AUTOCOMMIT")
15 |
16 | async with engine.connect() as conn:
17 | database_existance = await conn.execute(
18 | text(
19 | f"SELECT 1 FROM pg_database WHERE datname='{settings.db_base}'", # noqa: E501, S608
20 | )
21 | )
22 | database_exists = database_existance.scalar() == 1
23 |
24 | if database_exists:
25 | await drop_database()
26 |
27 | async with engine.connect() as conn: # noqa: WPS440
28 | await conn.execute(
29 | text(
30 | f'CREATE DATABASE "{settings.db_base}" ENCODING "utf8" TEMPLATE template1', # noqa: E501
31 | )
32 | )
33 |
34 | async def drop_database() -> None:
35 | """Drop current database."""
36 | db_url = make_url(str(settings.db_url.with_path('/postgres')))
37 | engine = create_async_engine(db_url, isolation_level="AUTOCOMMIT")
38 | async with engine.connect() as conn:
39 | disc_users = (
40 | "SELECT pg_terminate_backend(pg_stat_activity.pid) " # noqa: S608
41 | "FROM pg_stat_activity "
42 | f"WHERE pg_stat_activity.datname = '{settings.db_base}' "
43 | "AND pid <> pg_backend_pid();"
44 | )
45 | await conn.execute(text(disc_users))
46 | await conn.execute(text(f'DROP DATABASE "{settings.db_base}"'))
47 |
48 |
49 | {%- endif %}
50 | {%- if cookiecutter.db_info.name == "mysql" %}
51 | async def create_database() -> None:
52 | """Create a database."""
53 | engine = create_async_engine(str(settings.db_url.with_path("/mysql")))
54 |
55 | async with engine.connect() as conn:
56 | database_existance = await conn.execute(
57 | text(
58 | "SELECT 1 FROM INFORMATION_SCHEMA.SCHEMATA" # noqa: S608
59 | f" WHERE SCHEMA_NAME='{settings.db_base}';",
60 | )
61 | )
62 | database_exists = database_existance.scalar() == 1
63 |
64 | if database_exists:
65 | await drop_database()
66 |
67 | async with engine.connect() as conn: # noqa: WPS440
68 | await conn.execute(
69 | text(
70 | f'CREATE DATABASE {settings.db_base};'
71 | )
72 | )
73 |
74 | async def drop_database() -> None:
75 | """Drop current database."""
76 | engine = create_async_engine(str(settings.db_url.with_path("/mysql")))
77 | async with engine.connect() as conn:
78 | await conn.execute(text(f'DROP DATABASE {settings.db_base};'))
79 | {%- endif %}
80 | {%- if cookiecutter.db_info.name == "sqlite" %}
81 | async def create_database() -> None:
82 | """Create a database."""
83 |
84 | async def drop_database() -> None:
85 | """Drop current database."""
86 | if settings.db_file.exists():
87 | Path(settings.db_file).unlink()
88 |
89 | {%- endif %}
90 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_tortoise/config.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 |
3 | from {{cookiecutter.project_name}}.settings import settings
4 |
5 | MODELS_MODULES: List[str] = [{%- if cookiecutter.add_dummy == 'True' %}"{{cookiecutter.project_name}}.db.models.dummy_model"{%- endif %}] # noqa: WPS407
6 |
7 | TORTOISE_CONFIG = { # noqa: WPS407
8 | "connections": {
9 | "default": str(settings.db_url),
10 | },
11 | "apps": {
12 | "models": {
13 | "models": {%- if cookiecutter.enable_migrations == "True" %} [*MODELS_MODULES, "aerich.models"] {%- else %} MODELS_MODULES {%- endif %} ,
14 | "default_connection": "default",
15 | },
16 | },
17 | }
18 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_tortoise/dao/__init__.py:
--------------------------------------------------------------------------------
1 | """DAO classes."""
2 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_tortoise/dao/dummy_dao.py:
--------------------------------------------------------------------------------
1 | from typing import List, Optional
2 |
3 | from {{cookiecutter.project_name}}.db.models.dummy_model import DummyModel
4 |
5 |
6 | class DummyDAO:
7 | """Class for accessing dummy table."""
8 |
9 | async def create_dummy_model(self, name: str) -> None:
10 | """
11 | Add single dummy to session.
12 |
13 | :param name: name of a dummy.
14 | """
15 | await DummyModel.create(name=name)
16 |
17 | async def get_all_dummies(self, limit: int, offset: int) -> List[DummyModel]:
18 | """
19 | Get all dummy models with limit/offset pagination.
20 |
21 | :param limit: limit of dummies.
22 | :param offset: offset of dummies.
23 | :return: stream of dummies.
24 | """
25 | return (
26 | await DummyModel.all().offset(offset).limit(limit)
27 | )
28 |
29 | async def filter(self, name: Optional[str] = None) -> List[DummyModel]:
30 | """
31 | Get specific dummy model.
32 |
33 | :param name: name of dummy instance.
34 | :return: dummy models.
35 | """
36 | query = DummyModel.all()
37 | if name:
38 | query = query.filter(name=name)
39 | return await query
40 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_tortoise/migrations/models/0_20210928165300_init_mysql.sql:
--------------------------------------------------------------------------------
1 | -- upgrade --
2 | CREATE TABLE IF NOT EXISTS `aerich` (
3 | `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
4 | `version` VARCHAR(255) NOT NULL,
5 | `app` VARCHAR(20) NOT NULL,
6 | `content` JSON NOT NULL
7 | ) CHARACTER SET utf8mb4;
8 | -- downgrade --
9 | DROP TABLE `aerich`;
10 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_tortoise/migrations/models/0_20210928165300_init_pg.sql:
--------------------------------------------------------------------------------
1 | -- upgrade --
2 | CREATE TABLE IF NOT EXISTS "aerich" (
3 | "id" SERIAL NOT NULL PRIMARY KEY,
4 | "version" VARCHAR(255) NOT NULL,
5 | "app" VARCHAR(20) NOT NULL,
6 | "content" JSONB NOT NULL
7 | );
8 | -- downgrade --
9 | DROP TABLE "aerich";
10 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_tortoise/migrations/models/0_20210928165300_init_sqlite.sql:
--------------------------------------------------------------------------------
1 | -- upgrade --
2 | CREATE TABLE IF NOT EXISTS "aerich" (
3 | "id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
4 | "version" VARCHAR(255) NOT NULL,
5 | "app" VARCHAR(20) NOT NULL,
6 | "content" JSON NOT NULL
7 | );
8 | -- downgrade --
9 | DROP TABLE "aerich";
10 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_tortoise/migrations/models/1_20210928165300_init_dummy_mysql.sql:
--------------------------------------------------------------------------------
1 | -- upgrade --
2 | CREATE TABLE IF NOT EXISTS `dummymodel` (
3 | `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
4 | `name` VARCHAR(200) NOT NULL
5 | ) CHARACTER SET utf8mb4 COMMENT='Model for demo purpose.';
6 | -- downgrade --
7 | DROP TABLE `dummymodel`;
8 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_tortoise/migrations/models/1_20210928165300_init_dummy_pg.sql:
--------------------------------------------------------------------------------
1 | -- upgrade --
2 | CREATE TABLE IF NOT EXISTS "dummymodel" (
3 | "id" SERIAL NOT NULL PRIMARY KEY,
4 | "name" VARCHAR(200) NOT NULL
5 | );
6 | COMMENT ON TABLE "dummymodel" IS 'Model for demo purpose.';
7 | -- downgrade --
8 | DROP TABLE "dummymodel";
9 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_tortoise/migrations/models/1_20210928165300_init_dummy_sqlite.sql:
--------------------------------------------------------------------------------
1 | -- upgrade --
2 | CREATE TABLE IF NOT EXISTS "dummymodel" (
3 | "id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
4 | "name" VARCHAR(200) NOT NULL
5 | ) /* Model for demo purpose. */;
6 | -- downgrade --
7 | DROP TABLE "dummymodel";
8 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_tortoise/models/__init__.py:
--------------------------------------------------------------------------------
1 | """Models for {{cookiecutter.project_name}}."""
2 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/db_tortoise/models/dummy_model.py:
--------------------------------------------------------------------------------
1 | from tortoise import fields, models
2 |
3 |
4 | class DummyModel(models.Model):
5 | """Model for demo purpose."""
6 |
7 | id = fields.IntField(pk=True)
8 | name = fields.CharField(max_length=200) # noqa: WPS432
9 |
10 | def __str__(self) -> str:
11 | return self.name
12 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/gunicorn_runner.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 |
3 | from gunicorn.app.base import BaseApplication
4 | from gunicorn.util import import_app
5 | from uvicorn.workers import UvicornWorker as BaseUvicornWorker
6 |
7 | try:
8 | import uvloop # noqa: WPS433 (Found nested import)
9 | except ImportError:
10 | uvloop = None # type: ignore # noqa: WPS440 (variables overlap)
11 |
12 |
13 |
14 | class UvicornWorker(BaseUvicornWorker):
15 | """
16 | Configuration for uvicorn workers.
17 |
18 | This class is subclassing UvicornWorker and defines
19 | some parameters class-wide, because it's impossible,
20 | to pass these parameters through gunicorn.
21 | """
22 |
23 | CONFIG_KWARGS: dict[str, Any] = { # typing: ignore # noqa: RUF012
24 | "loop": "uvloop" if uvloop is not None else "asyncio",
25 | "http": "httptools",
26 | "lifespan": "on",
27 | "factory": True,
28 | "proxy_headers": False,
29 | }
30 |
31 |
32 | class GunicornApplication(BaseApplication):
33 | """
34 | Custom gunicorn application.
35 |
36 | This class is used to start guncicorn
37 | with custom uvicorn workers.
38 | """
39 |
40 | def __init__( # noqa: WPS211 (Too many args)
41 | self,
42 | app: str,
43 | host: str,
44 | port: int,
45 | workers: int,
46 | **kwargs: Any,
47 | ) -> None:
48 | self.options = {
49 | "bind": f"{host}:{port}",
50 | "workers": workers,
51 | "worker_class": "{{cookiecutter.project_name}}.gunicorn_runner.UvicornWorker",
52 | **kwargs
53 | }
54 | self.app = app
55 | super().__init__()
56 |
57 | def load_config(self) -> None:
58 | """
59 | Load config for web server.
60 |
61 | This function is used to set parameters to gunicorn
62 | main process. It only sets parameters that
63 | gunicorn can handle. If you pass unknown
64 | parameter to it, it crash with error.
65 | """
66 | for key, value in self.options.items():
67 | if key in self.cfg.settings and value is not None:
68 | self.cfg.set(key.lower(), value)
69 |
70 | def load(self) -> str:
71 | """
72 | Load actual application.
73 |
74 | Gunicorn loads application based on this
75 | function's returns. We return python's path to
76 | the app's factory.
77 |
78 | :returns: python path to app factory.
79 | """
80 | return import_app(self.app)
81 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/log.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import sys
3 | from typing import Any, Union
4 |
5 | from loguru import logger
6 | from {{cookiecutter.project_name}}.settings import settings
7 |
8 | {%- if cookiecutter.otlp_enabled == "True" %}
9 | from opentelemetry.trace import INVALID_SPAN, INVALID_SPAN_CONTEXT, get_current_span
10 |
11 | {%- endif %}
12 |
13 |
14 | class InterceptHandler(logging.Handler):
15 | """
16 | Default handler from examples in loguru documentation.
17 |
18 | This handler intercepts all log requests and
19 | passes them to loguru.
20 |
21 | For more info see:
22 | https://loguru.readthedocs.io/en/stable/overview.html#entirely-compatible-with-standard-logging
23 | """
24 |
25 | def emit(self, record: logging.LogRecord) -> None: # pragma: no cover
26 | """
27 | Propagates logs to loguru.
28 |
29 | :param record: record to log.
30 | """
31 | try:
32 | level: Union[str, int] = logger.level(record.levelname).name
33 | except ValueError:
34 | level = record.levelno
35 |
36 | # Find caller from where originated the logged message
37 | frame, depth = logging.currentframe(), 2
38 | while frame.f_code.co_filename == logging.__file__:
39 | frame = frame.f_back # type: ignore
40 | depth += 1
41 |
42 | logger.opt(depth=depth, exception=record.exc_info).log(
43 | level,
44 | record.getMessage(),
45 | )
46 |
47 | {%- if cookiecutter.otlp_enabled == "True" %}
48 |
49 | def record_formatter(record: dict[str, Any]) -> str: # pragma: no cover
50 | """
51 | Formats the record.
52 |
53 | This function formats message
54 | by adding extra trace information to the record.
55 |
56 | :param record: record information.
57 | :return: format string.
58 | """
59 | log_format = (
60 | "{time:YYYY-MM-DD HH:mm:ss.SSS} "
61 | "| {level: <8} "
62 | "| trace_id={extra[trace_id]} "
63 | "| span_id={extra[span_id]} "
64 | "| {name}:{function}:{line} "
65 | "- {message}\n"
66 | )
67 |
68 | span = get_current_span()
69 | record["extra"]["span_id"] = 0
70 | record["extra"]["trace_id"] = 0
71 | if span != INVALID_SPAN:
72 | span_context = span.get_span_context()
73 | if span_context != INVALID_SPAN_CONTEXT:
74 | record["extra"]["span_id"] = format(span_context.span_id, "016x")
75 | record["extra"]["trace_id"] = format(span_context.trace_id, "032x")
76 |
77 | if record["exception"]:
78 | log_format = f"{log_format}{{'{{'}}exception{{'}}'}}"
79 |
80 | return log_format
81 |
82 | {%- endif %}
83 |
84 | def configure_logging() -> None: # pragma: no cover
85 | """Configures logging."""
86 | intercept_handler = InterceptHandler()
87 |
88 | logging.basicConfig(handlers=[intercept_handler], level=logging.NOTSET)
89 |
90 | for logger_name in logging.root.manager.loggerDict:
91 | if logger_name.startswith("uvicorn."):
92 | logging.getLogger(logger_name).handlers = []
93 | {%- if cookiecutter.enable_taskiq == "True" %}
94 | if logger_name.startswith("taskiq."):
95 | logging.getLogger(logger_name).root.handlers = [intercept_handler]
96 | {%- endif %}
97 |
98 | # change handler for default uvicorn logger
99 | logging.getLogger("uvicorn").handlers = [intercept_handler]
100 | logging.getLogger("uvicorn.access").handlers = [intercept_handler]
101 |
102 | # set logs output, level and format
103 | logger.remove()
104 | logger.add(
105 | sys.stdout,
106 | level=settings.log_level.value,
107 | {%- if cookiecutter.otlp_enabled == "True" %}
108 | format=record_formatter, # type: ignore
109 | {%- endif %}
110 | )
111 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/piccolo_conf.py:
--------------------------------------------------------------------------------
1 | from piccolo.conf.apps import AppRegistry
2 | from {{cookiecutter.project_name}}.settings import settings
3 |
4 | {%- if cookiecutter.db_info.name == "postgresql" %}
5 | from piccolo.engine.postgres import PostgresEngine
6 |
7 | DB = PostgresEngine(
8 | config={
9 | "database": settings.db_base,
10 | "user": settings.db_user,
11 | "password": settings.db_pass,
12 | "host": settings.db_host,
13 | "port": settings.db_port,
14 | }
15 | )
16 |
17 |
18 |
19 | {%- elif cookiecutter.db_info.name == "sqlite" %}
20 | from piccolo.engine.sqlite import SQLiteEngine
21 |
22 | DB = SQLiteEngine(path=str(settings.db_file))
23 | {%- endif %}
24 |
25 |
26 | APP_REGISTRY = AppRegistry(
27 | apps=["{{cookiecutter.project_name}}.db.app_conf"]
28 | )
29 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/services/__init__.py:
--------------------------------------------------------------------------------
1 | """Services for {{cookiecutter.project_name}}."""
2 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/services/kafka/__init__.py:
--------------------------------------------------------------------------------
1 | """Kafka service."""
2 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/services/kafka/dependencies.py:
--------------------------------------------------------------------------------
1 | from aiokafka import AIOKafkaProducer
2 | from fastapi import Request
3 |
4 | {%- if cookiecutter.enable_taskiq == "True" %}
5 | from taskiq import TaskiqDepends
6 |
7 | {%- endif %}
8 |
9 |
10 | def get_kafka_producer(request: Request {%- if cookiecutter.enable_taskiq == "True" %} = TaskiqDepends(){%- endif %}) -> AIOKafkaProducer: # pragma: no cover
11 | """
12 | Returns kafka producer.
13 |
14 | :param request: current request.
15 | :return: kafka producer from the state.
16 | """
17 | return request.app.state.kafka_producer
18 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/services/kafka/lifespan.py:
--------------------------------------------------------------------------------
1 | from aiokafka import AIOKafkaProducer
2 | from fastapi import FastAPI
3 | from {{cookiecutter.project_name}}.settings import settings
4 |
5 |
6 | async def init_kafka(app: FastAPI) -> None: # pragma: no cover
7 | """
8 | Initialize kafka producer.
9 |
10 | This function creates producer
11 | and makes initial connection to
12 | the kafka cluster. After that you
13 | can use producer stored in state.
14 |
15 | We don't need to use pools here,
16 | because aiokafka has implicit pool
17 | inside the producer.
18 |
19 | :param app: current application.
20 | """
21 | app.state.kafka_producer = AIOKafkaProducer(
22 | bootstrap_servers=settings.kafka_bootstrap_servers,
23 | )
24 | await app.state.kafka_producer.start()
25 |
26 |
27 | async def shutdown_kafka(app: FastAPI) -> None: # pragma: no cover
28 | """
29 | Shutdown kafka client.
30 |
31 | This function closes all connections
32 | and sends all pending data to kafka.
33 |
34 | :param app: current application.
35 | """
36 | await app.state.kafka_producer.stop()
37 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/services/rabbit/__init__.py:
--------------------------------------------------------------------------------
1 | """RabbitMQ service."""
2 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/services/rabbit/dependencies.py:
--------------------------------------------------------------------------------
1 | from aio_pika import Channel
2 | from aio_pika.pool import Pool
3 | from fastapi import Request
4 |
5 | {%- if cookiecutter.enable_taskiq == "True" %}
6 | from taskiq import TaskiqDepends
7 |
8 | {%- endif %}
9 |
10 |
11 | def get_rmq_channel_pool(request: Request {%- if cookiecutter.enable_taskiq == "True" %} = TaskiqDepends(){%- endif %}) -> Pool[Channel]: # pragma: no cover
12 | """
13 | Get channel pool from the state.
14 |
15 | :param request: current request.
16 | :return: channel pool.
17 | """
18 | return request.app.state.rmq_channel_pool
19 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/services/rabbit/lifespan.py:
--------------------------------------------------------------------------------
1 | import aio_pika
2 | from aio_pika.abc import AbstractChannel, AbstractRobustConnection
3 | from aio_pika.pool import Pool
4 | from fastapi import FastAPI
5 | from {{cookiecutter.project_name}}.settings import settings
6 |
7 |
8 | def init_rabbit(app: FastAPI) -> None: # pragma: no cover
9 | """
10 | Initialize rabbitmq pools.
11 |
12 | :param app: current FastAPI application.
13 | """
14 |
15 | async def get_connection() -> AbstractRobustConnection: # noqa: WPS430
16 | """
17 | Creates connection to RabbitMQ using url from settings.
18 |
19 | :return: async connection to RabbitMQ.
20 | """
21 | return await aio_pika.connect_robust(str(settings.rabbit_url))
22 |
23 | # This pool is used to open connections.
24 | connection_pool: Pool[AbstractRobustConnection] = Pool(
25 | get_connection,
26 | max_size=settings.rabbit_pool_size,
27 | )
28 |
29 | async def get_channel() -> AbstractChannel: # noqa: WPS430
30 | """
31 | Open channel on connection.
32 |
33 | Channels are used to actually communicate with rabbitmq.
34 |
35 | :return: connected channel.
36 | """
37 | async with connection_pool.acquire() as connection:
38 | return await connection.channel()
39 |
40 | # This pool is used to open channels.
41 | channel_pool: Pool[aio_pika.Channel] = Pool(
42 | get_channel,
43 | max_size=settings.rabbit_channel_pool_size,
44 | )
45 |
46 | app.state.rmq_pool = connection_pool
47 | app.state.rmq_channel_pool = channel_pool
48 |
49 |
50 | async def shutdown_rabbit(app: FastAPI) -> None: # pragma: no cover
51 | """
52 | Close all connection and pools.
53 |
54 | :param app: current application.
55 | """
56 | await app.state.rmq_channel_pool.close()
57 | await app.state.rmq_pool.close()
58 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/services/redis/__init__.py:
--------------------------------------------------------------------------------
1 | """Redis service."""
2 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/services/redis/dependency.py:
--------------------------------------------------------------------------------
1 | from typing import AsyncGenerator
2 |
3 | from redis.asyncio import Redis
4 | from starlette.requests import Request
5 |
6 | {%- if cookiecutter.enable_taskiq == "True" %}
7 | from taskiq import TaskiqDepends
8 |
9 | {%- endif %}
10 |
11 |
12 | async def get_redis_pool(request: Request {%- if cookiecutter.enable_taskiq == "True" %} = TaskiqDepends(){%- endif %}) -> AsyncGenerator[Redis, None]: # pragma: no cover
13 | """
14 | Returns connection pool.
15 |
16 | You can use it like this:
17 |
18 | >>> from redis.asyncio import ConnectionPool, Redis
19 | >>>
20 | >>> async def handler(redis_pool: ConnectionPool = Depends(get_redis_pool)):
21 | >>> async with Redis(connection_pool=redis_pool) as redis:
22 | >>> await redis.get('key')
23 |
24 | I use pools, so you don't acquire connection till the end of the handler.
25 |
26 | :param request: current request.
27 | :returns: redis connection pool.
28 | """
29 | return request.app.state.redis_pool
30 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/services/redis/lifespan.py:
--------------------------------------------------------------------------------
1 | from fastapi import FastAPI
2 | from redis.asyncio import ConnectionPool
3 | from {{cookiecutter.project_name}}.settings import settings
4 |
5 |
6 | def init_redis(app: FastAPI) -> None: # pragma: no cover
7 | """
8 | Creates connection pool for redis.
9 |
10 | :param app: current fastapi application.
11 | """
12 | app.state.redis_pool = ConnectionPool.from_url(
13 | str(settings.redis_url),
14 | )
15 |
16 |
17 | async def shutdown_redis(app: FastAPI) -> None: # pragma: no cover
18 | """
19 | Closes redis connection pool.
20 |
21 | :param app: current FastAPI app.
22 | """
23 | await app.state.redis_pool.disconnect()
24 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/settings.py:
--------------------------------------------------------------------------------
1 | import os
2 | import enum
3 | from pathlib import Path
4 | from tempfile import gettempdir
5 | from typing import List, Optional
6 |
7 | from pydantic_settings import BaseSettings, SettingsConfigDict
8 |
9 | from yarl import URL
10 |
11 | TEMP_DIR = Path(gettempdir())
12 |
13 | class LogLevel(str, enum.Enum): # noqa: WPS600
14 | """Possible log levels."""
15 |
16 | NOTSET = "NOTSET"
17 | DEBUG = "DEBUG"
18 | INFO = "INFO"
19 | WARNING = "WARNING"
20 | ERROR = "ERROR"
21 | FATAL = "FATAL"
22 |
23 |
24 | class Settings(BaseSettings):
25 | """
26 | Application settings.
27 |
28 | These parameters can be configured
29 | with environment variables.
30 | """
31 |
32 | host: str = "127.0.0.1"
33 | port: int = 8000
34 | # quantity of workers for uvicorn
35 | workers_count: int = 1
36 | # Enable uvicorn reloading
37 | reload: bool = False
38 |
39 | # Current environment
40 | environment: str = "dev"
41 |
42 | log_level: LogLevel = LogLevel.INFO
43 |
44 | {%- if cookiecutter.add_users == "True" %}
45 | {%- if cookiecutter.orm == "sqlalchemy" %}
46 | users_secret: str = os.getenv("USERS_SECRET", "")
47 | {%- endif %}
48 | {%- endif %}
49 | {% if cookiecutter.db_info.name != "none" -%}
50 |
51 | # Variables for the database
52 | {%- if cookiecutter.db_info.name == "sqlite" %}
53 | db_file: Path = TEMP_DIR / "db.sqlite3"
54 | {%- else %}
55 | db_host: str = "localhost"
56 | db_port: int = {{cookiecutter.db_info.port}}
57 | db_user: str = "{{cookiecutter.project_name}}"
58 | db_pass: str = "{{cookiecutter.project_name}}"
59 | {%- if cookiecutter.db_info.name != "sqlite" %}
60 | db_base: str = "admin"
61 | {%- else %}
62 | db_base: str = "{{cookiecutter.project_name}}"
63 | {%- endif %}
64 | {%- endif %}
65 | db_echo: bool = False
66 |
67 | {%- endif %}
68 |
69 |
70 | {%- if cookiecutter.enable_redis == "True" %}
71 |
72 | # Variables for Redis
73 | redis_host: str = "{{cookiecutter.project_name}}-redis"
74 | redis_port: int = 6379
75 | redis_user: Optional[str] = None
76 | redis_pass: Optional[str] = None
77 | redis_base: Optional[int] = None
78 |
79 | {%- endif %}
80 |
81 |
82 | {%- if cookiecutter.enable_rmq == "True" %}
83 |
84 | # Variables for RabbitMQ
85 | rabbit_host: str = "{{cookiecutter.project_name}}-rmq"
86 | rabbit_port: int = 5672
87 | rabbit_user: str = "guest"
88 | rabbit_pass: str = "guest"
89 | rabbit_vhost: str = "/"
90 |
91 | rabbit_pool_size: int = 2
92 | rabbit_channel_pool_size: int = 10
93 |
94 | {%- endif %}
95 |
96 |
97 | {%- if cookiecutter.prometheus_enabled == "True" %}
98 |
99 | # This variable is used to define
100 | # multiproc_dir. It's required for [uvi|guni]corn projects.
101 | prometheus_dir: Path = TEMP_DIR / "prom"
102 |
103 | {%- endif %}
104 |
105 |
106 | {%- if cookiecutter.sentry_enabled == "True" %}
107 |
108 | # Sentry's configuration.
109 | sentry_dsn: Optional[str] = None
110 | sentry_sample_rate: float = 1.0
111 |
112 | {%- endif %}
113 |
114 |
115 | {%- if cookiecutter.otlp_enabled == "True" %}
116 |
117 | # Grpc endpoint for opentelemetry.
118 | # E.G. http://localhost:4317
119 | opentelemetry_endpoint: Optional[str] = None
120 |
121 | {%- endif %}
122 |
123 | {%- if cookiecutter.enable_kafka == "True" %}
124 |
125 | kafka_bootstrap_servers: List[str] = ["{{cookiecutter.project_name}}-kafka:9092"]
126 |
127 | {%- endif %}
128 |
129 | {%- if cookiecutter.db_info.name != "none" %}
130 |
131 |
132 | @property
133 | def db_url(self) -> URL:
134 | """
135 | Assemble database URL from settings.
136 |
137 | :return: database URL.
138 | """
139 | {%- if cookiecutter.db_info.name == "sqlite" %}
140 | return URL.build(
141 | {%- if cookiecutter.orm == "sqlalchemy" %}
142 | scheme="{{cookiecutter.db_info.async_driver}}",
143 | {%- elif cookiecutter.orm == "tortoise" %}
144 | scheme="{{cookiecutter.db_info.driver_short}}",
145 | {%- else %}
146 | scheme="{{cookiecutter.db_info.driver}}",
147 | {%- endif %}
148 | path=f"///{self.db_file}"
149 | )
150 | {%- else %}
151 | return URL.build(
152 | {%- if cookiecutter.orm == "sqlalchemy" %}
153 | scheme="{{cookiecutter.db_info.async_driver}}",
154 | {%- elif cookiecutter.orm == "tortoise" %}
155 | scheme="{{cookiecutter.db_info.driver_short}}",
156 | {%- else %}
157 | scheme="{{cookiecutter.db_info.driver}}",
158 | {%- endif %}
159 | host=self.db_host,
160 | port=self.db_port,
161 | user=self.db_user,
162 | password=self.db_pass,
163 | path=f"/{self.db_base}",
164 | )
165 | {%- endif %}
166 | {%- endif %}
167 |
168 | {%- if cookiecutter.enable_redis == "True" %}
169 | @property
170 | def redis_url(self) -> URL:
171 | """
172 | Assemble REDIS URL from settings.
173 |
174 | :return: redis URL.
175 | """
176 | path = ""
177 | if self.redis_base is not None:
178 | path = f"/{self.redis_base}"
179 | return URL.build(
180 | scheme="redis",
181 | host=self.redis_host,
182 | port=self.redis_port,
183 | user=self.redis_user,
184 | password=self.redis_pass,
185 | path=path,
186 | )
187 | {%- endif %}
188 |
189 | {%- if cookiecutter.enable_rmq == "True" %}
190 | @property
191 | def rabbit_url(self) -> URL:
192 | """
193 | Assemble RabbitMQ URL from settings.
194 |
195 | :return: rabbit URL.
196 | """
197 | return URL.build(
198 | scheme="amqp",
199 | host=self.rabbit_host,
200 | port=self.rabbit_port,
201 | user=self.rabbit_user,
202 | password=self.rabbit_pass,
203 | path=self.rabbit_vhost,
204 | )
205 | {%- endif %}
206 |
207 | model_config = SettingsConfigDict(
208 | env_file = ".env",
209 | env_prefix = "{{cookiecutter.project_name | upper }}_",
210 | env_file_encoding = "utf-8",
211 | )
212 |
213 |
214 |
215 | settings = Settings()
216 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/tkq.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 | import taskiq_fastapi
3 | from taskiq import InMemoryBroker, ZeroMQBroker, AsyncBroker, AsyncResultBackend
4 | from {{cookiecutter.project_name}}.settings import settings
5 |
6 | {%- if cookiecutter.enable_redis == "True" %}
7 | from taskiq_redis import ListQueueBroker, RedisAsyncResultBackend
8 |
9 | {%- endif %}
10 |
11 | {%- if cookiecutter.enable_rmq == "True" %}
12 | from taskiq_aio_pika import AioPikaBroker
13 |
14 | {%- endif %}
15 |
16 | {%- if cookiecutter.enable_redis == "True" %}
17 | result_backend: AsyncResultBackend[Any] = RedisAsyncResultBackend(
18 | redis_url=str(settings.redis_url.with_path("/1")),
19 | )
20 | {%- endif %}
21 |
22 |
23 | {%- if cookiecutter.enable_rmq == "True" %}
24 | broker: AsyncBroker = AioPikaBroker(
25 | str(settings.rabbit_url),
26 | ){%- if cookiecutter.enable_redis == "True" %}.with_result_backend(result_backend){%- endif %}
27 | {%- elif cookiecutter.enable_redis == "True" %}
28 | broker: AsyncBroker = ListQueueBroker(
29 | str(settings.redis_url.with_path("/1")),
30 | ).with_result_backend(result_backend)
31 | {%- else %}
32 | broker: AsyncBroker = ZeroMQBroker()
33 | {%- endif %}
34 |
35 | if settings.environment.lower() == "pytest":
36 | broker = InMemoryBroker()
37 |
38 | taskiq_fastapi.init(
39 | broker,
40 | "{{cookiecutter.project_name}}.web.application:get_app",
41 | )
42 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/__init__.py:
--------------------------------------------------------------------------------
1 | """WEB API for {{cookiecutter.project_name}}."""
2 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/__init__.py:
--------------------------------------------------------------------------------
1 | """{{cookiecutter.project_name}} API package."""
2 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/docs/__init__.py:
--------------------------------------------------------------------------------
1 | """Routes for swagger and redoc."""
2 | from {{cookiecutter.project_name}}.web.api.docs.views import router
3 |
4 | __all__ = ['router']
5 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/docs/views.py:
--------------------------------------------------------------------------------
1 | from fastapi import APIRouter, Request
2 | from fastapi.openapi.docs import (get_redoc_html, get_swagger_ui_html,
3 | get_swagger_ui_oauth2_redirect_html)
4 | from fastapi.responses import HTMLResponse
5 |
6 | router = APIRouter()
7 |
8 | @router.get("/docs", include_in_schema=False)
9 | async def swagger_ui_html(request: Request) -> HTMLResponse:
10 | """
11 | Swagger UI.
12 |
13 | :param request: current request.
14 | :return: rendered swagger UI.
15 | """
16 | title = request.app.title
17 | return get_swagger_ui_html(
18 | openapi_url=request.app.openapi_url,
19 | title=f"{title} - Swagger UI",
20 | oauth2_redirect_url=str(request.url_for("swagger_ui_redirect")),
21 | swagger_js_url="/static/docs/swagger-ui-bundle.js",
22 | swagger_css_url="/static/docs/swagger-ui.css",
23 | )
24 |
25 |
26 | @router.get("/swagger-redirect", include_in_schema=False)
27 | async def swagger_ui_redirect() -> HTMLResponse:
28 | """
29 | Redirect to swagger.
30 |
31 | :return: redirect.
32 | """
33 | return get_swagger_ui_oauth2_redirect_html()
34 |
35 |
36 | @router.get("/redoc", include_in_schema=False)
37 | async def redoc_html(request: Request) -> HTMLResponse:
38 | """
39 | Redoc UI.
40 |
41 | :param request: current request.
42 | :return: rendered redoc UI.
43 | """
44 | title = request.app.title
45 | return get_redoc_html(
46 | openapi_url=request.app.openapi_url,
47 | title=f"{title} - ReDoc",
48 | redoc_js_url="/static/docs/redoc.standalone.js",
49 | )
50 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/dummy/__init__.py:
--------------------------------------------------------------------------------
1 | """Dummy model API."""
2 | from {{cookiecutter.project_name}}.web.api.dummy.views import router
3 |
4 | __all__ = ['router']
5 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/dummy/schema.py:
--------------------------------------------------------------------------------
1 | from pydantic import BaseModel
2 |
3 | from pydantic import ConfigDict
4 | {%- if cookiecutter.db_info.name == "mongodb" %}
5 | from pydantic import field_validator
6 | from bson import ObjectId
7 | {%- endif %}
8 |
9 |
10 | class DummyModelDTO(BaseModel):
11 | """
12 | DTO for dummy models.
13 |
14 | It returned when accessing dummy models from the API.
15 | """
16 |
17 | {%- if cookiecutter.db_info.name != "mongodb" %}
18 | id: int
19 | {%- else %}
20 | id: str
21 | {%- endif %}
22 | name: str
23 |
24 | {%- if cookiecutter.db_info.name == "mongodb" %}
25 | @field_validator("id", mode="before")
26 | @classmethod
27 | def parse_object_id(cls, document_id: ObjectId) -> str:
28 | """
29 | Validator that converts `ObjectId` to json serializable `str`.
30 |
31 | :param document_id: Bson Id for this document.
32 | :return: The converted str.
33 | """
34 | return str(document_id)
35 | {%- endif %}
36 |
37 | model_config = ConfigDict(from_attributes=True)
38 |
39 | class DummyModelInputDTO(BaseModel):
40 | """DTO for creating new dummy model."""
41 |
42 | name: str
43 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/dummy/views.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 |
3 | from fastapi import APIRouter
4 | from fastapi.param_functions import Depends
5 | from {{cookiecutter.project_name}}.db.dao.dummy_dao import DummyDAO
6 | from {{cookiecutter.project_name}}.db.models.dummy_model import DummyModel
7 | from {{cookiecutter.project_name}}.web.api.dummy.schema import (DummyModelDTO,
8 | DummyModelInputDTO)
9 |
10 | router = APIRouter()
11 |
12 |
13 | @router.get("/", response_model=List[DummyModelDTO])
14 | async def get_dummy_models(
15 | limit: int = 10,
16 | offset: int = 0,
17 | dummy_dao: DummyDAO = Depends(),
18 | ) -> List[DummyModel]:
19 | """
20 | Retrieve all dummy objects from the database.
21 |
22 | :param limit: limit of dummy objects, defaults to 10.
23 | :param offset: offset of dummy objects, defaults to 0.
24 | :param dummy_dao: DAO for dummy models.
25 | :return: list of dummy objects from database.
26 | """
27 | return await dummy_dao.get_all_dummies(limit=limit, offset=offset)
28 |
29 |
30 | @router.put("/")
31 | async def create_dummy_model(
32 | new_dummy_object: DummyModelInputDTO,
33 | dummy_dao: DummyDAO = Depends(),
34 | ) -> None:
35 | """
36 | Creates dummy model in the database.
37 |
38 | :param new_dummy_object: new dummy model item.
39 | :param dummy_dao: DAO for dummy models.
40 | """
41 | await dummy_dao.create_dummy_model(name=new_dummy_object.name)
42 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/echo/__init__.py:
--------------------------------------------------------------------------------
1 | """Echo API."""
2 | from {{cookiecutter.project_name}}.web.api.echo.views import router
3 |
4 | __all__ = ['router']
5 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/echo/schema.py:
--------------------------------------------------------------------------------
1 | from pydantic import BaseModel
2 |
3 |
4 | class Message(BaseModel):
5 | """Simple message model."""
6 |
7 | message: str
8 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/echo/views.py:
--------------------------------------------------------------------------------
1 | from fastapi import APIRouter
2 | from {{cookiecutter.project_name}}.web.api.echo.schema import Message
3 |
4 | router = APIRouter()
5 |
6 |
7 | @router.post("/", response_model=Message)
8 | async def send_echo_message(
9 | incoming_message: Message,
10 | ) -> Message:
11 | """
12 | Sends echo back to user.
13 |
14 | :param incoming_message: incoming message.
15 | :returns: message same as the incoming.
16 | """
17 | return incoming_message
18 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/kafka/__init__.py:
--------------------------------------------------------------------------------
1 | """API to interact with kafka."""
2 | from {{cookiecutter.project_name}}.web.api.kafka.views import router
3 |
4 | __all__ = ["router"]
5 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/kafka/schema.py:
--------------------------------------------------------------------------------
1 | from pydantic import BaseModel
2 |
3 |
4 | class KafkaMessage(BaseModel):
5 | """DTO for kafka messages."""
6 |
7 | topic: str
8 | message: str
9 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/kafka/views.py:
--------------------------------------------------------------------------------
1 | from aiokafka import AIOKafkaProducer
2 | from fastapi import APIRouter, Depends
3 | from {{cookiecutter.project_name}}.services.kafka.dependencies import get_kafka_producer
4 | from {{cookiecutter.project_name}}.web.api.kafka.schema import KafkaMessage
5 |
6 | router = APIRouter()
7 |
8 |
9 | @router.post("/")
10 | async def send_kafka_message(
11 | kafka_message: KafkaMessage,
12 | producer: AIOKafkaProducer = Depends(get_kafka_producer),
13 | ) -> None:
14 | """
15 | Sends message to kafka.
16 |
17 | :param producer: kafka's producer.
18 | :param kafka_message: message to publish.
19 | """
20 | await producer.send(
21 | topic=kafka_message.topic,
22 | value=kafka_message.message.encode(),
23 | )
24 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/monitoring/__init__.py:
--------------------------------------------------------------------------------
1 | """API for checking project status."""
2 | from {{cookiecutter.project_name}}.web.api.monitoring.views import router
3 |
4 | __all__ = ['router']
5 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/monitoring/views.py:
--------------------------------------------------------------------------------
1 | from fastapi import APIRouter
2 |
3 | router = APIRouter()
4 |
5 | @router.get('/health')
6 | def health_check() -> None:
7 | """
8 | Checks the health of a project.
9 |
10 | It returns 200 if the project is healthy.
11 | """
12 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/rabbit/__init__.py:
--------------------------------------------------------------------------------
1 | """API to interact with RabbitMQ."""
2 | from {{cookiecutter.project_name}}.web.api.rabbit.views import router
3 |
4 | __all__ = ["router"]
5 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/rabbit/schema.py:
--------------------------------------------------------------------------------
1 | from pydantic import BaseModel
2 |
3 |
4 | class RMQMessageDTO(BaseModel):
5 | """DTO for publishing message in RabbitMQ."""
6 |
7 | exchange_name: str
8 | routing_key: str
9 | message: str
10 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/rabbit/views.py:
--------------------------------------------------------------------------------
1 | from aio_pika import Channel, Message
2 | from aio_pika.pool import Pool
3 | from fastapi import APIRouter, Depends
4 | from {{cookiecutter.project_name}}.services.rabbit.dependencies import \
5 | get_rmq_channel_pool
6 | from {{cookiecutter.project_name}}.web.api.rabbit.schema import RMQMessageDTO
7 |
8 | router = APIRouter()
9 |
10 |
11 | @router.post("/")
12 | async def send_rabbit_message(
13 | message: RMQMessageDTO,
14 | pool: Pool[Channel] = Depends(get_rmq_channel_pool),
15 | ) -> None:
16 | """
17 | Posts a message in a rabbitMQ's exchange.
18 |
19 | :param message: message to publish to rabbitmq.
20 | :param pool: rabbitmq channel pool
21 | """
22 | async with pool.acquire() as conn:
23 | exchange = await conn.declare_exchange(
24 | name=message.exchange_name,
25 | auto_delete=True,
26 | )
27 | await exchange.publish(
28 | message=Message(
29 | body=message.message.encode("utf-8"),
30 | content_encoding="utf-8",
31 | content_type="text/plain",
32 | ),
33 | routing_key=message.routing_key,
34 | )
35 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/redis/__init__.py:
--------------------------------------------------------------------------------
1 | """Redis API."""
2 | from {{cookiecutter.project_name}}.web.api.redis.views import router
3 |
4 | __all__ = ['router']
5 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/redis/schema.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 |
3 | from pydantic import BaseModel
4 |
5 |
6 | class RedisValueDTO(BaseModel):
7 | """DTO for redis values."""
8 |
9 | key: str
10 | value: Optional[str] # noqa: WPS110
11 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/redis/views.py:
--------------------------------------------------------------------------------
1 | from fastapi import APIRouter
2 | from fastapi.param_functions import Depends
3 | from redis.asyncio import ConnectionPool, Redis
4 | from {{cookiecutter.project_name}}.services.redis.dependency import get_redis_pool
5 | from {{cookiecutter.project_name}}.web.api.redis.schema import RedisValueDTO
6 |
7 | router = APIRouter()
8 |
9 |
10 | @router.get("/", response_model=RedisValueDTO)
11 | async def get_redis_value(
12 | key: str,
13 | redis_pool: ConnectionPool = Depends(get_redis_pool),
14 | ) -> RedisValueDTO:
15 | """
16 | Get value from redis.
17 |
18 | :param key: redis key, to get data from.
19 | :param redis_pool: redis connection pool.
20 | :returns: information from redis.
21 | """
22 | async with Redis(connection_pool=redis_pool) as redis:
23 | redis_value = await redis.get(key)
24 | return RedisValueDTO(
25 | key=key,
26 | value=redis_value,
27 | )
28 |
29 |
30 | @router.put("/")
31 | async def set_redis_value(
32 | redis_value: RedisValueDTO,
33 | redis_pool: ConnectionPool = Depends(get_redis_pool),
34 | ) -> None:
35 | """
36 | Set value in redis.
37 |
38 | :param redis_value: new value data.
39 | :param redis_pool: redis connection pool.
40 | """
41 | if redis_value.value is not None:
42 | async with Redis(connection_pool=redis_pool) as redis:
43 | await redis.set(name=redis_value.key, value=redis_value.value)
44 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/router.py:
--------------------------------------------------------------------------------
1 | from fastapi.routing import APIRouter
2 |
3 | {%- if cookiecutter.add_users == 'True' %}
4 | from {{cookiecutter.project_name}}.web.api import users
5 | from {{cookiecutter.project_name}}.db.models.users import api_users
6 | {%- endif %}
7 | {%- if cookiecutter.enable_routers == "True" %}
8 | {%- if cookiecutter.api_type == 'rest' %}
9 | from {{cookiecutter.project_name}}.web.api import echo
10 |
11 | {%- if cookiecutter.add_dummy == 'True' %}
12 | from {{cookiecutter.project_name}}.web.api import dummy
13 |
14 | {%- endif %}
15 | {%- if cookiecutter.enable_redis == "True" %}
16 | from {{cookiecutter.project_name}}.web.api import redis
17 |
18 | {%- endif %}
19 | {%- if cookiecutter.enable_rmq == "True" %}
20 | from {{cookiecutter.project_name}}.web.api import rabbit
21 |
22 | {%- endif %}
23 | {%- if cookiecutter.enable_kafka == "True" %}
24 | from {{cookiecutter.project_name}}.web.api import kafka
25 |
26 | {%- endif %}
27 | {%- endif %}
28 | {%- endif %}
29 | {%- if cookiecutter.self_hosted_swagger == "True" %}
30 | from {{cookiecutter.project_name}}.web.api import docs
31 |
32 | {%- endif %}
33 | from {{cookiecutter.project_name}}.web.api import monitoring
34 |
35 | api_router = APIRouter()
36 | api_router.include_router(monitoring.router)
37 | {%- if cookiecutter.add_users == 'True' %}
38 | api_router.include_router(users.router)
39 | {%- endif %}
40 | {%- if cookiecutter.self_hosted_swagger == "True" %}
41 | api_router.include_router(docs.router)
42 | {%- endif %}
43 | {%- if cookiecutter.enable_routers == "True" %}
44 | {%- if cookiecutter.api_type == 'rest' %}
45 | api_router.include_router(echo.router, prefix="/echo", tags=["echo"])
46 | {%- if cookiecutter.add_dummy == 'True' %}
47 | api_router.include_router(dummy.router, prefix="/dummy", tags=["dummy"])
48 | {%- endif %}
49 | {%- if cookiecutter.enable_redis == "True" %}
50 | api_router.include_router(redis.router, prefix="/redis", tags=["redis"])
51 | {%- endif %}
52 | {%- if cookiecutter.enable_rmq == "True" %}
53 | api_router.include_router(rabbit.router, prefix="/rabbit", tags=["rabbit"])
54 | {%- endif %}
55 | {%- if cookiecutter.enable_kafka == "True" %}
56 | api_router.include_router(kafka.router, prefix="/kafka", tags=["kafka"])
57 | {%- endif %}
58 | {%- endif %}
59 | {%- endif %}
60 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/users/__init__.py:
--------------------------------------------------------------------------------
1 | """API for checking project status."""
2 | from {{cookiecutter.project_name}}.web.api.users.views import router
3 |
4 | __all__ = ["router"]
5 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/api/users/views.py:
--------------------------------------------------------------------------------
1 | from fastapi import APIRouter
2 |
3 | from {{cookiecutter.project_name}}.db.models.users import (
4 | UserCreate, # type: ignore
5 | UserRead, # type: ignore
6 | UserUpdate, # type: ignore
7 | api_users, # type: ignore
8 | auth_jwt, # type: ignore
9 | auth_cookie, # type: ignore
10 | )
11 |
12 |
13 | router = APIRouter()
14 |
15 | router.include_router(
16 | api_users.get_register_router(UserRead, UserCreate),
17 | prefix="/auth",
18 | tags=["auth"],
19 | )
20 |
21 | router.include_router(
22 | api_users.get_reset_password_router(),
23 | prefix="/auth",
24 | tags=["auth"],
25 | )
26 |
27 | router.include_router(
28 | api_users.get_verify_router(UserRead),
29 | prefix="/auth",
30 | tags=["auth"],
31 | )
32 |
33 | router.include_router(
34 | api_users.get_users_router(UserRead, UserUpdate),
35 | prefix="/users",
36 | tags=["users"],
37 | )
38 | {%- if cookiecutter.jwt_auth == "True" %}
39 | router.include_router(
40 | api_users.get_auth_router(auth_jwt),
41 | prefix="/auth/jwt",
42 | tags=["auth"]
43 | )
44 | {%- endif %}
45 |
46 | {%- if cookiecutter.cookie_auth == "True" %}
47 | router.include_router(
48 | api_users.get_auth_router(auth_cookie),
49 | prefix="/auth/cookie",
50 | tags=["auth"]
51 | )
52 | {%- endif %}
53 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/application.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from fastapi import FastAPI
4 | from fastapi.responses import UJSONResponse
5 | from {{cookiecutter.project_name}}.settings import settings
6 | from {{cookiecutter.project_name}}.web.api.router import api_router
7 |
8 | {%- if cookiecutter.api_type == 'graphql' %}
9 | from {{cookiecutter.project_name}}.web.gql.router import gql_router
10 |
11 | {%- endif %}
12 | from importlib import metadata
13 |
14 | from {{cookiecutter.project_name}}.web.lifespan import lifespan_setup
15 |
16 | {%- if cookiecutter.orm == 'tortoise' %}
17 | from tortoise.contrib.fastapi import register_tortoise
18 | from {{cookiecutter.project_name}}.db.config import TORTOISE_CONFIG
19 |
20 | {%- endif %}
21 |
22 | {%- if cookiecutter.sentry_enabled == "True" %}
23 | import sentry_sdk
24 | from sentry_sdk.integrations.fastapi import FastApiIntegration
25 | from sentry_sdk.integrations.logging import LoggingIntegration
26 |
27 | {%- if cookiecutter.orm == "sqlalchemy" %}
28 | from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
29 |
30 | {%- endif %}
31 | {%- endif %}
32 |
33 | {%- if cookiecutter.enable_loguru == "True" %}
34 | from {{cookiecutter.project_name}}.log import configure_logging
35 |
36 | {%- endif %}
37 |
38 | {%- if cookiecutter.self_hosted_swagger == 'True' %}
39 | from pathlib import Path
40 |
41 | from fastapi.staticfiles import StaticFiles
42 |
43 | APP_ROOT = Path(__file__).parent.parent
44 | {%- endif %}
45 |
46 |
47 | def get_app() -> FastAPI:
48 | """
49 | Get FastAPI application.
50 |
51 | This is the main constructor of an application.
52 |
53 | :return: application.
54 | """
55 | {%- if cookiecutter.enable_loguru == "True" %}
56 | configure_logging()
57 | {%- endif %}
58 | {%- if cookiecutter.sentry_enabled == "True" %}
59 | if settings.sentry_dsn:
60 | # Enables sentry integration.
61 | sentry_sdk.init(
62 | dsn=settings.sentry_dsn,
63 | traces_sample_rate=settings.sentry_sample_rate,
64 | environment=settings.environment,
65 | integrations=[
66 | FastApiIntegration(transaction_style="endpoint"),
67 | LoggingIntegration(
68 | level=logging.getLevelName(
69 | settings.log_level.value,
70 | ),
71 | event_level=logging.ERROR,
72 | ),
73 | {%- if cookiecutter.orm == "sqlalchemy" %}
74 | SqlalchemyIntegration(),
75 | {%- endif %}
76 | ],
77 | )
78 | {%- endif %}
79 | app = FastAPI(
80 | title="{{cookiecutter.project_name}}",
81 | version=metadata.version("{{cookiecutter.project_name}}"),
82 | lifespan=lifespan_setup,
83 | {%- if cookiecutter.self_hosted_swagger == 'True' %}
84 | docs_url=None,
85 | redoc_url=None,
86 | {% else %}
87 | docs_url="/api/docs",
88 | redoc_url="/api/redoc",
89 | {%- endif %}
90 | openapi_url="/api/openapi.json",
91 | default_response_class=UJSONResponse,
92 | )
93 |
94 | # Main router for the API.
95 | app.include_router(router=api_router, prefix="/api")
96 | {%- if cookiecutter.api_type == 'graphql' %}
97 | # Graphql router
98 | app.include_router(router=gql_router, prefix="/graphql")
99 | {%- endif %}
100 |
101 | {%- if cookiecutter.self_hosted_swagger == 'True' %}
102 | # Adds static directory.
103 | # This directory is used to access swagger files.
104 | app.mount(
105 | "/static",
106 | StaticFiles(directory=APP_ROOT / "static"),
107 | name="static"
108 | )
109 | {% endif %}
110 |
111 | {%- if cookiecutter.orm == 'tortoise' %}
112 | # Configures tortoise orm.
113 | register_tortoise(
114 | app,
115 | config=TORTOISE_CONFIG,
116 | add_exception_handlers=True,
117 | {%- if cookiecutter.enable_migrations != "True" %}
118 | generate_schemas=True,
119 | {%- endif %}
120 | )
121 | {%- endif %}
122 |
123 | return app
124 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/gql/__init__.py:
--------------------------------------------------------------------------------
1 | """{{cookiecutter.project_name}} API package."""
2 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/gql/context.py:
--------------------------------------------------------------------------------
1 | from fastapi import Depends
2 | from strawberry.fastapi import BaseContext
3 |
4 | {%- if cookiecutter.enable_redis == "True" %}
5 | from redis.asyncio import ConnectionPool
6 | from {{cookiecutter.project_name}}.services.redis.dependency import get_redis_pool
7 |
8 | {%- endif %}
9 |
10 | {%- if cookiecutter.enable_rmq == "True" %}
11 | from aio_pika import Channel
12 | from aio_pika.pool import Pool
13 | from {{cookiecutter.project_name}}.services.rabbit.dependencies import \
14 | get_rmq_channel_pool
15 |
16 | {%- endif %}
17 |
18 | {%- if cookiecutter.enable_kafka == "True" %}
19 | from aiokafka import AIOKafkaProducer
20 | from {{cookiecutter.project_name}}.services.kafka.dependencies import get_kafka_producer
21 |
22 | {%- endif %}
23 |
24 |
25 | {%- if cookiecutter.orm == "sqlalchemy" %}
26 | from sqlalchemy.ext.asyncio import AsyncSession
27 | from {{cookiecutter.project_name}}.db.dependencies import get_db_session
28 |
29 | {%- elif cookiecutter.orm == "psycopg" %}
30 | from typing import Any
31 | from psycopg_pool import AsyncConnectionPool
32 | from {{cookiecutter.project_name}}.db.dependencies import get_db_pool
33 |
34 | {%- endif %}
35 |
36 |
37 | class Context(BaseContext):
38 | """Global graphql context."""
39 |
40 | def __init__(
41 | self,
42 | {%- if cookiecutter.enable_redis == "True" %}
43 | redis_pool: ConnectionPool = Depends(get_redis_pool),
44 | {%- endif %}
45 | {%- if cookiecutter.enable_rmq == "True" %}
46 | rabbit: Pool[Channel] = Depends(get_rmq_channel_pool),
47 | {%- endif %}
48 | {%- if cookiecutter.orm == "sqlalchemy" %}
49 | db_connection: AsyncSession = Depends(get_db_session),
50 | {%- elif cookiecutter.orm == "psycopg" %}
51 | db_pool: AsyncConnectionPool[Any] = Depends(get_db_pool),
52 | {%- endif %}
53 | {%- if cookiecutter.enable_kafka == "True" %}
54 | kafka_producer: AIOKafkaProducer = Depends(get_kafka_producer),
55 | {%- endif %}
56 | ) -> None:
57 | {%- if cookiecutter.enable_redis == "True" %}
58 | self.redis_pool = redis_pool
59 | {%- endif %}
60 | {%- if cookiecutter.enable_rmq == "True" %}
61 | self.rabbit = rabbit
62 | {%- endif %}
63 | {%- if cookiecutter.orm == "sqlalchemy" %}
64 | self.db_connection = db_connection
65 | {%- endif %}
66 | {%- if cookiecutter.orm == "psycopg" %}
67 | self.db_pool = db_pool
68 | {%- endif %}
69 | {%- if cookiecutter.enable_kafka == "True" %}
70 | self.kafka_producer = kafka_producer
71 | {%- endif %}
72 | pass # noqa: WPS420
73 |
74 |
75 | def get_context(context: Context = Depends(Context)) -> Context:
76 | """
77 | Get custom context.
78 |
79 | :param context: graphql context.
80 | :return: context
81 | """
82 | return context
83 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/gql/dummy/__init__.py:
--------------------------------------------------------------------------------
1 | """Package for dummy model."""
2 |
3 | from {{cookiecutter.project_name}}.web.gql.dummy.mutation import Mutation
4 | from {{cookiecutter.project_name}}.web.gql.dummy.query import Query
5 |
6 | __all__ = ["Query", "Mutation"]
7 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/gql/dummy/mutation.py:
--------------------------------------------------------------------------------
1 | import strawberry
2 | from strawberry.types import Info
3 | from {{cookiecutter.project_name}}.db.dao.dummy_dao import DummyDAO
4 | from {{cookiecutter.project_name}}.web.gql.context import Context
5 |
6 |
7 | @strawberry.type
8 | class Mutation:
9 | """Mutations for dummies."""
10 |
11 | @strawberry.mutation(description="Create dummy object in a database")
12 | async def create_dummy_model(
13 | self,
14 | {%- if cookiecutter.orm in ["sqlalchemy", "psycopg"] %}
15 | info: Info[Context, None],
16 | {%- endif %}
17 | name: str,
18 | ) -> str:
19 | """
20 | Creates dummy model in a database.
21 |
22 | {% if cookiecutter.orm in ["sqlalchemy", "psycopg"] -%}
23 | :param info: connection info.
24 | {% endif -%}
25 | :param name: name of a dummy.
26 | :return: name of a dummy model.
27 | """
28 | {%- if cookiecutter.orm == "sqlalchemy" %}
29 | dao = DummyDAO(info.context.db_connection)
30 | {%- elif cookiecutter.orm == "psycopg" %}
31 | dao = DummyDAO(info.context.db_pool)
32 | {%- else %}
33 | dao = DummyDAO()
34 | {%- endif %}
35 | await dao.create_dummy_model(name=name)
36 | return name
37 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/gql/dummy/query.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 |
3 | import strawberry
4 | from strawberry.types import Info
5 | from {{cookiecutter.project_name}}.db.dao.dummy_dao import DummyDAO
6 | from {{cookiecutter.project_name}}.web.gql.context import Context
7 | from {{cookiecutter.project_name}}.web.gql.dummy.schema import DummyModelDTO
8 |
9 |
10 | @strawberry.type
11 | class Query:
12 | """Query to interact with dummies."""
13 |
14 | @strawberry.field(description="Get all dummies")
15 | async def get_dummy_models(
16 | self,
17 | {%- if cookiecutter.orm in ["sqlalchemy", "psycopg"] %}
18 | info: Info[Context, None],
19 | {%- endif %}
20 | limit: int = 15,
21 | offset: int = 0,
22 | ) -> List[DummyModelDTO]:
23 | """
24 | Retrieves all dummy objects from database.
25 |
26 | {% if cookiecutter.orm in ["sqlalchemy", "psycopg"] -%}
27 | :param info: connection info.
28 | {% endif -%}
29 | :param limit: limit of dummy objects, defaults to 10.
30 | :param offset: offset of dummy objects, defaults to 0.
31 | :return: list of dummy objects from database.
32 | """
33 | {%- if cookiecutter.orm == "sqlalchemy" %}
34 | dao = DummyDAO(info.context.db_connection)
35 | {%- elif cookiecutter.orm == "psycopg" %}
36 | dao = DummyDAO(info.context.db_pool)
37 | {%- else %}
38 | dao = DummyDAO()
39 | {%- endif %}
40 | return await dao.get_all_dummies(limit=limit, offset=offset) # type: ignore
41 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/gql/dummy/schema.py:
--------------------------------------------------------------------------------
1 | import strawberry
2 |
3 |
4 | @strawberry.type
5 | class DummyModelDTO:
6 | """
7 | DTO for dummy models.
8 |
9 | It returned when accessing dummy models from the API.
10 | """
11 |
12 | {%- if cookiecutter.db_info.name != "mongodb" %}
13 | id: int
14 | {%- else %}
15 | id: str
16 | {%- endif %}
17 | name: str
18 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/gql/echo/__init__.py:
--------------------------------------------------------------------------------
1 | """Echo API."""
2 | from {{cookiecutter.project_name}}.web.gql.echo.mutation import Mutation
3 | from {{cookiecutter.project_name}}.web.gql.echo.query import Query
4 |
5 | __all__ = ["Query", "Mutation"]
6 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/gql/echo/mutation.py:
--------------------------------------------------------------------------------
1 | import strawberry
2 |
3 |
4 | @strawberry.type
5 | class Mutation:
6 | """Echo mutation."""
7 |
8 | @strawberry.field(description="Echo mutation")
9 | def echo(self, message: str) -> str:
10 | """
11 | Sends echo message back to user.
12 |
13 | :param message: incoming message.
14 | :returns: same message as the incoming.
15 | """
16 | return message
17 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/gql/echo/query.py:
--------------------------------------------------------------------------------
1 | import strawberry
2 |
3 |
4 | @strawberry.type
5 | class Query:
6 | """Echo query."""
7 |
8 | @strawberry.field(description="Echo query")
9 | def echo(self, message: str) -> str:
10 | """
11 | Sends echo message back to user.
12 |
13 | :param message: incoming message.
14 | :returns: same message as the incoming.
15 | """
16 | return message
17 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/gql/kafka/__init__.py:
--------------------------------------------------------------------------------
1 | """Package to interact with kafka."""
2 | from {{cookiecutter.project_name}}.web.gql.kafka.mutation import Mutation
3 |
4 | __all__ = ["Mutation"]
5 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/gql/kafka/mutation.py:
--------------------------------------------------------------------------------
1 | import strawberry
2 | from strawberry.types import Info
3 | from {{cookiecutter.project_name}}.web.gql.context import Context
4 | from {{cookiecutter.project_name}}.web.gql.kafka.schema import KafkaMessageDTO
5 |
6 |
7 | @strawberry.type
8 | class Mutation:
9 | """Mutation for rabbit package."""
10 |
11 | @strawberry.mutation(description="Send message to Kafka")
12 | async def send_kafka_message(
13 | self, message: KafkaMessageDTO,
14 | info: Info[Context, None],
15 | ) -> None:
16 | """
17 | Sends a message in Kafka.
18 |
19 | :param message: message to publish.
20 | :param info: current context.
21 | """
22 | await info.context.kafka_producer.send(
23 | topic=message.topic,
24 | value=message.message.encode(),
25 | )
26 |
27 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/gql/kafka/schema.py:
--------------------------------------------------------------------------------
1 | import strawberry
2 |
3 |
4 | @strawberry.input
5 | class KafkaMessageDTO:
6 | """Input type for kafka mutation."""
7 |
8 | topic: str
9 | message: str
10 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/gql/rabbit/__init__.py:
--------------------------------------------------------------------------------
1 | """Package to interact with rabbitMQ."""
2 | from {{cookiecutter.project_name}}.web.gql.rabbit.mutation import Mutation
3 |
4 | __all__ = ["Mutation"]
5 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/gql/rabbit/mutation.py:
--------------------------------------------------------------------------------
1 | import strawberry
2 | from aio_pika import Message
3 | from strawberry.types import Info
4 | from {{cookiecutter.project_name}}.web.gql.context import Context
5 | from {{cookiecutter.project_name}}.web.gql.rabbit.schema import RabbitMessageDTO
6 |
7 |
8 | @strawberry.type
9 | class Mutation:
10 | """Mutation for rabbit package."""
11 |
12 | @strawberry.mutation(description="Send message to RabbitMQ")
13 | async def send_rabbit_message(
14 | self, message: RabbitMessageDTO, info: Info[Context, None]
15 | ) -> None:
16 | """
17 | Sends a message in RabbitMQ.
18 |
19 | :param message: message to publish.
20 | :param info: current context.
21 | """
22 | async with info.context.rabbit.acquire() as conn:
23 | exchange = await conn.declare_exchange(
24 | name=message.exchange_name,
25 | auto_delete=True,
26 | )
27 | await exchange.publish(
28 | message=Message(
29 | body=message.message.encode("utf-8"),
30 | content_encoding="utf-8",
31 | content_type="text/plain",
32 | ),
33 | routing_key=message.routing_key,
34 | )
35 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/gql/rabbit/schema.py:
--------------------------------------------------------------------------------
1 | import strawberry
2 |
3 |
4 | @strawberry.input
5 | class RabbitMessageDTO:
6 | """Input type for rabbit mutation."""
7 |
8 | exchange_name: str
9 | routing_key: str
10 | message: str
11 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/gql/redis/__init__.py:
--------------------------------------------------------------------------------
1 | """Redis API."""
2 | from {{cookiecutter.project_name}}.web.gql.redis.mutation import Mutation
3 | from {{cookiecutter.project_name}}.web.gql.redis.query import Query
4 |
5 | __all__ = ["Query", "Mutation"]
6 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/gql/redis/mutation.py:
--------------------------------------------------------------------------------
1 | import strawberry
2 | from redis.asyncio import Redis
3 | from strawberry.types import Info
4 | from {{cookiecutter.project_name}}.web.gql.context import Context
5 | from {{cookiecutter.project_name}}.web.gql.redis.schema import RedisDTO, RedisDTOInput
6 |
7 |
8 | @strawberry.type
9 | class Mutation:
10 | """Mutations for redis."""
11 |
12 | @strawberry.mutation(description="Set value in redis")
13 | async def set_redis_value(
14 | self,
15 | data: RedisDTOInput,
16 | info: Info[Context, None],
17 | ) -> RedisDTO:
18 | """
19 | Sets value in redis.
20 |
21 | :param data: key and value to insert.
22 | :param info: connection info.
23 | :return: key and value.
24 | """
25 | async with Redis(connection_pool=info.context.redis_pool) as redis:
26 | await redis.set(name=data.key, value=data.value)
27 | return RedisDTO(key=data.key, value=data.value) # type: ignore
28 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/gql/redis/query.py:
--------------------------------------------------------------------------------
1 | import strawberry
2 | from redis.asyncio import Redis
3 | from strawberry.types import Info
4 | from {{cookiecutter.project_name}}.web.gql.context import Context
5 | from {{cookiecutter.project_name}}.web.gql.redis.schema import RedisDTO
6 |
7 |
8 | @strawberry.type
9 | class Query:
10 | """Query to interact with redis."""
11 |
12 | @strawberry.field(description="Get value from redis")
13 | async def get_redis_value(self, key: str, info: Info[Context, None]) -> RedisDTO:
14 | """
15 | Gets value from redis.
16 |
17 | :param key: key to search for.
18 | :param info: resolver context.
19 | :return: information from redis.
20 | """
21 | async with Redis(connection_pool=info.context.redis_pool) as redis:
22 | val = await redis.get(name=key)
23 | if isinstance(val, bytes):
24 | val = val.decode("utf-8")
25 | return RedisDTO(key=key, value=val) # type: ignore
26 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/gql/redis/schema.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 |
3 | import strawberry
4 |
5 |
6 | @strawberry.type
7 | class RedisDTO:
8 | """Output type for redis queries."""
9 |
10 | key: str
11 | value: Optional[str]
12 |
13 |
14 | @strawberry.input
15 | class RedisDTOInput:
16 | """Input type for redis mutation."""
17 |
18 | key: str
19 | value: str
20 |
--------------------------------------------------------------------------------
/fastapi_template/template/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/web/gql/router.py:
--------------------------------------------------------------------------------
1 | import strawberry
2 | from strawberry.fastapi import GraphQLRouter
3 | from {{cookiecutter.project_name}}.web.gql.context import Context, get_context
4 |
5 | {%- if cookiecutter.enable_routers == "True" %}
6 | from {{cookiecutter.project_name}}.web.gql import echo
7 |
8 | {%- if cookiecutter.add_dummy == 'True' %}
9 | from {{cookiecutter.project_name}}.web.gql import dummy
10 |
11 | {%- endif %}
12 | {%- if cookiecutter.enable_redis == "True" %}
13 | from {{cookiecutter.project_name}}.web.gql import redis
14 |
15 | {%- endif %}
16 | {%- if cookiecutter.enable_rmq == "True" %}
17 | from {{cookiecutter.project_name}}.web.gql import rabbit
18 |
19 | {%- endif %}
20 | {%- if cookiecutter.enable_kafka == "True" %}
21 | from {{cookiecutter.project_name}}.web.gql import kafka
22 |
23 | {%- endif %}
24 |
25 | {%- endif %}
26 |
27 | @strawberry.type
28 | class Query( # noqa: WPS215
29 | {%- if cookiecutter.enable_routers == "True" %}
30 | echo.Query,
31 | {%- if cookiecutter.add_dummy == 'True' %}
32 | dummy.Query,
33 | {%- endif %}
34 | {%- if cookiecutter.enable_redis == "True" %}
35 | redis.Query,
36 | {%- endif %}
37 | {%- endif %}
38 | ):
39 | """Main query."""
40 |
41 |
42 | @strawberry.type
43 | class Mutation( # noqa: WPS215
44 | {%- if cookiecutter.enable_routers == "True" %}
45 | echo.Mutation,
46 | {%- if cookiecutter.add_dummy == 'True' %}
47 | dummy.Mutation,
48 | {%- endif %}
49 | {%- if cookiecutter.enable_redis == "True" %}
50 | redis.Mutation,
51 | {%- endif %}
52 | {%- if cookiecutter.enable_rmq == "True" %}
53 | rabbit.Mutation,
54 | {%- endif %}
55 | {%- if cookiecutter.enable_kafka == "True" %}
56 | kafka.Mutation,
57 | {%- endif %}
58 | {%- endif %}
59 | ):
60 | """Main mutation."""
61 |
62 |
63 | schema = strawberry.Schema(
64 | Query,
65 | Mutation,
66 | )
67 |
68 | gql_router: GraphQLRouter[Context, None] = GraphQLRouter(
69 | schema,
70 | graphiql=True,
71 | context_getter=get_context,
72 | )
73 |
--------------------------------------------------------------------------------
/fastapi_template/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | import shutil
4 | import tempfile
5 | from pathlib import Path
6 | from typing import Generator
7 |
8 | import pytest
9 | from faker import Faker
10 | from fastapi_template.input_model import BuilderContext, Database
11 | from fastapi_template.tests.utils import run_docker_compose_command, model_dump_compat
12 |
13 |
14 | @pytest.fixture
15 | def project_name(worker_id: str) -> str:
16 | """
17 | Generate name for test project.
18 |
19 | :return: project name.
20 | """
21 | fake = Faker()
22 | raw_name = fake.name_female() + worker_id
23 | clear_name: str = (
24 | raw_name.lower().replace(" ", "_").replace("-", "_").replace(".", "_")
25 | )
26 | return re.sub("_+", "_", clear_name).strip("_")
27 |
28 |
29 | @pytest.fixture(scope="session", autouse=True)
30 | def generator_start_dir() -> Generator[str, None, None]:
31 | """
32 | Generate directory to work into
33 |
34 | :yield: this fixture generates dir for all test projects.
35 | """
36 | old_cwd = os.getcwd()
37 | newpath = tempfile.mkdtemp()
38 | os.chdir(newpath)
39 | try:
40 | yield newpath
41 | finally:
42 | os.chdir(old_cwd)
43 | shutil.rmtree(newpath, ignore_errors=True)
44 |
45 |
46 | @pytest.fixture()
47 | def default_context(project_name: str) -> BuilderContext:
48 | """
49 | Default builder context without features.
50 |
51 | :param project_name: current project name.
52 | :return: context.
53 | """
54 | return BuilderContext(
55 | project_name=project_name,
56 | kube_name=project_name.replace("_", "-"),
57 | api_type="rest",
58 | ci_type="none",
59 | db="none",
60 | db_info=model_dump_compat(Database(name="none")),
61 | enable_redis=False,
62 | enable_taskiq=False,
63 | add_users=False,
64 | enable_migrations=False,
65 | enable_kube=False,
66 | enable_routers=True,
67 | add_dummy=False,
68 | self_hosted_swagger=False,
69 | enable_rmq=False,
70 | prometheus_enabled=False,
71 | otlp_enabled=False,
72 | sentry_enabled=False,
73 | force=True,
74 | )
75 |
76 |
77 | @pytest.fixture(autouse=True)
78 | def default_dir(generator_start_dir: str) -> Generator[None, None, None]:
79 | """
80 | Change directory to generator_start_dir.
81 |
82 | :param generator_start_dir: start_dir.
83 | """
84 | yield
85 | cwd = os.getcwd()
86 | if cwd != generator_start_dir:
87 | os.chdir(generator_start_dir)
88 |
89 |
90 | @pytest.fixture(autouse=True)
91 | def docker_module_shutdown(
92 | generator_start_dir: str, project_name: str
93 | ) -> Generator[None, None, None]:
94 | """
95 | Cleans up docker context.
96 |
97 | :param generator_start_dir: generator dir.
98 | :param project_name: name of the project.
99 | """
100 | yield
101 | cwd = os.getcwd()
102 | project_dir = Path(generator_start_dir) / project_name
103 | if not project_dir.exists():
104 | return
105 | os.chdir(project_dir)
106 | Path("poetry.lock").unlink(missing_ok=True)
107 | run_docker_compose_command("down -v")
108 | os.chdir(cwd)
109 |
--------------------------------------------------------------------------------
/fastapi_template/tests/test_generator.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 |
3 | import pytest
4 |
5 | from fastapi_template.cli import db_menu
6 | from fastapi_template.input_model import BuilderContext
7 | from fastapi_template.tests.utils import model_dump_compat, run_default_check
8 |
9 |
10 | def init_context(
11 | context: BuilderContext,
12 | db: str,
13 | orm: Optional[str],
14 | api: Optional[str] = None,
15 | ) -> BuilderContext:
16 | db_info = None
17 | for entry in db_menu.entries:
18 | if entry.code == db:
19 | db_info = model_dump_compat(entry.additional_info)
20 |
21 | if db_info is None:
22 | raise ValueError(f"Unknown database: {db}")
23 |
24 | context.db = db
25 | context.db_info = db_info
26 | context.orm = orm
27 |
28 | if api is not None:
29 | context.api_type = api
30 |
31 | context.enable_migrations = db != "none"
32 | context.add_dummy = db != "none"
33 |
34 | return context
35 |
36 |
37 | def test_default_without_db(default_context: BuilderContext, worker_id: str):
38 | run_default_check(init_context(default_context, "none", None), worker_id)
39 |
40 |
41 | @pytest.mark.parametrize(
42 | "db",
43 | [
44 | "postgresql",
45 | "sqlite",
46 | "mysql",
47 | ],
48 | )
49 | @pytest.mark.parametrize(
50 | "orm",
51 | [
52 | "sqlalchemy",
53 | "tortoise",
54 | "ormar",
55 | "piccolo",
56 | ],
57 | )
58 | def test_default_with_db(default_context: BuilderContext, db: str, orm: str, worker_id: str):
59 | if orm == "piccolo" and db == "mysql":
60 | return
61 | run_default_check(init_context(default_context, db, orm), worker_id)
62 |
63 |
64 | @pytest.mark.parametrize(
65 | "db",
66 | [
67 | "mongodb",
68 | ],
69 | )
70 | @pytest.mark.parametrize(
71 | "orm",
72 | [
73 | "beanie",
74 | ],
75 | )
76 | def test_default_with_nosql_db(default_context: BuilderContext, db: str, orm: str, worker_id: str):
77 | run_default_check(init_context(default_context, db, orm), worker_id)
78 |
79 |
80 | @pytest.mark.parametrize("api", ["rest", "graphql"])
81 | @pytest.mark.parametrize(
82 | "orm",
83 | [
84 | "sqlalchemy",
85 | "tortoise",
86 | "ormar",
87 | "piccolo",
88 | ],
89 | )
90 | def test_default_for_apis(default_context: BuilderContext, orm: str, api: str, worker_id: str):
91 | run_default_check(init_context(default_context, "postgresql", orm, api), worker_id)
92 |
93 |
94 | @pytest.mark.parametrize("api", ["rest", "graphql"])
95 | @pytest.mark.parametrize(
96 | "orm",
97 | [
98 | "beanie",
99 | ]
100 | )
101 | def test_default_for_apis_with_nosql_db(default_context: BuilderContext, orm: str, api: str, worker_id: str):
102 | run_default_check(init_context(default_context, "mongodb", orm, api), worker_id)
103 |
104 |
105 | @pytest.mark.parametrize(
106 | "orm",
107 | [
108 | "psycopg",
109 | ],
110 | )
111 | def test_pg_drivers(default_context: BuilderContext, orm: str, worker_id: str):
112 | run_default_check(init_context(default_context, "postgresql", orm), worker_id)
113 |
114 |
115 | @pytest.mark.parametrize(
116 | "orm",
117 | [
118 | "sqlalchemy",
119 | "tortoise",
120 | "ormar",
121 | "psycopg",
122 | "piccolo",
123 | ],
124 | )
125 | def test_without_routers(default_context: BuilderContext, orm: str, worker_id: str):
126 | context = init_context(default_context, "postgresql", orm)
127 | context.enable_routers = False
128 | run_default_check(context, worker_id)
129 |
130 |
131 | def test_without_routers_with_nosql_db(default_context: BuilderContext, worker_id: str):
132 | context = init_context(default_context, "mongodb", "beanie")
133 | context.enable_routers = False
134 | run_default_check(context, worker_id)
135 |
136 |
137 | @pytest.mark.parametrize(
138 | "orm",
139 | [
140 | "sqlalchemy",
141 | "tortoise",
142 | "ormar",
143 | "piccolo",
144 | ],
145 | )
146 | def test_without_migrations(default_context: BuilderContext, orm: str, worker_id: str):
147 | context = init_context(default_context, "postgresql", orm)
148 | context.enable_migrations = False
149 | run_default_check(context, worker_id)
150 |
151 |
152 | def test_without_migrations_with_nosql_db(default_context: BuilderContext, worker_id: str):
153 | context = init_context(default_context, "mongodb", "beanie")
154 | context.enable_migrations = False
155 | run_default_check(context, worker_id)
156 |
157 |
158 | def test_with_selfhosted_swagger(default_context: BuilderContext, worker_id: str):
159 | default_context.self_hosted_swagger = True
160 | run_default_check(default_context, worker_id)
161 |
162 |
163 | @pytest.mark.parametrize(
164 | "orm",
165 | [
166 | "sqlalchemy",
167 | "tortoise",
168 | "ormar",
169 | "psycopg",
170 | "piccolo",
171 | ],
172 | )
173 | def test_without_dummy(default_context: BuilderContext, orm: str, worker_id: str):
174 | context = init_context(default_context, "postgresql", orm)
175 | context.add_dummy = False
176 | run_default_check(context, worker_id)
177 |
178 |
179 | def test_without_dummy_with_nosql_db(default_context: BuilderContext, worker_id: str):
180 | context = init_context(default_context, "mongodb", "beanie")
181 | context.add_dummy = False
182 | run_default_check(context, worker_id)
183 |
184 |
185 | @pytest.mark.parametrize(
186 | "api",
187 | [
188 | "rest",
189 | "graphql",
190 | ],
191 | )
192 | def test_redis(default_context: BuilderContext, api: str, worker_id: str):
193 | default_context.enable_redis = True
194 | default_context.enable_taskiq = True
195 | default_context.api_type = api
196 | run_default_check(default_context, worker_id)
197 |
198 |
199 | @pytest.mark.parametrize(
200 | "api",
201 | [
202 | "rest",
203 | "graphql",
204 | ],
205 | )
206 | def test_rmq(default_context: BuilderContext, api: str, worker_id: str):
207 | default_context.enable_rmq = True
208 | default_context.enable_taskiq = True
209 | default_context.api_type = api
210 | run_default_check(default_context, worker_id)
211 |
212 |
213 | def test_telemetry_pre_commit(default_context: BuilderContext, worker_id: str):
214 | default_context.enable_rmq = True
215 | default_context.enable_redis = True
216 | default_context.prometheus_enabled = True
217 | default_context.otlp_enabled = True
218 | default_context.sentry_enabled = True
219 | default_context.enable_loguru = True
220 | run_default_check(default_context, worker_id, without_pytest=True)
221 |
222 |
223 | def test_gunicorn(default_context: BuilderContext, worker_id: str):
224 | default_context.gunicorn = True
225 | run_default_check(default_context, worker_id, without_pytest=True)
226 |
227 |
228 | @pytest.mark.parametrize("api", ["rest", "graphql"])
229 | def test_kafka(default_context: BuilderContext, api: str, worker_id: str):
230 | default_context.enable_kafka = True
231 | default_context.api_type = api
232 | run_default_check(default_context, worker_id)
233 |
--------------------------------------------------------------------------------
/fastapi_template/tests/utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | from pathlib import Path
3 | import shlex
4 | import subprocess
5 | from typing import Any, Optional
6 |
7 | import yaml
8 | from fastapi_template.input_model import BuilderContext
9 | from fastapi_template.__main__ import generate_project
10 |
11 |
12 | def generate_project_and_chdir(context: BuilderContext):
13 | generate_project(context)
14 | os.chdir(context.project_name)
15 |
16 |
17 | def run_pre_commit() -> int:
18 | results = subprocess.run(["pre-commit", "run", "-a"])
19 | return results.returncode
20 |
21 |
22 | def run_docker_compose_command(
23 | command: Optional[str] = None,
24 | ) -> subprocess.CompletedProcess:
25 | docker_command = ["docker", "compose"]
26 | if command:
27 | docker_command.extend(shlex.split(command))
28 | else:
29 | docker_command.extend(["build"])
30 | return subprocess.run(docker_command)
31 |
32 |
33 | def run_default_check(context: BuilderContext, worker_id: str, without_pytest=False):
34 | generate_project_and_chdir(context)
35 | compose = Path("./docker-compose.yml")
36 | with compose.open("r") as compose_file:
37 | data = yaml.safe_load(compose_file)
38 | data["services"]["api"]["image"] = f"test_image:v{worker_id}"
39 | with compose.open("w") as compose_file:
40 | yaml.safe_dump(data, compose_file)
41 |
42 | assert run_pre_commit() == 0
43 |
44 | if without_pytest:
45 | return
46 |
47 | build = run_docker_compose_command("build")
48 | assert build.returncode == 0
49 | tests = run_docker_compose_command("run --rm api pytest -vv .")
50 | assert tests.returncode == 0
51 |
52 |
53 | def model_dump_compat(model: Any):
54 | if hasattr(model, "model_dump"):
55 | return model.model_dump()
56 | return model.dict()
57 |
--------------------------------------------------------------------------------
/images/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/s3rius/FastAPI-template/c236a2faf8ae6fe3e0d0c4a2425e03a248cf988d/images/logo.png
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "fastapi_template"
3 | version = "0.0.0"
4 | description = "Feature-rich robust FastAPI template"
5 | authors = ["Pavel Kirilin "]
6 | packages = [{ include = "fastapi_template" }]
7 | repository = "https://github.com/s3rius/FastAPI-template"
8 | homepage = "https://github.com/s3rius/FastAPI-template"
9 | readme = "README.md"
10 | keywords = ["FastAPI", "Cookiecutter", "Template"]
11 |
12 |
13 | [tool.poetry.dependencies]
14 | python = "^3.9"
15 | cookiecutter = "^1.7.3"
16 | pre-commit = "^2.14.0"
17 | termcolor = "^1.1.0"
18 | pydantic = ">=1.10"
19 | simple-term-menu = "^1.5.2"
20 | click = "^8.1.3"
21 | prompt-toolkit = "^3.0.36"
22 |
23 | [tool.poetry.group.dev.dependencies]
24 | pytest = "^7"
25 | pyyaml = "^6.0.1"
26 | pytest-env = "^1"
27 | Faker = "^8.14.0"
28 | pytest-xdist = {version = "^3", extras = ["psutil"]}
29 | requests = "^2.28.1"
30 | pytest-retry = "^1.6.3"
31 |
32 | [tool.pytest.ini_options]
33 | minversion = "6.0"
34 | markers = [
35 | "pg: tests for postgresql.",
36 | "mysql: tests for mysql.",
37 | "sqlite: tests for sqlite3.",
38 | ]
39 | env = [
40 | "POETRY_VIRTUALENVS_IN_PROJECT=True",
41 | "PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring",
42 | ]
43 | testpaths = ["fastapi_template/tests"]
44 | retries = 3
45 | retry_delay = 2
46 | cumulative_timing = true
47 |
48 | [tool.poetry.scripts]
49 | fastapi_template = "fastapi_template.__main__:main"
50 |
51 | [build-system]
52 | requires = ["poetry-core>=1.0.0"]
53 | build-backend = "poetry.core.masonry.api"
54 |
--------------------------------------------------------------------------------
/scripts/version_bumper.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | from pathlib import Path
3 | from typing import List, Optional
4 | import re
5 | import requests
6 |
7 | RAW_VERSION_RE = re.compile(r'(?P.*)\s*=\s*\"(?P[\^\~\>\=\<\!]?[\d\.\-\w]+)\"')
8 | EXPANDED_VER_RE = re.compile(
9 | r'(?P.*)\s*=\s*\{(.*)version\s*=\s*\"(?P[\^\~\>\=\<\!]?[\d\.\-\w]+)\"(.*)\}'
10 | )
11 |
12 | def parse_args() -> argparse.Namespace:
13 | parser = argparse.ArgumentParser()
14 | parser.add_argument(
15 | "file",
16 | type=Path,
17 | )
18 | parser.add_argument(
19 | "--section",
20 | "-s",
21 | type=str,
22 | default="tool.poetry.dependencies",
23 | )
24 | return parser.parse_args()
25 |
26 | def get_dependencies(path: Path, section: str) -> List[str]:
27 | read_file = path.read_text()
28 | recording = False
29 | deps = []
30 | for index, line in enumerate(read_file.splitlines(keepends=False)):
31 | if line.startswith('[') and line.strip('[]') != section:
32 | recording = False
33 | continue
34 | if line == f"[{section}]":
35 | recording = True
36 | continue
37 | if line.startswith('python ='):
38 | continue
39 | if line.startswith('{%'):
40 | continue
41 | if recording:
42 | deps.append((index, line))
43 | return deps
44 |
45 | def get_new_version(package_name: str) -> Optional[str]:
46 | resp = requests.get(f'https://pypi.org/pypi/{package_name}/json')
47 | if not resp.ok:
48 | return None
49 | rjson = resp.json()
50 | return rjson['info']["version"]
51 |
52 |
53 | def bump_version(dependency: str) -> str:
54 | exp_match = EXPANDED_VER_RE.match(dependency)
55 | raw_match = None
56 | if exp_match:
57 | package = exp_match.group("package").strip()
58 | version = exp_match.group("version").lstrip("^=!~<>")
59 | else:
60 | raw_match = RAW_VERSION_RE.match(dependency)
61 | if raw_match:
62 | package = raw_match.group("package").strip()
63 | version = raw_match.group("version").lstrip("^=!~<>")
64 | if exp_match is None and raw_match is None:
65 | return None
66 |
67 | print(f"Checking {package}")
68 | new_version = get_new_version(package)
69 | if new_version is not None and version != new_version:
70 | print(f"Found new version: {new_version}")
71 | return dependency.replace(version, new_version)
72 |
73 | return None
74 |
75 | def main():
76 | args = parse_args()
77 | deps = get_dependencies(args.file, args.section)
78 | lines = args.file.read_text().splitlines(keepends=False)
79 | for i, dep in deps:
80 | new_version = bump_version(dep)
81 | if new_version:
82 | lines[i] = new_version
83 | args.file.write_text("\n".join(lines))
84 |
85 |
86 |
87 | if __name__ == "__main__":
88 | main()
89 |
--------------------------------------------------------------------------------