├── .github └── FUNDING.yml ├── .gitignore ├── .pre-commit-config.yaml ├── LICENSE ├── README.md ├── cookiecutter.json └── {{cookiecutter.repo_name}} ├── .env ├── .github └── workflows │ ├── .gitkeep │ └── release.yml ├── .gitignore ├── .pre-commit-config.yaml ├── Dockerfile ├── Makefile ├── README.md ├── alembic.ini ├── alembic ├── README ├── env.py ├── script.py.mako └── versions │ └── .gitkeep ├── app ├── __init__.py ├── daos │ ├── __init__.py │ ├── base.py │ └── user.py ├── db.py ├── main.py ├── models │ ├── __init__.py │ ├── base.py │ └── user.py ├── routers │ ├── __init__.py │ ├── api_router.py │ └── user.py ├── schemas │ ├── __init__.py │ ├── token.py │ └── user.py ├── services │ ├── __init__.py │ ├── user.py │ └── utils.py └── settings.py ├── docker-compose.yaml ├── pyproject.toml ├── scripts ├── autogenerate.sh └── downgrade.sh ├── static └── welcome.txt └── tests └── __init__.py /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: Lolomgrofl 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # ---> Python 2 | # Byte-compiled / optimized / DLL files 3 | __pycache__/ 4 | *.py[cod] 5 | *$py.class 6 | 7 | # C extensions 8 | *.so 9 | 10 | # Distribution / packaging 11 | .Python 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | cover/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | db.sqlite3-journal 64 | 65 | # Flask stuff: 66 | instance/ 67 | .webassets-cache 68 | 69 | # Scrapy stuff: 70 | .scrapy 71 | 72 | # Sphinx documentation 73 | docs/_build/ 74 | 75 | # PyBuilder 76 | .pybuilder/ 77 | target/ 78 | 79 | # Jupyter Notebook 80 | .ipynb_checkpoints 81 | 82 | # IPython 83 | profile_default/ 84 | ipython_config.py 85 | 86 | # pyenv 87 | # For a library or package, you might want to ignore these files since the code is 88 | # intended to run in multiple environments; otherwise, check them in: 89 | # .python-version 90 | 91 | # pipenv 92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 95 | # install all needed dependencies. 96 | #Pipfile.lock 97 | 98 | # poetry 99 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 100 | # This is especially recommended for binary packages to ensure reproducibility, and is more 101 | # commonly ignored for libraries. 102 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 103 | #poetry.lock 104 | 105 | # pdm 106 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 107 | #pdm.lock 108 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 109 | # in version control. 110 | # https://pdm.fming.dev/#use-with-ide 111 | .pdm.toml 112 | 113 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 114 | __pypackages__/ 115 | 116 | # Celery stuff 117 | celerybeat-schedule 118 | celerybeat.pid 119 | 120 | # SageMath parsed files 121 | *.sage.py 122 | 123 | # Environments 124 | .env 125 | .venv 126 | env/ 127 | venv/ 128 | ENV/ 129 | env.bak/ 130 | venv.bak/ 131 | 132 | # Spyder project settings 133 | .spyderproject 134 | .spyproject 135 | 136 | # Rope project settings 137 | .ropeproject 138 | 139 | # mkdocs documentation 140 | /site 141 | 142 | # mypy 143 | .mypy_cache/ 144 | .dmypy.json 145 | dmypy.json 146 | 147 | # Pyre type checker 148 | .pyre/ 149 | 150 | # pytype static type analyzer 151 | .pytype/ 152 | .python-version 153 | # Cython debug symbols 154 | cython_debug/ 155 | .vscode/ 156 | # PyCharm 157 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 158 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 159 | # and can be added to the global gitignore or merged into this file. For a more nuclear 160 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 161 | .idea/ 162 | *.lock 163 | setup.cfg 164 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | default_language_version: 2 | python: python3.10 3 | repos: 4 | - repo: https://github.com/astral-sh/ruff-pre-commit 5 | rev: v0.9.6 6 | hooks: 7 | - id: ruff 8 | args: [ --fix, --exit-non-zero-on-fix, --exclude, alembic] 9 | - repo: https://github.com/asottile/pyupgrade 10 | rev: v3.19.1 11 | hooks: 12 | - id: pyupgrade 13 | args: 14 | - --py3-plus 15 | - --keep-runtime-typing 16 | 17 | - repo: https://github.com/psf/black 18 | rev: 25.1.0 19 | hooks: 20 | - id: black 21 | args: [--safe] 22 | 23 | - repo: https://github.com/pre-commit/pre-commit-hooks 24 | rev: v5.0.0 25 | hooks: 26 | - id: check-added-large-files 27 | - id: check-toml 28 | - id: check-yaml 29 | args: 30 | - --unsafe 31 | - id: end-of-file-fixer 32 | - id: trailing-whitespace 33 | 34 | ci: 35 | autofix_commit_msg: 🎨 [pre-commit.ci] Auto format from pre-commit.com hooks 36 | autoupdate_commit_msg: ⬆ [pre-commit.ci] pre-commit autoupdate 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Luka Vuksanović 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # FastAPI Genesis 🧬 - Project Template Generator 🚀 2 | Simple FastAPI project template with Docker, Alembic, PostgreSQL, Poetry and pre-commit to kickstart your new projects. 3 | 4 | ## How to use it 🤓 5 | 6 | Go to the directory where you want to create your project and run: 7 | 8 | ```bash 9 | pip install cookiecutter 10 | cookiecutter https://github.com/Lolomgrofl/fastapi-genesis.git 11 | ``` 12 | 13 | ## What's included in the template 🎉 14 | 15 | - Here is an explanation of the directory structure of the template: 16 | ``` 17 | ├── .github <- GitHub deployment workflow 18 | │ 19 | ├── alembic <- Alembic migrations 20 | │ 21 | ├── app <- Source code of the application (the main package) 22 | │ ├── daos <- Data Access Objects (DAOs) to interact with the database 23 | │ ├── models <- SQLAlchemy models (the database schema) 24 | │ ├── routers <- FastAPI routers (endpoints) 25 | │ ├── schemas <- Pydantic schemas for request and response models 26 | │ ├── services <- Business logic layer (services) 27 | │ ├── db.py <- Database initialization and session management code 28 | │ ├── main.py <- FastAPI application instance and startup code 29 | │ └── settings.py <- Settings management code (using pydantic settings) 30 | │ 31 | ├── scripts <- Scripts to perform various tasks like alembic migrations, etc. 32 | │ 33 | ├── static <- Static files like images, documents, etc. 34 | │ 35 | ├── tests <- Unit tests, one subdirectory per application module 36 | │ 37 | ├── .env <- Environment variables. Should not be committed to VCS 38 | │ 39 | ├── .gitignore <- Files and directories to be ignored by git 40 | │ 41 | ├── .pre-commit-config.yaml <- Configuration of pre-commit hooks (see https://pre-commit.com/) 42 | │ 43 | ├── alembic.ini <- Alembic configuration file 44 | │ 45 | ├── docker-compose.yml <- Docker compose configuration file 46 | │ 47 | ├── Dockerfile <- Dockerfile for building the image of the application 48 | │ 49 | ├── Makefile <- Makefile with useful commands for development and project setup 50 | │ 51 | ├── pyproject.toml <- Python dependencies for Poetry (see https://python-poetry.org/) 52 | │ 53 | ├── README.md <- File with useful information about the project and how to use it 54 | ``` 55 | 56 | ## Features 🧩 57 | 58 | - **Docker** and **docker-compose** for local development 59 | - **FastAPI** application with **uvicorn** server 60 | - **AsyncPG** for asynchronous access to PostgreSQL 61 | - **Pydantic** for data validation 62 | - **Poetry** for managing Python dependencies 63 | - **Alembic** for database migrations 64 | - **Pre-commit** hooks for code formatting and linting 65 | - **JWT** token authentication 66 | - **SQLAlchemy** models 67 | - **CORS** (Cross Origin Resource Sharing) 68 | - **GitOps** ( GitHub Workflows) 69 | 70 | ## User flow as an example of how to use the template 💡 71 | 72 | - It consists of the following steps: 73 | ``` 74 | - Register a new user 75 | - Login with the new user 76 | - Change password 77 | - Get all users 78 | - Delete user by id 79 | - Delete all users 80 | ``` 81 | - This following example will show you the pattern and good practices to follow in order to continue developing your application. 82 | 83 | 84 | ## Cookiecutter parameters explained 🍪 85 | 86 | 87 | - `repo_name`: Name of the project repository (e.g. `my_project`) 88 | - `app_container_name`: Name of the Docker container for the FastAPI application server inside `docker-compose.yaml` file 89 | - `app_service_port`: Port on the host machine to which the FastAPI application will be exposed inside `docker-compose.yaml` file and `Dockerfile` 90 | - `db_container_name`: Name of the Docker container for the PostgreSQL database server inside `docker-compose.yaml` file 91 | - `db_service_port`: Port on the host machine to which the PostgreSQL database server will be exposed inside `docker-compose.yaml` file 92 | - `pgadmin_container_name`: Name of the Docker container for the pgAdmin web interface inside `docker-compose.yaml` file 93 | - `pgadmin_service_port`: Port on the host machine to which the pgAdmin web interface will be exposed inside `docker-compose.yaml` file 94 | - `network_name`: Name of the Docker network that will be created inside `docker-compose.yaml` file 95 | 96 | # GLHF 🚀 97 | 98 | ## License 99 | 100 | This project is licensed under the terms of the MIT license. 101 | -------------------------------------------------------------------------------- /cookiecutter.json: -------------------------------------------------------------------------------- 1 | { 2 | "repo_name": "fastapi_genesis", 3 | "app_container_name": "fast_app", 4 | "app_service_port": "8000", 5 | "db_container_name": "fast_db", 6 | "db_service_port": "5432", 7 | "pgadmin_container_name": "fast_pgadmin", 8 | "pgadmin_service_port": "5050", 9 | "network_name": "fast_network" 10 | } 11 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/.env: -------------------------------------------------------------------------------- 1 | # Postgres 2 | POSTGRES_USER="postgres" 3 | POSTGRES_PASSWORD="postgres" 4 | POSTGRES_HOST="db" 5 | POSTGRES_PORT=5432 6 | POSTGRES_DB="fastdb" 7 | # PGAdmin 8 | PGADMIN_EMAIL="fastaexample@gmail.com" 9 | PGADMIN_PASSWORD=123 10 | # Alembic + Sqlalchemy Session 11 | POSTGRES_URL = postgresql+asyncpg://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB} 12 | 13 | # Feel free to add/remove/modify these settings depending on your needs 14 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/.github/workflows/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lolomgrofl/fastapi-genesis/6c284ba95a8bbe4feefe1328de9b623307a70097/{{cookiecutter.repo_name}}/.github/workflows/.gitkeep -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | # The logic for the release.yml action is not inside this file, but rather inside the .gitkeep file in the same directory. The .gitkeep file is used to keep the directory in version control even if it is empty. 2 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/.gitignore: -------------------------------------------------------------------------------- 1 | # ---> Python 2 | # Byte-compiled / optimized / DLL files 3 | __pycache__/ 4 | *.py[cod] 5 | *$py.class 6 | 7 | # C extensions 8 | *.so 9 | 10 | # Distribution / packaging 11 | .Python 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | cover/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | db.sqlite3-journal 64 | 65 | # Flask stuff: 66 | instance/ 67 | .webassets-cache 68 | 69 | # Scrapy stuff: 70 | .scrapy 71 | 72 | # Sphinx documentation 73 | docs/_build/ 74 | 75 | # PyBuilder 76 | .pybuilder/ 77 | target/ 78 | 79 | # Jupyter Notebook 80 | .ipynb_checkpoints 81 | 82 | # IPython 83 | profile_default/ 84 | ipython_config.py 85 | 86 | # pyenv 87 | # For a library or package, you might want to ignore these files since the code is 88 | # intended to run in multiple environments; otherwise, check them in: 89 | # .python-version 90 | 91 | # pipenv 92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 95 | # install all needed dependencies. 96 | #Pipfile.lock 97 | 98 | # poetry 99 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 100 | # This is especially recommended for binary packages to ensure reproducibility, and is more 101 | # commonly ignored for libraries. 102 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 103 | #poetry.lock 104 | 105 | # pdm 106 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 107 | #pdm.lock 108 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 109 | # in version control. 110 | # https://pdm.fming.dev/#use-with-ide 111 | .pdm.toml 112 | 113 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 114 | __pypackages__/ 115 | 116 | # Celery stuff 117 | celerybeat-schedule 118 | celerybeat.pid 119 | 120 | # SageMath parsed files 121 | *.sage.py 122 | 123 | # Environments 124 | .env 125 | .venv 126 | env/ 127 | venv/ 128 | ENV/ 129 | env.bak/ 130 | venv.bak/ 131 | 132 | # Spyder project settings 133 | .spyderproject 134 | .spyproject 135 | 136 | # Rope project settings 137 | .ropeproject 138 | 139 | # mkdocs documentation 140 | /site 141 | 142 | # mypy 143 | .mypy_cache/ 144 | .dmypy.json 145 | dmypy.json 146 | 147 | # Pyre type checker 148 | .pyre/ 149 | 150 | # pytype static type analyzer 151 | .pytype/ 152 | .python-version 153 | # Cython debug symbols 154 | cython_debug/ 155 | .vscode/ 156 | # PyCharm 157 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 158 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 159 | # and can be added to the global gitignore or merged into this file. For a more nuclear 160 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 161 | .idea/ 162 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | default_language_version: 2 | python: python3.10 3 | repos: 4 | - repo: https://github.com/astral-sh/ruff-pre-commit 5 | rev: v0.0.275 6 | hooks: 7 | - id: ruff 8 | args: [ --fix, --exit-non-zero-on-fix, --exclude, alembic] 9 | 10 | - repo: https://github.com/psf/black 11 | rev: 23.1.0 12 | hooks: 13 | - id: black 14 | args: [--safe, --exclude, alembic] 15 | 16 | - repo: https://github.com/pre-commit/pre-commit-hooks 17 | rev: v4.4.0 18 | hooks: 19 | - id: check-added-large-files 20 | - id: check-toml 21 | - id: check-yaml 22 | args: 23 | - --unsafe 24 | - id: end-of-file-fixer 25 | - id: trailing-whitespace 26 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.10-slim-bullseye as requirements-stage 2 | 3 | WORKDIR /tmp 4 | 5 | RUN pip install poetry 6 | 7 | COPY ./pyproject.toml ./poetry.lock* /tmp/ 8 | 9 | # we export pyproject.toml which is poetry-native to plain requirements.txt that we can install with pip 10 | # as it's the recommended approach thanks to which we do not have to install poetry in production-stage image 11 | RUN poetry export -f requirements.txt --output requirements.txt --without-hashes 12 | 13 | FROM python:3.10-slim-bullseye as production-stage 14 | 15 | # Install system dependencies 16 | RUN apt-get update && apt-get -y install libpq-dev gcc g++ curl procps net-tools tini 17 | 18 | # Set up the Python environment 19 | ENV PYTHONFAULTHANDLER=1 20 | ENV PYTHONUNBUFFERED=1 21 | 22 | # Set the working directory 23 | WORKDIR /app 24 | 25 | # Install the project dependencies 26 | COPY --from=requirements-stage /tmp/requirements.txt /app/requirements.txt 27 | RUN pip install --no-cache-dir --upgrade -r /app/requirements.txt 28 | 29 | # Copy the rest of the project files 30 | COPY . /app/ 31 | 32 | # Expose the application port 33 | EXPOSE {{ cookiecutter.app_service_port }} 34 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: help 2 | help: ## Show this help (usage: make help) 3 | @echo "Usage: make [target]" 4 | @echo "Targets:" 5 | @awk '/^[a-zA-Z0-9_-]+:.*?##/ { \ 6 | helpMessage = match($$0, /## (.*)/); \ 7 | if (helpMessage) { \ 8 | target = $$1; \ 9 | sub(/:/, "", target); \ 10 | printf " \033[36m%-20s\033[0m %s\n", target, substr($$0, RSTART + 3, RLENGTH); \ 11 | } \ 12 | }' $(MAKEFILE_LIST) 13 | 14 | 15 | .PHONY: build 16 | build: ## Build project with docker-compose 17 | docker-compose up --build 18 | 19 | .PHONY: up 20 | up: ## Run project with docker-compose 21 | docker-compose up 22 | 23 | .PHONY: down 24 | down: ## Stop project with docker-compose and remove containers and networks 25 | docker-compose down --remove-orphans | true 26 | 27 | 28 | .PHONY: autogenerate 29 | autogenerate: ## Generate migration file (usage: make autogenerate msg="migration message") 30 | docker-compose up -d | true 31 | docker-compose exec app alembic revision --autogenerate -m "$(msg)" 32 | 33 | .PHONY: downgrade 34 | downgrade: ## Downgrade by 1 revision 35 | docker-compose up -d | true 36 | docker-compose exec app alembic downgrade -1 37 | 38 | .PHONY: downgrade_to 39 | downgrade_to: ## Downgrade to the specific revision (usage: make downgrade_to revision="revision") 40 | docker-compose up -d | true 41 | docker-compose exec app alembic downgrade "$(revision)" 42 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/README.md: -------------------------------------------------------------------------------- 1 | ## Pre-requisites 2 | 3 | Before getting started, please ensure that you have the following dependencies installed on your system: 4 | 5 | - Docker: [Installation guide](https://docs.docker.com/get-docker/) 6 | - Docker Compose: [Installation guide](https://docs.docker.com/compose/install/) 7 | - Poetry: [Installation guide](https://python-poetry.org/docs/#installation) 8 | 9 | ## Getting Started 10 | 11 | To set up and run the app, please follow these steps: 12 | 13 | 1. Move to the directory where `pyproject.toml` is located: 14 | 15 | ```shell 16 | cd {{ cookiecutter.repo_name }} 17 | ``` 18 | 2. Install the dependencies: 19 | 20 | ```shell 21 | poetry install 22 | ``` 23 | 24 | If you don't want to install the dev packages, 25 | you can use the following command instead: 26 | ```shell 27 | poetry install --without dev 28 | ``` 29 | 30 | 3. Activate the virtual environment: 31 | 32 | ```shell 33 | poetry shell 34 | ``` 35 | 36 | 4. All necessary commands to start with the project can be found in Makefile. 37 | To see all available commands, run the following command: 38 | 39 | ```shell 40 | make help 41 | ``` 42 | 43 | 5. Build and start the Docker containers: 44 | 45 | ```shell 46 | make build 47 | ``` 48 | 49 | 6. Open your browser and go to `http://localhost:{{ cookiecutter.app_service_port }}` to see the app running. 50 | 51 | 7. Since there is only one SQLAlchemy model, you can create a new migration file by running the following command: 52 | 53 | ```shell 54 | make autogenerate msg="user_init" 55 | ``` 56 | 57 | This will create a new migration file in `{{ cookiecutter.repo_name }}/alembic/versions/`. 58 | 59 | Since the `app` service inside `docker-compose.yaml` will automatically run the `alembic upgrade head` command whenever it starts, in order to apply the new migration, you just need to stop the containers and start them again. CTRL+C to stop the containers and then run the following command to start them again: 60 | ```shell 61 | make up 62 | ``` 63 | 64 | 8. To check the documentation of the API, go to `http://localhost:{{ cookiecutter.app_service_port }}/docs`. 65 | 66 | 9. To check the database you can use hosted `pgAdmin` tool, just go to `http://localhost:{{ cookiecutter.pgadmin_service_port }}` and login with the credentials from `.env` file: 67 | - Email: `$PGADMIN_EMAIL` 68 | - Password: `$PGADMIN_PASSWORD` 69 | 70 | # ENJOY AND GOOD LUCK WITH YOUR PROJECT! 🧬 🚀 71 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | script_location = alembic 6 | 7 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s 8 | # Uncomment the line below if you want the files to be prepended with date and time 9 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file 10 | # for all available tokens 11 | file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s 12 | 13 | # sys.path path, will be prepended to sys.path if present. 14 | # defaults to the current working directory. 15 | prepend_sys_path = . 16 | 17 | # timezone to use when rendering the date within the migration file 18 | # as well as the filename. 19 | # If specified, requires the python-dateutil library that can be 20 | # installed by adding `alembic[tz]` to the pip requirements 21 | # string value is passed to dateutil.tz.gettz() 22 | # leave blank for localtime 23 | # timezone = 24 | 25 | # max length of characters to apply to the 26 | # "slug" field 27 | # truncate_slug_length = 40 28 | 29 | # set to 'true' to run the environment during 30 | # the 'revision' command, regardless of autogenerate 31 | # revision_environment = false 32 | 33 | # set to 'true' to allow .pyc and .pyo files without 34 | # a source .py file to be detected as revisions in the 35 | # versions/ directory 36 | # sourceless = false 37 | 38 | # version location specification; This defaults 39 | # to alembic/versions. When using multiple version 40 | # directories, initial revisions must be specified with --version-path. 41 | # The path separator used here should be the separator specified by "version_path_separator" below. 42 | # version_locations = %(here)s/bar:%(here)s/bat:alembic/versions 43 | 44 | # version path separator; As mentioned above, this is the character used to split 45 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. 46 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. 47 | # Valid values for version_path_separator are: 48 | # 49 | # version_path_separator = : 50 | # version_path_separator = ; 51 | # version_path_separator = space 52 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects. 53 | 54 | # set to 'true' to search source files recursively 55 | # in each "version_locations" directory 56 | # new in Alembic version 1.10 57 | # recursive_version_locations = false 58 | 59 | # the output encoding used when revision files 60 | # are written from script.py.mako 61 | # output_encoding = utf-8 62 | 63 | 64 | [post_write_hooks] 65 | # post_write_hooks defines scripts or Python functions that are run 66 | # on newly generated revision scripts. See the documentation for further 67 | # detail and examples 68 | 69 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 70 | # hooks = black 71 | # black.type = console_scripts 72 | # black.entrypoint = black 73 | # black.options = -l 79 REVISION_SCRIPT_FILENAME 74 | 75 | # Logging configuration 76 | [loggers] 77 | keys = root,sqlalchemy,alembic 78 | 79 | [handlers] 80 | keys = console 81 | 82 | [formatters] 83 | keys = generic 84 | 85 | [logger_root] 86 | level = WARN 87 | handlers = console 88 | qualname = 89 | 90 | [logger_sqlalchemy] 91 | level = WARN 92 | handlers = 93 | qualname = sqlalchemy.engine 94 | 95 | [logger_alembic] 96 | level = INFO 97 | handlers = 98 | qualname = alembic 99 | 100 | [handler_console] 101 | class = StreamHandler 102 | args = (sys.stderr,) 103 | level = NOTSET 104 | formatter = generic 105 | 106 | [formatter_generic] 107 | format = %(levelname)-5.5s [%(name)s] %(message)s 108 | datefmt = %H:%M:%S 109 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/alembic/README: -------------------------------------------------------------------------------- 1 | Generic single-database configuration. 2 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/alembic/env.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from alembic import context 4 | from app.models.base import Base 5 | from app.models.user import * 6 | from app.settings import settings 7 | from sqlalchemy.ext.asyncio import create_async_engine 8 | 9 | target_metadata = Base.metadata 10 | 11 | 12 | def run_migrations(connection): 13 | context.configure( 14 | connection=connection, 15 | compare_type=True, 16 | dialect_opts={"paramstyle": "named"}, 17 | target_metadata=target_metadata, 18 | include_schemas=True, 19 | version_table_schema=target_metadata.schema, 20 | ) 21 | 22 | with context.begin_transaction(): 23 | context.run_migrations() 24 | 25 | 26 | async def run_migrations_online(): 27 | """Run migrations in 'online' mode. 28 | 29 | In this scenario we need to create an Engine 30 | and associate a connection with the context. 31 | 32 | """ 33 | connectable = create_async_engine(settings.POSTGRES_URL.unicode_string(), future=True) 34 | 35 | async with connectable.connect() as connection: 36 | await connection.run_sync(run_migrations) 37 | 38 | 39 | asyncio.run(run_migrations_online()) 40 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/alembic/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | ${imports if imports else ""} 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = ${repr(up_revision)} 14 | down_revision = ${repr(down_revision)} 15 | branch_labels = ${repr(branch_labels)} 16 | depends_on = ${repr(depends_on)} 17 | 18 | 19 | def upgrade() -> None: 20 | ${upgrades if upgrades else "pass"} 21 | 22 | 23 | def downgrade() -> None: 24 | ${downgrades if downgrades else "pass"} 25 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/alembic/versions/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lolomgrofl/fastapi-genesis/6c284ba95a8bbe4feefe1328de9b623307a70097/{{cookiecutter.repo_name}}/alembic/versions/.gitkeep -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/app/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.1.0" 2 | __author__ = "" 3 | 4 | # MAJOR.MINOR.PATCH 5 | # MAJOR version when you make incompatible API changes, 6 | # MINOR version when you add functionality in a backwards compatible manner, and 7 | # PATCH version when you make backwards compatible bug fixes. 8 | # Additional labels for pre-release and build metadata are available as extensions to the MAJOR.MINOR.PATCH format. 9 | # For more information, see https://semver.org/. 10 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/app/daos/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lolomgrofl/fastapi-genesis/6c284ba95a8bbe4feefe1328de9b623307a70097/{{cookiecutter.repo_name}}/app/daos/__init__.py -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/app/daos/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | from sqlalchemy.ext.asyncio import AsyncSession 4 | 5 | 6 | class BaseDao(ABC): 7 | def __init__(self, session: AsyncSession): 8 | self.session = session 9 | 10 | @abstractmethod 11 | async def create(self, request): 12 | pass 13 | 14 | @abstractmethod 15 | async def get_by_id(self, id): 16 | pass 17 | 18 | @abstractmethod 19 | async def get_all(self): 20 | pass 21 | 22 | @abstractmethod 23 | async def delete_all(self): 24 | pass 25 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/app/daos/user.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import delete, select 2 | from sqlalchemy.ext.asyncio import AsyncSession 3 | 4 | from app.daos.base import BaseDao 5 | from app.models.user import User 6 | 7 | 8 | class UserDao(BaseDao): 9 | def __init__(self, session: AsyncSession) -> None: 10 | super().__init__(session) 11 | 12 | async def create(self, user_data) -> User: 13 | _user = User(**user_data) 14 | self.session.add(_user) 15 | await self.session.commit() 16 | await self.session.refresh(_user) 17 | return _user 18 | 19 | async def get_by_id(self, user_id: int) -> User | None: 20 | statement = select(User).where(User.id == user_id) 21 | return await self.session.scalar(statement=statement) 22 | 23 | async def get_by_email(self, email) -> User | None: 24 | statement = select(User).where(User.email == email) 25 | return await self.session.scalar(statement=statement) 26 | 27 | async def get_all(self) -> list[User]: 28 | statement = select(User).order_by(User.id) 29 | result = await self.session.execute(statement=statement) 30 | return result.scalars().all() 31 | 32 | async def delete_all(self) -> None: 33 | await self.session.execute(delete(User)) 34 | await self.session.commit() 35 | 36 | async def delete_by_id(self, user_id: int) -> User | None: 37 | _user = await self.get_by_id(user_id=user_id) 38 | statement = delete(User).where(User.id == user_id) 39 | await self.session.execute(statement=statement) 40 | await self.session.commit() 41 | return _user 42 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/app/db.py: -------------------------------------------------------------------------------- 1 | from fastapi import Depends 2 | 3 | from collections.abc import AsyncGenerator 4 | from typing import Annotated 5 | 6 | from sqlalchemy.ext.asyncio import ( 7 | AsyncSession, 8 | async_sessionmaker, 9 | create_async_engine, 10 | ) 11 | 12 | from app.settings import settings 13 | 14 | postgres_url = settings.POSTGRES_URL.unicode_string() 15 | 16 | engine = create_async_engine(postgres_url, echo=True, future=True) 17 | AsyncSessionFactory = async_sessionmaker( 18 | autocommit=False, 19 | autoflush=False, 20 | expire_on_commit=False, 21 | bind=engine, 22 | class_=AsyncSession, 23 | ) 24 | 25 | 26 | async def get_session() -> AsyncGenerator: 27 | async with AsyncSessionFactory() as session: 28 | yield session 29 | 30 | 31 | SessionDep = Annotated[AsyncSession, Depends(get_session)] 32 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/app/main.py: -------------------------------------------------------------------------------- 1 | from fastapi import FastAPI 2 | from fastapi.middleware.cors import CORSMiddleware 3 | from fastapi.staticfiles import StaticFiles 4 | 5 | from app import __version__ 6 | from app.routers.api_router import api_router 7 | from app.settings import settings 8 | 9 | app = FastAPI(title=settings.PROJECT_NAME, version=__version__) 10 | 11 | app.add_middleware( 12 | CORSMiddleware, 13 | allow_origins=["*"], 14 | allow_credentials=True, 15 | allow_methods=["*"], 16 | allow_headers=["*"], 17 | ) 18 | 19 | app.include_router(api_router) 20 | app.mount("/static", StaticFiles(directory="static"), name="static") 21 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/app/models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lolomgrofl/fastapi-genesis/6c284ba95a8bbe4feefe1328de9b623307a70097/{{cookiecutter.repo_name}}/app/models/__init__.py -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/app/models/base.py: -------------------------------------------------------------------------------- 1 | from typing import Annotated 2 | 3 | from sqlalchemy.orm import DeclarativeBase, mapped_column 4 | 5 | 6 | class Base(DeclarativeBase): 7 | pass 8 | 9 | 10 | intpk = Annotated[int, mapped_column(primary_key=True, index=True, autoincrement=True)] 11 | str100 = Annotated[str, 100] 12 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/app/models/user.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import String 2 | from sqlalchemy.orm import Mapped, mapped_column 3 | 4 | from app.models.base import Base, intpk, str100 5 | 6 | 7 | class User(Base): 8 | __tablename__ = "user" 9 | 10 | id: Mapped[intpk] 11 | email: Mapped[str] = mapped_column(String(100), unique=True, index=True, nullable=False) 12 | password: Mapped[str] 13 | first_name: Mapped[str100 | None] 14 | last_name: Mapped[str100 | None] 15 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/app/routers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lolomgrofl/fastapi-genesis/6c284ba95a8bbe4feefe1328de9b623307a70097/{{cookiecutter.repo_name}}/app/routers/__init__.py -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/app/routers/api_router.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | 3 | from app.routers import user 4 | from app.settings import settings 5 | 6 | api_router = APIRouter(prefix=settings.API_V1_STR) 7 | 8 | api_router.include_router(user.router) 9 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/app/routers/user.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter, Depends, status 2 | from fastapi.security import OAuth2PasswordRequestForm 3 | 4 | from app.db import SessionDep 5 | from app.schemas.token import Token 6 | from app.schemas.user import ChangePasswordIn, UserIn, UserOut 7 | from app.services.user import CurrentUserDep, UserService 8 | 9 | router = APIRouter(tags=["User"], prefix="/user") 10 | 11 | 12 | @router.post("/register", status_code=status.HTTP_201_CREATED) 13 | async def register_user( 14 | user_data: UserIn, 15 | session: SessionDep, 16 | ): 17 | return await UserService.register_user(user_data, session) 18 | 19 | 20 | @router.post("/token", status_code=status.HTTP_200_OK) 21 | async def token( 22 | session: SessionDep, 23 | form_data: OAuth2PasswordRequestForm = Depends(), 24 | ) -> Token: 25 | return await UserService.login(form_data, session) 26 | 27 | 28 | @router.get("/login", status_code=status.HTTP_200_OK) 29 | async def login(current_user: CurrentUserDep) -> UserOut: 30 | return UserOut.model_validate(current_user) 31 | 32 | 33 | @router.get("/get_by_id/{user_id}", status_code=status.HTTP_200_OK) 34 | async def get_user_by_id( 35 | user_id: int, 36 | session: SessionDep, 37 | ) -> UserOut: 38 | return await UserService.get_user_by_id(user_id, session) 39 | 40 | 41 | @router.get("/get_all", status_code=status.HTTP_200_OK) 42 | async def get_all_users(session: SessionDep) -> list[UserOut]: 43 | return await UserService.get_all_users(session) 44 | 45 | 46 | @router.delete("/delete_by_id/{user_id}", status_code=status.HTTP_200_OK) 47 | async def delete_user_by_id( 48 | user_id: int, 49 | session: SessionDep, 50 | ): 51 | return await UserService.delete_user_by_id(user_id, session) 52 | 53 | 54 | @router.delete("/delete_all", status_code=status.HTTP_200_OK) 55 | async def delete_all_users(session: SessionDep): 56 | return await UserService.delete_all_users(session) 57 | 58 | 59 | @router.patch( 60 | "/change_password", 61 | status_code=status.HTTP_200_OK, 62 | summary="Change password for current user that is logged in", 63 | ) 64 | async def change_password( 65 | session: SessionDep, 66 | password_data: ChangePasswordIn, 67 | current_user: CurrentUserDep, 68 | ): 69 | return await UserService.change_password(password_data, current_user, session) 70 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/app/schemas/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lolomgrofl/fastapi-genesis/6c284ba95a8bbe4feefe1328de9b623307a70097/{{cookiecutter.repo_name}}/app/schemas/__init__.py -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/app/schemas/token.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | 4 | class Token(BaseModel): 5 | access_token: str 6 | token_type: str 7 | 8 | 9 | class TokenData(BaseModel): 10 | email: str | None = None 11 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/app/schemas/user.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel, ConfigDict, EmailStr, field_validator 2 | 3 | 4 | class UserBase(BaseModel): 5 | email: EmailStr 6 | first_name: str | None 7 | last_name: str | None 8 | model_config = ConfigDict(from_attributes=True) 9 | 10 | 11 | class UserIn(UserBase): 12 | password: str 13 | 14 | 15 | class UserOut(UserBase): 16 | id: int 17 | 18 | 19 | class ChangePasswordIn(BaseModel): 20 | old_password: str 21 | new_password: str 22 | 23 | @field_validator("old_password") 24 | @classmethod 25 | def old_password_is_not_blank(cls, value): 26 | if not value: 27 | raise ValueError("Old password field can't be blank!!!") 28 | return value 29 | 30 | @field_validator("new_password") 31 | @classmethod 32 | def new_password_is_not_blank(cls, value): 33 | if not value: 34 | raise ValueError("New password field can't be blank!!!") 35 | return value 36 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/app/services/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lolomgrofl/fastapi-genesis/6c284ba95a8bbe4feefe1328de9b623307a70097/{{cookiecutter.repo_name}}/app/services/__init__.py -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/app/services/user.py: -------------------------------------------------------------------------------- 1 | from fastapi import Depends, HTTPException, status 2 | from fastapi.responses import JSONResponse 3 | from fastapi.security import OAuth2PasswordRequestForm 4 | 5 | from datetime import timedelta 6 | from typing import Annotated 7 | 8 | from jose import JWTError, jwt 9 | from loguru import logger 10 | from sqlalchemy.ext.asyncio import AsyncSession 11 | 12 | from app.daos import user 13 | from app.db import get_session 14 | from app.models.user import User as UserModel 15 | from app.schemas.token import Token, TokenData 16 | from app.schemas.user import ChangePasswordIn, UserIn, UserOut 17 | from app.services.utils import UtilsService, oauth2_scheme 18 | from app.settings import settings 19 | 20 | 21 | class UserService: 22 | @staticmethod 23 | async def register_user(user_data: UserIn, session: AsyncSession): 24 | user_exist = await UserService.user_email_exists(session, user_data.email) 25 | 26 | if user_exist: 27 | raise HTTPException( 28 | status_code=status.HTTP_400_BAD_REQUEST, 29 | detail="User with the given email already exists!!!", 30 | ) 31 | 32 | user_data.password = UtilsService.get_password_hash(user_data.password) 33 | new_user = await user.UserDao(session).create(user_data.model_dump()) 34 | logger.info(f"New user created successfully: {new_user}!!!") 35 | return JSONResponse( 36 | content={"message": "User created successfully"}, 37 | status_code=status.HTTP_201_CREATED, 38 | ) 39 | 40 | @staticmethod 41 | async def authenticate_user(session: AsyncSession, email: str, password: str) -> UserModel | bool: 42 | _user = await user.UserDao(session).get_by_email(email) 43 | if not _user or not UtilsService.verify_password(password, _user.password): 44 | return False 45 | return _user 46 | 47 | @staticmethod 48 | async def user_email_exists(session: AsyncSession, email: str) -> UserModel | None: 49 | _user = await user.UserDao(session).get_by_email(email) 50 | return _user if _user else None 51 | 52 | @staticmethod 53 | async def login(form_data: OAuth2PasswordRequestForm, session: AsyncSession) -> Token: 54 | _user = await UserService.authenticate_user(session, form_data.username, form_data.password) 55 | if not _user: 56 | raise HTTPException( 57 | status_code=status.HTTP_400_BAD_REQUEST, 58 | detail="Incorrect email or password", 59 | ) 60 | 61 | access_token_expires = timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES) 62 | access_token = UtilsService.create_access_token(data={"sub": _user.email}, expires_delta=access_token_expires) 63 | token_data = { 64 | "access_token": access_token, 65 | "token_type": "Bearer", 66 | } 67 | return Token(**token_data) 68 | 69 | @staticmethod 70 | async def get_current_user( 71 | session: AsyncSession = Depends(get_session), 72 | token: str = Depends(oauth2_scheme), 73 | ) -> UserModel: 74 | credentials_exception = HTTPException( 75 | status_code=status.HTTP_401_UNAUTHORIZED, 76 | detail="Could not validate credentials", 77 | headers={"WWW-Authenticate": "Bearer"}, 78 | ) 79 | try: 80 | payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[settings.ALGORITHM]) 81 | email: str = payload.get("sub") 82 | if not email: 83 | raise credentials_exception 84 | token_data = TokenData(email=email) 85 | except JWTError: 86 | raise credentials_exception 87 | _user = await user.UserDao(session).get_by_email(email=token_data.email) 88 | if not _user: 89 | raise credentials_exception 90 | return _user 91 | 92 | @staticmethod 93 | async def get_all_users(session: AsyncSession) -> list[UserOut]: 94 | all_users = await user.UserDao(session).get_all() 95 | return [UserOut.model_validate(_user) for _user in all_users] 96 | 97 | @staticmethod 98 | async def delete_all_users(session: AsyncSession): 99 | await user.UserDao(session).delete_all() 100 | return JSONResponse( 101 | content={"message": "All users deleted successfully!!!"}, 102 | status_code=status.HTTP_200_OK, 103 | ) 104 | 105 | @staticmethod 106 | async def change_password( 107 | password_data: ChangePasswordIn, 108 | current_user: UserModel, 109 | session: AsyncSession = Depends(get_session), 110 | ): 111 | if not UtilsService.verify_password(password_data.old_password, current_user.password): 112 | raise HTTPException( 113 | status_code=status.HTTP_400_BAD_REQUEST, 114 | detail="Incorrect old password!!!", 115 | ) 116 | current_user.password = UtilsService.get_password_hash(password_data.new_password) 117 | session.add(current_user) 118 | await session.commit() 119 | return JSONResponse( 120 | content={"message": "Password updated successfully!!!"}, 121 | status_code=status.HTTP_200_OK, 122 | ) 123 | 124 | @staticmethod 125 | async def get_user_by_id(user_id: int, session: AsyncSession) -> UserOut: 126 | _user = await user.UserDao(session).get_by_id(user_id) 127 | if not _user: 128 | raise HTTPException( 129 | status_code=status.HTTP_404_NOT_FOUND, 130 | detail="User with the given id does not exist!!!", 131 | ) 132 | return UserOut.model_validate(_user) 133 | 134 | @staticmethod 135 | async def delete_user_by_id(user_id: int, session: AsyncSession): 136 | _user = await user.UserDao(session).delete_by_id(user_id) 137 | if not _user: 138 | raise HTTPException( 139 | status_code=status.HTTP_404_NOT_FOUND, 140 | detail="User with the given id does not exist!!!", 141 | ) 142 | return JSONResponse( 143 | content={"message": "User deleted successfully!!!"}, 144 | status_code=status.HTTP_200_OK, 145 | ) 146 | 147 | 148 | CurrentUserDep = Annotated[UserModel, Depends(UserService.get_current_user)] 149 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/app/services/utils.py: -------------------------------------------------------------------------------- 1 | from fastapi.security import OAuth2PasswordBearer 2 | 3 | from datetime import datetime, timedelta 4 | 5 | from jose import jwt 6 | from passlib.context import CryptContext 7 | 8 | from app.settings import settings 9 | 10 | pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") 11 | 12 | oauth2_scheme = OAuth2PasswordBearer(tokenUrl=f"{settings.API_V1_STR}/user/token") 13 | 14 | 15 | class UtilsService: 16 | @staticmethod 17 | def verify_password(plain_password, hashed_password) -> bool: 18 | return pwd_context.verify(plain_password, hashed_password) 19 | 20 | @staticmethod 21 | def get_password_hash(password) -> str: 22 | return pwd_context.hash(password) 23 | 24 | @staticmethod 25 | def create_access_token(data: dict, expires_delta: timedelta | None = None) -> str: 26 | to_encode = data.copy() 27 | if expires_delta: 28 | expire = datetime.utcnow() + expires_delta 29 | else: 30 | expire = datetime.utcnow() + timedelta(minutes=15) 31 | to_encode.update({"exp": expire}) 32 | encoded_jwt = jwt.encode(to_encode, settings.SECRET_KEY, algorithm=settings.ALGORITHM) 33 | return encoded_jwt 34 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/app/settings.py: -------------------------------------------------------------------------------- 1 | import secrets 2 | from functools import lru_cache 3 | 4 | from pydantic import PostgresDsn 5 | from pydantic_settings import BaseSettings, SettingsConfigDict 6 | 7 | 8 | class Settings(BaseSettings): 9 | model_config = SettingsConfigDict(env_file=".env", env_ignore_empty=True, extra="ignore") 10 | 11 | POSTGRES_USER: str 12 | POSTGRES_PASSWORD: str 13 | POSTGRES_HOST: str 14 | POSTGRES_PORT: int 15 | POSTGRES_DB: str 16 | POSTGRES_URL: PostgresDsn 17 | 18 | PGADMIN_EMAIL: str 19 | PGADMIN_PASSWORD: str 20 | 21 | SECRET_KEY: str = secrets.token_urlsafe(32) 22 | ALGORITHM: str = "HS256" 23 | ACCESS_TOKEN_EXPIRE_MINUTES: int = 3600 24 | DATE_FORMAT: str = "%Y-%m-%d" 25 | 26 | API_V1_STR: str = "/api/v1" 27 | PROJECT_NAME: str = "FastAPI Genesis API Docs" 28 | 29 | 30 | @lru_cache 31 | def get_settings(): 32 | return Settings() # type: ignore 33 | 34 | 35 | settings = get_settings() 36 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "3.9" 2 | 3 | services: 4 | app: 5 | container_name: "{{ cookiecutter.app_container_name }}" 6 | # build: . for local dev you can use this option, for prod work you should take image from ECR 7 | image: your-ecr.amazonaws.com/some-repo:latest 8 | command: bash -c "alembic upgrade head && uvicorn app.main:app --host 0.0.0.0 --port {{ cookiecutter.app_service_port }} --reload" 9 | volumes: 10 | - .:/app 11 | ports: 12 | - "{{ cookiecutter.app_service_port }}:{{ cookiecutter.app_service_port }}" 13 | depends_on: 14 | - db 15 | restart: always 16 | networks: 17 | - "{{ cookiecutter.network_name }}" 18 | 19 | db: 20 | container_name: "{{ cookiecutter.db_container_name }}" 21 | image: postgres:latest 22 | restart: always 23 | volumes: 24 | - postgres_data:/var/lib/postgresql/data/ 25 | ports: 26 | - "{{ cookiecutter.db_service_port }}:5432" 27 | environment: 28 | - POSTGRES_USER=${POSTGRES_USER} 29 | - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} 30 | - POSTGRES_DB=${POSTGRES_DB} 31 | env_file: 32 | - .env 33 | networks: 34 | - "{{ cookiecutter.network_name }}" 35 | 36 | pgadmin: 37 | container_name: "{{ cookiecutter.pgadmin_container_name }}" 38 | image: dpage/pgadmin4 39 | environment: 40 | - PGADMIN_DEFAULT_EMAIL=${PGADMIN_EMAIL} 41 | - PGADMIN_DEFAULT_PASSWORD=${PGADMIN_PASSWORD} 42 | ports: 43 | - "{{ cookiecutter.pgadmin_service_port }}:80" 44 | depends_on: 45 | - db 46 | restart: always 47 | env_file: 48 | - .env 49 | networks: 50 | - "{{ cookiecutter.network_name }}" 51 | 52 | volumes: 53 | postgres_data: 54 | 55 | 56 | networks: 57 | {{ cookiecutter.network_name }}: 58 | driver: bridge 59 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "{{cookiecutter.repo_name}}" 3 | version = "0.1.0" 4 | description = "" 5 | authors = ["User "] 6 | readme = "README.md" 7 | 8 | [tool.poetry.dependencies] 9 | python = "^3.10" 10 | alembic = "^1.11.1" 11 | loguru = "^0.7.0" 12 | fastapi = {extras = ["all"], version = "^0.100.0"} 13 | python-jose = {extras = ["cryptography"], version = "^3.3.0"} 14 | passlib = {extras = ["bcrypt"], version = "^1.7.4"} 15 | sqlalchemy = "^2.0.19" 16 | pydantic = "^2.0.3" 17 | asyncpg = "^0.28.0" 18 | 19 | [tool.poetry.group.dev.dependencies] 20 | isort = "^5.12.0" 21 | mypy = "^1.4.1" 22 | black = "^23.7.0" 23 | pytest = "^7.4.0" 24 | pre-commit = "^3.3.3" 25 | 26 | [tool.black] 27 | line-length = 120 28 | target-version = ['py310'] 29 | include = '\.pyi?$' 30 | exclude-extend = ''' 31 | /venv 32 | /.venv 33 | /__pycache__ 34 | /.mypy_cache 35 | /.pytest_cache 36 | /db.sqlite3 37 | /alembic 38 | ''' 39 | 40 | [tool.ruff] 41 | line-length = 120 42 | select = ["E", "F", "UP", "I"] 43 | ignore = ["E501"] 44 | fixable = ["A", "B", "C", "D", "E", "F", "I"] 45 | exclude = [ 46 | ".direnv", 47 | ".eggs", 48 | ".git", 49 | ".mypy_cache", 50 | ".pytype", 51 | ".ruff_cache", 52 | ".svn", 53 | ".tox", 54 | ".venv", 55 | "migrations", 56 | "__pypackages__", 57 | "buck-out", 58 | "build", 59 | "dist", 60 | "node_modules", 61 | "venv", 62 | "alembic" 63 | ] 64 | 65 | [tool.ruff.isort] 66 | section-order = ["future", "fastapi", "standard-library", "third-party", "first-party", "local-folder"] 67 | 68 | [tool.ruff.isort.sections] 69 | fastapi = ["fastapi"] 70 | 71 | [build-system] 72 | requires = ["poetry-core"] 73 | build-backend = "poetry.core.masonry.api" 74 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/scripts/autogenerate.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | docker-compose exec app alembic revision --autogenerate -m $1 3 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/scripts/downgrade.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | docker-compose exec app alembic downgrade "-1" 3 | 4 | 5 | 6 | # alembic downgrade -1 7 | # This will run the downgrade() method of your latest revision and update the alembic_version table to indicate the revision you're now at. 8 | 9 | # If you need to go back multiple migrations, run 10 | # alembic history 11 | # to view a list of all the migrations in your project (from newest to oldest), 12 | # then copy and paste the identifier of the migration you want to go back to: 13 | # alembic downgrade 8ac14e223d1e 14 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/static/welcome.txt: -------------------------------------------------------------------------------- 1 | Use this folder to store all static files generated by your FastAPI application if needed. 2 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lolomgrofl/fastapi-genesis/6c284ba95a8bbe4feefe1328de9b623307a70097/{{cookiecutter.repo_name}}/tests/__init__.py --------------------------------------------------------------------------------