├── .github └── ISSUE_TEMPLATE │ ├── PULL_REQUEST_TEMPLATE.md │ ├── fastapi-boilerplate-feature-request.md │ └── fastapi-boilerplate-issue.md ├── .gitignore ├── .pre-commit-config.yaml ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE.md ├── README.md ├── default.conf ├── docker-compose.yml ├── mypy.ini ├── pyproject.toml ├── src ├── __init__.py ├── alembic.ini ├── app │ ├── __init__.py │ ├── api │ │ ├── __init__.py │ │ ├── dependencies.py │ │ └── v1 │ │ │ ├── __init__.py │ │ │ ├── login.py │ │ │ ├── logout.py │ │ │ ├── posts.py │ │ │ ├── rate_limits.py │ │ │ ├── tasks.py │ │ │ ├── tiers.py │ │ │ └── users.py │ ├── core │ │ ├── __init__.py │ │ ├── config.py │ │ ├── db │ │ │ ├── __init__.py │ │ │ ├── crud_token_blacklist.py │ │ │ ├── database.py │ │ │ ├── models.py │ │ │ └── token_blacklist.py │ │ ├── exceptions │ │ │ ├── __init__.py │ │ │ ├── cache_exceptions.py │ │ │ └── http_exceptions.py │ │ ├── logger.py │ │ ├── schemas.py │ │ ├── security.py │ │ ├── setup.py │ │ ├── utils │ │ │ ├── __init__.py │ │ │ ├── cache.py │ │ │ ├── queue.py │ │ │ └── rate_limit.py │ │ └── worker │ │ │ ├── __init__.py │ │ │ ├── functions.py │ │ │ └── settings.py │ ├── crud │ │ ├── __init__.py │ │ ├── crud_posts.py │ │ ├── crud_rate_limit.py │ │ ├── crud_tier.py │ │ └── crud_users.py │ ├── main.py │ ├── middleware │ │ └── client_cache_middleware.py │ ├── models │ │ ├── __init__.py │ │ ├── post.py │ │ ├── rate_limit.py │ │ ├── tier.py │ │ └── user.py │ └── schemas │ │ ├── __init__.py │ │ ├── job.py │ │ ├── post.py │ │ ├── rate_limit.py │ │ ├── tier.py │ │ └── user.py ├── migrations │ ├── README │ ├── env.py │ ├── script.py.mako │ └── versions │ │ └── README.MD └── scripts │ ├── __init__.py │ ├── create_first_superuser.py │ └── create_first_tier.py └── tests ├── __init__.py ├── conftest.py ├── helpers ├── generators.py └── mocks.py └── test_user.py /.github/ISSUE_TEMPLATE/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | # Pull Request Template for FastAPI-boilerplate 2 | 3 | ## Description 4 | Please provide a clear and concise description of what your pull request is about. 5 | 6 | ## Changes 7 | Briefly list the changes you've made. If applicable, also link any relevant issues or pull requests. 8 | 9 | ## Tests 10 | Describe the tests you added or modified to cover your changes, if applicable. 11 | 12 | ## Checklist 13 | - [ ] I have read the [CONTRIBUTING](CONTRIBUTING.md) document. 14 | - [ ] My code follows the code style of this project. 15 | - [ ] I have added necessary documentation (if appropriate). 16 | - [ ] I have added tests that cover my changes (if applicable). 17 | - [ ] All new and existing tests passed. 18 | 19 | ## Additional Notes 20 | Include any additional information that you think is important for reviewers to know. 21 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/fastapi-boilerplate-feature-request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: FastAPI-boilerplate Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/fastapi-boilerplate-issue.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: FastAPI-boilerplate Issue 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug or question** 11 | A clear and concise description of what the bug or question is. 12 | 13 | **To Reproduce** 14 | Please provide a self-contained, minimal, and reproducible example of your use case 15 | ```python 16 | # Your code here 17 | ``` 18 | 19 | **Description** 20 | Describe the problem, question, or error you are facing. Include both the expected output for your input and the actual output you're observing. 21 | 22 | **Screenshots** 23 | If applicable, add screenshots to help explain your problem. 24 | 25 | **Additional context** 26 | Add any other context about the problem here. 27 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | poetry.lock 112 | src/poetry.lock 113 | 114 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 115 | __pypackages__/ 116 | 117 | # Celery stuff 118 | celerybeat-schedule 119 | celerybeat.pid 120 | 121 | # SageMath parsed files 122 | *.sage.py 123 | 124 | # Environments 125 | .env 126 | .venv 127 | env/ 128 | venv/ 129 | ENV/ 130 | env.bak/ 131 | venv.bak/ 132 | 133 | # Spyder project settings 134 | .spyderproject 135 | .spyproject 136 | 137 | # Rope project settings 138 | .ropeproject 139 | 140 | # mkdocs documentation 141 | /site 142 | 143 | # mypy 144 | .mypy_cache/ 145 | .dmypy.json 146 | dmypy.json 147 | 148 | # Pyre type checker 149 | .pyre/ 150 | 151 | # pytype static type analyzer 152 | .pytype/ 153 | 154 | # Cython debug symbols 155 | cython_debug/ 156 | 157 | # PyCharm 158 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 159 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 160 | # and can be added to the global gitignore or merged into this file. For a more nuclear 161 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 162 | #.idea/ 163 | 164 | # Macos 165 | .DS_Store 166 | 167 | .ruff_cache 168 | 169 | .idea -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | default_language_version: 2 | python: python3.11 3 | 4 | repos: 5 | - repo: https://github.com/pre-commit/pre-commit-hooks 6 | rev: v4.1.0 7 | hooks: 8 | - id: end-of-file-fixer 9 | - id: trailing-whitespace 10 | - id: check-yaml 11 | - id: check-docstring-first 12 | - id: check-executables-have-shebangs 13 | - id: check-case-conflict 14 | - id: check-added-large-files 15 | exclude: ^(.*\/dummy.*|.*\.json)$ 16 | args: ["--maxkb=750", "--enforce-all"] 17 | - id: detect-private-key 18 | - id: check-merge-conflict 19 | 20 | - repo: https://github.com/asottile/pyupgrade 21 | rev: v3.15.0 22 | hooks: 23 | - id: pyupgrade 24 | args: [--py310-plus] 25 | name: Upgrade code to Python 3.10+ 26 | 27 | - repo: https://github.com/myint/docformatter 28 | rev: v1.7.5 29 | hooks: 30 | - id: docformatter 31 | args: [--in-place, --wrap-summaries=115, --wrap-descriptions=120] 32 | 33 | - repo: https://github.com/asottile/yesqa 34 | rev: v1.5.0 35 | hooks: 36 | - id: yesqa 37 | name: Unused noqa 38 | 39 | - repo: https://github.com/astral-sh/ruff-pre-commit 40 | rev: v0.1.9 41 | hooks: 42 | - id: ruff 43 | args: [ --fix ] 44 | - id: ruff-format 45 | 46 | - repo: https://github.com/asottile/blacken-docs 47 | rev: 1.16.0 48 | hooks: 49 | - id: blacken-docs 50 | args: [--line-length=120] 51 | additional_dependencies: [black==22.1.0] 52 | 53 | - repo: https://github.com/executablebooks/mdformat 54 | rev: 0.7.17 55 | hooks: 56 | - id: mdformat 57 | additional_dependencies: 58 | - mdformat-gfm 59 | - mdformat_frontmatter 60 | exclude: CHANGELOG.md 61 | 62 | - repo: local 63 | hooks: 64 | - id: unit_test 65 | name: Unit test 66 | language: system 67 | entry: poetry run pytest 68 | pass_filenames: false 69 | always_run: true 70 | types: [python] 71 | stages: [manual] 72 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our 6 | community a harassment-free experience for everyone, regardless of age, body 7 | size, visible or invisible disability, ethnicity, sex characteristics, gender 8 | identity and expression, level of experience, education, socio-economic status, 9 | nationality, personal appearance, race, religion, or sexual identity 10 | and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, 13 | diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | * Demonstrating empathy and kindness toward other people 21 | * Being respectful of differing opinions, viewpoints, and experiences 22 | * Giving and gracefully accepting constructive feedback 23 | * Accepting responsibility and apologizing to those affected by our mistakes, 24 | and learning from the experience 25 | * Focusing on what is best not just for us as individuals, but for the 26 | overall community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | * The use of sexualized language or imagery, and sexual attention or 31 | advances of any kind 32 | * Trolling, insulting or derogatory comments, and personal or political attacks 33 | * Public or private harassment 34 | * Publishing others' private information, such as a physical or email 35 | address, without their explicit permission 36 | * Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | ## Enforcement Responsibilities 40 | 41 | Community leaders are responsible for clarifying and enforcing our standards of 42 | acceptable behavior and will take appropriate and fair corrective action in 43 | response to any behavior that they deem inappropriate, threatening, offensive, 44 | or harmful. 45 | 46 | Community leaders have the right and responsibility to remove, edit, or reject 47 | comments, commits, code, wiki edits, issues, and other contributions that are 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation 49 | decisions when appropriate. 50 | 51 | ## Scope 52 | 53 | This Code of Conduct applies within all community spaces, and also applies when 54 | an individual is officially representing the community in public spaces. 55 | Examples of representing our community include using an official e-mail address, 56 | posting via an official social media account, or acting as an appointed 57 | representative at an online or offline event. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported to the community leaders responsible for enforcement at 63 | igor.magalhaes.r@gmail.com. 64 | All complaints will be reviewed and investigated promptly and fairly. 65 | 66 | All community leaders are obligated to respect the privacy and security of the 67 | reporter of any incident. 68 | 69 | ## Enforcement Guidelines 70 | 71 | Community leaders will follow these Community Impact Guidelines in determining 72 | the consequences for any action they deem in violation of this Code of Conduct: 73 | 74 | ### 1. Correction 75 | 76 | **Community Impact**: Use of inappropriate language or other behavior deemed 77 | unprofessional or unwelcome in the community. 78 | 79 | **Consequence**: A private, written warning from community leaders, providing 80 | clarity around the nature of the violation and an explanation of why the 81 | behavior was inappropriate. A public apology may be requested. 82 | 83 | ### 2. Warning 84 | 85 | **Community Impact**: A violation through a single incident or series 86 | of actions. 87 | 88 | **Consequence**: A warning with consequences for continued behavior. No 89 | interaction with the people involved, including unsolicited interaction with 90 | those enforcing the Code of Conduct, for a specified period of time. This 91 | includes avoiding interactions in community spaces as well as external channels 92 | like social media. Violating these terms may lead to a temporary or 93 | permanent ban. 94 | 95 | ### 3. Temporary Ban 96 | 97 | **Community Impact**: A serious violation of community standards, including 98 | sustained inappropriate behavior. 99 | 100 | **Consequence**: A temporary ban from any sort of interaction or public 101 | communication with the community for a specified period of time. No public or 102 | private interaction with the people involved, including unsolicited interaction 103 | with those enforcing the Code of Conduct, is allowed during this period. 104 | Violating these terms may lead to a permanent ban. 105 | 106 | ### 4. Permanent Ban 107 | 108 | **Community Impact**: Demonstrating a pattern of violation of community 109 | standards, including sustained inappropriate behavior, harassment of an 110 | individual, or aggression toward or disparagement of classes of individuals. 111 | 112 | **Consequence**: A permanent ban from any sort of public interaction within 113 | the community. 114 | 115 | ## Attribution 116 | 117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 118 | version 2.0, available at 119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. 120 | 121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct 122 | enforcement ladder](https://github.com/mozilla/diversity). 123 | 124 | [homepage]: https://www.contributor-covenant.org 125 | 126 | For answers to common questions about this code of conduct, see the FAQ at 127 | https://www.contributor-covenant.org/faq. Translations are available at 128 | https://www.contributor-covenant.org/translations. 129 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to FastAPI-boilerplate 2 | 3 | Thank you for your interest in contributing to FastAPI-boilerplate! This guide is meant to make it easy for you to get started. 4 | Contributions are appreciated, even if just reporting bugs, documenting stuff or answering questions. To contribute with a feature: 5 | 6 | ## Setting Up Your Development Environment 7 | 8 | ### Cloning the Repository 9 | Start by forking and cloning the FastAPI-boilerplate repository: 10 | 11 | 1. **Fork the Repository**: Begin by forking the project repository. You can do this by visiting https://github.com/igormagalhaesr/FastAPI-boilerplate and clicking the "Fork" button. 12 | 1. **Create a Feature Branch**: Once you've forked the repo, create a branch for your feature by running `git checkout -b feature/fooBar`. 13 | 1. **Testing Changes**: Ensure that your changes do not break existing functionality by running tests. In the root folder, execute poetry run `python -m pytest` to run the tests. 14 | 15 | ### Using Poetry for Dependency Management 16 | FastAPI-boilerplate uses Poetry for managing dependencies. If you don't have Poetry installed, follow the instructions on the [official Poetry website](https://python-poetry.org/docs/). 17 | 18 | Once Poetry is installed, navigate to the cloned repository and install the dependencies: 19 | ```sh 20 | cd FastAPI-boilerplate 21 | poetry install 22 | ``` 23 | 24 | ### Activating the Virtual Environment 25 | Poetry creates a virtual environment for your project. Activate it using: 26 | 27 | ```sh 28 | poetry shell 29 | ``` 30 | 31 | ## Making Contributions 32 | 33 | ### Coding Standards 34 | - Follow PEP 8 guidelines. 35 | - Write meaningful tests for new features or bug fixes. 36 | 37 | ### Testing with Pytest 38 | FastAPI-boilerplate uses pytest for testing. Run tests using: 39 | ```sh 40 | poetry run pytest 41 | ``` 42 | 43 | ### Linting 44 | Use mypy for type checking: 45 | ```sh 46 | mypy src 47 | ``` 48 | 49 | Use ruff for style: 50 | ```sh 51 | ruff check --fix 52 | ruff format 53 | ``` 54 | 55 | Ensure your code passes linting before submitting. 56 | 57 | ### Using pre-commit for Better Code Quality 58 | 59 | It helps in identifying simple issues before submission to code review. By running automated checks, pre-commit can ensure code quality and consistency. 60 | 61 | 1. **Install Pre-commit**: 62 | - **Installation**: Install pre-commit in your development environment. Use the command `pip install pre-commit`. 63 | - **Setting Up Hooks**: After installing pre-commit, set up the hooks with `pre-commit install`. This command will install hooks into your .git/ directory which will automatically check your commits for issues. 64 | 1. **Committing Your Changes**: 65 | After making your changes, use `git commit -am 'Add some fooBar'` to commit them. Pre-commit will run automatically on your files when you commit, ensuring that they meet the required standards. 66 | Note: If pre-commit identifies issues, it may block your commit. Fix these issues and commit again. This ensures that all contributions are of high quality. 67 | 1. **Pushing Changes and Creating Pull Request**: 68 | Push your changes to the branch using `git push origin feature/fooBar`. 69 | Visit your fork on GitHub and create a new Pull Request to the main repository. 70 | 71 | ### Additional Notes 72 | 73 | **Stay Updated**: Keep your fork updated with the main repository to avoid merge conflicts. Regularly fetch and merge changes from the upstream repository. 74 | **Adhere to Project Conventions**: Follow the coding style, conventions, and commit message guidelines of the project. 75 | **Open Communication**: Feel free to ask questions or discuss your ideas by opening an issue or in discussions. 76 | 77 | ## Submitting Your Contributions 78 | 79 | ### Creating a Pull Request 80 | After making your changes: 81 | 82 | - Push your changes to your fork. 83 | - Open a pull request with a clear description of your changes. 84 | - Update the README.md if necessary. 85 | 86 | 87 | ### Code Reviews 88 | - Address any feedback from code reviews. 89 | - Once approved, your contributions will be merged into the main branch. 90 | 91 | ## Code of Conduct 92 | Please adhere to our [Code of Conduct](CODE_OF_CONDUCT.md) to maintain a welcoming and inclusive environment. 93 | 94 | Thank you for contributing to FastAPI-boilerplate🚀 95 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # --------- requirements --------- 2 | 3 | FROM python:3.11 as requirements-stage 4 | 5 | WORKDIR /tmp 6 | 7 | RUN pip install poetry poetry-plugin-export 8 | 9 | COPY ./pyproject.toml ./poetry.lock* /tmp/ 10 | 11 | RUN poetry export -f requirements.txt --output requirements.txt --without-hashes 12 | 13 | 14 | # --------- final image build --------- 15 | FROM python:3.11 16 | 17 | WORKDIR /code 18 | 19 | COPY --from=requirements-stage /tmp/requirements.txt /code/requirements.txt 20 | 21 | RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt 22 | 23 | COPY ./src/app /code/app 24 | 25 | # -------- replace with comment to run with gunicorn -------- 26 | CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"] 27 | # CMD ["gunicorn", "app.main:app", "-w", "4", "-k", "uvicorn.workers.UvicornWorker". "-b", "0.0.0.0:8000"] 28 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Igor Magalhães 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

Fast FastAPI boilerplate

2 |

3 | Yet another template to speed your FastAPI development up. 4 |

5 | 6 |

7 | 8 | Blue Rocket with FastAPI Logo as its window. There is a word FAST written 9 | 10 |

11 | 12 |

13 | 14 | Python 15 | 16 | 17 | FastAPI 18 | 19 | 20 | Pydantic 21 | 22 | 23 | PostgreSQL 24 | 25 | 26 | Redis 27 | 28 | 29 | Docker 30 | 31 | 32 | NGINX 33 | 34 |

35 | 36 | ## 0. About 37 | 38 | **FastAPI boilerplate** creates an extendable async API using FastAPI, Pydantic V2, SQLAlchemy 2.0 and PostgreSQL: 39 | 40 | - [`FastAPI`](https://fastapi.tiangolo.com): modern Python web framework for building APIs 41 | - [`Pydantic V2`](https://docs.pydantic.dev/2.4/): the most widely used data Python validation library, rewritten in Rust [`(5x-50x faster)`](https://docs.pydantic.dev/latest/blog/pydantic-v2-alpha/) 42 | - [`SQLAlchemy 2.0`](https://docs.sqlalchemy.org/en/20/changelog/whatsnew_20.html): Python SQL toolkit and Object Relational Mapper 43 | - [`PostgreSQL`](https://www.postgresql.org): The World's Most Advanced Open Source Relational Database 44 | - [`Redis`](https://redis.io): Open source, in-memory data store used by millions as a cache, message broker and more. 45 | - [`ARQ`](https://arq-docs.helpmanual.io) Job queues and RPC in python with asyncio and redis. 46 | - [`Docker Compose`](https://docs.docker.com/compose/) With a single command, create and start all the services from your configuration. 47 | - [`NGINX`](https://nginx.org/en/) High-performance low resource consumption web server used for Reverse Proxy and Load Balancing. 48 | 49 | > \[!TIP\] 50 | > If you want the `SQLModel` version instead, head to [SQLModel-boilerplate](https://github.com/igorbenav/SQLModel-boilerplate). 51 | 52 | ## 1. Features 53 | 54 | - ⚡️ Fully async 55 | - 🚀 Pydantic V2 and SQLAlchemy 2.0 56 | - 🔐 User authentication with JWT 57 | - 🍪 Cookie based refresh token 58 | - 🏬 Easy redis caching 59 | - 👜 Easy client-side caching 60 | - 🚦 ARQ integration for task queue 61 | - ⚙️ Efficient and robust queries with fastcrud 62 | - ⎘ Out of the box offset and cursor pagination support with fastcrud 63 | - 🛑 Rate Limiter dependency 64 | - 👮 FastAPI docs behind authentication and hidden based on the environment 65 | - 🦾 Easily extendable 66 | - 🤸‍♂️ Flexible 67 | - 🚚 Easy running with docker compose 68 | - ⚖️ NGINX Reverse Proxy and Load Balancing 69 | 70 | ## 2. Contents 71 | 72 | 0. [About](#0-about) 73 | 1. [Features](#1-features) 74 | 1. [Contents](#2-contents) 75 | 1. [Prerequisites](#3-prerequisites) 76 | 1. [Environment Variables (.env)](#31-environment-variables-env) 77 | 1. [Docker Compose](#32-docker-compose-preferred) 78 | 1. [From Scratch](#33-from-scratch) 79 | 1. [Usage](#4-usage) 80 | 1. [Docker Compose](#41-docker-compose) 81 | 1. [From Scratch](#42-from-scratch) 82 | 1. [Packages](#421-packages) 83 | 1. [Running PostgreSQL With Docker](#422-running-postgresql-with-docker) 84 | 1. [Running Redis with Docker](#423-running-redis-with-docker) 85 | 1. [Running the API](#424-running-the-api) 86 | 1. [Creating the first superuser](#43-creating-the-first-superuser) 87 | 1. [Database Migrations](#44-database-migrations) 88 | 1. [Extending](#5-extending) 89 | 1. [Project Structure](#51-project-structure) 90 | 1. [Database Model](#52-database-model) 91 | 1. [SQLAlchemy Models](#53-sqlalchemy-models) 92 | 1. [Pydantic Schemas](#54-pydantic-schemas) 93 | 1. [Alembic Migrations](#55-alembic-migrations) 94 | 1. [CRUD](#56-crud) 95 | 1. [Routes](#57-routes) 96 | 1. [Paginated Responses](#571-paginated-responses) 97 | 1. [HTTP Exceptions](#572-http-exceptions) 98 | 1. [Caching](#58-caching) 99 | 1. [More Advanced Caching](#59-more-advanced-caching) 100 | 1. [ARQ Job Queues](#510-arq-job-queues) 101 | 1. [Rate Limiting](#511-rate-limiting) 102 | 1. [JWT Authentication](#512-jwt-authentication) 103 | 1. [Running](#513-running) 104 | 1. [Create Application](#514-create-application) 105 | 2. [Opting Out of Services](#515-opting-out-of-services) 106 | 1. [Running in Production](#6-running-in-production) 107 | 1. [Uvicorn Workers with Gunicorn](#61-uvicorn-workers-with-gunicorn) 108 | 1. [Running With NGINX](#62-running-with-nginx) 109 | 1. [One Server](#621-one-server) 110 | 1. [Multiple Servers](#622-multiple-servers) 111 | 1. [Testing](#7-testing) 112 | 1. [Contributing](#8-contributing) 113 | 1. [References](#9-references) 114 | 1. [License](#10-license) 115 | 1. [Contact](#11-contact) 116 | 117 | ______________________________________________________________________ 118 | 119 | ## 3. Prerequisites 120 | 121 | ### 3.0 Start 122 | 123 | Start by using the template, and naming the repository to what you want. 124 | 125 |

126 | clicking use this template button, then create a new repository option 127 |

128 | 129 | Then clone your created repository (I'm using the base for the example) 130 | 131 | ```sh 132 | git clone https://github.com/igormagalhaesr/FastAPI-boilerplate 133 | ``` 134 | 135 | > \[!TIP\] 136 | > If you are in a hurry, you may use one of the following templates (containing a `.env`, `docker-compose.yml` and `Dockerfile`): 137 | 138 | - [Running locally with uvicorn](https://gist.github.com/igorbenav/48ad745120c3f77817e094f3a609111a) 139 | - [Runing in staging with gunicorn managing uvicorn workers](https://gist.github.com/igorbenav/d0518d4f6bdfb426d4036090f74905ee) 140 | - [Running in production with NGINX](https://gist.github.com/igorbenav/232c3b73339d6ca74e2bf179a5ef48a1) 141 | 142 | > \[!WARNING\] 143 | > Do not forget to place `docker-compose.yml` and `Dockerfile` in the `root` folder, while `.env` should be in the `src` folder. 144 | 145 | ### 3.1 Environment Variables (.env) 146 | 147 | Then create a `.env` file inside `src` directory: 148 | 149 | ```sh 150 | touch .env 151 | ``` 152 | 153 | Inside of `.env`, create the following app settings variables: 154 | 155 | ``` 156 | # ------------- app settings ------------- 157 | APP_NAME="Your app name here" 158 | APP_DESCRIPTION="Your app description here" 159 | APP_VERSION="0.1" 160 | CONTACT_NAME="Your name" 161 | CONTACT_EMAIL="Your email" 162 | LICENSE_NAME="The license you picked" 163 | ``` 164 | 165 | For the database ([`if you don't have a database yet, click here`](#422-running-postgresql-with-docker)), create: 166 | 167 | ``` 168 | # ------------- database ------------- 169 | POSTGRES_USER="your_postgres_user" 170 | POSTGRES_PASSWORD="your_password" 171 | POSTGRES_SERVER="your_server" # default "localhost", if using docker compose you should use "db" 172 | POSTGRES_PORT=5432 # default "5432", if using docker compose you should use "5432" 173 | POSTGRES_DB="your_db" 174 | ``` 175 | 176 | For database administration using PGAdmin create the following variables in the .env file 177 | 178 | ``` 179 | # ------------- pgadmin ------------- 180 | PGADMIN_DEFAULT_EMAIL="your_email_address" 181 | PGADMIN_DEFAULT_PASSWORD="your_password" 182 | PGADMIN_LISTEN_PORT=80 183 | ``` 184 | 185 | To connect to the database, log into the PGAdmin console with the values specified in `PGADMIN_DEFAULT_EMAIL` and `PGADMIN_DEFAULT_PASSWORD`. 186 | 187 | Once in the main PGAdmin screen, click Add Server: 188 | 189 | ![pgadmin-connect](https://github.com/igorbenav/docs-images/blob/main/289698727-e15693b6-fae9-4ec6-a597-e70ab6f44133-3.png?raw=true) 190 | 191 | 1. Hostname/address is `db` (if using containers) 192 | 1. Is the value you specified in `POSTGRES_PORT` 193 | 1. Leave this value as `postgres` 194 | 1. is the value you specified in `POSTGRES_USER` 195 | 1. Is the value you specified in `POSTGRES_PASSWORD` 196 | 197 | For crypt: 198 | Start by running 199 | 200 | ```sh 201 | openssl rand -hex 32 202 | ``` 203 | 204 | And then create in `.env`: 205 | 206 | ``` 207 | # ------------- crypt ------------- 208 | SECRET_KEY= # result of openssl rand -hex 32 209 | ALGORITHM= # pick an algorithm, default HS256 210 | ACCESS_TOKEN_EXPIRE_MINUTES= # minutes until token expires, default 30 211 | REFRESH_TOKEN_EXPIRE_DAYS= # days until token expires, default 7 212 | ``` 213 | 214 | Then for the first admin user: 215 | 216 | ``` 217 | # ------------- admin ------------- 218 | ADMIN_NAME="your_name" 219 | ADMIN_EMAIL="your_email" 220 | ADMIN_USERNAME="your_username" 221 | ADMIN_PASSWORD="your_password" 222 | ``` 223 | 224 | For redis caching: 225 | 226 | ``` 227 | # ------------- redis cache------------- 228 | REDIS_CACHE_HOST="your_host" # default "localhost", if using docker compose you should use "redis" 229 | REDIS_CACHE_PORT=6379 # default "6379", if using docker compose you should use "6379" 230 | ``` 231 | 232 | And for client-side caching: 233 | 234 | ``` 235 | # ------------- redis client-side cache ------------- 236 | CLIENT_CACHE_MAX_AGE=30 # default "30" 237 | ``` 238 | 239 | For ARQ Job Queues: 240 | 241 | ``` 242 | # ------------- redis queue ------------- 243 | REDIS_QUEUE_HOST="your_host" # default "localhost", if using docker compose you should use "redis" 244 | REDIS_QUEUE_PORT=6379 # default "6379", if using docker compose you should use "6379" 245 | ``` 246 | 247 | > \[!WARNING\] 248 | > You may use the same redis for both caching and queue while developing, but the recommendation is using two separate containers for production. 249 | 250 | To create the first tier: 251 | 252 | ``` 253 | # ------------- first tier ------------- 254 | TIER_NAME="free" 255 | ``` 256 | 257 | For the rate limiter: 258 | 259 | ``` 260 | # ------------- redis rate limit ------------- 261 | REDIS_RATE_LIMIT_HOST="localhost" # default="localhost", if using docker compose you should use "redis" 262 | REDIS_RATE_LIMIT_PORT=6379 # default=6379, if using docker compose you should use "6379" 263 | 264 | 265 | # ------------- default rate limit settings ------------- 266 | DEFAULT_RATE_LIMIT_LIMIT=10 # default=10 267 | DEFAULT_RATE_LIMIT_PERIOD=3600 # default=3600 268 | ``` 269 | 270 | And Finally the environment: 271 | 272 | ``` 273 | # ------------- environment ------------- 274 | ENVIRONMENT="local" 275 | ``` 276 | 277 | `ENVIRONMENT` can be one of `local`, `staging` and `production`, defaults to local, and changes the behavior of api `docs` endpoints: 278 | 279 | - **local:** `/docs`, `/redoc` and `/openapi.json` available 280 | - **staging:** `/docs`, `/redoc` and `/openapi.json` available for superusers 281 | - **production:** `/docs`, `/redoc` and `/openapi.json` not available 282 | 283 | ### 3.2 Docker Compose (preferred) 284 | 285 | To do it using docker compose, ensure you have docker and docker compose installed, then: 286 | While in the base project directory (FastAPI-boilerplate here), run: 287 | 288 | ```sh 289 | docker compose up 290 | ``` 291 | 292 | You should have a `web` container, `postgres` container, a `worker` container and a `redis` container running. 293 | Then head to `http://127.0.0.1:8000/docs`. 294 | 295 | ### 3.3 From Scratch 296 | 297 | Install poetry: 298 | 299 | ```sh 300 | pip install poetry 301 | ``` 302 | 303 | ## 4. Usage 304 | 305 | ### 4.1 Docker Compose 306 | 307 | If you used docker compose, your setup is done. You just need to ensure that when you run (while in the base folder): 308 | 309 | ```sh 310 | docker compose up 311 | ``` 312 | 313 | You get the following outputs (in addition to many other outputs): 314 | 315 | ```sh 316 | fastapi-boilerplate-worker-1 | ... redis_version=x.x.x mem_usage=999K clients_connected=1 db_keys=0 317 | ... 318 | fastapi-boilerplate-db-1 | ... [1] LOG: database system is ready to accept connections 319 | ... 320 | fastapi-boilerplate-web-1 | INFO: Application startup complete. 321 | ``` 322 | 323 | So you may skip to [5. Extending](#5-extending). 324 | 325 | ### 4.2 From Scratch 326 | 327 | #### 4.2.1. Packages 328 | 329 | In the `root` directory (`FastAPI-boilerplate` if you didn't change anything), run to install required packages: 330 | 331 | ```sh 332 | poetry install 333 | ``` 334 | 335 | Ensuring it ran without any problem. 336 | 337 | #### 4.2.2. Running PostgreSQL With Docker 338 | 339 | > \[!NOTE\] 340 | > If you already have a PostgreSQL running, you may skip this step. 341 | 342 | Install docker if you don't have it yet, then run: 343 | 344 | ```sh 345 | docker pull postgres 346 | ``` 347 | 348 | And pick the port, name, user and password, replacing the fields: 349 | 350 | ```sh 351 | docker run -d \ 352 | -p {PORT}:{PORT} \ 353 | --name {NAME} \ 354 | -e POSTGRES_PASSWORD={PASSWORD} \ 355 | -e POSTGRES_USER={USER} \ 356 | postgres 357 | ``` 358 | 359 | Such as: 360 | 361 | ```sh 362 | docker run -d \ 363 | -p 5432:5432 \ 364 | --name postgres \ 365 | -e POSTGRES_PASSWORD=1234 \ 366 | -e POSTGRES_USER=postgres \ 367 | postgres 368 | ``` 369 | 370 | #### 4.2.3. Running redis With Docker 371 | 372 | > \[!NOTE\] 373 | > If you already have a redis running, you may skip this step. 374 | 375 | Install docker if you don't have it yet, then run: 376 | 377 | ```sh 378 | docker pull redis:alpine 379 | ``` 380 | 381 | And pick the name and port, replacing the fields: 382 | 383 | ```sh 384 | docker run -d \ 385 | --name {NAME} \ 386 | -p {PORT}:{PORT} \ 387 | redis:alpine 388 | ``` 389 | 390 | Such as 391 | 392 | ```sh 393 | docker run -d \ 394 | --name redis \ 395 | -p 6379:6379 \ 396 | redis:alpine 397 | ``` 398 | 399 | #### 4.2.4. Running the API 400 | 401 | While in the `root` folder, run to start the application with uvicorn server: 402 | 403 | ```sh 404 | poetry run uvicorn src.app.main:app --reload 405 | ``` 406 | 407 | > \[!TIP\] 408 | > The --reload flag enables auto-reload once you change (and save) something in the project 409 | 410 | ### 4.3 Creating the first superuser 411 | 412 | #### 4.3.1 Docker Compose 413 | 414 | > \[!WARNING\] 415 | > Make sure DB and tables are created before running create_superuser (db should be running and the api should run at least once before) 416 | 417 | If you are using docker compose, you should uncomment this part of the docker-compose.yml: 418 | 419 | ``` 420 | #-------- uncomment to create first superuser -------- 421 | # create_superuser: 422 | # build: 423 | # context: . 424 | # dockerfile: Dockerfile 425 | # env_file: 426 | # - ./src/.env 427 | # depends_on: 428 | # - db 429 | # command: python -m src.scripts.create_first_superuser 430 | # volumes: 431 | # - ./src:/code/src 432 | ``` 433 | 434 | Getting: 435 | 436 | ``` 437 | #-------- uncomment to create first superuser -------- 438 | create_superuser: 439 | build: 440 | context: . 441 | dockerfile: Dockerfile 442 | env_file: 443 | - ./src/.env 444 | depends_on: 445 | - db 446 | command: python -m src.scripts.create_first_superuser 447 | volumes: 448 | - ./src:/code/src 449 | ``` 450 | 451 | While in the base project folder run to start the services: 452 | 453 | ```sh 454 | docker-compose up -d 455 | ``` 456 | 457 | It will automatically run the create_superuser script as well, but if you want to rerun eventually: 458 | 459 | ```sh 460 | docker-compose run --rm create_superuser 461 | ``` 462 | 463 | to stop the create_superuser service: 464 | 465 | ```sh 466 | docker-compose stop create_superuser 467 | ``` 468 | 469 | #### 4.3.2 From Scratch 470 | 471 | While in the `root` folder, run (after you started the application at least once to create the tables): 472 | 473 | ```sh 474 | poetry run python -m src.scripts.create_first_superuser 475 | ``` 476 | 477 | ### 4.3.3 Creating the first tier 478 | 479 | > \[!WARNING\] 480 | > Make sure DB and tables are created before running create_tier (db should be running and the api should run at least once before) 481 | 482 | To create the first tier it's similar, you just replace `create_superuser` for `create_tier` service or `create_first_superuser` to `create_first_tier` for scripts. If using `docker compose`, do not forget to uncomment the `create_tier` service in `docker-compose.yml`. 483 | 484 | ### 4.4 Database Migrations 485 | 486 | > \[!WARNING\] 487 | > To create the tables if you did not create the endpoints, ensure that you import the models in src/app/models/__init__.py. This step is crucial to create the new tables. 488 | 489 | If you are using the db in docker, you need to change this in `docker-compose.yml` to run migrations: 490 | 491 | ```sh 492 | db: 493 | image: postgres:13 494 | env_file: 495 | - ./src/.env 496 | volumes: 497 | - postgres-data:/var/lib/postgresql/data 498 | # -------- replace with comment to run migrations with docker -------- 499 | expose: 500 | - "5432" 501 | # ports: 502 | # - 5432:5432 503 | ``` 504 | 505 | Getting: 506 | 507 | ```sh 508 | db: 509 | ... 510 | # expose: 511 | # - "5432" 512 | ports: 513 | - 5432:5432 514 | ``` 515 | 516 | While in the `src` folder, run Alembic migrations: 517 | 518 | ```sh 519 | poetry run alembic revision --autogenerate 520 | ``` 521 | 522 | And to apply the migration 523 | 524 | ```sh 525 | poetry run alembic upgrade head 526 | ``` 527 | 528 | > [!NOTE] 529 | > If you do not have poetry, you may run it without poetry after running `pip install alembic` 530 | 531 | ## 5. Extending 532 | 533 | ### 5.1 Project Structure 534 | 535 | First, you may want to take a look at the project structure and understand what each file is doing. 536 | 537 | ```sh 538 | . 539 | ├── Dockerfile # Dockerfile for building the application container. 540 | ├── docker-compose.yml # Docker Compose file for defining multi-container applications. 541 | ├── pyproject.toml # Poetry configuration file with project metadata and dependencies. 542 | ├── README.md # Project README providing information and instructions. 543 | ├── LICENSE.md # License file for the project. 544 | │ 545 | ├── tests # Unit and integration tests for the application. 546 | │ ├──helpers # Helper functions for tests. 547 | │ │ ├── generators.py # Helper functions for generating test data. 548 | │ │ └── mocks.py # Mock function for testing. 549 | │ ├── __init__.py 550 | │ ├── conftest.py # Configuration and fixtures for pytest. 551 | │ └── test_user.py # Test cases for user-related functionality. 552 | │ 553 | └── src # Source code directory. 554 | ├── __init__.py # Initialization file for the src package. 555 | ├── alembic.ini # Configuration file for Alembic (database migration tool). 556 | ├── poetry.lock # Poetry lock file specifying exact versions of dependencies. 557 | │ 558 | ├── app # Main application directory. 559 | │ ├── __init__.py # Initialization file for the app package. 560 | │ ├── main.py # Main entry point of the FastAPI application. 561 | │ │ 562 | │ │ 563 | │ ├── api # Folder containing API-related logic. 564 | │ │ ├── __init__.py 565 | │ │ ├── dependencies.py # Defines dependencies for use across API endpoints. 566 | │ │ │ 567 | │ │ └── v1 # Version 1 of the API. 568 | │ │ ├── __init__.py 569 | │ │ ├── login.py # API route for user login. 570 | │ │ ├── logout.py # API route for user logout. 571 | │ │ ├── posts.py # API routes for post operations. 572 | │ │ ├── rate_limits.py # API routes for rate limiting functionalities. 573 | │ │ ├── tasks.py # API routes for task management. 574 | │ │ ├── tiers.py # API routes for user tier functionalities. 575 | │ │ └── users.py # API routes for user management. 576 | │ │ 577 | │ ├── core # Core utilities and configurations for the application. 578 | │ │ ├── __init__.py 579 | │ │ ├── config.py # Configuration settings for the application. 580 | │ │ ├── logger.py # Configuration for application logging. 581 | │ │ ├── schemas.py # Pydantic schemas for data validation. 582 | │ │ ├── security.py # Security utilities, such as password hashing. 583 | │ │ ├── setup.py # Setup file for the FastAPI app instance. 584 | │ │ │ 585 | │ │ ├── db # Core Database related modules. 586 | │ │ │ ├── __init__.py 587 | │ │ │ ├── crud_token_blacklist.py # CRUD operations for token blacklist. 588 | │ │ │ ├── database.py # Database connectivity and session management. 589 | │ │ │ ├── models.py # Core Database models. 590 | │ │ │ └── token_blacklist.py # Model for token blacklist functionality. 591 | │ │ │ 592 | │ │ ├── exceptions # Custom exception classes. 593 | │ │ │ ├── __init__.py 594 | │ │ │ ├── cache_exceptions.py # Exceptions related to cache operations. 595 | │ │ │ └── http_exceptions.py # HTTP-related exceptions. 596 | │ │ │ 597 | │ │ ├── utils # Utility functions and helpers. 598 | │ │ │ ├── __init__.py 599 | │ │ │ ├── cache.py # Cache-related utilities. 600 | │ │ │ ├── queue.py # Utilities for task queue management. 601 | │ │ │ └── rate_limit.py # Rate limiting utilities. 602 | │ │ │ 603 | │ │ └── worker # Worker script for background tasks. 604 | │ │ ├── __init__.py 605 | │ │ ├── settings.py # Worker configuration and settings. 606 | │ │ └── functions.py # Async task definitions and management. 607 | │ │ 608 | │ ├── crud # CRUD operations for the application. 609 | │ │ ├── __init__.py 610 | │ │ ├── crud_base.py # Base class for CRUD operations. 611 | │ │ ├── crud_posts.py # CRUD operations for posts. 612 | │ │ ├── crud_rate_limit.py # CRUD operations for rate limiting. 613 | │ │ ├── crud_tier.py # CRUD operations for user tiers. 614 | │ │ ├── crud_users.py # CRUD operations for users. 615 | │ │ └── helper.py # Helper functions for CRUD operations. 616 | │ │ 617 | │ ├── logs # Directory for log files. 618 | │ │ └── app.log # Log file for the application. 619 | │ │ 620 | │ ├── middleware # Middleware components for the application. 621 | │ │ └── client_cache_middleware.py # Middleware for client-side caching. 622 | │ │ 623 | │ ├── models # ORM models for the application. 624 | │ │ ├── __init__.py 625 | │ │ ├── post.py # ORM model for posts. 626 | │ │ ├── rate_limit.py # ORM model for rate limiting. 627 | │ │ ├── tier.py # ORM model for user tiers. 628 | │ │ └── user.py # ORM model for users. 629 | │ │ 630 | │ └── schemas # Pydantic schemas for data validation. 631 | │ ├── __init__.py 632 | │ ├── job.py # Schema for background jobs. 633 | │ ├── post.py # Schema for post data. 634 | │ ├── rate_limit.py # Schema for rate limiting data. 635 | │ ├── tier.py # Schema for user tier data. 636 | │ └── user.py # Schema for user data. 637 | │ 638 | ├── migrations # Alembic migration scripts for database changes. 639 | │ ├── README 640 | │ ├── env.py # Environment configuration for Alembic. 641 | │ ├── script.py.mako # Template script for Alembic migrations. 642 | │ │ 643 | │ └── versions # Individual migration scripts. 644 | │ └── README.MD 645 | │ 646 | └── scripts # Utility scripts for the application. 647 | ├── __init__.py 648 | ├── create_first_superuser.py # Script to create the first superuser. 649 | └── create_first_tier.py # Script to create the first user tier. 650 | ``` 651 | 652 | ### 5.2 Database Model 653 | 654 | Create the new entities and relationships and add them to the model
655 | ![diagram](https://user-images.githubusercontent.com/43156212/284426387-bdafc637-0473-4b71-890d-29e79da288cf.png) 656 | 657 | #### 5.2.1 Token Blacklist 658 | 659 | Note that this table is used to blacklist the `JWT` tokens (it's how you log a user out)
660 | ![diagram](https://user-images.githubusercontent.com/43156212/284426382-b2f3c0ca-b8ea-4f20-b47e-de1bad2ca283.png) 661 | 662 | ### 5.3 SQLAlchemy Models 663 | 664 | Inside `app/models`, create a new `entity.py` for each new entity (replacing entity with the name) and define the attributes according to [SQLAlchemy 2.0 standards](https://docs.sqlalchemy.org/en/20/orm/mapping_styles.html#orm-mapping-styles): 665 | 666 | > \[!WARNING\] 667 | > Note that since it inherits from `Base`, the new model is mapped as a python `dataclass`, so optional attributes (arguments with a default value) should be defined after required attributes. 668 | 669 | ```python 670 | from sqlalchemy import String, DateTime 671 | from sqlalchemy.orm import Mapped, mapped_column, relationship 672 | 673 | from app.core.db.database import Base 674 | 675 | 676 | class Entity(Base): 677 | __tablename__ = "entity" 678 | 679 | id: Mapped[int] = mapped_column("id", autoincrement=True, nullable=False, unique=True, primary_key=True, init=False) 680 | name: Mapped[str] = mapped_column(String(30)) 681 | ... 682 | ``` 683 | 684 | ### 5.4 Pydantic Schemas 685 | 686 | Inside `app/schemas`, create a new `entity.py` for each new entity (replacing entity with the name) and create the schemas according to [Pydantic V2](https://docs.pydantic.dev/latest/#pydantic-examples) standards: 687 | 688 | ```python 689 | from typing import Annotated 690 | 691 | from pydantic import BaseModel, EmailStr, Field, HttpUrl, ConfigDict 692 | 693 | 694 | class EntityBase(BaseModel): 695 | name: Annotated[ 696 | str, 697 | Field(min_length=2, max_length=30, examples=["Entity Name"]), 698 | ] 699 | 700 | 701 | class Entity(EntityBase): 702 | ... 703 | 704 | 705 | class EntityRead(EntityBase): 706 | ... 707 | 708 | 709 | class EntityCreate(EntityBase): 710 | ... 711 | 712 | 713 | class EntityCreateInternal(EntityCreate): 714 | ... 715 | 716 | 717 | class EntityUpdate(BaseModel): 718 | ... 719 | 720 | 721 | class EntityUpdateInternal(BaseModel): 722 | ... 723 | 724 | 725 | class EntityDelete(BaseModel): 726 | model_config = ConfigDict(extra="forbid") 727 | 728 | is_deleted: bool 729 | deleted_at: datetime 730 | ``` 731 | 732 | ### 5.5 Alembic Migrations 733 | 734 | > \[!WARNING\] 735 | > To create the tables if you did not create the endpoints, ensure that you import the models in src/app/models/__init__.py. This step is crucial to create the new models. 736 | 737 | Then, while in the `src` folder, run Alembic migrations: 738 | 739 | ```sh 740 | poetry run alembic revision --autogenerate 741 | ``` 742 | 743 | And to apply the migration 744 | 745 | ```sh 746 | poetry run alembic upgrade head 747 | ``` 748 | 749 | ### 5.6 CRUD 750 | 751 | Inside `app/crud`, create a new `crud_entities.py` inheriting from `FastCRUD` for each new entity: 752 | 753 | ```python 754 | from fastcrud import FastCRUD 755 | 756 | from app.models.entity import Entity 757 | from app.schemas.entity import EntityCreateInternal, EntityUpdate, EntityUpdateInternal, EntityDelete 758 | 759 | CRUDEntity = FastCRUD[Entity, EntityCreateInternal, EntityUpdate, EntityUpdateInternal, EntityDelete] 760 | crud_entity = CRUDEntity(Entity) 761 | ``` 762 | 763 | So, for users: 764 | 765 | ```python 766 | # crud_users.py 767 | from app.model.user import User 768 | from app.schemas.user import UserCreateInternal, UserUpdate, UserUpdateInternal, UserDelete 769 | 770 | CRUDUser = FastCRUD[User, UserCreateInternal, UserUpdate, UserUpdateInternal, UserDelete] 771 | crud_users = CRUDUser(User) 772 | ``` 773 | 774 | #### 5.6.1 Get 775 | 776 | When actually using the crud in an endpoint, to get data you just pass the database connection and the attributes as kwargs: 777 | 778 | ```python 779 | # Here I'm getting the first user with email == user.email (email is unique in this case) 780 | user = await crud_users.get(db=db, email=user.email) 781 | ``` 782 | 783 | #### 5.6.2 Get Multi 784 | 785 | To get a list of objects with the attributes, you should use the get_multi: 786 | 787 | ```python 788 | # Here I'm getting at most 10 users with the name 'User Userson' except for the first 3 789 | user = await crud_users.get_multi(db=db, offset=3, limit=100, name="User Userson") 790 | ``` 791 | 792 | > \[!WARNING\] 793 | > Note that get_multi returns a python `dict`. 794 | 795 | Which will return a python dict with the following structure: 796 | 797 | ```javascript 798 | { 799 | "data": [ 800 | { 801 | "id": 4, 802 | "name": "User Userson", 803 | "username": "userson4", 804 | "email": "user.userson4@example.com", 805 | "profile_image_url": "https://profileimageurl.com" 806 | }, 807 | { 808 | "id": 5, 809 | "name": "User Userson", 810 | "username": "userson5", 811 | "email": "user.userson5@example.com", 812 | "profile_image_url": "https://profileimageurl.com" 813 | } 814 | ], 815 | "total_count": 2, 816 | "has_more": false, 817 | "page": 1, 818 | "items_per_page": 10 819 | } 820 | ``` 821 | 822 | #### 5.6.3 Create 823 | 824 | To create, you pass a `CreateSchemaType` object with the attributes, such as a `UserCreate` pydantic schema: 825 | 826 | ```python 827 | from app.schemas.user import UserCreate 828 | 829 | # Creating the object 830 | user_internal = UserCreate(name="user", username="myusername", email="user@example.com") 831 | 832 | # Passing the object to be created 833 | crud_users.create(db=db, object=user_internal) 834 | ``` 835 | 836 | #### 5.6.4 Exists 837 | 838 | To just check if there is at least one row that matches a certain set of attributes, you should use `exists` 839 | 840 | ```python 841 | # This queries only the email variable 842 | # It returns True if there's at least one or False if there is none 843 | crud_users.exists(db=db, email=user @ example.com) 844 | ``` 845 | 846 | #### 5.6.5 Count 847 | 848 | You can also get the count of a certain object with the specified filter: 849 | 850 | ```python 851 | # Here I'm getting the count of users with the name 'User Userson' 852 | user = await crud_users.count(db=db, name="User Userson") 853 | ``` 854 | 855 | #### 5.6.6 Update 856 | 857 | To update you pass an `object` which may be a `pydantic schema` or just a regular `dict`, and the kwargs. 858 | You will update with `objects` the rows that match your `kwargs`. 859 | 860 | ```python 861 | # Here I'm updating the user with username == "myusername". 862 | # #I'll change his name to "Updated Name" 863 | crud_users.update(db=db, object={"name": "Updated Name"}, username="myusername") 864 | ``` 865 | 866 | #### 5.6.7 Delete 867 | 868 | To delete we have two options: 869 | 870 | - db_delete: actually deletes the row from the database 871 | - delete: 872 | - adds `"is_deleted": True` and `deleted_at: datetime.now(UTC)` if the model inherits from `PersistentDeletion` (performs a soft delete), but keeps the object in the database. 873 | - actually deletes the row from the database if the model does not inherit from `PersistentDeletion` 874 | 875 | ```python 876 | # Here I'll just change is_deleted to True 877 | crud_users.delete(db=db, username="myusername") 878 | 879 | # Here I actually delete it from the database 880 | crud_users.db_delete(db=db, username="myusername") 881 | ``` 882 | 883 | #### 5.6.8 Get Joined 884 | 885 | To retrieve data with a join operation, you can use the get_joined method from your CRUD module. Here's how to do it: 886 | 887 | ```python 888 | # Fetch a single record with a join on another model (e.g., User and Tier). 889 | result = await crud_users.get_joined( 890 | db=db, # The SQLAlchemy async session. 891 | join_model=Tier, # The model to join with (e.g., Tier). 892 | schema_to_select=UserSchema, # Pydantic schema for selecting User model columns (optional). 893 | join_schema_to_select=TierSchema, # Pydantic schema for selecting Tier model columns (optional). 894 | ) 895 | ``` 896 | 897 | **Relevant Parameters:** 898 | 899 | - `join_model`: The model you want to join with (e.g., Tier). 900 | - `join_prefix`: Optional prefix to be added to all columns of the joined model. If None, no prefix is added. 901 | - `join_on`: SQLAlchemy Join object for specifying the ON clause of the join. If None, the join condition is auto-detected based on foreign keys. 902 | - `schema_to_select`: A Pydantic schema to select specific columns from the primary model (e.g., UserSchema). 903 | - `join_schema_to_select`: A Pydantic schema to select specific columns from the joined model (e.g., TierSchema). 904 | - `join_type`: pecifies the type of join operation to perform. Can be "left" for a left outer join or "inner" for an inner join. Default "left". 905 | - `kwargs`: Filters to apply to the primary query. 906 | 907 | This method allows you to perform a join operation, selecting columns from both models, and retrieve a single record. 908 | 909 | #### 5.6.9 Get Multi Joined 910 | 911 | Similarly, to retrieve multiple records with a join operation, you can use the get_multi_joined method. Here's how: 912 | 913 | ```python 914 | # Retrieve a list of objects with a join on another model (e.g., User and Tier). 915 | result = await crud_users.get_multi_joined( 916 | db=db, # The SQLAlchemy async session. 917 | join_model=Tier, # The model to join with (e.g., Tier). 918 | join_prefix="tier_", # Optional prefix for joined model columns. 919 | join_on=and_(User.tier_id == Tier.id, User.is_superuser == True), # Custom join condition. 920 | schema_to_select=UserSchema, # Pydantic schema for selecting User model columns. 921 | join_schema_to_select=TierSchema, # Pydantic schema for selecting Tier model columns. 922 | username="john_doe", # Additional filter parameters. 923 | ) 924 | ``` 925 | 926 | **Relevant Parameters:** 927 | 928 | - `join_model`: The model you want to join with (e.g., Tier). 929 | - `join_prefix`: Optional prefix to be added to all columns of the joined model. If None, no prefix is added. 930 | - `join_on`: SQLAlchemy Join object for specifying the ON clause of the join. If None, the join condition is auto-detected based on foreign keys. 931 | - `schema_to_select`: A Pydantic schema to select specific columns from the primary model (e.g., UserSchema). 932 | - `join_schema_to_select`: A Pydantic schema to select specific columns from the joined model (e.g., TierSchema). 933 | - `join_type`: pecifies the type of join operation to perform. Can be "left" for a left outer join or "inner" for an inner join. Default "left". 934 | - `kwargs`: Filters to apply to the primary query. 935 | - `offset`: The offset (number of records to skip) for pagination. Default 0. 936 | - `limit`: The limit (maximum number of records to return) for pagination. Default 100. 937 | - `kwargs`: Filters to apply to the primary query. 938 | 939 | #### More Efficient Selecting 940 | 941 | For the `get` and `get_multi` methods we have the option to define a `schema_to_select` attribute, which is what actually makes the queries more efficient. When you pass a `pydantic schema` (preferred) or a list of the names of the attributes in `schema_to_select` to the `get` or `get_multi` methods, only the attributes in the schema will be selected. 942 | 943 | ```python 944 | from app.schemas.user import UserRead 945 | 946 | # Here it's selecting all of the user's data 947 | crud_user.get(db=db, username="myusername") 948 | 949 | # Now it's only selecting the data that is in UserRead. 950 | # Since that's my response_model, it's all I need 951 | crud_user.get(db=db, username="myusername", schema_to_select=UserRead) 952 | ``` 953 | 954 | ### 5.7 Routes 955 | 956 | Inside `app/api/v1`, create a new `entities.py` file and create the desired routes 957 | 958 | ```python 959 | from typing import Annotated 960 | 961 | from fastapi import Depends 962 | 963 | from app.schemas.entity import EntityRead 964 | from app.core.db.database import async_get_db 965 | 966 | ... 967 | 968 | router = fastapi.APIRouter(tags=["entities"]) 969 | 970 | 971 | @router.get("/entities/{id}", response_model=List[EntityRead]) 972 | async def read_entities(request: Request, id: int, db: Annotated[AsyncSession, Depends(async_get_db)]): 973 | entity = await crud_entities.get(db=db, id=id) 974 | 975 | return entity 976 | 977 | 978 | ... 979 | ``` 980 | 981 | Then in `app/api/v1/__init__.py` add the router such as: 982 | 983 | ```python 984 | from fastapi import APIRouter 985 | from app.api.v1.entity import router as entity_router 986 | 987 | ... 988 | 989 | router = APIRouter(prefix="/v1") # this should be there already 990 | ... 991 | router.include_router(entity_router) 992 | ``` 993 | 994 | #### 5.7.1 Paginated Responses 995 | 996 | With the `get_multi` method we get a python `dict` with full suport for pagination: 997 | 998 | ```javascript 999 | { 1000 | "data": [ 1001 | { 1002 | "id": 4, 1003 | "name": "User Userson", 1004 | "username": "userson4", 1005 | "email": "user.userson4@example.com", 1006 | "profile_image_url": "https://profileimageurl.com" 1007 | }, 1008 | { 1009 | "id": 5, 1010 | "name": "User Userson", 1011 | "username": "userson5", 1012 | "email": "user.userson5@example.com", 1013 | "profile_image_url": "https://profileimageurl.com" 1014 | } 1015 | ], 1016 | "total_count": 2, 1017 | "has_more": false, 1018 | "page": 1, 1019 | "items_per_page": 10 1020 | } 1021 | ``` 1022 | 1023 | And in the endpoint, we can import from `fastcrud.paginated` the following functions and Pydantic Schema: 1024 | 1025 | ```python 1026 | from fastcrud.paginated import ( 1027 | PaginatedListResponse, # What you'll use as a response_model to validate 1028 | paginated_response, # Creates a paginated response based on the parameters 1029 | compute_offset, # Calculate the offset for pagination ((page - 1) * items_per_page) 1030 | ) 1031 | ``` 1032 | 1033 | Then let's create the endpoint: 1034 | 1035 | ```python 1036 | import fastapi 1037 | 1038 | from app.schemas.entity import EntityRead 1039 | 1040 | ... 1041 | 1042 | 1043 | @router.get("/entities", response_model=PaginatedListResponse[EntityRead]) 1044 | async def read_entities( 1045 | request: Request, db: Annotated[AsyncSession, Depends(async_get_db)], page: int = 1, items_per_page: int = 10 1046 | ): 1047 | entities_data = await crud_entity.get_multi( 1048 | db=db, 1049 | offset=compute_offset(page, items_per_page), 1050 | limit=items_per_page, 1051 | schema_to_select=UserRead, 1052 | is_deleted=False, 1053 | ) 1054 | 1055 | return paginated_response(crud_data=entities_data, page=page, items_per_page=items_per_page) 1056 | ``` 1057 | 1058 | #### 5.7.2 HTTP Exceptions 1059 | 1060 | To add exceptions you may just import from `app/core/exceptions/http_exceptions` and optionally add a detail: 1061 | 1062 | ```python 1063 | from app.core.exceptions.http_exceptions import NotFoundException 1064 | 1065 | # If you want to specify the detail, just add the message 1066 | if not user: 1067 | raise NotFoundException("User not found") 1068 | 1069 | # Or you may just use the default message 1070 | if not post: 1071 | raise NotFoundException() 1072 | ``` 1073 | 1074 | **The predefined possibilities in http_exceptions are the following:** 1075 | 1076 | - `CustomException`: 500 internal error 1077 | - `BadRequestException`: 400 bad request 1078 | - `NotFoundException`: 404 not found 1079 | - `ForbiddenException`: 403 forbidden 1080 | - `UnauthorizedException`: 401 unauthorized 1081 | - `UnprocessableEntityException`: 422 unprocessable entity 1082 | - `DuplicateValueException`: 422 unprocessable entity 1083 | - `RateLimitException`: 429 too many requests 1084 | 1085 | ### 5.8 Caching 1086 | 1087 | The `cache` decorator allows you to cache the results of FastAPI endpoint functions, enhancing response times and reducing the load on your application by storing and retrieving data in a cache. 1088 | 1089 | Caching the response of an endpoint is really simple, just apply the `cache` decorator to the endpoint function. 1090 | 1091 | > \[!WARNING\] 1092 | > Note that you should always pass request as a variable to your endpoint function if you plan to use the cache decorator. 1093 | 1094 | ```python 1095 | ... 1096 | from app.core.utils.cache import cache 1097 | 1098 | 1099 | @app.get("/sample/{my_id}") 1100 | @cache(key_prefix="sample_data", expiration=3600, resource_id_name="my_id") 1101 | async def sample_endpoint(request: Request, my_id: int): 1102 | # Endpoint logic here 1103 | return {"data": "my_data"} 1104 | ``` 1105 | 1106 | The way it works is: 1107 | 1108 | - the data is saved in redis with the following cache key: `sample_data:{my_id}` 1109 | - then the time to expire is set as 3600 seconds (that's the default) 1110 | 1111 | Another option is not passing the `resource_id_name`, but passing the `resource_id_type` (default int): 1112 | 1113 | ```python 1114 | ... 1115 | from app.core.utils.cache import cache 1116 | 1117 | 1118 | @app.get("/sample/{my_id}") 1119 | @cache(key_prefix="sample_data", resource_id_type=int) 1120 | async def sample_endpoint(request: Request, my_id: int): 1121 | # Endpoint logic here 1122 | return {"data": "my_data"} 1123 | ``` 1124 | 1125 | In this case, what will happen is: 1126 | 1127 | - the `resource_id` will be inferred from the keyword arguments (`my_id` in this case) 1128 | - the data is saved in redis with the following cache key: `sample_data:{my_id}` 1129 | - then the the time to expire is set as 3600 seconds (that's the default) 1130 | 1131 | Passing resource_id_name is usually preferred. 1132 | 1133 | ### 5.9 More Advanced Caching 1134 | 1135 | The behaviour of the `cache` decorator changes based on the request method of your endpoint. 1136 | It caches the result if you are passing it to a **GET** endpoint, and it invalidates the cache with this key_prefix and id if passed to other endpoints (**PATCH**, **DELETE**). 1137 | 1138 | #### Invalidating Extra Keys 1139 | 1140 | If you also want to invalidate cache with a different key, you can use the decorator with the `to_invalidate_extra` variable. 1141 | 1142 | In the following example, I want to invalidate the cache for a certain `user_id`, since I'm deleting it, but I also want to invalidate the cache for the list of users, so it will not be out of sync. 1143 | 1144 | ```python 1145 | # The cache here will be saved as "{username}_posts:{username}": 1146 | @router.get("/{username}/posts", response_model=List[PostRead]) 1147 | @cache(key_prefix="{username}_posts", resource_id_name="username") 1148 | async def read_posts(request: Request, username: str, db: Annotated[AsyncSession, Depends(async_get_db)]): 1149 | ... 1150 | 1151 | 1152 | ... 1153 | 1154 | # Invalidating cache for the former endpoint by just passing the key_prefix and id as a dictionary: 1155 | @router.delete("/{username}/post/{id}") 1156 | @cache( 1157 | "{username}_post_cache", 1158 | resource_id_name="id", 1159 | to_invalidate_extra={"{username}_posts": "{username}"}, # also invalidate "{username}_posts:{username}" cache 1160 | ) 1161 | async def erase_post( 1162 | request: Request, 1163 | username: str, 1164 | id: int, 1165 | current_user: Annotated[UserRead, Depends(get_current_user)], 1166 | db: Annotated[AsyncSession, Depends(async_get_db)], 1167 | ): 1168 | ... 1169 | 1170 | 1171 | # And now I'll also invalidate when I update the user: 1172 | @router.patch("/{username}/post/{id}", response_model=PostRead) 1173 | @cache("{username}_post_cache", resource_id_name="id", to_invalidate_extra={"{username}_posts": "{username}"}) 1174 | async def patch_post( 1175 | request: Request, 1176 | username: str, 1177 | id: int, 1178 | values: PostUpdate, 1179 | current_user: Annotated[UserRead, Depends(get_current_user)], 1180 | db: Annotated[AsyncSession, Depends(async_get_db)], 1181 | ): 1182 | ... 1183 | ``` 1184 | 1185 | > \[!WARNING\] 1186 | > Note that adding `to_invalidate_extra` will not work for **GET** requests. 1187 | 1188 | #### Invalidate Extra By Pattern 1189 | 1190 | Let's assume we have an endpoint with a paginated response, such as: 1191 | 1192 | ```python 1193 | @router.get("/{username}/posts", response_model=PaginatedListResponse[PostRead]) 1194 | @cache( 1195 | key_prefix="{username}_posts:page_{page}:items_per_page:{items_per_page}", 1196 | resource_id_name="username", 1197 | expiration=60, 1198 | ) 1199 | async def read_posts( 1200 | request: Request, 1201 | username: str, 1202 | db: Annotated[AsyncSession, Depends(async_get_db)], 1203 | page: int = 1, 1204 | items_per_page: int = 10, 1205 | ): 1206 | db_user = await crud_users.get(db=db, schema_to_select=UserRead, username=username, is_deleted=False) 1207 | if not db_user: 1208 | raise HTTPException(status_code=404, detail="User not found") 1209 | 1210 | posts_data = await crud_posts.get_multi( 1211 | db=db, 1212 | offset=compute_offset(page, items_per_page), 1213 | limit=items_per_page, 1214 | schema_to_select=PostRead, 1215 | created_by_user_id=db_user["id"], 1216 | is_deleted=False, 1217 | ) 1218 | 1219 | return paginated_response(crud_data=posts_data, page=page, items_per_page=items_per_page) 1220 | ``` 1221 | 1222 | Just passing `to_invalidate_extra` will not work to invalidate this cache, since the key will change based on the `page` and `items_per_page` values. 1223 | To overcome this we may use the `pattern_to_invalidate_extra` parameter: 1224 | 1225 | ```python 1226 | @router.patch("/{username}/post/{id}") 1227 | @cache("{username}_post_cache", resource_id_name="id", pattern_to_invalidate_extra=["{username}_posts:*"]) 1228 | async def patch_post( 1229 | request: Request, 1230 | username: str, 1231 | id: int, 1232 | values: PostUpdate, 1233 | current_user: Annotated[UserRead, Depends(get_current_user)], 1234 | db: Annotated[AsyncSession, Depends(async_get_db)], 1235 | ): 1236 | ... 1237 | ``` 1238 | 1239 | Now it will invalidate all caches with a key that matches the pattern `"{username}_posts:*`, which will work for the paginated responses. 1240 | 1241 | > \[!CAUTION\] 1242 | > Using `pattern_to_invalidate_extra` can be resource-intensive on large datasets. Use it judiciously and consider the potential impact on Redis performance. Be cautious with patterns that could match a large number of keys, as deleting many keys simultaneously may impact the performance of the Redis server. 1243 | 1244 | #### Client-side Caching 1245 | 1246 | For `client-side caching`, all you have to do is let the `Settings` class defined in `app/core/config.py` inherit from the `ClientSideCacheSettings` class. You can set the `CLIENT_CACHE_MAX_AGE` value in `.env,` it defaults to 60 (seconds). 1247 | 1248 | ### 5.10 ARQ Job Queues 1249 | 1250 | Depending on the problem your API is solving, you might want to implement a job queue. A job queue allows you to run tasks in the background, and is usually aimed at functions that require longer run times and don't directly impact user response in your frontend. As a rule of thumb, if a task takes more than 2 seconds to run, can be executed asynchronously, and its result is not needed for the next step of the user's interaction, then it is a good candidate for the job queue. 1251 | 1252 | > [!TIP] 1253 | > Very common candidates for background functions are calls to and from LLM endpoints (e.g. OpenAI or Openrouter). This is because they span tens of seconds and often need to be further parsed and saved. 1254 | 1255 | #### Background task creation 1256 | 1257 | For simple background tasks, you can just create a function in the `app/core/worker/functions.py` file. For more complex tasks, we recommend you to create a new file in the `app/core/worker` directory. 1258 | 1259 | ```python 1260 | async def sample_background_task(ctx, name: str) -> str: 1261 | await asyncio.sleep(5) 1262 | return f"Task {name} is complete!" 1263 | ``` 1264 | 1265 | Then add the function to the `WorkerSettings` class `functions` variable in `app/core/worker/settings.py` to make it available to the worker. If you created a new file in the `app/core/worker` directory, then simply import this function in the `app/core/worker/settings.py` file: 1266 | 1267 | ```python 1268 | from .functions import sample_background_task 1269 | from .your_module import sample_complex_background_task 1270 | 1271 | class WorkerSettings: 1272 | functions = [sample_background_task, sample_complex_background_task] 1273 | ... 1274 | ``` 1275 | 1276 | #### Add the task to an endpoint 1277 | 1278 | Once you have created the background task, you can add it to any endpoint of your choice to be enqueued. The best practice is to enqueue the task in a **POST** endpoint, while having a **GET** endpoint to get more information on the task. For more details on how job results are handled, check the [ARQ docs](https://arq-docs.helpmanual.io/#job-results). 1279 | 1280 | ```python 1281 | @router.post("/task", response_model=Job, status_code=201) 1282 | async def create_task(message: str): 1283 | job = await queue.pool.enqueue_job("sample_background_task", message) 1284 | return {"id": job.job_id} 1285 | 1286 | 1287 | @router.get("/task/{task_id}") 1288 | async def get_task(task_id: str): 1289 | job = ArqJob(task_id, queue.pool) 1290 | return await job.info() 1291 | ``` 1292 | 1293 | And finally run the worker in parallel to your fastapi application. 1294 | 1295 | > [!IMPORTANT] 1296 | > For any change to the `sample_background_task` to be reflected in the worker, you need to restart the worker (e.g. the docker container). 1297 | 1298 | If you are using `docker compose`, the worker is already running. 1299 | If you are doing it from scratch, run while in the `root` folder: 1300 | 1301 | ```sh 1302 | poetry run arq src.app.core.worker.settings.WorkerSettings 1303 | ``` 1304 | 1305 | #### Database session with background tasks 1306 | 1307 | With time your background functions will become 'workflows' increasing in complexity and requirements. Probably, you will need to use a database session to get, create, update, or delete data as part of this workflow. 1308 | 1309 | To do this, you can add the database session to the `ctx` object in the `startup` and `shutdown` functions in `app/core/worker/functions.py`, like in the example below: 1310 | 1311 | ```python 1312 | from arq.worker import Worker 1313 | from ...core.db.database import async_get_db 1314 | 1315 | async def startup(ctx: Worker) -> None: 1316 | ctx["db"] = await anext(async_get_db()) 1317 | logging.info("Worker Started") 1318 | 1319 | 1320 | async def shutdown(ctx: Worker) -> None: 1321 | await ctx["db"].close() 1322 | logging.info("Worker end") 1323 | ``` 1324 | 1325 | This will allow you to have the async database session always available in any background function and automatically close it on worker shutdown. Once you have this database session, you can use it as follows: 1326 | 1327 | ```python 1328 | from arq.worker import Worker 1329 | 1330 | async def your_background_function( 1331 | ctx: Worker, 1332 | post_id: int, 1333 | ... 1334 | ) -> Any: 1335 | db = ctx["db"] 1336 | post = crud_posts.get(db=db, schema_to_select=PostRead, id=post_id) 1337 | ... 1338 | ``` 1339 | 1340 | > [!WARNING] 1341 | > When using database sessions, you will want to use Pydantic objects. However, these objects don't mingle well with the seralization required by ARQ tasks and will be retrieved as a dictionary. 1342 | 1343 | ### 5.11 Rate Limiting 1344 | 1345 | To limit how many times a user can make a request in a certain interval of time (very useful to create subscription plans or just to protect your API against DDOS), you may just use the `rate_limiter_dependency` dependency: 1346 | 1347 | ```python 1348 | from fastapi import Depends 1349 | 1350 | from app.api.dependencies import rate_limiter_dependency 1351 | from app.core.utils import queue 1352 | from app.schemas.job import Job 1353 | 1354 | 1355 | @router.post("/task", response_model=Job, status_code=201, dependencies=[Depends(rate_limiter_dependency)]) 1356 | async def create_task(message: str): 1357 | job = await queue.pool.enqueue_job("sample_background_task", message) 1358 | return {"id": job.job_id} 1359 | ``` 1360 | 1361 | By default, if no token is passed in the header (that is - the user is not authenticated), the user will be limited by his IP address with the default `limit` (how many times the user can make this request every period) and `period` (time in seconds) defined in `.env`. 1362 | 1363 | Even though this is useful, real power comes from creating `tiers` (categories of users) and standard `rate_limits` (`limits` and `periods` defined for specific `paths` - that is - endpoints) for these tiers. 1364 | 1365 | All of the `tier` and `rate_limit` models, schemas, and endpoints are already created in the respective folders (and usable only by superusers). You may use the `create_tier` script to create the first tier (it uses the `.env` variable `TIER_NAME`, which is all you need to create a tier) or just use the api: 1366 | 1367 | Here I'll create a `free` tier: 1368 | 1369 |

1370 | passing name = free to api request body 1371 |

1372 | 1373 | And a `pro` tier: 1374 | 1375 |

1376 | passing name = pro to api request body 1377 |

1378 | 1379 | Then I'll associate a `rate_limit` for the path `api/v1/tasks/task` for each of them, I'll associate a `rate limit` for the path `api/v1/tasks/task`. 1380 | 1381 | > \[!WARNING\] 1382 | > Do not forget to add `api/v1/...` or any other prefix to the beggining of your path. For the structure of the boilerplate, `api/v1/` 1383 | 1384 | 1 request every hour (3600 seconds) for the free tier: 1385 | 1386 |

1387 | passing path=api/v1/tasks/task, limit=1, period=3600, name=api_v1_tasks:1:3600 to free tier rate limit 1388 |

1389 | 1390 | 10 requests every hour for the pro tier: 1391 | 1392 |

1393 | passing path=api/v1/tasks/task, limit=10, period=3600, name=api_v1_tasks:10:3600 to pro tier rate limit 1394 |

1395 | 1396 | Now let's read all the tiers available (`GET api/v1/tiers`): 1397 | 1398 | ```javascript 1399 | { 1400 | "data": [ 1401 | { 1402 | "name": "free", 1403 | "id": 1, 1404 | "created_at": "2023-11-11T05:57:25.420360" 1405 | }, 1406 | { 1407 | "name": "pro", 1408 | "id": 2, 1409 | "created_at": "2023-11-12T00:40:00.759847" 1410 | } 1411 | ], 1412 | "total_count": 2, 1413 | "has_more": false, 1414 | "page": 1, 1415 | "items_per_page": 10 1416 | } 1417 | ``` 1418 | 1419 | And read the `rate_limits` for the `pro` tier to ensure it's working (`GET api/v1/tier/pro/rate_limits`): 1420 | 1421 | ```javascript 1422 | { 1423 | "data": [ 1424 | { 1425 | "path": "api_v1_tasks_task", 1426 | "limit": 10, 1427 | "period": 3600, 1428 | "id": 1, 1429 | "tier_id": 2, 1430 | "name": "api_v1_tasks:10:3600" 1431 | } 1432 | ], 1433 | "total_count": 1, 1434 | "has_more": false, 1435 | "page": 1, 1436 | "items_per_page": 10 1437 | } 1438 | ``` 1439 | 1440 | Now, whenever an authenticated user makes a `POST` request to the `api/v1/tasks/task`, they'll use the quota that is defined by their tier. 1441 | You may check this getting the token from the `api/v1/login` endpoint, then passing it in the request header: 1442 | 1443 | ```sh 1444 | curl -X POST 'http://127.0.0.1:8000/api/v1/tasks/task?message=test' \ 1445 | -H 'Authorization: Bearer ' 1446 | ``` 1447 | 1448 | > \[!TIP\] 1449 | > Since the `rate_limiter_dependency` dependency uses the `get_optional_user` dependency instead of `get_current_user`, it will not require authentication to be used, but will behave accordingly if the user is authenticated (and token is passed in header). If you want to ensure authentication, also use `get_current_user` if you need. 1450 | 1451 | To change a user's tier, you may just use the `PATCH api/v1/user/{username}/tier` endpoint. 1452 | Note that for flexibility (since this is a boilerplate), it's not necessary to previously inform a tier_id to create a user, but you probably should set every user to a certain tier (let's say `free`) once they are created. 1453 | 1454 | > \[!WARNING\] 1455 | > If a user does not have a `tier` or the tier does not have a defined `rate limit` for the path and the token is still passed to the request, the default `limit` and `period` will be used, this will be saved in `app/logs`. 1456 | 1457 | ### 5.12 JWT Authentication 1458 | 1459 | #### 5.12.1 Details 1460 | 1461 | The JWT in this boilerplate is created in the following way: 1462 | 1463 | 1. **JWT Access Tokens:** how you actually access protected resources is passing this token in the request header. 1464 | 1. **Refresh Tokens:** you use this type of token to get an `access token`, which you'll use to access protected resources. 1465 | 1466 | The `access token` is short lived (default 30 minutes) to reduce the damage of a potential leak. The `refresh token`, on the other hand, is long lived (default 7 days), and you use it to renew your `access token` without the need to provide username and password every time it expires. 1467 | 1468 | Since the `refresh token` lasts for a longer time, it's stored as a cookie in a secure way: 1469 | 1470 | ```python 1471 | # app/api/v1/login 1472 | 1473 | ... 1474 | response.set_cookie( 1475 | key="refresh_token", 1476 | value=refresh_token, 1477 | httponly=True, # Prevent access through JavaScript 1478 | secure=True, # Ensure cookie is sent over HTTPS only 1479 | samesite="Lax", # Default to Lax for reasonable balance between security and usability 1480 | max_age=number_of_seconds, # Set a max age for the cookie 1481 | ) 1482 | ... 1483 | ``` 1484 | 1485 | You may change it to suit your needs. The possible options for `samesite` are: 1486 | 1487 | - `Lax`: Cookies will be sent in top-level navigations (like clicking on a link to go to another site), but not in API requests or images loaded from other sites. 1488 | - `Strict`: Cookies are sent only on top-level navigations from the same site that set the cookie, enhancing privacy but potentially disrupting user sessions. 1489 | - `None`: Cookies will be sent with both same-site and cross-site requests. 1490 | 1491 | #### 5.12.2 Usage 1492 | 1493 | What you should do with the client is: 1494 | 1495 | - `Login`: Send credentials to `/api/v1/login`. Store the returned access token in memory for subsequent requests. 1496 | - `Accessing Protected Routes`: Include the access token in the Authorization header. 1497 | - `Token Renewal`: On access token expiry, the front end should automatically call `/api/v1/refresh` for a new token. 1498 | - `Login Again`: If refresh token is expired, credentials should be sent to `/api/v1/login` again, storing the new access token in memory. 1499 | - `Logout`: Call /api/v1/logout to end the session securely. 1500 | 1501 | This authentication setup in the provides a robust, secure, and user-friendly way to handle user sessions in your API applications. 1502 | 1503 | ### 5.13 Running 1504 | 1505 | If you are using docker compose, just running the following command should ensure everything is working: 1506 | 1507 | ```sh 1508 | docker compose up 1509 | ``` 1510 | 1511 | If you are doing it from scratch, ensure your postgres and your redis are running, then 1512 | while in the `root` folder, run to start the application with uvicorn server: 1513 | 1514 | ```sh 1515 | poetry run uvicorn src.app.main:app --reload 1516 | ``` 1517 | 1518 | And for the worker: 1519 | 1520 | ```sh 1521 | poetry run arq src.app.core.worker.settings.WorkerSettings 1522 | ``` 1523 | ### 5.14 Create Application 1524 | 1525 | If you want to stop tables from being created every time you run the api, you should disable this here: 1526 | 1527 | ```python 1528 | # app/main.py 1529 | 1530 | from .api import router 1531 | from .core.config import settings 1532 | from .core.setup import create_application 1533 | 1534 | # create_tables_on_start defaults to True 1535 | app = create_application(router=router, settings=settings, create_tables_on_start=False) 1536 | ``` 1537 | 1538 | This `create_application` function is defined in `app/core/setup.py`, and it's a flexible way to configure the behavior of your application. 1539 | 1540 | A few examples: 1541 | 1542 | - Deactivate or password protect /docs 1543 | - Add client-side cache middleware 1544 | - Add Startup and Shutdown event handlers for cache, queue and rate limit 1545 | 1546 | ### 5.15 Opting Out of Services 1547 | 1548 | To opt out of services (like `Redis`, `Queue`, `Rate Limiter`), head to the `Settings` class in `src/app/core/config`: 1549 | 1550 | ```python 1551 | # src/app/core/config 1552 | import os 1553 | from enum import Enum 1554 | 1555 | from pydantic_settings import BaseSettings 1556 | from starlette.config import Config 1557 | 1558 | current_file_dir = os.path.dirname(os.path.realpath(__file__)) 1559 | env_path = os.path.join(current_file_dir, "..", "..", ".env") 1560 | config = Config(env_path) 1561 | ... 1562 | 1563 | class Settings( 1564 | AppSettings, 1565 | PostgresSettings, 1566 | CryptSettings, 1567 | FirstUserSettings, 1568 | TestSettings, 1569 | RedisCacheSettings, 1570 | ClientSideCacheSettings, 1571 | RedisQueueSettings, 1572 | RedisRateLimiterSettings, 1573 | DefaultRateLimitSettings, 1574 | EnvironmentSettings, 1575 | ): 1576 | pass 1577 | 1578 | 1579 | settings = Settings() 1580 | ``` 1581 | 1582 | And remove the Settings of the services you do not need. For example, without using redis (removed `Cache`, `Queue` and `Rate limit`): 1583 | 1584 | ```python 1585 | class Settings( 1586 | AppSettings, 1587 | PostgresSettings, 1588 | CryptSettings, 1589 | FirstUserSettings, 1590 | TestSettings, 1591 | ClientSideCacheSettings, 1592 | DefaultRateLimitSettings, 1593 | EnvironmentSettings, 1594 | ): 1595 | pass 1596 | ``` 1597 | 1598 | Then comment or remove the services you do not want from `docker-compose.yml`. Here, I removed `redis` and `worker` services: 1599 | 1600 | ```yml 1601 | version: '3.8' 1602 | 1603 | services: 1604 | web: 1605 | build: 1606 | context: . 1607 | dockerfile: Dockerfile 1608 | # -------- replace with comment to run with gunicorn -------- 1609 | command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload 1610 | # command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000 1611 | env_file: 1612 | - ./src/.env 1613 | # -------- replace with comment if you are using nginx -------- 1614 | ports: 1615 | - "8000:8000" 1616 | # expose: 1617 | # - "8000" 1618 | depends_on: 1619 | - db 1620 | - redis 1621 | volumes: 1622 | - ./src/app:/code/app 1623 | - ./src/.env:/code/.env 1624 | db: 1625 | image: postgres:13 1626 | env_file: 1627 | - ./src/.env 1628 | volumes: 1629 | - postgres-data:/var/lib/postgresql/data 1630 | # -------- replace with comment to run migrations with docker -------- 1631 | expose: 1632 | - "5432" 1633 | # ports: 1634 | # - 5432:5432 1635 | 1636 | volumes: 1637 | postgres-data: 1638 | redis-data: 1639 | #pgadmin-data: 1640 | ``` 1641 | 1642 | ## 6. Running in Production 1643 | 1644 | ### 6.1 Uvicorn Workers with Gunicorn 1645 | 1646 | In production you may want to run using gunicorn to manage uvicorn workers: 1647 | 1648 | ```sh 1649 | command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000 1650 | ``` 1651 | 1652 | Here it's running with 4 workers, but you should test it depending on how many cores your machine has. 1653 | 1654 | To do this if you are using docker compose, just replace the comment: 1655 | This part in `docker-compose.yml`: 1656 | 1657 | ```YAML 1658 | # docker-compose.yml 1659 | 1660 | # -------- replace with comment to run with gunicorn -------- 1661 | command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload 1662 | # command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000 1663 | ``` 1664 | 1665 | Should be changed to: 1666 | 1667 | ```YAML 1668 | # docker-compose.yml 1669 | 1670 | # -------- replace with comment to run with uvicorn -------- 1671 | # command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload 1672 | command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000 1673 | ``` 1674 | 1675 | And the same in `Dockerfile`: 1676 | This part: 1677 | 1678 | ```Dockerfile 1679 | # Dockerfile 1680 | 1681 | CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"] 1682 | # CMD ["gunicorn", "app.main:app", "-w", "4", "-k", "uvicorn.workers.UvicornWorker". "-b", "0.0.0.0:8000"] 1683 | ``` 1684 | 1685 | Should be changed to: 1686 | 1687 | ```Dockerfile 1688 | # Dockerfile 1689 | 1690 | # CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"] 1691 | CMD ["gunicorn", "app.main:app", "-w", "4", "-k", "uvicorn.workers.UvicornWorker". "-b", "0.0.0.0:8000"] 1692 | ``` 1693 | 1694 | > \[!CAUTION\] 1695 | > Do not forget to set the `ENVIRONMENT` in `.env` to `production` unless you want the API docs to be public. 1696 | 1697 | ### 6.2 Running with NGINX 1698 | 1699 | NGINX is a high-performance web server, known for its stability, rich feature set, simple configuration, and low resource consumption. NGINX acts as a reverse proxy, that is, it receives client requests, forwards them to the FastAPI server (running via Uvicorn or Gunicorn), and then passes the responses back to the clients. 1700 | 1701 | To run with NGINX, you start by uncommenting the following part in your `docker-compose.yml`: 1702 | 1703 | ```python 1704 | # docker-compose.yml 1705 | 1706 | ... 1707 | # -------- uncomment to run with nginx -------- 1708 | # nginx: 1709 | # image: nginx:latest 1710 | # ports: 1711 | # - "80:80" 1712 | # volumes: 1713 | # - ./default.conf:/etc/nginx/conf.d/default.conf 1714 | # depends_on: 1715 | # - web 1716 | ... 1717 | ``` 1718 | 1719 | Which should be changed to: 1720 | 1721 | ```YAML 1722 | # docker-compose.yml 1723 | 1724 | ... 1725 | #-------- uncomment to run with nginx -------- 1726 | nginx: 1727 | image: nginx:latest 1728 | ports: 1729 | - "80:80" 1730 | volumes: 1731 | - ./default.conf:/etc/nginx/conf.d/default.conf 1732 | depends_on: 1733 | - web 1734 | ... 1735 | ``` 1736 | 1737 | Then comment the following part: 1738 | 1739 | ```YAML 1740 | # docker-compose.yml 1741 | 1742 | services: 1743 | web: 1744 | ... 1745 | # -------- Both of the following should be commented to run with nginx -------- 1746 | command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload 1747 | # command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000 1748 | ``` 1749 | 1750 | Which becomes: 1751 | 1752 | ```YAML 1753 | # docker-compose.yml 1754 | 1755 | services: 1756 | web: 1757 | ... 1758 | # -------- Both of the following should be commented to run with nginx -------- 1759 | # command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload 1760 | # command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000 1761 | ``` 1762 | 1763 | Then pick the way you want to run (uvicorn or gunicorn managing uvicorn workers) in `Dockerfile`. 1764 | The one you want should be uncommented, comment the other one. 1765 | 1766 | ```Dockerfile 1767 | # Dockerfile 1768 | 1769 | CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"] 1770 | # CMD ["gunicorn", "app.main:app", "-w", "4", "-k", "uvicorn.workers.UvicornWorker". "-b", "0.0.0.0:8000"] 1771 | ``` 1772 | 1773 | And finally head to `http://localhost/docs`. 1774 | 1775 | #### 6.2.1 One Server 1776 | 1777 | If you want to run with one server only, your setup should be ready. Just make sure the only part that is not a comment in `default.conf` is: 1778 | 1779 | ```conf 1780 | # default.conf 1781 | 1782 | # ---------------- Running With One Server ---------------- 1783 | server { 1784 | listen 80; 1785 | 1786 | location / { 1787 | proxy_pass http://web:8000; 1788 | proxy_set_header Host $host; 1789 | proxy_set_header X-Real-IP $remote_addr; 1790 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 1791 | proxy_set_header X-Forwarded-Proto $scheme; 1792 | } 1793 | } 1794 | ``` 1795 | 1796 | So just type on your browser: `http://localhost/docs`. 1797 | 1798 | #### 6.2.2 Multiple Servers 1799 | 1800 | NGINX can distribute incoming network traffic across multiple servers, improving the efficiency and capacity utilization of your application. 1801 | 1802 | To run with multiple servers, just comment the `Running With One Server` part in `default.conf` and Uncomment the other one: 1803 | 1804 | ```conf 1805 | # default.conf 1806 | 1807 | # ---------------- Running With One Server ---------------- 1808 | ... 1809 | 1810 | # ---------------- To Run with Multiple Servers, Uncomment below ---------------- 1811 | upstream fastapi_app { 1812 | server fastapi1:8000; # Replace with actual server names or IP addresses 1813 | server fastapi2:8000; 1814 | # Add more servers as needed 1815 | } 1816 | 1817 | server { 1818 | listen 80; 1819 | 1820 | location / { 1821 | proxy_pass http://fastapi_app; 1822 | proxy_set_header Host $host; 1823 | proxy_set_header X-Real-IP $remote_addr; 1824 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 1825 | proxy_set_header X-Forwarded-Proto $scheme; 1826 | } 1827 | } 1828 | ``` 1829 | 1830 | And finally, on your browser: `http://localhost/docs`. 1831 | 1832 | > \[!WARNING\] 1833 | > Note that we are using `fastapi1:8000` and `fastapi2:8000` as examples, you should replace it with the actual name of your service and the port it's running on. 1834 | 1835 | ## 7. Testing 1836 | 1837 | While in the tests folder, create your test file with the name "test\_{entity}.py", replacing entity with what you're testing 1838 | 1839 | ```sh 1840 | touch test_items.py 1841 | ``` 1842 | 1843 | Finally create your tests (you may want to copy the structure in test_user.py) 1844 | 1845 | Now, to run: 1846 | 1847 | ### 7.1 Docker Compose 1848 | 1849 | First you need to uncomment the following part in the `docker-compose.yml` file: 1850 | 1851 | ```YAML 1852 | #-------- uncomment to run tests -------- 1853 | # pytest: 1854 | # build: 1855 | # context: . 1856 | # dockerfile: Dockerfile 1857 | # env_file: 1858 | # - ./src/.env 1859 | # depends_on: 1860 | # - db 1861 | # - redis 1862 | # command: python -m pytest ./tests 1863 | # volumes: 1864 | # - .:/code 1865 | ``` 1866 | 1867 | You'll get: 1868 | 1869 | ```YAML 1870 | #-------- uncomment to run tests -------- 1871 | pytest: 1872 | build: 1873 | context: . 1874 | dockerfile: Dockerfile 1875 | env_file: 1876 | - ./src/.env 1877 | depends_on: 1878 | - db 1879 | - redis 1880 | command: python -m pytest ./tests 1881 | volumes: 1882 | - .:/code 1883 | ``` 1884 | 1885 | Start the Docker Compose services: 1886 | 1887 | ```sh 1888 | docker-compose up -d 1889 | ``` 1890 | 1891 | It will automatically run the tests, but if you want to run again later: 1892 | 1893 | ```sh 1894 | docker-compose run --rm pytest 1895 | ``` 1896 | 1897 | ### 7.2 From Scratch 1898 | 1899 | While in the `root` folder, run: 1900 | 1901 | ```sh 1902 | poetry run python -m pytest 1903 | ``` 1904 | 1905 | ## 8. Contributing 1906 | 1907 | Read [contributing](CONTRIBUTING.md). 1908 | 1909 | ## 9. References 1910 | 1911 | This project was inspired by a few projects, it's based on them with things changed to the way I like (and pydantic, sqlalchemy updated) 1912 | 1913 | - [`Full Stack FastAPI and PostgreSQL`](https://github.com/tiangolo/full-stack-fastapi-postgresql) by @tiangolo himself 1914 | - [`FastAPI Microservices`](https://github.com/Kludex/fastapi-microservices) by @kludex which heavily inspired this boilerplate 1915 | - [`Async Web API with FastAPI + SQLAlchemy 2.0`](https://github.com/rhoboro/async-fastapi-sqlalchemy) for sqlalchemy 2.0 ORM examples 1916 | - [`FastaAPI Rocket Boilerplate`](https://github.com/asacristani/fastapi-rocket-boilerplate/tree/main) for docker compose 1917 | 1918 | ## 10. License 1919 | 1920 | [`MIT`](LICENSE.md) 1921 | 1922 | ## 11. Contact 1923 | 1924 | Benav Labs – [benav.io](https://benav.io) 1925 | [github.com/benavlabs](https://github.com/benavlabs/) 1926 | 1927 |
1928 | 1929 | Powered by Benav Labs - benav.io 1930 | -------------------------------------------------------------------------------- /default.conf: -------------------------------------------------------------------------------- 1 | # ---------------- Running With One Server ---------------- 2 | server { 3 | listen 80; 4 | 5 | location / { 6 | proxy_pass http://web:8000; 7 | proxy_set_header Host $host; 8 | proxy_set_header X-Real-IP $remote_addr; 9 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 10 | proxy_set_header X-Forwarded-Proto $scheme; 11 | } 12 | } 13 | 14 | 15 | # # ---------------- To Run with Multiple Servers, Uncomment below ---------------- 16 | # upstream fastapi_app { 17 | # server fastapi1:8000; # Replace with actual server names or IP addresses 18 | # server fastapi2:8000; 19 | # # Add more servers as needed 20 | # } 21 | 22 | # server { 23 | # listen 80; 24 | 25 | # location / { 26 | # proxy_pass http://fastapi_app; 27 | # proxy_set_header Host $host; 28 | # proxy_set_header X-Real-IP $remote_addr; 29 | # proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 30 | # proxy_set_header X-Forwarded-Proto $scheme; 31 | # } 32 | # } 33 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.8' 2 | 3 | services: 4 | web: 5 | build: 6 | context: . 7 | dockerfile: Dockerfile 8 | # -------- replace with comment to run with gunicorn -------- 9 | command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload 10 | # command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000 11 | env_file: 12 | - ./src/.env 13 | # -------- replace with comment if you are using nginx -------- 14 | ports: 15 | - "8000:8000" 16 | # expose: 17 | # - "8000" 18 | depends_on: 19 | - db 20 | - redis 21 | volumes: 22 | - ./src/app:/code/app 23 | - ./src/.env:/code/.env 24 | 25 | worker: 26 | build: 27 | context: . 28 | dockerfile: Dockerfile 29 | command: arq app.core.worker.settings.WorkerSettings 30 | env_file: 31 | - ./src/.env 32 | depends_on: 33 | - db 34 | - redis 35 | volumes: 36 | - ./src/app:/code/app 37 | - ./src/.env:/code/.env 38 | 39 | db: 40 | image: postgres:13 41 | env_file: 42 | - ./src/.env 43 | volumes: 44 | - postgres-data:/var/lib/postgresql/data 45 | # -------- replace with comment to run migrations with docker -------- 46 | expose: 47 | - "5432" 48 | # ports: 49 | # - 5432:5432 50 | 51 | redis: 52 | image: redis:alpine 53 | volumes: 54 | - redis-data:/data 55 | expose: 56 | - "6379" 57 | 58 | #-------- uncomment to run with pgadmin -------- 59 | # pgadmin: 60 | # container_name: pgadmin4 61 | # image: dpage/pgadmin4:latest 62 | # restart: always 63 | # ports: 64 | # - "5050:80" 65 | # volumes: 66 | # - pgadmin-data:/var/lib/pgadmin 67 | # env_file: 68 | # - ./src/.env 69 | # depends_on: 70 | # - db 71 | 72 | #-------- uncomment to run with nginx -------- 73 | # nginx: 74 | # image: nginx:latest 75 | # ports: 76 | # - "80:80" 77 | # volumes: 78 | # - ./default.conf:/etc/nginx/conf.d/default.conf 79 | # depends_on: 80 | # - web 81 | 82 | #-------- uncomment to create first superuser -------- 83 | # create_superuser: 84 | # build: 85 | # context: . 86 | # dockerfile: Dockerfile 87 | # env_file: 88 | # - ./src/.env 89 | # depends_on: 90 | # - db 91 | # - web 92 | # command: python -m src.scripts.create_first_superuser 93 | # volumes: 94 | # - ./src:/code/src 95 | 96 | #-------- uncomment to run tests -------- 97 | # pytest: 98 | # build: 99 | # context: . 100 | # dockerfile: Dockerfile 101 | # env_file: 102 | # - ./src/.env 103 | # depends_on: 104 | # - db 105 | # - redis 106 | # command: python -m pytest ./tests 107 | # volumes: 108 | # - .:/code 109 | 110 | #-------- uncomment to create first tier -------- 111 | # create_tier: 112 | # build: 113 | # context: . 114 | # dockerfile: Dockerfile 115 | # env_file: 116 | # - ./src/.env 117 | # depends_on: 118 | # - db 119 | # - web 120 | # command: python -m src.scripts.create_first_tier 121 | # volumes: 122 | # - ./src:/code/src 123 | 124 | volumes: 125 | postgres-data: 126 | redis-data: 127 | #pgadmin-data: 128 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | python_version = 3.11 3 | warn_return_any = True 4 | warn_unused_configs = True 5 | ignore_missing_imports = True 6 | 7 | [mypy-src.app.*] 8 | disallow_untyped_defs = True 9 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "fastapi-boilerplate" 3 | version = "0.1.0" 4 | description = "A fully Async FastAPI boilerplate using SQLAlchemy and Pydantic 2" 5 | authors = ["Igor Magalhaes "] 6 | license = "MIT" 7 | readme = "README.md" 8 | packages = [{ include = "src" }] 9 | 10 | [tool.poetry.dependencies] 11 | python = "~3.11" 12 | python-dotenv = "^1.0.0" 13 | pydantic = { extras = ["email"], version = "^2.6.1" } 14 | fastapi = "^0.109.1" 15 | uvicorn = "^0.27.0" 16 | uvloop = "^0.19.0" 17 | httptools = "^0.6.1" 18 | uuid = "^1.30" 19 | alembic = "^1.13.1" 20 | asyncpg = "^0.29.0" 21 | SQLAlchemy-Utils = "^0.41.1" 22 | python-jose = "^3.3.0" 23 | SQLAlchemy = "^2.0.25" 24 | pytest = "^7.4.2" 25 | python-multipart = "^0.0.9" 26 | greenlet = "^2.0.2" 27 | httpx = "^0.26.0" 28 | pydantic-settings = "^2.0.3" 29 | redis = "^5.0.1" 30 | arq = "^0.25.0" 31 | gunicorn = "^22.0.0" 32 | bcrypt = "^4.1.1" 33 | faker = "^26.0.0" 34 | psycopg2-binary = "^2.9.9" 35 | pytest-mock = "^3.14.0" 36 | fastcrud = "^0.15.5" 37 | 38 | 39 | [build-system] 40 | requires = ["poetry-core"] 41 | build-backend = "poetry.core.masonry.api" 42 | 43 | [tool.ruff] 44 | target-version = "py311" 45 | line-length = 120 46 | fix = true 47 | select = [ 48 | # https://docs.astral.sh/ruff/rules/#pyflakes-f 49 | "F", # Pyflakes 50 | # https://docs.astral.sh/ruff/rules/#pycodestyle-e-w 51 | "E", # pycodestyle 52 | "W", # Warning 53 | # https://docs.astral.sh/ruff/rules/#flake8-comprehensions-c4 54 | # https://docs.astral.sh/ruff/rules/#mccabe-c90 55 | "C", # Complexity (mccabe+) & comprehensions 56 | # https://docs.astral.sh/ruff/rules/#pyupgrade-up 57 | "UP", # pyupgrade 58 | # https://docs.astral.sh/ruff/rules/#isort-i 59 | "I", # isort 60 | ] 61 | ignore = [ 62 | # https://docs.astral.sh/ruff/rules/#pycodestyle-e-w 63 | "E402", # module level import not at top of file 64 | # https://docs.astral.sh/ruff/rules/#pyupgrade-up 65 | "UP006", # use-pep585-annotation 66 | "UP007", # use-pep604-annotation 67 | "E741", # Ambiguous variable name 68 | # "UP035", # deprecated-assertion 69 | ] 70 | [tool.ruff.per-file-ignores] 71 | "__init__.py" = [ 72 | "F401", # unused import 73 | "F403", # star imports 74 | ] 75 | 76 | [tool.ruff.mccabe] 77 | max-complexity = 24 78 | 79 | [tool.ruff.pydocstyle] 80 | convention = "numpy" 81 | -------------------------------------------------------------------------------- /src/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/benavlabs/FastAPI-boilerplate/d224aad172a9a82710860402a30bacacb7125509/src/__init__.py -------------------------------------------------------------------------------- /src/alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | script_location = migrations 6 | 7 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s 8 | # Uncomment the line below if you want the files to be prepended with date and time 9 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file 10 | # for all available tokens 11 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s 12 | 13 | # sys.path path, will be prepended to sys.path if present. 14 | # defaults to the current working directory. 15 | prepend_sys_path = . 16 | 17 | # timezone to use when rendering the date within the migration file 18 | # as well as the filename. 19 | # If specified, requires the python-dateutil library that can be 20 | # installed by adding `alembic[tz]` to the pip requirements 21 | # string value is passed to dateutil.tz.gettz() 22 | # leave blank for localtime 23 | # timezone = 24 | 25 | # max length of characters to apply to the 26 | # "slug" field 27 | # truncate_slug_length = 40 28 | 29 | # set to 'true' to run the environment during 30 | # the 'revision' command, regardless of autogenerate 31 | # revision_environment = false 32 | 33 | # set to 'true' to allow .pyc and .pyo files without 34 | # a source .py file to be detected as revisions in the 35 | # versions/ directory 36 | # sourceless = false 37 | 38 | # version location specification; This defaults 39 | # to migrations/versions. When using multiple version 40 | # directories, initial revisions must be specified with --version-path. 41 | # The path separator used here should be the separator specified by "version_path_separator" below. 42 | # version_locations = %(here)s/bar:%(here)s/bat:migrations/versions 43 | 44 | # version path separator; As mentioned above, this is the character used to split 45 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. 46 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. 47 | # Valid values for version_path_separator are: 48 | # 49 | # version_path_separator = : 50 | # version_path_separator = ; 51 | # version_path_separator = space 52 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects. 53 | 54 | # set to 'true' to search source files recursively 55 | # in each "version_locations" directory 56 | # new in Alembic version 1.10 57 | # recursive_version_locations = false 58 | 59 | # the output encoding used when revision files 60 | # are written from script.py.mako 61 | # output_encoding = utf-8 62 | 63 | sqlalchemy.url = driver://user:pass@localhost/dbname 64 | 65 | 66 | [post_write_hooks] 67 | # post_write_hooks defines scripts or Python functions that are run 68 | # on newly generated revision scripts. See the documentation for further 69 | # detail and examples 70 | 71 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 72 | # hooks = black 73 | # black.type = console_scripts 74 | # black.entrypoint = black 75 | # black.options = -l 79 REVISION_SCRIPT_FILENAME 76 | 77 | # lint with attempts to fix using "ruff" - use the exec runner, execute a binary 78 | # hooks = ruff 79 | # ruff.type = exec 80 | # ruff.executable = %(here)s/.venv/bin/ruff 81 | # ruff.options = --fix REVISION_SCRIPT_FILENAME 82 | 83 | # Logging configuration 84 | [loggers] 85 | keys = root,sqlalchemy,alembic 86 | 87 | [handlers] 88 | keys = console 89 | 90 | [formatters] 91 | keys = generic 92 | 93 | [logger_root] 94 | level = WARN 95 | handlers = console 96 | qualname = 97 | 98 | [logger_sqlalchemy] 99 | level = WARN 100 | handlers = 101 | qualname = sqlalchemy.engine 102 | 103 | [logger_alembic] 104 | level = INFO 105 | handlers = 106 | qualname = alembic 107 | 108 | [handler_console] 109 | class = StreamHandler 110 | args = (sys.stderr,) 111 | level = NOTSET 112 | formatter = generic 113 | 114 | [formatter_generic] 115 | format = %(levelname)-5.5s [%(name)s] %(message)s 116 | datefmt = %H:%M:%S 117 | -------------------------------------------------------------------------------- /src/app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/benavlabs/FastAPI-boilerplate/d224aad172a9a82710860402a30bacacb7125509/src/app/__init__.py -------------------------------------------------------------------------------- /src/app/api/__init__.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | 3 | from ..api.v1 import router as v1_router 4 | 5 | router = APIRouter(prefix="/api") 6 | router.include_router(v1_router) 7 | -------------------------------------------------------------------------------- /src/app/api/dependencies.py: -------------------------------------------------------------------------------- 1 | from typing import Annotated, Any 2 | 3 | from fastapi import Depends, HTTPException, Request 4 | from sqlalchemy.ext.asyncio import AsyncSession 5 | 6 | from ..core.config import settings 7 | from ..core.db.database import async_get_db 8 | from ..core.exceptions.http_exceptions import ForbiddenException, RateLimitException, UnauthorizedException 9 | from ..core.logger import logging 10 | from ..core.security import TokenType, oauth2_scheme, verify_token 11 | from ..core.utils.rate_limit import rate_limiter 12 | from ..crud.crud_rate_limit import crud_rate_limits 13 | from ..crud.crud_tier import crud_tiers 14 | from ..crud.crud_users import crud_users 15 | from ..models.user import User 16 | from ..schemas.rate_limit import sanitize_path 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | DEFAULT_LIMIT = settings.DEFAULT_RATE_LIMIT_LIMIT 21 | DEFAULT_PERIOD = settings.DEFAULT_RATE_LIMIT_PERIOD 22 | 23 | 24 | async def get_current_user( 25 | token: Annotated[str, Depends(oauth2_scheme)], db: Annotated[AsyncSession, Depends(async_get_db)] 26 | ) -> dict[str, Any] | None: 27 | token_data = await verify_token(token, TokenType.ACCESS, db) 28 | if token_data is None: 29 | raise UnauthorizedException("User not authenticated.") 30 | 31 | if "@" in token_data.username_or_email: 32 | user: dict | None = await crud_users.get(db=db, email=token_data.username_or_email, is_deleted=False) 33 | else: 34 | user = await crud_users.get(db=db, username=token_data.username_or_email, is_deleted=False) 35 | 36 | if user: 37 | return user 38 | 39 | raise UnauthorizedException("User not authenticated.") 40 | 41 | 42 | async def get_optional_user(request: Request, db: AsyncSession = Depends(async_get_db)) -> dict | None: 43 | token = request.headers.get("Authorization") 44 | if not token: 45 | return None 46 | 47 | try: 48 | token_type, _, token_value = token.partition(" ") 49 | if token_type.lower() != "bearer" or not token_value: 50 | return None 51 | 52 | token_data = await verify_token(token_value, TokenType.ACCESS, db) 53 | if token_data is None: 54 | return None 55 | 56 | return await get_current_user(token_value, db=db) 57 | 58 | except HTTPException as http_exc: 59 | if http_exc.status_code != 401: 60 | logger.error(f"Unexpected HTTPException in get_optional_user: {http_exc.detail}") 61 | return None 62 | 63 | except Exception as exc: 64 | logger.error(f"Unexpected error in get_optional_user: {exc}") 65 | return None 66 | 67 | 68 | async def get_current_superuser(current_user: Annotated[dict, Depends(get_current_user)]) -> dict: 69 | if not current_user["is_superuser"]: 70 | raise ForbiddenException("You do not have enough privileges.") 71 | 72 | return current_user 73 | 74 | 75 | async def rate_limiter_dependency( 76 | request: Request, db: Annotated[AsyncSession, Depends(async_get_db)], user: User | None = Depends(get_optional_user) 77 | ) -> None: 78 | if hasattr(request.app.state, "initialization_complete"): 79 | await request.app.state.initialization_complete.wait() 80 | 81 | path = sanitize_path(request.url.path) 82 | if user: 83 | user_id = user["id"] 84 | tier = await crud_tiers.get(db, id=user["tier_id"]) 85 | if tier: 86 | rate_limit = await crud_rate_limits.get(db=db, tier_id=tier["id"], path=path) 87 | if rate_limit: 88 | limit, period = rate_limit["limit"], rate_limit["period"] 89 | else: 90 | logger.warning( 91 | f"User {user_id} with tier '{tier['name']}' has no specific rate limit for path '{path}'. \ 92 | Applying default rate limit." 93 | ) 94 | limit, period = DEFAULT_LIMIT, DEFAULT_PERIOD 95 | else: 96 | logger.warning(f"User {user_id} has no assigned tier. Applying default rate limit.") 97 | limit, period = DEFAULT_LIMIT, DEFAULT_PERIOD 98 | else: 99 | user_id = request.client.host 100 | limit, period = DEFAULT_LIMIT, DEFAULT_PERIOD 101 | 102 | is_limited = await rate_limiter.is_rate_limited(db=db, user_id=user_id, path=path, limit=limit, period=period) 103 | if is_limited: 104 | raise RateLimitException("Rate limit exceeded.") 105 | -------------------------------------------------------------------------------- /src/app/api/v1/__init__.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | 3 | from .login import router as login_router 4 | from .logout import router as logout_router 5 | from .posts import router as posts_router 6 | from .rate_limits import router as rate_limits_router 7 | from .tasks import router as tasks_router 8 | from .tiers import router as tiers_router 9 | from .users import router as users_router 10 | 11 | router = APIRouter(prefix="/v1") 12 | router.include_router(login_router) 13 | router.include_router(logout_router) 14 | router.include_router(users_router) 15 | router.include_router(posts_router) 16 | router.include_router(tasks_router) 17 | router.include_router(tiers_router) 18 | router.include_router(rate_limits_router) 19 | -------------------------------------------------------------------------------- /src/app/api/v1/login.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | from typing import Annotated 3 | 4 | from fastapi import APIRouter, Depends, Request, Response 5 | from fastapi.security import OAuth2PasswordRequestForm 6 | from sqlalchemy.ext.asyncio import AsyncSession 7 | 8 | from ...core.config import settings 9 | from ...core.db.database import async_get_db 10 | from ...core.exceptions.http_exceptions import UnauthorizedException 11 | from ...core.schemas import Token 12 | from ...core.security import ( 13 | ACCESS_TOKEN_EXPIRE_MINUTES, 14 | authenticate_user, 15 | create_access_token, 16 | create_refresh_token, 17 | verify_token, 18 | ) 19 | 20 | router = APIRouter(tags=["login"]) 21 | 22 | 23 | @router.post("/login", response_model=Token) 24 | async def login_for_access_token( 25 | response: Response, 26 | form_data: Annotated[OAuth2PasswordRequestForm, Depends()], 27 | db: Annotated[AsyncSession, Depends(async_get_db)], 28 | ) -> dict[str, str]: 29 | user = await authenticate_user(username_or_email=form_data.username, password=form_data.password, db=db) 30 | if not user: 31 | raise UnauthorizedException("Wrong username, email or password.") 32 | 33 | access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) 34 | access_token = await create_access_token(data={"sub": user["username"]}, expires_delta=access_token_expires) 35 | 36 | refresh_token = await create_refresh_token(data={"sub": user["username"]}) 37 | max_age = settings.REFRESH_TOKEN_EXPIRE_DAYS * 24 * 60 * 60 38 | 39 | response.set_cookie( 40 | key="refresh_token", value=refresh_token, httponly=True, secure=True, samesite="Lax", max_age=max_age 41 | ) 42 | 43 | return {"access_token": access_token, "token_type": "bearer"} 44 | 45 | 46 | @router.post("/refresh") 47 | async def refresh_access_token(request: Request, db: AsyncSession = Depends(async_get_db)) -> dict[str, str]: 48 | refresh_token = request.cookies.get("refresh_token") 49 | if not refresh_token: 50 | raise UnauthorizedException("Refresh token missing.") 51 | 52 | user_data = await verify_token(refresh_token, db) 53 | if not user_data: 54 | raise UnauthorizedException("Invalid refresh token.") 55 | 56 | new_access_token = await create_access_token(data={"sub": user_data.username_or_email}) 57 | return {"access_token": new_access_token, "token_type": "bearer"} 58 | -------------------------------------------------------------------------------- /src/app/api/v1/logout.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter, Depends, Response, Cookie 2 | from jose import JWTError 3 | from sqlalchemy.ext.asyncio import AsyncSession 4 | from typing import Optional 5 | 6 | from ...core.db.database import async_get_db 7 | from ...core.exceptions.http_exceptions import UnauthorizedException 8 | from ...core.security import blacklist_tokens, oauth2_scheme 9 | 10 | router = APIRouter(tags=["login"]) 11 | 12 | 13 | @router.post("/logout") 14 | async def logout( 15 | response: Response, 16 | access_token: str = Depends(oauth2_scheme), 17 | refresh_token: Optional[str] = Cookie(None, alias="refresh_token"), 18 | db: AsyncSession = Depends(async_get_db) 19 | ) -> dict[str, str]: 20 | try: 21 | if not refresh_token: 22 | raise UnauthorizedException("Refresh token not found") 23 | 24 | await blacklist_tokens( 25 | access_token=access_token, 26 | refresh_token=refresh_token, 27 | db=db 28 | ) 29 | response.delete_cookie(key="refresh_token") 30 | 31 | return {"message": "Logged out successfully"} 32 | 33 | except JWTError: 34 | raise UnauthorizedException("Invalid token.") 35 | -------------------------------------------------------------------------------- /src/app/api/v1/posts.py: -------------------------------------------------------------------------------- 1 | from typing import Annotated, Any 2 | 3 | from fastapi import APIRouter, Depends, Request 4 | from fastcrud.paginated import PaginatedListResponse, compute_offset, paginated_response 5 | from sqlalchemy.ext.asyncio import AsyncSession 6 | 7 | from ...api.dependencies import get_current_superuser, get_current_user 8 | from ...core.db.database import async_get_db 9 | from ...core.exceptions.http_exceptions import ForbiddenException, NotFoundException 10 | from ...core.utils.cache import cache 11 | from ...crud.crud_posts import crud_posts 12 | from ...crud.crud_users import crud_users 13 | from ...schemas.post import PostCreate, PostCreateInternal, PostRead, PostUpdate 14 | from ...schemas.user import UserRead 15 | 16 | router = APIRouter(tags=["posts"]) 17 | 18 | 19 | @router.post("/{username}/post", response_model=PostRead, status_code=201) 20 | async def write_post( 21 | request: Request, 22 | username: str, 23 | post: PostCreate, 24 | current_user: Annotated[UserRead, Depends(get_current_user)], 25 | db: Annotated[AsyncSession, Depends(async_get_db)], 26 | ) -> PostRead: 27 | db_user = await crud_users.get(db=db, schema_to_select=UserRead, username=username, is_deleted=False) 28 | if db_user is None: 29 | raise NotFoundException("User not found") 30 | 31 | if current_user["id"] != db_user["id"]: 32 | raise ForbiddenException() 33 | 34 | post_internal_dict = post.model_dump() 35 | post_internal_dict["created_by_user_id"] = db_user["id"] 36 | 37 | post_internal = PostCreateInternal(**post_internal_dict) 38 | created_post: PostRead = await crud_posts.create(db=db, object=post_internal) 39 | return created_post 40 | 41 | 42 | @router.get("/{username}/posts", response_model=PaginatedListResponse[PostRead]) 43 | @cache( 44 | key_prefix="{username}_posts:page_{page}:items_per_page:{items_per_page}", 45 | resource_id_name="username", 46 | expiration=60, 47 | ) 48 | async def read_posts( 49 | request: Request, 50 | username: str, 51 | db: Annotated[AsyncSession, Depends(async_get_db)], 52 | page: int = 1, 53 | items_per_page: int = 10, 54 | ) -> dict: 55 | db_user = await crud_users.get(db=db, schema_to_select=UserRead, username=username, is_deleted=False) 56 | if not db_user: 57 | raise NotFoundException("User not found") 58 | 59 | posts_data = await crud_posts.get_multi( 60 | db=db, 61 | offset=compute_offset(page, items_per_page), 62 | limit=items_per_page, 63 | schema_to_select=PostRead, 64 | created_by_user_id=db_user["id"], 65 | is_deleted=False, 66 | ) 67 | 68 | response: dict[str, Any] = paginated_response(crud_data=posts_data, page=page, items_per_page=items_per_page) 69 | return response 70 | 71 | 72 | @router.get("/{username}/post/{id}", response_model=PostRead) 73 | @cache(key_prefix="{username}_post_cache", resource_id_name="id") 74 | async def read_post( 75 | request: Request, username: str, id: int, db: Annotated[AsyncSession, Depends(async_get_db)] 76 | ) -> dict: 77 | db_user = await crud_users.get(db=db, schema_to_select=UserRead, username=username, is_deleted=False) 78 | if db_user is None: 79 | raise NotFoundException("User not found") 80 | 81 | db_post: PostRead | None = await crud_posts.get( 82 | db=db, schema_to_select=PostRead, id=id, created_by_user_id=db_user["id"], is_deleted=False 83 | ) 84 | if db_post is None: 85 | raise NotFoundException("Post not found") 86 | 87 | return db_post 88 | 89 | 90 | @router.patch("/{username}/post/{id}") 91 | @cache("{username}_post_cache", resource_id_name="id", pattern_to_invalidate_extra=["{username}_posts:*"]) 92 | async def patch_post( 93 | request: Request, 94 | username: str, 95 | id: int, 96 | values: PostUpdate, 97 | current_user: Annotated[UserRead, Depends(get_current_user)], 98 | db: Annotated[AsyncSession, Depends(async_get_db)], 99 | ) -> dict[str, str]: 100 | db_user = await crud_users.get(db=db, schema_to_select=UserRead, username=username, is_deleted=False) 101 | if db_user is None: 102 | raise NotFoundException("User not found") 103 | 104 | if current_user["id"] != db_user["id"]: 105 | raise ForbiddenException() 106 | 107 | db_post = await crud_posts.get(db=db, schema_to_select=PostRead, id=id, is_deleted=False) 108 | if db_post is None: 109 | raise NotFoundException("Post not found") 110 | 111 | await crud_posts.update(db=db, object=values, id=id) 112 | return {"message": "Post updated"} 113 | 114 | 115 | @router.delete("/{username}/post/{id}") 116 | @cache("{username}_post_cache", resource_id_name="id", to_invalidate_extra={"{username}_posts": "{username}"}) 117 | async def erase_post( 118 | request: Request, 119 | username: str, 120 | id: int, 121 | current_user: Annotated[UserRead, Depends(get_current_user)], 122 | db: Annotated[AsyncSession, Depends(async_get_db)], 123 | ) -> dict[str, str]: 124 | db_user = await crud_users.get(db=db, schema_to_select=UserRead, username=username, is_deleted=False) 125 | if db_user is None: 126 | raise NotFoundException("User not found") 127 | 128 | if current_user["id"] != db_user["id"]: 129 | raise ForbiddenException() 130 | 131 | db_post = await crud_posts.get(db=db, schema_to_select=PostRead, id=id, is_deleted=False) 132 | if db_post is None: 133 | raise NotFoundException("Post not found") 134 | 135 | await crud_posts.delete(db=db, id=id) 136 | 137 | return {"message": "Post deleted"} 138 | 139 | 140 | @router.delete("/{username}/db_post/{id}", dependencies=[Depends(get_current_superuser)]) 141 | @cache("{username}_post_cache", resource_id_name="id", to_invalidate_extra={"{username}_posts": "{username}"}) 142 | async def erase_db_post( 143 | request: Request, username: str, id: int, db: Annotated[AsyncSession, Depends(async_get_db)] 144 | ) -> dict[str, str]: 145 | db_user = await crud_users.get(db=db, schema_to_select=UserRead, username=username, is_deleted=False) 146 | if db_user is None: 147 | raise NotFoundException("User not found") 148 | 149 | db_post = await crud_posts.get(db=db, schema_to_select=PostRead, id=id, is_deleted=False) 150 | if db_post is None: 151 | raise NotFoundException("Post not found") 152 | 153 | await crud_posts.db_delete(db=db, id=id) 154 | return {"message": "Post deleted from the database"} 155 | -------------------------------------------------------------------------------- /src/app/api/v1/rate_limits.py: -------------------------------------------------------------------------------- 1 | from typing import Annotated, Any 2 | 3 | from fastapi import APIRouter, Depends, Request 4 | from fastcrud.paginated import PaginatedListResponse, compute_offset, paginated_response 5 | from sqlalchemy.ext.asyncio import AsyncSession 6 | 7 | from ...api.dependencies import get_current_superuser 8 | from ...core.db.database import async_get_db 9 | from ...core.exceptions.http_exceptions import DuplicateValueException, NotFoundException 10 | from ...crud.crud_rate_limit import crud_rate_limits 11 | from ...crud.crud_tier import crud_tiers 12 | from ...schemas.rate_limit import RateLimitCreate, RateLimitCreateInternal, RateLimitRead, RateLimitUpdate 13 | 14 | router = APIRouter(tags=["rate_limits"]) 15 | 16 | 17 | @router.post("/tier/{tier_name}/rate_limit", dependencies=[Depends(get_current_superuser)], status_code=201) 18 | async def write_rate_limit( 19 | request: Request, tier_name: str, rate_limit: RateLimitCreate, db: Annotated[AsyncSession, Depends(async_get_db)] 20 | ) -> RateLimitRead: 21 | db_tier = await crud_tiers.get(db=db, name=tier_name) 22 | if not db_tier: 23 | raise NotFoundException("Tier not found") 24 | 25 | rate_limit_internal_dict = rate_limit.model_dump() 26 | rate_limit_internal_dict["tier_id"] = db_tier["id"] 27 | 28 | db_rate_limit = await crud_rate_limits.exists(db=db, name=rate_limit_internal_dict["name"]) 29 | if db_rate_limit: 30 | raise DuplicateValueException("Rate Limit Name not available") 31 | 32 | rate_limit_internal = RateLimitCreateInternal(**rate_limit_internal_dict) 33 | created_rate_limit: RateLimitRead = await crud_rate_limits.create(db=db, object=rate_limit_internal) 34 | return created_rate_limit 35 | 36 | 37 | @router.get("/tier/{tier_name}/rate_limits", response_model=PaginatedListResponse[RateLimitRead]) 38 | async def read_rate_limits( 39 | request: Request, 40 | tier_name: str, 41 | db: Annotated[AsyncSession, Depends(async_get_db)], 42 | page: int = 1, 43 | items_per_page: int = 10, 44 | ) -> dict: 45 | db_tier = await crud_tiers.get(db=db, name=tier_name) 46 | if not db_tier: 47 | raise NotFoundException("Tier not found") 48 | 49 | rate_limits_data = await crud_rate_limits.get_multi( 50 | db=db, 51 | offset=compute_offset(page, items_per_page), 52 | limit=items_per_page, 53 | schema_to_select=RateLimitRead, 54 | tier_id=db_tier["id"], 55 | ) 56 | 57 | response: dict[str, Any] = paginated_response(crud_data=rate_limits_data, page=page, items_per_page=items_per_page) 58 | return response 59 | 60 | 61 | @router.get("/tier/{tier_name}/rate_limit/{id}", response_model=RateLimitRead) 62 | async def read_rate_limit( 63 | request: Request, tier_name: str, id: int, db: Annotated[AsyncSession, Depends(async_get_db)] 64 | ) -> dict: 65 | db_tier = await crud_tiers.get(db=db, name=tier_name) 66 | if not db_tier: 67 | raise NotFoundException("Tier not found") 68 | 69 | db_rate_limit: dict | None = await crud_rate_limits.get( 70 | db=db, schema_to_select=RateLimitRead, tier_id=db_tier["id"], id=id 71 | ) 72 | if db_rate_limit is None: 73 | raise NotFoundException("Rate Limit not found") 74 | 75 | return db_rate_limit 76 | 77 | 78 | @router.patch("/tier/{tier_name}/rate_limit/{id}", dependencies=[Depends(get_current_superuser)]) 79 | async def patch_rate_limit( 80 | request: Request, 81 | tier_name: str, 82 | id: int, 83 | values: RateLimitUpdate, 84 | db: Annotated[AsyncSession, Depends(async_get_db)], 85 | ) -> dict[str, str]: 86 | db_tier = await crud_tiers.get(db=db, name=tier_name) 87 | if db_tier is None: 88 | raise NotFoundException("Tier not found") 89 | 90 | db_rate_limit = await crud_rate_limits.get(db=db, schema_to_select=RateLimitRead, tier_id=db_tier["id"], id=id) 91 | if db_rate_limit is None: 92 | raise NotFoundException("Rate Limit not found") 93 | 94 | db_rate_limit_path = await crud_rate_limits.exists(db=db, tier_id=db_tier["id"], path=values.path) 95 | if db_rate_limit_path: 96 | raise DuplicateValueException("There is already a rate limit for this path") 97 | 98 | await crud_rate_limits.exists(db=db) 99 | if db_rate_limit_path: 100 | raise DuplicateValueException("There is already a rate limit with this name") 101 | 102 | await crud_rate_limits.update(db=db, object=values, id=db_rate_limit["id"]) 103 | return {"message": "Rate Limit updated"} 104 | 105 | 106 | @router.delete("/tier/{tier_name}/rate_limit/{id}", dependencies=[Depends(get_current_superuser)]) 107 | async def erase_rate_limit( 108 | request: Request, tier_name: str, id: int, db: Annotated[AsyncSession, Depends(async_get_db)] 109 | ) -> dict[str, str]: 110 | db_tier = await crud_tiers.get(db=db, name=tier_name) 111 | if not db_tier: 112 | raise NotFoundException("Tier not found") 113 | 114 | db_rate_limit = await crud_rate_limits.get(db=db, schema_to_select=RateLimitRead, tier_id=db_tier["id"], id=id) 115 | if db_rate_limit is None: 116 | raise NotFoundException("Rate Limit not found") 117 | 118 | await crud_rate_limits.delete(db=db, id=db_rate_limit["id"]) 119 | return {"message": "Rate Limit deleted"} 120 | -------------------------------------------------------------------------------- /src/app/api/v1/tasks.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from arq.jobs import Job as ArqJob 4 | from fastapi import APIRouter, Depends 5 | 6 | from ...api.dependencies import rate_limiter_dependency 7 | from ...core.utils import queue 8 | from ...schemas.job import Job 9 | 10 | router = APIRouter(prefix="/tasks", tags=["tasks"]) 11 | 12 | 13 | @router.post("/task", response_model=Job, status_code=201, dependencies=[Depends(rate_limiter_dependency)]) 14 | async def create_task(message: str) -> dict[str, str]: 15 | """Create a new background task. 16 | 17 | Parameters 18 | ---------- 19 | message: str 20 | The message or data to be processed by the task. 21 | 22 | Returns 23 | ------- 24 | dict[str, str] 25 | A dictionary containing the ID of the created task. 26 | """ 27 | job = await queue.pool.enqueue_job("sample_background_task", message) # type: ignore 28 | return {"id": job.job_id} 29 | 30 | 31 | @router.get("/task/{task_id}") 32 | async def get_task(task_id: str) -> dict[str, Any] | None: 33 | """Get information about a specific background task. 34 | 35 | Parameters 36 | ---------- 37 | task_id: str 38 | The ID of the task. 39 | 40 | Returns 41 | ------- 42 | Optional[dict[str, Any]] 43 | A dictionary containing information about the task if found, or None otherwise. 44 | """ 45 | job = ArqJob(task_id, queue.pool) 46 | job_info: dict = await job.info() 47 | return vars(job_info) 48 | -------------------------------------------------------------------------------- /src/app/api/v1/tiers.py: -------------------------------------------------------------------------------- 1 | from typing import Annotated, Any 2 | 3 | from fastapi import APIRouter, Depends, Request 4 | from fastcrud.paginated import PaginatedListResponse, compute_offset, paginated_response 5 | from sqlalchemy.ext.asyncio import AsyncSession 6 | 7 | from ...api.dependencies import get_current_superuser 8 | from ...core.db.database import async_get_db 9 | from ...core.exceptions.http_exceptions import DuplicateValueException, NotFoundException 10 | from ...crud.crud_tier import crud_tiers 11 | from ...schemas.tier import TierCreate, TierCreateInternal, TierRead, TierUpdate 12 | 13 | router = APIRouter(tags=["tiers"]) 14 | 15 | 16 | @router.post("/tier", dependencies=[Depends(get_current_superuser)], status_code=201) 17 | async def write_tier( 18 | request: Request, tier: TierCreate, db: Annotated[AsyncSession, Depends(async_get_db)] 19 | ) -> TierRead: 20 | tier_internal_dict = tier.model_dump() 21 | db_tier = await crud_tiers.exists(db=db, name=tier_internal_dict["name"]) 22 | if db_tier: 23 | raise DuplicateValueException("Tier Name not available") 24 | 25 | tier_internal = TierCreateInternal(**tier_internal_dict) 26 | created_tier: TierRead = await crud_tiers.create(db=db, object=tier_internal) 27 | return created_tier 28 | 29 | 30 | @router.get("/tiers", response_model=PaginatedListResponse[TierRead]) 31 | async def read_tiers( 32 | request: Request, db: Annotated[AsyncSession, Depends(async_get_db)], page: int = 1, items_per_page: int = 10 33 | ) -> dict: 34 | tiers_data = await crud_tiers.get_multi( 35 | db=db, offset=compute_offset(page, items_per_page), limit=items_per_page, schema_to_select=TierRead 36 | ) 37 | 38 | response: dict[str, Any] = paginated_response(crud_data=tiers_data, page=page, items_per_page=items_per_page) 39 | return response 40 | 41 | 42 | @router.get("/tier/{name}", response_model=TierRead) 43 | async def read_tier(request: Request, name: str, db: Annotated[AsyncSession, Depends(async_get_db)]) -> dict: 44 | db_tier: TierRead | None = await crud_tiers.get(db=db, schema_to_select=TierRead, name=name) 45 | if db_tier is None: 46 | raise NotFoundException("Tier not found") 47 | 48 | return db_tier 49 | 50 | 51 | @router.patch("/tier/{name}", dependencies=[Depends(get_current_superuser)]) 52 | async def patch_tier( 53 | request: Request, values: TierUpdate, name: str, db: Annotated[AsyncSession, Depends(async_get_db)] 54 | ) -> dict[str, str]: 55 | db_tier = await crud_tiers.get(db=db, schema_to_select=TierRead, name=name) 56 | if db_tier is None: 57 | raise NotFoundException("Tier not found") 58 | 59 | await crud_tiers.update(db=db, object=values, name=name) 60 | return {"message": "Tier updated"} 61 | 62 | 63 | @router.delete("/tier/{name}", dependencies=[Depends(get_current_superuser)]) 64 | async def erase_tier(request: Request, name: str, db: Annotated[AsyncSession, Depends(async_get_db)]) -> dict[str, str]: 65 | db_tier = await crud_tiers.get(db=db, schema_to_select=TierRead, name=name) 66 | if db_tier is None: 67 | raise NotFoundException("Tier not found") 68 | 69 | await crud_tiers.delete(db=db, name=name) 70 | return {"message": "Tier deleted"} 71 | -------------------------------------------------------------------------------- /src/app/api/v1/users.py: -------------------------------------------------------------------------------- 1 | from typing import Annotated, Any 2 | 3 | from fastapi import APIRouter, Depends, Request 4 | from fastcrud.paginated import PaginatedListResponse, compute_offset, paginated_response 5 | from sqlalchemy.ext.asyncio import AsyncSession 6 | 7 | from ...api.dependencies import get_current_superuser, get_current_user 8 | from ...core.db.database import async_get_db 9 | from ...core.exceptions.http_exceptions import DuplicateValueException, ForbiddenException, NotFoundException 10 | from ...core.security import blacklist_token, get_password_hash, oauth2_scheme 11 | from ...crud.crud_rate_limit import crud_rate_limits 12 | from ...crud.crud_tier import crud_tiers 13 | from ...crud.crud_users import crud_users 14 | from ...models.tier import Tier 15 | from ...schemas.tier import TierRead 16 | from ...schemas.user import UserCreate, UserCreateInternal, UserRead, UserTierUpdate, UserUpdate 17 | 18 | router = APIRouter(tags=["users"]) 19 | 20 | 21 | @router.post("/user", response_model=UserRead, status_code=201) 22 | async def write_user( 23 | request: Request, user: UserCreate, db: Annotated[AsyncSession, Depends(async_get_db)] 24 | ) -> UserRead: 25 | email_row = await crud_users.exists(db=db, email=user.email) 26 | if email_row: 27 | raise DuplicateValueException("Email is already registered") 28 | 29 | username_row = await crud_users.exists(db=db, username=user.username) 30 | if username_row: 31 | raise DuplicateValueException("Username not available") 32 | 33 | user_internal_dict = user.model_dump() 34 | user_internal_dict["hashed_password"] = get_password_hash(password=user_internal_dict["password"]) 35 | del user_internal_dict["password"] 36 | 37 | user_internal = UserCreateInternal(**user_internal_dict) 38 | created_user: UserRead = await crud_users.create(db=db, object=user_internal) 39 | return created_user 40 | 41 | 42 | @router.get("/users", response_model=PaginatedListResponse[UserRead]) 43 | async def read_users( 44 | request: Request, db: Annotated[AsyncSession, Depends(async_get_db)], page: int = 1, items_per_page: int = 10 45 | ) -> dict: 46 | users_data = await crud_users.get_multi( 47 | db=db, 48 | offset=compute_offset(page, items_per_page), 49 | limit=items_per_page, 50 | schema_to_select=UserRead, 51 | is_deleted=False, 52 | ) 53 | 54 | response: dict[str, Any] = paginated_response(crud_data=users_data, page=page, items_per_page=items_per_page) 55 | return response 56 | 57 | 58 | @router.get("/user/me/", response_model=UserRead) 59 | async def read_users_me(request: Request, current_user: Annotated[UserRead, Depends(get_current_user)]) -> UserRead: 60 | return current_user 61 | 62 | 63 | @router.get("/user/{username}", response_model=UserRead) 64 | async def read_user(request: Request, username: str, db: Annotated[AsyncSession, Depends(async_get_db)]) -> dict: 65 | db_user: UserRead | None = await crud_users.get( 66 | db=db, schema_to_select=UserRead, username=username, is_deleted=False 67 | ) 68 | if db_user is None: 69 | raise NotFoundException("User not found") 70 | 71 | return db_user 72 | 73 | 74 | @router.patch("/user/{username}") 75 | async def patch_user( 76 | request: Request, 77 | values: UserUpdate, 78 | username: str, 79 | current_user: Annotated[UserRead, Depends(get_current_user)], 80 | db: Annotated[AsyncSession, Depends(async_get_db)], 81 | ) -> dict[str, str]: 82 | db_user = await crud_users.get(db=db, schema_to_select=UserRead, username=username) 83 | if db_user is None: 84 | raise NotFoundException("User not found") 85 | 86 | if db_user["username"] != current_user["username"]: 87 | raise ForbiddenException() 88 | 89 | if values.username != db_user["username"]: 90 | existing_username = await crud_users.exists(db=db, username=values.username) 91 | if existing_username: 92 | raise DuplicateValueException("Username not available") 93 | 94 | if values.email != db_user["email"]: 95 | existing_email = await crud_users.exists(db=db, email=values.email) 96 | if existing_email: 97 | raise DuplicateValueException("Email is already registered") 98 | 99 | await crud_users.update(db=db, object=values, username=username) 100 | return {"message": "User updated"} 101 | 102 | 103 | @router.delete("/user/{username}") 104 | async def erase_user( 105 | request: Request, 106 | username: str, 107 | current_user: Annotated[UserRead, Depends(get_current_user)], 108 | db: Annotated[AsyncSession, Depends(async_get_db)], 109 | token: str = Depends(oauth2_scheme), 110 | ) -> dict[str, str]: 111 | db_user = await crud_users.get(db=db, schema_to_select=UserRead, username=username) 112 | if not db_user: 113 | raise NotFoundException("User not found") 114 | 115 | if username != current_user["username"]: 116 | raise ForbiddenException() 117 | 118 | await crud_users.delete(db=db, username=username) 119 | await blacklist_token(token=token, db=db) 120 | return {"message": "User deleted"} 121 | 122 | 123 | @router.delete("/db_user/{username}", dependencies=[Depends(get_current_superuser)]) 124 | async def erase_db_user( 125 | request: Request, 126 | username: str, 127 | db: Annotated[AsyncSession, Depends(async_get_db)], 128 | token: str = Depends(oauth2_scheme), 129 | ) -> dict[str, str]: 130 | db_user = await crud_users.exists(db=db, username=username) 131 | if not db_user: 132 | raise NotFoundException("User not found") 133 | 134 | await crud_users.db_delete(db=db, username=username) 135 | await blacklist_token(token=token, db=db) 136 | return {"message": "User deleted from the database"} 137 | 138 | 139 | @router.get("/user/{username}/rate_limits", dependencies=[Depends(get_current_superuser)]) 140 | async def read_user_rate_limits( 141 | request: Request, username: str, db: Annotated[AsyncSession, Depends(async_get_db)] 142 | ) -> dict[str, Any]: 143 | db_user: dict | None = await crud_users.get(db=db, username=username, schema_to_select=UserRead) 144 | if db_user is None: 145 | raise NotFoundException("User not found") 146 | 147 | if db_user["tier_id"] is None: 148 | db_user["tier_rate_limits"] = [] 149 | return db_user 150 | 151 | db_tier = await crud_tiers.get(db=db, id=db_user["tier_id"]) 152 | if db_tier is None: 153 | raise NotFoundException("Tier not found") 154 | 155 | db_rate_limits = await crud_rate_limits.get_multi(db=db, tier_id=db_tier["id"]) 156 | 157 | db_user["tier_rate_limits"] = db_rate_limits["data"] 158 | 159 | return db_user 160 | 161 | 162 | @router.get("/user/{username}/tier") 163 | async def read_user_tier( 164 | request: Request, username: str, db: Annotated[AsyncSession, Depends(async_get_db)] 165 | ) -> dict | None: 166 | db_user = await crud_users.get(db=db, username=username, schema_to_select=UserRead) 167 | if db_user is None: 168 | raise NotFoundException("User not found") 169 | 170 | db_tier = await crud_tiers.exists(db=db, id=db_user["tier_id"]) 171 | if not db_tier: 172 | raise NotFoundException("Tier not found") 173 | 174 | joined: dict = await crud_users.get_joined( 175 | db=db, 176 | join_model=Tier, 177 | join_prefix="tier_", 178 | schema_to_select=UserRead, 179 | join_schema_to_select=TierRead, 180 | username=username, 181 | ) 182 | 183 | return joined 184 | 185 | 186 | @router.patch("/user/{username}/tier", dependencies=[Depends(get_current_superuser)]) 187 | async def patch_user_tier( 188 | request: Request, username: str, values: UserTierUpdate, db: Annotated[AsyncSession, Depends(async_get_db)] 189 | ) -> dict[str, str]: 190 | db_user = await crud_users.get(db=db, username=username, schema_to_select=UserRead) 191 | if db_user is None: 192 | raise NotFoundException("User not found") 193 | 194 | db_tier = await crud_tiers.get(db=db, id=values.tier_id) 195 | if db_tier is None: 196 | raise NotFoundException("Tier not found") 197 | 198 | await crud_users.update(db=db, object=values, username=username) 199 | return {"message": f"User {db_user['name']} Tier updated"} 200 | -------------------------------------------------------------------------------- /src/app/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/benavlabs/FastAPI-boilerplate/d224aad172a9a82710860402a30bacacb7125509/src/app/core/__init__.py -------------------------------------------------------------------------------- /src/app/core/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | from enum import Enum 3 | 4 | from pydantic_settings import BaseSettings 5 | from pydantic import SecretStr 6 | from starlette.config import Config 7 | 8 | current_file_dir = os.path.dirname(os.path.realpath(__file__)) 9 | env_path = os.path.join(current_file_dir, "..", "..", ".env") 10 | config = Config(env_path) 11 | 12 | 13 | class AppSettings(BaseSettings): 14 | APP_NAME: str = config("APP_NAME", default="FastAPI app") 15 | APP_DESCRIPTION: str | None = config("APP_DESCRIPTION", default=None) 16 | APP_VERSION: str | None = config("APP_VERSION", default=None) 17 | LICENSE_NAME: str | None = config("LICENSE", default=None) 18 | CONTACT_NAME: str | None = config("CONTACT_NAME", default=None) 19 | CONTACT_EMAIL: str | None = config("CONTACT_EMAIL", default=None) 20 | 21 | 22 | class CryptSettings(BaseSettings): 23 | SECRET_KEY: SecretStr = config("SECRET_KEY", cast=SecretStr) 24 | ALGORITHM: str = config("ALGORITHM", default="HS256") 25 | ACCESS_TOKEN_EXPIRE_MINUTES: int = config("ACCESS_TOKEN_EXPIRE_MINUTES", default=30) 26 | REFRESH_TOKEN_EXPIRE_DAYS: int = config("REFRESH_TOKEN_EXPIRE_DAYS", default=7) 27 | 28 | 29 | class DatabaseSettings(BaseSettings): 30 | pass 31 | 32 | 33 | class SQLiteSettings(DatabaseSettings): 34 | SQLITE_URI: str = config("SQLITE_URI", default="./sql_app.db") 35 | SQLITE_SYNC_PREFIX: str = config("SQLITE_SYNC_PREFIX", default="sqlite:///") 36 | SQLITE_ASYNC_PREFIX: str = config("SQLITE_ASYNC_PREFIX", default="sqlite+aiosqlite:///") 37 | 38 | 39 | class MySQLSettings(DatabaseSettings): 40 | MYSQL_USER: str = config("MYSQL_USER", default="username") 41 | MYSQL_PASSWORD: str = config("MYSQL_PASSWORD", default="password") 42 | MYSQL_SERVER: str = config("MYSQL_SERVER", default="localhost") 43 | MYSQL_PORT: int = config("MYSQL_PORT", default=5432) 44 | MYSQL_DB: str = config("MYSQL_DB", default="dbname") 45 | MYSQL_URI: str = f"{MYSQL_USER}:{MYSQL_PASSWORD}@{MYSQL_SERVER}:{MYSQL_PORT}/{MYSQL_DB}" 46 | MYSQL_SYNC_PREFIX: str = config("MYSQL_SYNC_PREFIX", default="mysql://") 47 | MYSQL_ASYNC_PREFIX: str = config("MYSQL_ASYNC_PREFIX", default="mysql+aiomysql://") 48 | MYSQL_URL: str = config("MYSQL_URL", default=None) 49 | 50 | 51 | class PostgresSettings(DatabaseSettings): 52 | POSTGRES_USER: str = config("POSTGRES_USER", default="postgres") 53 | POSTGRES_PASSWORD: str = config("POSTGRES_PASSWORD", default="postgres") 54 | POSTGRES_SERVER: str = config("POSTGRES_SERVER", default="localhost") 55 | POSTGRES_PORT: int = config("POSTGRES_PORT", default=5432) 56 | POSTGRES_DB: str = config("POSTGRES_DB", default="postgres") 57 | POSTGRES_SYNC_PREFIX: str = config("POSTGRES_SYNC_PREFIX", default="postgresql://") 58 | POSTGRES_ASYNC_PREFIX: str = config("POSTGRES_ASYNC_PREFIX", default="postgresql+asyncpg://") 59 | POSTGRES_URI: str = f"{POSTGRES_USER}:{POSTGRES_PASSWORD}@{POSTGRES_SERVER}:{POSTGRES_PORT}/{POSTGRES_DB}" 60 | POSTGRES_URL: str | None = config("POSTGRES_URL", default=None) 61 | 62 | 63 | class FirstUserSettings(BaseSettings): 64 | ADMIN_NAME: str = config("ADMIN_NAME", default="admin") 65 | ADMIN_EMAIL: str = config("ADMIN_EMAIL", default="admin@admin.com") 66 | ADMIN_USERNAME: str = config("ADMIN_USERNAME", default="admin") 67 | ADMIN_PASSWORD: str = config("ADMIN_PASSWORD", default="!Ch4ng3Th1sP4ssW0rd!") 68 | 69 | 70 | class TestSettings(BaseSettings): 71 | ... 72 | 73 | 74 | class RedisCacheSettings(BaseSettings): 75 | REDIS_CACHE_HOST: str = config("REDIS_CACHE_HOST", default="localhost") 76 | REDIS_CACHE_PORT: int = config("REDIS_CACHE_PORT", default=6379) 77 | REDIS_CACHE_URL: str = f"redis://{REDIS_CACHE_HOST}:{REDIS_CACHE_PORT}" 78 | 79 | 80 | class ClientSideCacheSettings(BaseSettings): 81 | CLIENT_CACHE_MAX_AGE: int = config("CLIENT_CACHE_MAX_AGE", default=60) 82 | 83 | 84 | class RedisQueueSettings(BaseSettings): 85 | REDIS_QUEUE_HOST: str = config("REDIS_QUEUE_HOST", default="localhost") 86 | REDIS_QUEUE_PORT: int = config("REDIS_QUEUE_PORT", default=6379) 87 | 88 | 89 | class RedisRateLimiterSettings(BaseSettings): 90 | REDIS_RATE_LIMIT_HOST: str = config("REDIS_RATE_LIMIT_HOST", default="localhost") 91 | REDIS_RATE_LIMIT_PORT: int = config("REDIS_RATE_LIMIT_PORT", default=6379) 92 | REDIS_RATE_LIMIT_URL: str = f"redis://{REDIS_RATE_LIMIT_HOST}:{REDIS_RATE_LIMIT_PORT}" 93 | 94 | 95 | class DefaultRateLimitSettings(BaseSettings): 96 | DEFAULT_RATE_LIMIT_LIMIT: int = config("DEFAULT_RATE_LIMIT_LIMIT", default=10) 97 | DEFAULT_RATE_LIMIT_PERIOD: int = config("DEFAULT_RATE_LIMIT_PERIOD", default=3600) 98 | 99 | 100 | class EnvironmentOption(Enum): 101 | LOCAL = "local" 102 | STAGING = "staging" 103 | PRODUCTION = "production" 104 | 105 | 106 | class EnvironmentSettings(BaseSettings): 107 | ENVIRONMENT: EnvironmentOption = config("ENVIRONMENT", default="local") 108 | 109 | 110 | class Settings( 111 | AppSettings, 112 | PostgresSettings, 113 | CryptSettings, 114 | FirstUserSettings, 115 | TestSettings, 116 | RedisCacheSettings, 117 | ClientSideCacheSettings, 118 | RedisQueueSettings, 119 | RedisRateLimiterSettings, 120 | DefaultRateLimitSettings, 121 | EnvironmentSettings, 122 | ): 123 | pass 124 | 125 | 126 | settings = Settings() 127 | -------------------------------------------------------------------------------- /src/app/core/db/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/benavlabs/FastAPI-boilerplate/d224aad172a9a82710860402a30bacacb7125509/src/app/core/db/__init__.py -------------------------------------------------------------------------------- /src/app/core/db/crud_token_blacklist.py: -------------------------------------------------------------------------------- 1 | from fastcrud import FastCRUD 2 | 3 | from ..db.token_blacklist import TokenBlacklist 4 | from ..schemas import TokenBlacklistCreate, TokenBlacklistUpdate 5 | 6 | CRUDTokenBlacklist = FastCRUD[TokenBlacklist, TokenBlacklistCreate, TokenBlacklistUpdate, TokenBlacklistUpdate, None, None] 7 | crud_token_blacklist = CRUDTokenBlacklist(TokenBlacklist) 8 | -------------------------------------------------------------------------------- /src/app/core/db/database.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.ext.asyncio import create_async_engine 2 | from sqlalchemy.ext.asyncio.session import AsyncSession 3 | from sqlalchemy.orm import DeclarativeBase, MappedAsDataclass, sessionmaker 4 | 5 | from ..config import settings 6 | 7 | 8 | class Base(DeclarativeBase, MappedAsDataclass): 9 | pass 10 | 11 | 12 | DATABASE_URI = settings.POSTGRES_URI 13 | DATABASE_PREFIX = settings.POSTGRES_ASYNC_PREFIX 14 | DATABASE_URL = f"{DATABASE_PREFIX}{DATABASE_URI}" 15 | 16 | async_engine = create_async_engine(DATABASE_URL, echo=False, future=True) 17 | 18 | local_session = sessionmaker(bind=async_engine, class_=AsyncSession, expire_on_commit=False) 19 | 20 | 21 | async def async_get_db() -> AsyncSession: 22 | async_session = local_session 23 | async with async_session() as db: 24 | yield db 25 | -------------------------------------------------------------------------------- /src/app/core/db/models.py: -------------------------------------------------------------------------------- 1 | import uuid as uuid_pkg 2 | from datetime import UTC, datetime 3 | 4 | from sqlalchemy import Boolean, Column, DateTime, text 5 | from sqlalchemy.dialects.postgresql import UUID 6 | 7 | 8 | class UUIDMixin: 9 | uuid: uuid_pkg.UUID = Column( 10 | UUID, primary_key=True, default=uuid_pkg.uuid4, server_default=text("gen_random_uuid()") 11 | ) 12 | 13 | 14 | class TimestampMixin: 15 | created_at: datetime = Column(DateTime, default=datetime.now(UTC), server_default=text("current_timestamp(0)")) 16 | updated_at: datetime = Column( 17 | DateTime, nullable=True, onupdate=datetime.now(UTC), server_default=text("current_timestamp(0)") 18 | ) 19 | 20 | 21 | class SoftDeleteMixin: 22 | deleted_at: datetime = Column(DateTime, nullable=True) 23 | is_deleted: bool = Column(Boolean, default=False) 24 | -------------------------------------------------------------------------------- /src/app/core/db/token_blacklist.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | from sqlalchemy import DateTime, String 4 | from sqlalchemy.orm import Mapped, mapped_column 5 | 6 | from .database import Base 7 | 8 | 9 | class TokenBlacklist(Base): 10 | __tablename__ = "token_blacklist" 11 | 12 | id: Mapped[int] = mapped_column("id", autoincrement=True, nullable=False, unique=True, primary_key=True, init=False) 13 | token: Mapped[str] = mapped_column(String, unique=True, index=True) 14 | expires_at: Mapped[datetime] = mapped_column(DateTime) 15 | -------------------------------------------------------------------------------- /src/app/core/exceptions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/benavlabs/FastAPI-boilerplate/d224aad172a9a82710860402a30bacacb7125509/src/app/core/exceptions/__init__.py -------------------------------------------------------------------------------- /src/app/core/exceptions/cache_exceptions.py: -------------------------------------------------------------------------------- 1 | class CacheIdentificationInferenceError(Exception): 2 | def __init__(self, message: str = "Could not infer id for resource being cached.") -> None: 3 | self.message = message 4 | super().__init__(self.message) 5 | 6 | 7 | class InvalidRequestError(Exception): 8 | def __init__(self, message: str = "Type of request not supported.") -> None: 9 | self.message = message 10 | super().__init__(self.message) 11 | 12 | 13 | class MissingClientError(Exception): 14 | def __init__(self, message: str = "Client is None.") -> None: 15 | self.message = message 16 | super().__init__(self.message) 17 | -------------------------------------------------------------------------------- /src/app/core/exceptions/http_exceptions.py: -------------------------------------------------------------------------------- 1 | # ruff: noqa 2 | from fastcrud.exceptions.http_exceptions import ( 3 | CustomException, 4 | BadRequestException, 5 | NotFoundException, 6 | ForbiddenException, 7 | UnauthorizedException, 8 | UnprocessableEntityException, 9 | DuplicateValueException, 10 | RateLimitException, 11 | ) 12 | -------------------------------------------------------------------------------- /src/app/core/logger.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | from logging.handlers import RotatingFileHandler 4 | 5 | LOG_DIR = os.path.join(os.path.dirname(os.path.dirname(__file__)), "logs") 6 | if not os.path.exists(LOG_DIR): 7 | os.makedirs(LOG_DIR) 8 | 9 | LOG_FILE_PATH = os.path.join(LOG_DIR, "app.log") 10 | 11 | LOGGING_LEVEL = logging.INFO 12 | LOGGING_FORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s" 13 | 14 | logging.basicConfig(level=LOGGING_LEVEL, format=LOGGING_FORMAT) 15 | 16 | file_handler = RotatingFileHandler(LOG_FILE_PATH, maxBytes=10485760, backupCount=5) 17 | file_handler.setLevel(LOGGING_LEVEL) 18 | file_handler.setFormatter(logging.Formatter(LOGGING_FORMAT)) 19 | 20 | logging.getLogger("").addHandler(file_handler) 21 | -------------------------------------------------------------------------------- /src/app/core/schemas.py: -------------------------------------------------------------------------------- 1 | import uuid as uuid_pkg 2 | from datetime import UTC, datetime 3 | from typing import Any 4 | 5 | from pydantic import BaseModel, Field, field_serializer 6 | 7 | 8 | class HealthCheck(BaseModel): 9 | name: str 10 | version: str 11 | description: str 12 | 13 | 14 | # -------------- mixins -------------- 15 | class UUIDSchema(BaseModel): 16 | uuid: uuid_pkg.UUID = Field(default_factory=uuid_pkg.uuid4) 17 | 18 | 19 | class TimestampSchema(BaseModel): 20 | created_at: datetime = Field(default_factory=lambda: datetime.now(UTC).replace(tzinfo=None)) 21 | updated_at: datetime = Field(default=None) 22 | 23 | @field_serializer("created_at") 24 | def serialize_dt(self, created_at: datetime | None, _info: Any) -> str | None: 25 | if created_at is not None: 26 | return created_at.isoformat() 27 | 28 | return None 29 | 30 | @field_serializer("updated_at") 31 | def serialize_updated_at(self, updated_at: datetime | None, _info: Any) -> str | None: 32 | if updated_at is not None: 33 | return updated_at.isoformat() 34 | 35 | return None 36 | 37 | 38 | class PersistentDeletion(BaseModel): 39 | deleted_at: datetime | None = Field(default=None) 40 | is_deleted: bool = False 41 | 42 | @field_serializer("deleted_at") 43 | def serialize_dates(self, deleted_at: datetime | None, _info: Any) -> str | None: 44 | if deleted_at is not None: 45 | return deleted_at.isoformat() 46 | 47 | return None 48 | 49 | 50 | # -------------- token -------------- 51 | class Token(BaseModel): 52 | access_token: str 53 | token_type: str 54 | 55 | 56 | class TokenData(BaseModel): 57 | username_or_email: str 58 | 59 | 60 | class TokenBlacklistBase(BaseModel): 61 | token: str 62 | expires_at: datetime 63 | 64 | 65 | class TokenBlacklistCreate(TokenBlacklistBase): 66 | pass 67 | 68 | 69 | class TokenBlacklistUpdate(TokenBlacklistBase): 70 | pass 71 | -------------------------------------------------------------------------------- /src/app/core/security.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from datetime import UTC, datetime, timedelta 3 | from typing import Any, Literal 4 | 5 | import bcrypt 6 | from fastapi.security import OAuth2PasswordBearer 7 | from jose import JWTError, jwt 8 | from sqlalchemy.ext.asyncio import AsyncSession 9 | from Pydantic import SecretStr 10 | 11 | from ..crud.crud_users import crud_users 12 | from .config import settings 13 | from .db.crud_token_blacklist import crud_token_blacklist 14 | from .schemas import TokenBlacklistCreate, TokenData 15 | 16 | 17 | SECRET_KEY: SecretStr = settings.SECRET_KEY 18 | ALGORITHM = settings.ALGORITHM 19 | ACCESS_TOKEN_EXPIRE_MINUTES = settings.ACCESS_TOKEN_EXPIRE_MINUTES 20 | REFRESH_TOKEN_EXPIRE_DAYS = settings.REFRESH_TOKEN_EXPIRE_DAYS 21 | 22 | oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/api/v1/login") 23 | 24 | 25 | class TokenType(str, Enum): 26 | ACCESS = "access" 27 | REFRESH = "refresh" 28 | 29 | async def verify_password(plain_password: str, hashed_password: str) -> bool: 30 | correct_password: bool = bcrypt.checkpw(plain_password.encode(), hashed_password.encode()) 31 | return correct_password 32 | 33 | 34 | def get_password_hash(password: str) -> str: 35 | hashed_password: str = bcrypt.hashpw(password.encode(), bcrypt.gensalt()).decode() 36 | return hashed_password 37 | 38 | 39 | async def authenticate_user(username_or_email: str, password: str, db: AsyncSession) -> dict[str, Any] | Literal[False]: 40 | if "@" in username_or_email: 41 | db_user: dict | None = await crud_users.get(db=db, email=username_or_email, is_deleted=False) 42 | else: 43 | db_user = await crud_users.get(db=db, username=username_or_email, is_deleted=False) 44 | 45 | if not db_user: 46 | return False 47 | 48 | elif not await verify_password(password, db_user["hashed_password"]): 49 | return False 50 | 51 | return db_user 52 | 53 | 54 | async def create_access_token(data: dict[str, Any], expires_delta: timedelta | None = None) -> str: 55 | to_encode = data.copy() 56 | if expires_delta: 57 | expire = datetime.now(UTC).replace(tzinfo=None) + expires_delta 58 | else: 59 | expire = datetime.now(UTC).replace(tzinfo=None) + timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) 60 | to_encode.update({"exp": expire, "token_type": TokenType.ACCESS}) 61 | encoded_jwt: str = jwt.encode(to_encode, SECRET_KEY.get_secret_value(), algorithm=ALGORITHM) 62 | return encoded_jwt 63 | 64 | 65 | async def create_refresh_token(data: dict[str, Any], expires_delta: timedelta | None = None) -> str: 66 | to_encode = data.copy() 67 | if expires_delta: 68 | expire = datetime.now(UTC).replace(tzinfo=None) + expires_delta 69 | else: 70 | expire = datetime.now(UTC).replace(tzinfo=None) + timedelta(days=REFRESH_TOKEN_EXPIRE_DAYS) 71 | to_encode.update({"exp": expire, "token_type": TokenType.REFRESH}) 72 | encoded_jwt: str = jwt.encode(to_encode, SECRET_KEY.get_secret_value(), algorithm=ALGORITHM) 73 | return encoded_jwt 74 | 75 | 76 | async def verify_token(token: str, expected_token_type: TokenType, db: AsyncSession) -> TokenData | None: 77 | """Verify a JWT token and return TokenData if valid. 78 | 79 | Parameters 80 | ---------- 81 | token: str 82 | The JWT token to be verified. 83 | expected_token_type: TokenType 84 | The expected type of token (access or refresh) 85 | db: AsyncSession 86 | Database session for performing database operations. 87 | 88 | Returns 89 | ------- 90 | TokenData | None 91 | TokenData instance if the token is valid, None otherwise. 92 | """ 93 | is_blacklisted = await crud_token_blacklist.exists(db, token=token) 94 | if is_blacklisted: 95 | return None 96 | 97 | try: 98 | payload = jwt.decode(token, SECRET_KEY.get_secret_value(), algorithms=[ALGORITHM]) 99 | username_or_email: str = payload.get("sub") 100 | token_type: str = payload.get("token_type") 101 | 102 | if username_or_email is None or token_type != expected_token_type: 103 | return None 104 | 105 | return TokenData(username_or_email=username_or_email) 106 | 107 | except JWTError: 108 | return None 109 | 110 | 111 | async def blacklist_tokens(access_token: str, refresh_token: str, db: AsyncSession) -> None: 112 | """Blacklist both access and refresh tokens. 113 | 114 | Parameters 115 | ---------- 116 | access_token: str 117 | The access token to blacklist 118 | refresh_token: str 119 | The refresh token to blacklist 120 | db: AsyncSession 121 | Database session for performing database operations. 122 | """ 123 | for token in [access_token, refresh_token]: 124 | payload = jwt.decode(token, SECRET_KEY.get_secret_value(), algorithms=[ALGORITHM]) 125 | expires_at = datetime.fromtimestamp(payload.get("exp")) 126 | await crud_token_blacklist.create( 127 | db, 128 | object=TokenBlacklistCreate( 129 | token=token, 130 | expires_at=expires_at 131 | ) 132 | ) 133 | 134 | async def blacklist_token(token: str, db: AsyncSession) -> None: 135 | payload = jwt.decode(token, SECRET_KEY.get_secret_value(), algorithms=[ALGORITHM]) 136 | expires_at = datetime.fromtimestamp(payload.get("exp")) 137 | await crud_token_blacklist.create( 138 | db, 139 | object=TokenBlacklistCreate( 140 | token=token, 141 | expires_at=expires_at 142 | ) 143 | ) 144 | -------------------------------------------------------------------------------- /src/app/core/setup.py: -------------------------------------------------------------------------------- 1 | from collections.abc import AsyncGenerator, Callable 2 | from contextlib import _AsyncGeneratorContextManager, asynccontextmanager 3 | from typing import Any 4 | 5 | import anyio 6 | import fastapi 7 | import redis.asyncio as redis 8 | from arq import create_pool 9 | from arq.connections import RedisSettings 10 | from fastapi import APIRouter, Depends, FastAPI 11 | from fastapi.openapi.docs import get_redoc_html, get_swagger_ui_html 12 | from fastapi.openapi.utils import get_openapi 13 | 14 | from ..api.dependencies import get_current_superuser 15 | from ..core.utils.rate_limit import rate_limiter 16 | from ..middleware.client_cache_middleware import ClientCacheMiddleware 17 | from ..models import * 18 | from .config import ( 19 | AppSettings, 20 | ClientSideCacheSettings, 21 | DatabaseSettings, 22 | EnvironmentOption, 23 | EnvironmentSettings, 24 | RedisCacheSettings, 25 | RedisQueueSettings, 26 | RedisRateLimiterSettings, 27 | settings, 28 | ) 29 | from .db.database import Base 30 | from .db.database import async_engine as engine 31 | from .utils import cache, queue 32 | 33 | 34 | # -------------- database -------------- 35 | async def create_tables() -> None: 36 | async with engine.begin() as conn: 37 | await conn.run_sync(Base.metadata.create_all) 38 | 39 | 40 | # -------------- cache -------------- 41 | async def create_redis_cache_pool() -> None: 42 | cache.pool = redis.ConnectionPool.from_url(settings.REDIS_CACHE_URL) 43 | cache.client = redis.Redis.from_pool(cache.pool) # type: ignore 44 | 45 | 46 | async def close_redis_cache_pool() -> None: 47 | await cache.client.aclose() # type: ignore 48 | 49 | 50 | # -------------- queue -------------- 51 | async def create_redis_queue_pool() -> None: 52 | queue.pool = await create_pool(RedisSettings(host=settings.REDIS_QUEUE_HOST, port=settings.REDIS_QUEUE_PORT)) 53 | 54 | 55 | async def close_redis_queue_pool() -> None: 56 | await queue.pool.aclose() # type: ignore 57 | 58 | 59 | # -------------- rate limit -------------- 60 | async def create_redis_rate_limit_pool() -> None: 61 | rate_limiter.initialize(settings.REDIS_RATE_LIMIT_URL) # type: ignore 62 | 63 | 64 | async def close_redis_rate_limit_pool() -> None: 65 | await rate_limiter.client.aclose() # type: ignore 66 | 67 | 68 | # -------------- application -------------- 69 | async def set_threadpool_tokens(number_of_tokens: int = 100) -> None: 70 | limiter = anyio.to_thread.current_default_thread_limiter() 71 | limiter.total_tokens = number_of_tokens 72 | 73 | 74 | def lifespan_factory( 75 | settings: ( 76 | DatabaseSettings 77 | | RedisCacheSettings 78 | | AppSettings 79 | | ClientSideCacheSettings 80 | | RedisQueueSettings 81 | | RedisRateLimiterSettings 82 | | EnvironmentSettings 83 | ), 84 | create_tables_on_start: bool = True, 85 | ) -> Callable[[FastAPI], _AsyncGeneratorContextManager[Any]]: 86 | """Factory to create a lifespan async context manager for a FastAPI app.""" 87 | 88 | @asynccontextmanager 89 | async def lifespan(app: FastAPI) -> AsyncGenerator: 90 | from asyncio import Event 91 | 92 | initialization_complete = Event() 93 | app.state.initialization_complete = initialization_complete 94 | 95 | await set_threadpool_tokens() 96 | 97 | try: 98 | if isinstance(settings, RedisCacheSettings): 99 | await create_redis_cache_pool() 100 | 101 | if isinstance(settings, RedisQueueSettings): 102 | await create_redis_queue_pool() 103 | 104 | if isinstance(settings, RedisRateLimiterSettings): 105 | await create_redis_rate_limit_pool() 106 | 107 | if create_tables_on_start: 108 | await create_tables() 109 | 110 | initialization_complete.set() 111 | 112 | yield 113 | 114 | finally: 115 | if isinstance(settings, RedisCacheSettings): 116 | await close_redis_cache_pool() 117 | 118 | if isinstance(settings, RedisQueueSettings): 119 | await close_redis_queue_pool() 120 | 121 | if isinstance(settings, RedisRateLimiterSettings): 122 | await close_redis_rate_limit_pool() 123 | 124 | return lifespan 125 | 126 | 127 | # -------------- application -------------- 128 | def create_application( 129 | router: APIRouter, 130 | settings: ( 131 | DatabaseSettings 132 | | RedisCacheSettings 133 | | AppSettings 134 | | ClientSideCacheSettings 135 | | RedisQueueSettings 136 | | RedisRateLimiterSettings 137 | | EnvironmentSettings 138 | ), 139 | create_tables_on_start: bool = True, 140 | **kwargs: Any, 141 | ) -> FastAPI: 142 | """Creates and configures a FastAPI application based on the provided settings. 143 | 144 | This function initializes a FastAPI application and configures it with various settings 145 | and handlers based on the type of the `settings` object provided. 146 | 147 | Parameters 148 | ---------- 149 | router : APIRouter 150 | The APIRouter object containing the routes to be included in the FastAPI application. 151 | 152 | settings 153 | An instance representing the settings for configuring the FastAPI application. 154 | It determines the configuration applied: 155 | 156 | - AppSettings: Configures basic app metadata like name, description, contact, and license info. 157 | - DatabaseSettings: Adds event handlers for initializing database tables during startup. 158 | - RedisCacheSettings: Sets up event handlers for creating and closing a Redis cache pool. 159 | - ClientSideCacheSettings: Integrates middleware for client-side caching. 160 | - RedisQueueSettings: Sets up event handlers for creating and closing a Redis queue pool. 161 | - RedisRateLimiterSettings: Sets up event handlers for creating and closing a Redis rate limiter pool. 162 | - EnvironmentSettings: Conditionally sets documentation URLs and integrates custom routes for API documentation 163 | based on the environment type. 164 | 165 | create_tables_on_start : bool 166 | A flag to indicate whether to create database tables on application startup. 167 | Defaults to True. 168 | 169 | **kwargs 170 | Additional keyword arguments passed directly to the FastAPI constructor. 171 | 172 | Returns 173 | ------- 174 | FastAPI 175 | A fully configured FastAPI application instance. 176 | 177 | The function configures the FastAPI application with different features and behaviors 178 | based on the provided settings. It includes setting up database connections, Redis pools 179 | for caching, queue, and rate limiting, client-side caching, and customizing the API documentation 180 | based on the environment settings. 181 | """ 182 | # --- before creating application --- 183 | if isinstance(settings, AppSettings): 184 | to_update = { 185 | "title": settings.APP_NAME, 186 | "description": settings.APP_DESCRIPTION, 187 | "contact": {"name": settings.CONTACT_NAME, "email": settings.CONTACT_EMAIL}, 188 | "license_info": {"name": settings.LICENSE_NAME}, 189 | } 190 | kwargs.update(to_update) 191 | 192 | if isinstance(settings, EnvironmentSettings): 193 | kwargs.update({"docs_url": None, "redoc_url": None, "openapi_url": None}) 194 | 195 | lifespan = lifespan_factory(settings, create_tables_on_start=create_tables_on_start) 196 | 197 | application = FastAPI(lifespan=lifespan, **kwargs) 198 | application.include_router(router) 199 | 200 | if isinstance(settings, ClientSideCacheSettings): 201 | application.add_middleware(ClientCacheMiddleware, max_age=settings.CLIENT_CACHE_MAX_AGE) 202 | 203 | if isinstance(settings, EnvironmentSettings): 204 | if settings.ENVIRONMENT != EnvironmentOption.PRODUCTION: 205 | docs_router = APIRouter() 206 | if settings.ENVIRONMENT != EnvironmentOption.LOCAL: 207 | docs_router = APIRouter(dependencies=[Depends(get_current_superuser)]) 208 | 209 | @docs_router.get("/docs", include_in_schema=False) 210 | async def get_swagger_documentation() -> fastapi.responses.HTMLResponse: 211 | return get_swagger_ui_html(openapi_url="/openapi.json", title="docs") 212 | 213 | @docs_router.get("/redoc", include_in_schema=False) 214 | async def get_redoc_documentation() -> fastapi.responses.HTMLResponse: 215 | return get_redoc_html(openapi_url="/openapi.json", title="docs") 216 | 217 | @docs_router.get("/openapi.json", include_in_schema=False) 218 | async def openapi() -> dict[str, Any]: 219 | out: dict = get_openapi(title=application.title, version=application.version, routes=application.routes) 220 | return out 221 | 222 | application.include_router(docs_router) 223 | 224 | return application 225 | -------------------------------------------------------------------------------- /src/app/core/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/benavlabs/FastAPI-boilerplate/d224aad172a9a82710860402a30bacacb7125509/src/app/core/utils/__init__.py -------------------------------------------------------------------------------- /src/app/core/utils/cache.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import json 3 | import re 4 | from collections.abc import Callable 5 | from typing import Any 6 | 7 | from fastapi import Request, Response 8 | from fastapi.encoders import jsonable_encoder 9 | from redis.asyncio import ConnectionPool, Redis 10 | 11 | from ..exceptions.cache_exceptions import CacheIdentificationInferenceError, InvalidRequestError, MissingClientError 12 | 13 | pool: ConnectionPool | None = None 14 | client: Redis | None = None 15 | 16 | 17 | def _infer_resource_id(kwargs: dict[str, Any], resource_id_type: type | tuple[type, ...]) -> int | str: 18 | """Infer the resource ID from a dictionary of keyword arguments. 19 | 20 | Parameters 21 | ---------- 22 | kwargs: Dict[str, Any] 23 | A dictionary of keyword arguments. 24 | resource_id_type: Union[type, Tuple[type, ...]] 25 | The expected type of the resource ID, which can be integer (int) or a string (str). 26 | 27 | Returns 28 | ------- 29 | Union[None, int, str] 30 | The inferred resource ID. If it cannot be inferred or does not match the expected type, it returns None. 31 | 32 | Note 33 | ---- 34 | - When `resource_id_type` is `int`, the function looks for an argument with the key 'id'. 35 | - When `resource_id_type` is `str`, it attempts to infer the resource ID as a string. 36 | """ 37 | resource_id: int | str | None = None 38 | for arg_name, arg_value in kwargs.items(): 39 | if isinstance(arg_value, resource_id_type): 40 | if (resource_id_type is int) and ("id" in arg_name): 41 | resource_id = arg_value 42 | 43 | elif (resource_id_type is int) and ("id" not in arg_name): 44 | pass 45 | 46 | elif resource_id_type is str: 47 | resource_id = arg_value 48 | 49 | if resource_id is None: 50 | raise CacheIdentificationInferenceError 51 | 52 | return resource_id 53 | 54 | 55 | def _extract_data_inside_brackets(input_string: str) -> list[str]: 56 | """Extract data inside curly brackets from a given string using regular expressions. 57 | 58 | Parameters 59 | ---------- 60 | input_string: str 61 | The input string in which to find data enclosed within curly brackets. 62 | 63 | Returns 64 | ------- 65 | List[str] 66 | A list of strings containing the data found inside the curly brackets within the input string. 67 | 68 | Example 69 | ------- 70 | >>> _extract_data_inside_brackets("The {quick} brown {fox} jumps over the {lazy} dog.") 71 | ['quick', 'fox', 'lazy'] 72 | """ 73 | data_inside_brackets = re.findall(r"{(.*?)}", input_string) 74 | return data_inside_brackets 75 | 76 | 77 | def _construct_data_dict(data_inside_brackets: list[str], kwargs: dict[str, Any]) -> dict[str, Any]: 78 | """Construct a dictionary based on data inside brackets and keyword arguments. 79 | 80 | Parameters 81 | ---------- 82 | data_inside_brackets: List[str] 83 | A list of keys inside brackets. 84 | kwargs: Dict[str, Any] 85 | A dictionary of keyword arguments. 86 | 87 | Returns 88 | ------- 89 | Dict[str, Any]: A dictionary with keys from data_inside_brackets and corresponding values from kwargs. 90 | """ 91 | data_dict = {} 92 | for key in data_inside_brackets: 93 | data_dict[key] = kwargs[key] 94 | return data_dict 95 | 96 | 97 | def _format_prefix(prefix: str, kwargs: dict[str, Any]) -> str: 98 | """Format a prefix using keyword arguments. 99 | 100 | Parameters 101 | ---------- 102 | prefix: str 103 | The prefix template to be formatted. 104 | kwargs: Dict[str, Any] 105 | A dictionary of keyword arguments. 106 | 107 | Returns 108 | ------- 109 | str: The formatted prefix. 110 | """ 111 | data_inside_brackets = _extract_data_inside_brackets(prefix) 112 | data_dict = _construct_data_dict(data_inside_brackets, kwargs) 113 | formatted_prefix = prefix.format(**data_dict) 114 | return formatted_prefix 115 | 116 | 117 | def _format_extra_data(to_invalidate_extra: dict[str, str], kwargs: dict[str, Any]) -> dict[str, Any]: 118 | """Format extra data based on provided templates and keyword arguments. 119 | 120 | This function takes a dictionary of templates and their associated values and a dictionary of keyword arguments. 121 | It formats the templates with the corresponding values from the keyword arguments and returns a dictionary 122 | where keys are the formatted templates and values are the associated keyword argument values. 123 | 124 | Parameters 125 | ---------- 126 | to_invalidate_extra: Dict[str, str] 127 | A dictionary where keys are templates and values are the associated values. 128 | kwargs: Dict[str, Any] 129 | A dictionary of keyword arguments. 130 | 131 | Returns 132 | ------- 133 | Dict[str, Any]: A dictionary where keys are formatted templates and values 134 | are associated keyword argument values. 135 | """ 136 | formatted_extra = {} 137 | for prefix, id_template in to_invalidate_extra.items(): 138 | formatted_prefix = _format_prefix(prefix, kwargs) 139 | id = _extract_data_inside_brackets(id_template)[0] 140 | formatted_extra[formatted_prefix] = kwargs[id] 141 | 142 | return formatted_extra 143 | 144 | 145 | async def _delete_keys_by_pattern(pattern: str) -> None: 146 | """Delete keys from Redis that match a given pattern using the SCAN command. 147 | 148 | This function iteratively scans the Redis key space for keys that match a specific pattern 149 | and deletes them. It uses the SCAN command to efficiently find keys, which is more 150 | performance-friendly compared to the KEYS command, especially for large datasets. 151 | 152 | The function scans the key space in an iterative manner using a cursor-based approach. 153 | It retrieves a batch of keys matching the pattern on each iteration and deletes them 154 | until no matching keys are left. 155 | 156 | Parameters 157 | ---------- 158 | pattern: str 159 | The pattern to match keys against. The pattern can include wildcards, 160 | such as '*' for matching any character sequence. Example: 'user:*' 161 | 162 | Notes 163 | ----- 164 | - The SCAN command is used with a count of 100 to retrieve keys in batches. 165 | This count can be adjusted based on the size of your dataset and Redis performance. 166 | 167 | - The function uses the delete command to remove keys in bulk. If the dataset 168 | is extremely large, consider implementing additional logic to handle bulk deletion 169 | more efficiently. 170 | 171 | - Be cautious with patterns that could match a large number of keys, as deleting 172 | many keys simultaneously may impact the performance of the Redis server. 173 | """ 174 | if client is None: 175 | raise MissingClientError 176 | 177 | cursor = -1 178 | while cursor != 0: 179 | cursor, keys = await client.scan(cursor, match=pattern, count=100) 180 | if keys: 181 | await client.delete(*keys) 182 | 183 | 184 | def cache( 185 | key_prefix: str, 186 | resource_id_name: Any = None, 187 | expiration: int = 3600, 188 | resource_id_type: type | tuple[type, ...] = int, 189 | to_invalidate_extra: dict[str, Any] | None = None, 190 | pattern_to_invalidate_extra: list[str] | None = None, 191 | ) -> Callable: 192 | """Cache decorator for FastAPI endpoints. 193 | 194 | This decorator enables caching the results of FastAPI endpoint functions to improve response times 195 | and reduce the load on the application by storing and retrieving data in a cache. 196 | 197 | Parameters 198 | ---------- 199 | key_prefix: str 200 | A unique prefix to identify the cache key. 201 | resource_id_name: Any, optional 202 | The name of the resource ID argument in the decorated function. If provided, it is used directly; 203 | otherwise, the resource ID is inferred from the function's arguments. 204 | expiration: int, optional 205 | The expiration time for the cached data in seconds. Defaults to 3600 seconds (1 hour). 206 | resource_id_type: Union[type, Tuple[type, ...]], default int 207 | The expected type of the resource ID. 208 | This can be a single type (e.g., int) or a tuple of types (e.g., (int, str)). 209 | Defaults to int. This is used only if resource_id_name is not provided. 210 | to_invalidate_extra: Dict[str, Any] | None, optional 211 | A dictionary where keys are cache key prefixes and values are templates for cache key suffixes. 212 | These keys are invalidated when the decorated function is called with a method other than GET. 213 | pattern_to_invalidate_extra: List[str] | None, optional 214 | A list of string patterns for cache keys that should be invalidated when the decorated function is called. 215 | This allows for bulk invalidation of cache keys based on a matching pattern. 216 | 217 | Returns 218 | ------- 219 | Callable 220 | A decorator function that can be applied to FastAPI endpoint functions. 221 | 222 | Example usage 223 | ------------- 224 | 225 | ```python 226 | from fastapi import FastAPI, Request 227 | from my_module import cache # Replace with your actual module and imports 228 | 229 | app = FastAPI() 230 | 231 | # Define a sample endpoint with caching 232 | @app.get("/sample/{resource_id}") 233 | @cache(key_prefix="sample_data", expiration=3600, resource_id_type=int) 234 | async def sample_endpoint(request: Request, resource_id: int): 235 | # Your endpoint logic here 236 | return {"data": "your_data"} 237 | ``` 238 | 239 | This decorator caches the response data of the endpoint function using a unique cache key. 240 | The cached data is retrieved for GET requests, and the cache is invalidated for other types of requests. 241 | 242 | Advanced Example Usage 243 | ------------- 244 | ```python 245 | from fastapi import FastAPI, Request 246 | from my_module import cache 247 | 248 | app = FastAPI() 249 | 250 | 251 | @app.get("/users/{user_id}/items") 252 | @cache(key_prefix="user_items", resource_id_name="user_id", expiration=1200) 253 | async def read_user_items(request: Request, user_id: int): 254 | # Endpoint logic to fetch user's items 255 | return {"items": "user specific items"} 256 | 257 | 258 | @app.put("/items/{item_id}") 259 | @cache( 260 | key_prefix="item_data", 261 | resource_id_name="item_id", 262 | to_invalidate_extra={"user_items": "{user_id}"}, 263 | pattern_to_invalidate_extra=["user_*_items:*"], 264 | ) 265 | async def update_item(request: Request, item_id: int, data: dict, user_id: int): 266 | # Update logic for an item 267 | # Invalidate both the specific item cache and all user-specific item lists 268 | return {"status": "updated"} 269 | ``` 270 | 271 | In this example: 272 | - When reading user items, the response is cached under a key formed with 'user_items' prefix and 'user_id'. 273 | - When updating an item, the cache for this specific item (under 'item_data:item_id') and all caches with keys 274 | starting with 'user_{user_id}_items:' are invalidated. The `to_invalidate_extra` parameter specifically targets 275 | the cache for user-specific item lists, while `pattern_to_invalidate_extra` allows bulk invalidation of all keys 276 | matching the pattern 'user_*_items:*', covering all users. 277 | 278 | Note 279 | ---- 280 | - resource_id_type is used only if resource_id is not passed. 281 | - `to_invalidate_extra` and `pattern_to_invalidate_extra` are used for cache invalidation on methods other than GET. 282 | - Using `pattern_to_invalidate_extra` can be resource-intensive on large datasets. Use it judiciously and 283 | consider the potential impact on Redis performance. 284 | """ 285 | 286 | def wrapper(func: Callable) -> Callable: 287 | @functools.wraps(func) 288 | async def inner(request: Request, *args: Any, **kwargs: Any) -> Response: 289 | if client is None: 290 | raise MissingClientError 291 | 292 | if resource_id_name: 293 | resource_id = kwargs[resource_id_name] 294 | else: 295 | resource_id = _infer_resource_id(kwargs=kwargs, resource_id_type=resource_id_type) 296 | 297 | formatted_key_prefix = _format_prefix(key_prefix, kwargs) 298 | cache_key = f"{formatted_key_prefix}:{resource_id}" 299 | if request.method == "GET": 300 | if to_invalidate_extra is not None or pattern_to_invalidate_extra is not None: 301 | raise InvalidRequestError 302 | 303 | cached_data = await client.get(cache_key) 304 | if cached_data: 305 | return json.loads(cached_data.decode()) 306 | 307 | result = await func(request, *args, **kwargs) 308 | 309 | if request.method == "GET": 310 | serializable_data = jsonable_encoder(result) 311 | serialized_data = json.dumps(serializable_data) 312 | 313 | await client.set(cache_key, serialized_data) 314 | await client.expire(cache_key, expiration) 315 | 316 | serialized_data = json.loads(serialized_data) 317 | 318 | else: 319 | await client.delete(cache_key) 320 | if to_invalidate_extra is not None: 321 | formatted_extra = _format_extra_data(to_invalidate_extra, kwargs) 322 | for prefix, id in formatted_extra.items(): 323 | extra_cache_key = f"{prefix}:{id}" 324 | await client.delete(extra_cache_key) 325 | 326 | if pattern_to_invalidate_extra is not None: 327 | for pattern in pattern_to_invalidate_extra: 328 | formatted_pattern = _format_prefix(pattern, kwargs) 329 | await _delete_keys_by_pattern(formatted_pattern + "*") 330 | 331 | return result 332 | 333 | return inner 334 | 335 | return wrapper 336 | -------------------------------------------------------------------------------- /src/app/core/utils/queue.py: -------------------------------------------------------------------------------- 1 | from arq.connections import ArqRedis 2 | 3 | pool: ArqRedis | None = None 4 | -------------------------------------------------------------------------------- /src/app/core/utils/rate_limit.py: -------------------------------------------------------------------------------- 1 | from datetime import UTC, datetime 2 | from typing import Optional 3 | 4 | from redis.asyncio import ConnectionPool, Redis 5 | from sqlalchemy.ext.asyncio import AsyncSession 6 | 7 | from ...core.logger import logging 8 | from ...schemas.rate_limit import sanitize_path 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | 13 | class RateLimiter: 14 | _instance: Optional["RateLimiter"] = None 15 | pool: Optional[ConnectionPool] = None 16 | client: Optional[Redis] = None 17 | 18 | def __new__(cls): 19 | if cls._instance is None: 20 | cls._instance = super().__new__(cls) 21 | return cls._instance 22 | 23 | @classmethod 24 | def initialize(cls, redis_url: str) -> None: 25 | instance = cls() 26 | if instance.pool is None: 27 | instance.pool = ConnectionPool.from_url(redis_url) 28 | instance.client = Redis(connection_pool=instance.pool) 29 | 30 | @classmethod 31 | def get_client(cls) -> Redis: 32 | instance = cls() 33 | if instance.client is None: 34 | logger.error("Redis client is not initialized.") 35 | raise Exception("Redis client is not initialized.") 36 | return instance.client 37 | 38 | async def is_rate_limited(self, db: AsyncSession, user_id: int, path: str, limit: int, period: int) -> bool: 39 | client = self.get_client() 40 | current_timestamp = int(datetime.now(UTC).timestamp()) 41 | window_start = current_timestamp - (current_timestamp % period) 42 | 43 | sanitized_path = sanitize_path(path) 44 | key = f"ratelimit:{user_id}:{sanitized_path}:{window_start}" 45 | 46 | try: 47 | current_count = await client.incr(key) 48 | if current_count == 1: 49 | await client.expire(key, period) 50 | 51 | if current_count > limit: 52 | return True 53 | 54 | except Exception as e: 55 | logger.exception(f"Error checking rate limit for user {user_id} on path {path}: {e}") 56 | raise e 57 | 58 | return False 59 | 60 | 61 | rate_limiter = RateLimiter() 62 | -------------------------------------------------------------------------------- /src/app/core/worker/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/benavlabs/FastAPI-boilerplate/d224aad172a9a82710860402a30bacacb7125509/src/app/core/worker/__init__.py -------------------------------------------------------------------------------- /src/app/core/worker/functions.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | 4 | import uvloop 5 | from arq.worker import Worker 6 | 7 | asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) 8 | 9 | logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s") 10 | 11 | 12 | # -------- background tasks -------- 13 | async def sample_background_task(ctx: Worker, name: str) -> str: 14 | await asyncio.sleep(5) 15 | return f"Task {name} is complete!" 16 | 17 | 18 | # -------- base functions -------- 19 | async def startup(ctx: Worker) -> None: 20 | logging.info("Worker Started") 21 | 22 | 23 | async def shutdown(ctx: Worker) -> None: 24 | logging.info("Worker end") 25 | -------------------------------------------------------------------------------- /src/app/core/worker/settings.py: -------------------------------------------------------------------------------- 1 | from arq.connections import RedisSettings 2 | 3 | from ...core.config import settings 4 | from .functions import sample_background_task, shutdown, startup 5 | 6 | REDIS_QUEUE_HOST = settings.REDIS_QUEUE_HOST 7 | REDIS_QUEUE_PORT = settings.REDIS_QUEUE_PORT 8 | 9 | 10 | class WorkerSettings: 11 | functions = [sample_background_task] 12 | redis_settings = RedisSettings(host=REDIS_QUEUE_HOST, port=REDIS_QUEUE_PORT) 13 | on_startup = startup 14 | on_shutdown = shutdown 15 | handle_signals = False 16 | -------------------------------------------------------------------------------- /src/app/crud/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/benavlabs/FastAPI-boilerplate/d224aad172a9a82710860402a30bacacb7125509/src/app/crud/__init__.py -------------------------------------------------------------------------------- /src/app/crud/crud_posts.py: -------------------------------------------------------------------------------- 1 | from fastcrud import FastCRUD 2 | 3 | from ..models.post import Post 4 | from ..schemas.post import PostCreateInternal, PostDelete, PostUpdate, PostUpdateInternal 5 | 6 | CRUDPost = FastCRUD[Post, PostCreateInternal, PostUpdate, PostUpdateInternal, PostDelete, None] 7 | crud_posts = CRUDPost(Post) 8 | -------------------------------------------------------------------------------- /src/app/crud/crud_rate_limit.py: -------------------------------------------------------------------------------- 1 | from fastcrud import FastCRUD 2 | 3 | from ..models.rate_limit import RateLimit 4 | from ..schemas.rate_limit import RateLimitCreateInternal, RateLimitDelete, RateLimitUpdate, RateLimitUpdateInternal 5 | 6 | CRUDRateLimit = FastCRUD[RateLimit, RateLimitCreateInternal, RateLimitUpdate, RateLimitUpdateInternal, RateLimitDelete, None] 7 | crud_rate_limits = CRUDRateLimit(RateLimit) 8 | -------------------------------------------------------------------------------- /src/app/crud/crud_tier.py: -------------------------------------------------------------------------------- 1 | from fastcrud import FastCRUD 2 | 3 | from ..models.tier import Tier 4 | from ..schemas.tier import TierCreateInternal, TierDelete, TierUpdate, TierUpdateInternal 5 | 6 | CRUDTier = FastCRUD[Tier, TierCreateInternal, TierUpdate, TierUpdateInternal, TierDelete, None] 7 | crud_tiers = CRUDTier(Tier) 8 | -------------------------------------------------------------------------------- /src/app/crud/crud_users.py: -------------------------------------------------------------------------------- 1 | from fastcrud import FastCRUD 2 | 3 | from ..models.user import User 4 | from ..schemas.user import UserCreateInternal, UserDelete, UserUpdate, UserUpdateInternal 5 | 6 | CRUDUser = FastCRUD[User, UserCreateInternal, UserUpdate, UserUpdateInternal, UserDelete, None] 7 | crud_users = CRUDUser(User) 8 | -------------------------------------------------------------------------------- /src/app/main.py: -------------------------------------------------------------------------------- 1 | from .api import router 2 | from .core.config import settings 3 | from .core.setup import create_application 4 | 5 | app = create_application(router=router, settings=settings) 6 | -------------------------------------------------------------------------------- /src/app/middleware/client_cache_middleware.py: -------------------------------------------------------------------------------- 1 | from fastapi import FastAPI, Request, Response 2 | from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint 3 | 4 | 5 | class ClientCacheMiddleware(BaseHTTPMiddleware): 6 | """Middleware to set the `Cache-Control` header for client-side caching on all responses. 7 | 8 | Parameters 9 | ---------- 10 | app: FastAPI 11 | The FastAPI application instance. 12 | max_age: int, optional 13 | Duration (in seconds) for which the response should be cached. Defaults to 60 seconds. 14 | 15 | Attributes 16 | ---------- 17 | max_age: int 18 | Duration (in seconds) for which the response should be cached. 19 | 20 | Methods 21 | ------- 22 | async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response: 23 | Process the request and set the `Cache-Control` header in the response. 24 | 25 | Note 26 | ---- 27 | - The `Cache-Control` header instructs clients (e.g., browsers) 28 | to cache the response for the specified duration. 29 | """ 30 | 31 | def __init__(self, app: FastAPI, max_age: int = 60) -> None: 32 | super().__init__(app) 33 | self.max_age = max_age 34 | 35 | async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response: 36 | """Process the request and set the `Cache-Control` header in the response. 37 | 38 | Parameters 39 | ---------- 40 | request: Request 41 | The incoming request. 42 | call_next: RequestResponseEndpoint 43 | The next middleware or route handler in the processing chain. 44 | 45 | Returns 46 | ------- 47 | Response 48 | The response object with the `Cache-Control` header set. 49 | 50 | Note 51 | ---- 52 | - This method is automatically called by Starlette for processing the request-response cycle. 53 | """ 54 | response: Response = await call_next(request) 55 | response.headers["Cache-Control"] = f"public, max-age={self.max_age}" 56 | return response 57 | -------------------------------------------------------------------------------- /src/app/models/__init__.py: -------------------------------------------------------------------------------- 1 | from .post import Post 2 | from .rate_limit import RateLimit 3 | from .tier import Tier 4 | from .user import User 5 | -------------------------------------------------------------------------------- /src/app/models/post.py: -------------------------------------------------------------------------------- 1 | import uuid as uuid_pkg 2 | from datetime import UTC, datetime 3 | 4 | from sqlalchemy import DateTime, ForeignKey, String 5 | from sqlalchemy.orm import Mapped, mapped_column 6 | 7 | from ..core.db.database import Base 8 | 9 | 10 | class Post(Base): 11 | __tablename__ = "post" 12 | 13 | id: Mapped[int] = mapped_column("id", autoincrement=True, nullable=False, unique=True, primary_key=True, init=False) 14 | created_by_user_id: Mapped[int] = mapped_column(ForeignKey("user.id"), index=True) 15 | title: Mapped[str] = mapped_column(String(30)) 16 | text: Mapped[str] = mapped_column(String(63206)) 17 | uuid: Mapped[uuid_pkg.UUID] = mapped_column(default_factory=uuid_pkg.uuid4, primary_key=True, unique=True) 18 | media_url: Mapped[str | None] = mapped_column(String, default=None) 19 | 20 | created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default_factory=lambda: datetime.now(UTC)) 21 | updated_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) 22 | deleted_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) 23 | is_deleted: Mapped[bool] = mapped_column(default=False, index=True) 24 | -------------------------------------------------------------------------------- /src/app/models/rate_limit.py: -------------------------------------------------------------------------------- 1 | from datetime import UTC, datetime 2 | 3 | from sqlalchemy import DateTime, ForeignKey, Integer, String 4 | from sqlalchemy.orm import Mapped, mapped_column 5 | 6 | from ..core.db.database import Base 7 | 8 | 9 | class RateLimit(Base): 10 | __tablename__ = "rate_limit" 11 | 12 | id: Mapped[int] = mapped_column("id", autoincrement=True, nullable=False, unique=True, primary_key=True, init=False) 13 | tier_id: Mapped[int] = mapped_column(ForeignKey("tier.id"), index=True) 14 | name: Mapped[str] = mapped_column(String, nullable=False, unique=True) 15 | path: Mapped[str] = mapped_column(String, nullable=False) 16 | limit: Mapped[int] = mapped_column(Integer, nullable=False) 17 | period: Mapped[int] = mapped_column(Integer, nullable=False) 18 | 19 | created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default_factory=lambda: datetime.now(UTC)) 20 | updated_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) 21 | -------------------------------------------------------------------------------- /src/app/models/tier.py: -------------------------------------------------------------------------------- 1 | from datetime import UTC, datetime 2 | 3 | from sqlalchemy import DateTime, String 4 | from sqlalchemy.orm import Mapped, mapped_column 5 | 6 | from ..core.db.database import Base 7 | 8 | 9 | class Tier(Base): 10 | __tablename__ = "tier" 11 | 12 | id: Mapped[int] = mapped_column("id", autoincrement=True, nullable=False, unique=True, primary_key=True, init=False) 13 | name: Mapped[str] = mapped_column(String, nullable=False, unique=True) 14 | 15 | created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default_factory=lambda: datetime.now(UTC)) 16 | updated_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) 17 | -------------------------------------------------------------------------------- /src/app/models/user.py: -------------------------------------------------------------------------------- 1 | import uuid as uuid_pkg 2 | from datetime import UTC, datetime 3 | 4 | from sqlalchemy import DateTime, ForeignKey, String 5 | from sqlalchemy.orm import Mapped, mapped_column 6 | 7 | from ..core.db.database import Base 8 | 9 | 10 | class User(Base): 11 | __tablename__ = "user" 12 | 13 | id: Mapped[int] = mapped_column("id", autoincrement=True, nullable=False, unique=True, primary_key=True, init=False) 14 | 15 | name: Mapped[str] = mapped_column(String(30)) 16 | username: Mapped[str] = mapped_column(String(20), unique=True, index=True) 17 | email: Mapped[str] = mapped_column(String(50), unique=True, index=True) 18 | hashed_password: Mapped[str] = mapped_column(String) 19 | 20 | profile_image_url: Mapped[str] = mapped_column(String, default="https://profileimageurl.com") 21 | uuid: Mapped[uuid_pkg.UUID] = mapped_column(default_factory=uuid_pkg.uuid4, primary_key=True, unique=True) 22 | created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default_factory=lambda: datetime.now(UTC)) 23 | updated_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) 24 | deleted_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) 25 | is_deleted: Mapped[bool] = mapped_column(default=False, index=True) 26 | is_superuser: Mapped[bool] = mapped_column(default=False) 27 | 28 | tier_id: Mapped[int | None] = mapped_column(ForeignKey("tier.id"), index=True, default=None, init=False) 29 | -------------------------------------------------------------------------------- /src/app/schemas/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/benavlabs/FastAPI-boilerplate/d224aad172a9a82710860402a30bacacb7125509/src/app/schemas/__init__.py -------------------------------------------------------------------------------- /src/app/schemas/job.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | 4 | class Job(BaseModel): 5 | id: str 6 | -------------------------------------------------------------------------------- /src/app/schemas/post.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from typing import Annotated 3 | 4 | from pydantic import BaseModel, ConfigDict, Field 5 | 6 | from ..core.schemas import PersistentDeletion, TimestampSchema, UUIDSchema 7 | 8 | 9 | class PostBase(BaseModel): 10 | title: Annotated[str, Field(min_length=2, max_length=30, examples=["This is my post"])] 11 | text: Annotated[str, Field(min_length=1, max_length=63206, examples=["This is the content of my post."])] 12 | 13 | 14 | class Post(TimestampSchema, PostBase, UUIDSchema, PersistentDeletion): 15 | media_url: Annotated[ 16 | str | None, 17 | Field(pattern=r"^(https?|ftp)://[^\s/$.?#].[^\s]*$", examples=["https://www.postimageurl.com"], default=None), 18 | ] 19 | created_by_user_id: int 20 | 21 | 22 | class PostRead(BaseModel): 23 | id: int 24 | title: Annotated[str, Field(min_length=2, max_length=30, examples=["This is my post"])] 25 | text: Annotated[str, Field(min_length=1, max_length=63206, examples=["This is the content of my post."])] 26 | media_url: Annotated[ 27 | str | None, 28 | Field(examples=["https://www.postimageurl.com"], default=None), 29 | ] 30 | created_by_user_id: int 31 | created_at: datetime 32 | 33 | 34 | class PostCreate(PostBase): 35 | model_config = ConfigDict(extra="forbid") 36 | 37 | media_url: Annotated[ 38 | str | None, 39 | Field(pattern=r"^(https?|ftp)://[^\s/$.?#].[^\s]*$", examples=["https://www.postimageurl.com"], default=None), 40 | ] 41 | 42 | 43 | class PostCreateInternal(PostCreate): 44 | created_by_user_id: int 45 | 46 | 47 | class PostUpdate(BaseModel): 48 | model_config = ConfigDict(extra="forbid") 49 | 50 | title: Annotated[str | None, Field(min_length=2, max_length=30, examples=["This is my updated post"], default=None)] 51 | text: Annotated[ 52 | str | None, 53 | Field(min_length=1, max_length=63206, examples=["This is the updated content of my post."], default=None), 54 | ] 55 | media_url: Annotated[ 56 | str | None, 57 | Field(pattern=r"^(https?|ftp)://[^\s/$.?#].[^\s]*$", examples=["https://www.postimageurl.com"], default=None), 58 | ] 59 | 60 | 61 | class PostUpdateInternal(PostUpdate): 62 | updated_at: datetime 63 | 64 | 65 | class PostDelete(BaseModel): 66 | model_config = ConfigDict(extra="forbid") 67 | 68 | is_deleted: bool 69 | deleted_at: datetime 70 | -------------------------------------------------------------------------------- /src/app/schemas/rate_limit.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from typing import Annotated 3 | 4 | from pydantic import BaseModel, ConfigDict, Field, field_validator 5 | 6 | from ..core.schemas import TimestampSchema 7 | 8 | 9 | def sanitize_path(path: str) -> str: 10 | return path.strip("/").replace("/", "_") 11 | 12 | 13 | class RateLimitBase(BaseModel): 14 | path: Annotated[str, Field(examples=["users"])] 15 | limit: Annotated[int, Field(examples=[5])] 16 | period: Annotated[int, Field(examples=[60])] 17 | 18 | @field_validator("path") 19 | def validate_and_sanitize_path(cls, v: str) -> str: 20 | return sanitize_path(v) 21 | 22 | 23 | class RateLimit(TimestampSchema, RateLimitBase): 24 | tier_id: int 25 | name: Annotated[str | None, Field(default=None, examples=["users:5:60"])] 26 | 27 | 28 | class RateLimitRead(RateLimitBase): 29 | id: int 30 | tier_id: int 31 | name: str 32 | 33 | 34 | class RateLimitCreate(RateLimitBase): 35 | model_config = ConfigDict(extra="forbid") 36 | 37 | name: Annotated[str | None, Field(default=None, examples=["api_v1_users:5:60"])] 38 | 39 | 40 | class RateLimitCreateInternal(RateLimitCreate): 41 | tier_id: int 42 | 43 | 44 | class RateLimitUpdate(BaseModel): 45 | path: str | None = Field(default=None) 46 | limit: int | None = None 47 | period: int | None = None 48 | name: str | None = None 49 | 50 | @field_validator("path") 51 | def validate_and_sanitize_path(cls, v: str) -> str: 52 | return sanitize_path(v) if v is not None else None 53 | 54 | 55 | class RateLimitUpdateInternal(RateLimitUpdate): 56 | updated_at: datetime 57 | 58 | 59 | class RateLimitDelete(BaseModel): 60 | pass 61 | -------------------------------------------------------------------------------- /src/app/schemas/tier.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from typing import Annotated 3 | 4 | from pydantic import BaseModel, Field 5 | 6 | from ..core.schemas import TimestampSchema 7 | 8 | 9 | class TierBase(BaseModel): 10 | name: Annotated[str, Field(examples=["free"])] 11 | 12 | 13 | class Tier(TimestampSchema, TierBase): 14 | pass 15 | 16 | 17 | class TierRead(TierBase): 18 | id: int 19 | created_at: datetime 20 | 21 | 22 | class TierCreate(TierBase): 23 | pass 24 | 25 | 26 | class TierCreateInternal(TierCreate): 27 | pass 28 | 29 | 30 | class TierUpdate(BaseModel): 31 | name: str | None = None 32 | 33 | 34 | class TierUpdateInternal(TierUpdate): 35 | updated_at: datetime 36 | 37 | 38 | class TierDelete(BaseModel): 39 | pass 40 | -------------------------------------------------------------------------------- /src/app/schemas/user.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from typing import Annotated 3 | 4 | from pydantic import BaseModel, ConfigDict, EmailStr, Field 5 | 6 | from ..core.schemas import PersistentDeletion, TimestampSchema, UUIDSchema 7 | 8 | 9 | class UserBase(BaseModel): 10 | name: Annotated[str, Field(min_length=2, max_length=30, examples=["User Userson"])] 11 | username: Annotated[str, Field(min_length=2, max_length=20, pattern=r"^[a-z0-9]+$", examples=["userson"])] 12 | email: Annotated[EmailStr, Field(examples=["user.userson@example.com"])] 13 | 14 | 15 | class User(TimestampSchema, UserBase, UUIDSchema, PersistentDeletion): 16 | profile_image_url: Annotated[str, Field(default="https://www.profileimageurl.com")] 17 | hashed_password: str 18 | is_superuser: bool = False 19 | tier_id: int | None = None 20 | 21 | 22 | class UserRead(BaseModel): 23 | id: int 24 | 25 | name: Annotated[str, Field(min_length=2, max_length=30, examples=["User Userson"])] 26 | username: Annotated[str, Field(min_length=2, max_length=20, pattern=r"^[a-z0-9]+$", examples=["userson"])] 27 | email: Annotated[EmailStr, Field(examples=["user.userson@example.com"])] 28 | profile_image_url: str 29 | tier_id: int | None 30 | 31 | 32 | class UserCreate(UserBase): 33 | model_config = ConfigDict(extra="forbid") 34 | 35 | password: Annotated[str, Field(pattern=r"^.{8,}|[0-9]+|[A-Z]+|[a-z]+|[^a-zA-Z0-9]+$", examples=["Str1ngst!"])] 36 | 37 | 38 | class UserCreateInternal(UserBase): 39 | hashed_password: str 40 | 41 | 42 | class UserUpdate(BaseModel): 43 | model_config = ConfigDict(extra="forbid") 44 | 45 | name: Annotated[str | None, Field(min_length=2, max_length=30, examples=["User Userberg"], default=None)] 46 | username: Annotated[ 47 | str | None, Field(min_length=2, max_length=20, pattern=r"^[a-z0-9]+$", examples=["userberg"], default=None) 48 | ] 49 | email: Annotated[EmailStr | None, Field(examples=["user.userberg@example.com"], default=None)] 50 | profile_image_url: Annotated[ 51 | str | None, 52 | Field( 53 | pattern=r"^(https?|ftp)://[^\s/$.?#].[^\s]*$", examples=["https://www.profileimageurl.com"], default=None 54 | ), 55 | ] 56 | 57 | 58 | class UserUpdateInternal(UserUpdate): 59 | updated_at: datetime 60 | 61 | 62 | class UserTierUpdate(BaseModel): 63 | tier_id: int 64 | 65 | 66 | class UserDelete(BaseModel): 67 | model_config = ConfigDict(extra="forbid") 68 | 69 | is_deleted: bool 70 | deleted_at: datetime 71 | 72 | 73 | class UserRestoreDeleted(BaseModel): 74 | is_deleted: bool 75 | -------------------------------------------------------------------------------- /src/migrations/README: -------------------------------------------------------------------------------- 1 | Generic single-database configuration. 2 | -------------------------------------------------------------------------------- /src/migrations/env.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import importlib 3 | import pkgutil 4 | from logging.config import fileConfig 5 | 6 | from alembic import context 7 | from app.core.config import settings 8 | from app.core.db.database import Base 9 | from sqlalchemy import pool 10 | from sqlalchemy.engine import Connection 11 | from sqlalchemy.ext.asyncio import async_engine_from_config 12 | 13 | # this is the Alembic Config object, which provides 14 | # access to the values within the .ini file in use. 15 | config = context.config 16 | 17 | config.set_main_option( 18 | "sqlalchemy.url", 19 | f"{settings.POSTGRES_ASYNC_PREFIX}{settings.POSTGRES_USER}:{settings.POSTGRES_PASSWORD}@{settings.POSTGRES_SERVER}:{settings.POSTGRES_PORT}/{settings.POSTGRES_DB}", 20 | ) 21 | 22 | # Interpret the config file for Python logging. 23 | # This line sets up loggers basically. 24 | if config.config_file_name is not None: 25 | fileConfig(config.config_file_name) 26 | 27 | # add your model's MetaData object here 28 | # Auto-import all models in app.models 29 | def import_models(package_name): 30 | package = importlib.import_module(package_name) 31 | for _, module_name, _ in pkgutil.walk_packages(package.__path__, package.__name__ + "."): 32 | importlib.import_module(module_name) 33 | 34 | # Load all models dynamically 35 | import_models("app.models") 36 | target_metadata = Base.metadata 37 | 38 | # other values from the config, defined by the needs of env.py, 39 | # can be acquired: 40 | # my_important_option = config.get_main_option("my_important_option") 41 | # ... etc. 42 | 43 | 44 | def run_migrations_offline() -> None: 45 | """Run migrations in 'offline' mode. 46 | 47 | This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By 48 | skipping the Engine creation we don't even need a DBAPI to be available. 49 | 50 | Calls to context.execute() here emit the given string to the script output. 51 | """ 52 | url = config.get_main_option("sqlalchemy.url") 53 | context.configure( 54 | url=url, 55 | target_metadata=target_metadata, 56 | literal_binds=True, 57 | dialect_opts={"paramstyle": "named"}, 58 | ) 59 | 60 | with context.begin_transaction(): 61 | context.run_migrations() 62 | 63 | 64 | def do_run_migrations(connection: Connection) -> None: 65 | context.configure(connection=connection, target_metadata=target_metadata) 66 | 67 | with context.begin_transaction(): 68 | context.run_migrations() 69 | 70 | 71 | async def run_async_migrations() -> None: 72 | """In this scenario we need to create an Engine and associate a connection with the context.""" 73 | 74 | connectable = async_engine_from_config( 75 | config.get_section(config.config_ini_section, {}), 76 | prefix="sqlalchemy.", 77 | poolclass=pool.NullPool, 78 | ) 79 | 80 | async with connectable.connect() as connection: 81 | await connection.run_sync(do_run_migrations) 82 | 83 | await connectable.dispose() 84 | 85 | 86 | def run_migrations_online() -> None: 87 | """Run migrations in 'online' mode.""" 88 | 89 | asyncio.run(run_async_migrations()) 90 | 91 | 92 | if context.is_offline_mode(): 93 | run_migrations_offline() 94 | else: 95 | run_migrations_online() 96 | -------------------------------------------------------------------------------- /src/migrations/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | ${imports if imports else ""} 13 | 14 | # revision identifiers, used by Alembic. 15 | revision: str = ${repr(up_revision)} 16 | down_revision: Union[str, None] = ${repr(down_revision)} 17 | branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} 18 | depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} 19 | 20 | 21 | def upgrade() -> None: 22 | ${upgrades if upgrades else "pass"} 23 | 24 | 25 | def downgrade() -> None: 26 | ${downgrades if downgrades else "pass"} 27 | -------------------------------------------------------------------------------- /src/migrations/versions/README.MD: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/benavlabs/FastAPI-boilerplate/d224aad172a9a82710860402a30bacacb7125509/src/migrations/versions/README.MD -------------------------------------------------------------------------------- /src/scripts/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/benavlabs/FastAPI-boilerplate/d224aad172a9a82710860402a30bacacb7125509/src/scripts/__init__.py -------------------------------------------------------------------------------- /src/scripts/create_first_superuser.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | import uuid 4 | from datetime import UTC, datetime 5 | 6 | from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String, Table, insert, select 7 | from sqlalchemy.dialects.postgresql import UUID 8 | 9 | from ..app.core.config import settings 10 | from ..app.core.db.database import AsyncSession, async_engine, local_session 11 | from ..app.core.security import get_password_hash 12 | from ..app.models.user import User 13 | 14 | logging.basicConfig(level=logging.INFO) 15 | logger = logging.getLogger(__name__) 16 | 17 | 18 | async def create_first_user(session: AsyncSession) -> None: 19 | try: 20 | name = settings.ADMIN_NAME 21 | email = settings.ADMIN_EMAIL 22 | username = settings.ADMIN_USERNAME 23 | hashed_password = get_password_hash(settings.ADMIN_PASSWORD) 24 | 25 | query = select(User).filter_by(email=email) 26 | result = await session.execute(query) 27 | user = result.scalar_one_or_none() 28 | 29 | if user is None: 30 | metadata = MetaData() 31 | user_table = Table( 32 | "user", 33 | metadata, 34 | Column("id", Integer, primary_key=True, autoincrement=True, nullable=False), 35 | Column("name", String(30), nullable=False), 36 | Column("username", String(20), nullable=False, unique=True, index=True), 37 | Column("email", String(50), nullable=False, unique=True, index=True), 38 | Column("hashed_password", String, nullable=False), 39 | Column("profile_image_url", String, default="https://profileimageurl.com"), 40 | Column("uuid", UUID(as_uuid=True), primary_key=True, default=uuid.uuid4, unique=True), 41 | Column("created_at", DateTime(timezone=True), default=lambda: datetime.now(UTC), nullable=False), 42 | Column("updated_at", DateTime), 43 | Column("deleted_at", DateTime), 44 | Column("is_deleted", Boolean, default=False, index=True), 45 | Column("is_superuser", Boolean, default=False), 46 | Column("tier_id", Integer, ForeignKey("tier.id"), index=True), 47 | ) 48 | 49 | data = { 50 | "name": name, 51 | "email": email, 52 | "username": username, 53 | "hashed_password": hashed_password, 54 | "is_superuser": True, 55 | } 56 | 57 | stmt = insert(user_table).values(data) 58 | async with async_engine.connect() as conn: 59 | await conn.execute(stmt) 60 | await conn.commit() 61 | 62 | logger.info(f"Admin user {username} created successfully.") 63 | 64 | else: 65 | logger.info(f"Admin user {username} already exists.") 66 | 67 | except Exception as e: 68 | logger.error(f"Error creating admin user: {e}") 69 | 70 | 71 | async def main(): 72 | async with local_session() as session: 73 | await create_first_user(session) 74 | 75 | 76 | if __name__ == "__main__": 77 | loop = asyncio.get_event_loop() 78 | loop.run_until_complete(main()) 79 | -------------------------------------------------------------------------------- /src/scripts/create_first_tier.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | 4 | from sqlalchemy import select 5 | 6 | from ..app.core.config import config 7 | from ..app.core.db.database import AsyncSession, local_session 8 | from ..app.models.tier import Tier 9 | 10 | logging.basicConfig(level=logging.INFO) 11 | logger = logging.getLogger(__name__) 12 | 13 | 14 | async def create_first_tier(session: AsyncSession) -> None: 15 | try: 16 | tier_name = config("TIER_NAME", default="free") 17 | 18 | query = select(Tier).where(Tier.name == tier_name) 19 | result = await session.execute(query) 20 | tier = result.scalar_one_or_none() 21 | 22 | if tier is None: 23 | session.add(Tier(name=tier_name)) 24 | await session.commit() 25 | logger.info(f"Tier '{tier_name}' created successfully.") 26 | 27 | else: 28 | logger.info(f"Tier '{tier_name}' already exists.") 29 | 30 | except Exception as e: 31 | logger.error(f"Error creating tier: {e}") 32 | 33 | 34 | async def main(): 35 | async with local_session() as session: 36 | await create_first_tier(session) 37 | 38 | 39 | if __name__ == "__main__": 40 | loop = asyncio.get_event_loop() 41 | loop.run_until_complete(main()) 42 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/benavlabs/FastAPI-boilerplate/d224aad172a9a82710860402a30bacacb7125509/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Callable, Generator 2 | 3 | import pytest 4 | from faker import Faker 5 | from fastapi.testclient import TestClient 6 | from sqlalchemy import create_engine 7 | from sqlalchemy.orm import sessionmaker 8 | from sqlalchemy.orm.session import Session 9 | 10 | from src.app.core.config import settings 11 | from src.app.main import app 12 | 13 | DATABASE_URI = settings.POSTGRES_URI 14 | DATABASE_PREFIX = settings.POSTGRES_SYNC_PREFIX 15 | 16 | sync_engine = create_engine(DATABASE_PREFIX + DATABASE_URI) 17 | local_session = sessionmaker(autocommit=False, autoflush=False, bind=sync_engine) 18 | 19 | 20 | fake = Faker() 21 | 22 | 23 | @pytest.fixture(scope="session") 24 | def client() -> Generator[TestClient, Any, None]: 25 | with TestClient(app) as _client: 26 | yield _client 27 | app.dependency_overrides = {} 28 | sync_engine.dispose() 29 | 30 | 31 | @pytest.fixture 32 | def db() -> Generator[Session, Any, None]: 33 | session = local_session() 34 | yield session 35 | session.close() 36 | 37 | 38 | def override_dependency(dependency: Callable[..., Any], mocked_response: Any) -> None: 39 | app.dependency_overrides[dependency] = lambda: mocked_response 40 | -------------------------------------------------------------------------------- /tests/helpers/generators.py: -------------------------------------------------------------------------------- 1 | import uuid as uuid_pkg 2 | 3 | from sqlalchemy.orm import Session 4 | 5 | from src.app import models 6 | from src.app.core.security import get_password_hash 7 | from tests.conftest import fake 8 | 9 | 10 | def create_user(db: Session, is_super_user: bool = False) -> models.User: 11 | _user = models.User( 12 | name=fake.name(), 13 | username=fake.user_name(), 14 | email=fake.email(), 15 | hashed_password=get_password_hash(fake.password()), 16 | profile_image_url=fake.image_url(), 17 | uuid=uuid_pkg.uuid4(), 18 | is_superuser=is_super_user, 19 | ) 20 | 21 | db.add(_user) 22 | db.commit() 23 | db.refresh(_user) 24 | 25 | return _user 26 | 27 | -------------------------------------------------------------------------------- /tests/helpers/mocks.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from fastapi.encoders import jsonable_encoder 4 | 5 | from src.app import models 6 | from tests.conftest import fake 7 | 8 | 9 | def get_current_user(user: models.User) -> dict[str, Any]: 10 | return jsonable_encoder(user) 11 | 12 | 13 | def oauth2_scheme() -> str: 14 | token = fake.sha256() 15 | if isinstance(token, bytes): 16 | token = token.decode("utf-8") 17 | return token # type: ignore 18 | -------------------------------------------------------------------------------- /tests/test_user.py: -------------------------------------------------------------------------------- 1 | from fastapi import status 2 | from fastapi.testclient import TestClient 3 | from pytest_mock import MockerFixture 4 | from sqlalchemy.orm import Session 5 | 6 | from src.app.api.dependencies import get_current_user 7 | from src.app.api.v1.users import oauth2_scheme 8 | from tests.conftest import fake, override_dependency 9 | 10 | from .helpers import generators, mocks 11 | 12 | 13 | def test_post_user(client: TestClient) -> None: 14 | response = client.post( 15 | "/api/v1/user", 16 | json={ 17 | "name": fake.name(), 18 | "username": fake.user_name(), 19 | "email": fake.email(), 20 | "password": fake.password(), 21 | }, 22 | ) 23 | assert response.status_code == status.HTTP_201_CREATED 24 | 25 | 26 | def test_get_user(db: Session, client: TestClient) -> None: 27 | user = generators.create_user(db) 28 | 29 | response = client.get(f"/api/v1/user/{user.username}") 30 | assert response.status_code == status.HTTP_200_OK 31 | 32 | response_data = response.json() 33 | 34 | assert response_data["id"] == user.id 35 | assert response_data["username"] == user.username 36 | 37 | 38 | def test_get_multiple_users(db: Session, client: TestClient) -> None: 39 | for _ in range(5): 40 | generators.create_user(db) 41 | 42 | response = client.get("/api/v1/users") 43 | assert response.status_code == status.HTTP_200_OK 44 | 45 | response_data = response.json()["data"] 46 | assert len(response_data) >= 5 47 | 48 | 49 | def test_update_user(db: Session, client: TestClient) -> None: 50 | user = generators.create_user(db) 51 | new_name = fake.name() 52 | 53 | override_dependency(get_current_user, mocks.get_current_user(user)) 54 | 55 | response = client.patch(f"/api/v1/user/{user.username}", json={"name": new_name}) 56 | assert response.status_code == status.HTTP_200_OK 57 | 58 | 59 | def test_delete_user(db: Session, client: TestClient, mocker: MockerFixture) -> None: 60 | user = generators.create_user(db) 61 | 62 | override_dependency(get_current_user, mocks.get_current_user(user)) 63 | override_dependency(oauth2_scheme, mocks.oauth2_scheme()) 64 | 65 | mocker.patch("src.app.core.security.jwt.decode", return_value={"sub": user.username, "exp": 9999999999}) 66 | 67 | response = client.delete(f"/api/v1/user/{user.username}") 68 | assert response.status_code == status.HTTP_200_OK 69 | 70 | 71 | def test_delete_db_user(db: Session, mocker: MockerFixture, client: TestClient) -> None: 72 | user = generators.create_user(db) 73 | super_user = generators.create_user(db, is_super_user=True) 74 | 75 | override_dependency(get_current_user, mocks.get_current_user(super_user)) 76 | override_dependency(oauth2_scheme, mocks.oauth2_scheme()) 77 | 78 | mocker.patch("src.app.core.security.jwt.decode", return_value={"sub": user.username, "exp": 9999999999}) 79 | 80 | response = client.delete(f"/api/v1/db_user/{user.username}") 81 | assert response.status_code == status.HTTP_200_OK 82 | --------------------------------------------------------------------------------