├── .editorconfig ├── .gitignore ├── .pre-commit-config.yaml ├── Makefile ├── README.md ├── README_ru.md ├── backend ├── .env.template ├── README.md ├── README_ru.md ├── alembic.ini ├── application │ ├── __init__.py │ ├── common │ │ ├── __init__.py │ │ └── container.py │ └── web │ │ ├── __init__.py │ │ ├── application.py │ │ ├── base.py │ │ ├── health_check │ │ ├── __init__.py │ │ └── api.py │ │ ├── main.py │ │ ├── upload_file │ │ ├── __init__.py │ │ ├── api.py │ │ ├── exceptions.py │ │ └── models │ │ │ ├── __init__.py │ │ │ └── upload_file.py │ │ └── urls.py ├── common │ ├── __init__.py │ ├── base.py │ ├── config.py │ └── services │ │ ├── __init__.py │ │ ├── random_re.py │ │ └── storage.py ├── docker │ ├── Dockerfile │ └── entrypoint.sh ├── docs │ └── assets │ │ ├── ArchitectureEN.svg │ │ └── ArchitectureRU.svg ├── domain │ ├── __init__.py │ └── upload_file │ │ ├── __init__.py │ │ ├── exceptions.py │ │ ├── repos │ │ ├── __init__.py │ │ └── uploaded_file.py │ │ └── use_cases │ │ ├── __init__.py │ │ ├── delete_file.py │ │ └── upload_file.py ├── infrastructure │ ├── __init__.py │ ├── database │ │ ├── __init__.py │ │ ├── alembic │ │ │ ├── env.py │ │ │ ├── script.py.mako │ │ │ └── versions │ │ │ │ └── __init__.py │ │ ├── base.py │ │ ├── database.py │ │ └── scripts │ │ │ ├── makemigrations.sh │ │ │ └── migrate.sh │ ├── services │ │ ├── __init__.py │ │ ├── random_re_rstr.py │ │ ├── storage_mock.py │ │ └── storage_s3.py │ └── upload_file │ │ ├── __init__.py │ │ └── repos │ │ ├── __init__.py │ │ └── upload_file_db_repo.py ├── libs │ ├── __init__.py │ └── punq │ │ ├── __init__.py │ │ └── _compat.py ├── poetry.lock ├── pyproject.toml ├── setup.cfg └── tests │ ├── __init__.py │ ├── conftest.py │ ├── test_application │ ├── __init__.py │ └── test_web │ │ ├── __init__.py │ │ ├── test_health_check.py │ │ └── test_upload_file.py │ └── test_domain │ ├── __init__.py │ └── test_upload_file │ ├── __init__.py │ └── test_use_cases.py ├── docker-compose.override.yml ├── docker-compose.test.yml └── docker-compose.yml /.editorconfig: -------------------------------------------------------------------------------- 1 | [*] 2 | charset = utf-8 3 | end_of_line = lf 4 | 5 | indent_size = 2 6 | indent_style = space 7 | 8 | insert_final_newline = true 9 | trim_trailing_whitespace = true 10 | 11 | [*.py] 12 | indent_size = 4 13 | 14 | [Makefile] 15 | indent_style = tab 16 | indent_size = 4 17 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 98 | __pypackages__/ 99 | 100 | # Celery stuff 101 | celerybeat-schedule 102 | celerybeat.pid 103 | 104 | # SageMath parsed files 105 | *.sage.py 106 | 107 | # Environments 108 | .env 109 | .venv 110 | env/ 111 | venv/ 112 | ENV/ 113 | env.bak/ 114 | venv.bak/ 115 | 116 | # Spyder project settings 117 | .spyderproject 118 | .spyproject 119 | 120 | # Rope project settings 121 | .ropeproject 122 | 123 | # mkdocs documentation 124 | /site 125 | 126 | # mypy 127 | .mypy_cache/ 128 | .dmypy.json 129 | dmypy.json 130 | 131 | # Pyre type checker 132 | .pyre/ 133 | 134 | # pytype static type analyzer 135 | .pytype/ 136 | 137 | # Cython debug symbols 138 | cython_debug/ 139 | 140 | .idea/ 141 | .vscode/ 142 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # https://pre-commit.com/ 2 | repos: 3 | - repo: https://github.com/pre-commit/pre-commit-hooks 4 | rev: v4.1.0 5 | hooks: 6 | - id: trailing-whitespace 7 | - id: end-of-file-fixer 8 | - id: debug-statements 9 | - id: mixed-line-ending 10 | args: [ '--fix=lf' ] 11 | description: Forces to replace line ending by the UNIX 'lf' character. 12 | - id: check-merge-conflict 13 | - id: check-ast 14 | 15 | - repo: https://github.com/editorconfig-checker/editorconfig-checker.python 16 | rev: 2.4.0 17 | hooks: 18 | - id: editorconfig-checker 19 | 20 | - repo: https://github.com/compilerla/conventional-pre-commit 21 | rev: v1.2.0 22 | hooks: 23 | - id: conventional-pre-commit 24 | stages: [ commit-msg ] 25 | 26 | - repo: https://github.com/markdownlint/markdownlint 27 | rev: v0.11.0 28 | hooks: 29 | - id: markdownlint 30 | 31 | # Тут свои прекоммиты, которые запускаются в отдельных docker-контейнерах. 32 | # Для них нужно отрезать папку (название проекта) в начале каждого аргумента, что делает sed 33 | # 34 | # ATTENTION: Если вылетает ошибка 123 или 1, вероятнее всего, контейнер не запущен 35 | - repo: local 36 | hooks: 37 | # ========================================= Backend ========================================== 38 | - id: backend-docker 39 | name: backend-docker 40 | entry: docker-compose up --build -d backend 41 | verbose: true 42 | language: system 43 | files: ^backend/.*.(py|pyi)$ 44 | pass_filenames: false 45 | fail_fast: true 46 | 47 | - id: backend-black 48 | name: backend-black 49 | entry: bash -c 'echo "$@" | sed "s/^backend\///g" | sed "s/\sbackend\// /g" | 50 | xargs docker-compose exec -T backend black' -- 51 | verbose: true 52 | language: system 53 | files: ^backend/.*.(py|pyi)$ 54 | 55 | - id: backend-isort 56 | name: backend-isort 57 | entry: bash -c 'echo "$@" | sed "s/^backend\///g" | sed "s/\sbackend\// /g" | 58 | xargs docker-compose exec -T backend isort --sp pyproject.toml' -- 59 | language: system 60 | files: ^backend/.*.(py|pyi)$ 61 | 62 | # Mypy работает со всеми файлами 63 | - id: backend-mypy 64 | name: backend-mypy 65 | entry: docker-compose exec -T backend mypy --show-error-codes --config-file setup.cfg . 66 | language: system 67 | files: ^backend/.*.(py|pyi)$ 68 | pass_filenames: false 69 | 70 | - id: backend-flake8 71 | name: backend-flake8 72 | entry: bash -c 'echo "$@" | sed "s/^backend\///g" | sed "s/\sbackend\// /g" | 73 | xargs docker-compose exec -T backend flake8 --config setup.cfg' -- 74 | language: system 75 | files: ^backend/.*.(py|pyi)$ 76 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | ############################################################################### 2 | # --------------------------------- COMMON ---------------------------------- # 3 | ############################################################################### 4 | 5 | # Цветной принт в консоль 6 | highlight_color=$(shell echo -e "\033[0;34m") 7 | success_color=$(shell echo -e "\033[0;32m") 8 | default_color=$(shell echo -e "\033[0m") 9 | 10 | define echo 11 | @echo '$(highlight_color)$(1)$(default_color)' 12 | endef 13 | 14 | define echo_success 15 | @echo '$(success_color)$(1)$(default_color)' 16 | endef 17 | 18 | 19 | ############################################################################### 20 | # --------------------------------- GENERAL --------------------------------- # 21 | ############################################################################### 22 | 23 | # Заведение репозитория для разработки 24 | init:: pre-commit 25 | 26 | @$(call echo_success,'Configured successfully!') 27 | 28 | # Установка pre-commit 29 | pre-commit:: 30 | @$(call echo,'[general]: Installing pre-commit...') 31 | @pip install pre-commit --no-input 32 | @$(call echo,'[general]: Configuring pre-commit...') 33 | @pre-commit install --install-hooks 34 | @pre-commit install --hook-type commit-msg 35 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # fast-api-template 2 | 3 | [![Русский язык](https://raw.githubusercontent.com/stevenrskelton/flag-icon/master/png/16/country-4x3/bg.png) Русский язык](/README_ru.md) 4 | 5 | ## Rationale 6 | 7 | I decided to create this project for enlightening purposes to spread 8 | the application of good practices for running and maintaining a Python project. 9 | 10 | ## Best-practices, used in this project 11 | 12 | ### [pre-commit][PreCommitLink] 13 | 14 | This framework makes it much easier to work with hooks in git. 15 | 16 | It can be used to set up a number of commit quality checks - 17 | ranging from formatting, linking, code validity, 18 | to the messages of those commits. 19 | 20 | ### [black][BlackLink] 21 | 22 | This is an uncompromising python file formatter, which in my opinion is better, 23 | than the alternatives autopep8 and yapf. 24 | 25 | I have not been able to get them to work together with isort, 26 | so I have to live with black's PEP8 violations. 27 | 28 | ### [isort][IsortLink] 29 | 30 | A tool for sorting imports in python files. 31 | 32 | ### [mypy][MypyLink] 33 | 34 | A static code analyzer from the world of Python. 35 | It allows you to use type annotations for premature code validation. 36 | 37 | If there is invalid python code, for example: 38 | 39 | ```python 40 | from typing import Any 41 | def sum_str_and_int(string: str, number: int) -> Any: 42 | return string + number 43 | ``` 44 | 45 | Then mypy will generate the following error: 46 | 47 | ```bash 48 | $ mypy . 49 | a.py:3: error: Unsupported operand types for + ("str" and "int") 50 | Found 1 error in 1 file (checked 1 source file) 51 | ``` 52 | 53 | If you take into account that mypy can be strictly customized, 54 | you can be sure that the repository won't leak obviously broken code, 55 | that the developer may have been careless. 56 | 57 | In my practice, using mypy greatly reduces the number of such 58 | errors during refactoring and reworking of existing functionality. 59 | 60 | ### [flake8][Flake8Link] 61 | 62 | Flake8 is a linter. Linters help to keep the code consistent 63 | and easy to understand. Flake8 functionality is extended by adding plugins 64 | that introduce new code validation rules. 65 | 66 | ### [poetry][PoetryLink] 67 | 68 | I'm a firm believer that at this moment **poetry** 69 | is the best manager of python packages. 70 | 71 | If you don't use poetry... use poetry... 72 | 73 | ### Clean Architecture 74 | 75 | There were a lot of discussions about Clean Architecture. 76 | I believe, this is a thing that every developer should know, 77 | but don't be too fanatical about it. 78 | 79 | ### Dependency Injection 80 | 81 | Dependency injection is an approach that allows an object 82 | to receive other objects on which it depends at the design stage. 83 | 84 | For some reason this approach is not actively used in Python projects, 85 | although it is far from being a novelty in programming. 86 | 87 | The [punq][PunqLink] library was chosen and slightly modified as a DI solution. 88 | 89 | I made this decision after watching 90 | a [presentation by Alexander Shibaev, Tinkoff][DIConferenceLink]. 91 | In it he reviewed the existing python libraries and frameworks, 92 | that allow the use of DI and explained why they settled on **punq**. 93 | 94 | ## Further reading 95 | 96 | You can enjoy the clean architecture 97 | by navigating to the folder [backend](./backend) 98 | 99 | ## Development 100 | 101 | ### Initial configuration after cloning the repository 102 | 103 | Configuration of the development environment 104 | is performed with a single command: `make`. 105 | 106 | It will install and configure **pre-commit**. 107 | 108 | [PreCommitLink]: https://commonmark.org/help/tutorial/07-links.html "pre-commit" 109 | [BlackLink]: https://github.com/psf/black "Black" 110 | [IsortLink]: https://github.com/PyCQA/isort "isort" 111 | [MypyLink]: https://mypy.readthedocs.io/ "mypy" 112 | [Flake8Link]: https://github.com/pycqa/flake8 "flake8" 113 | [PoetryLink]: https://python-poetry.org/ "Poetry" 114 | [PunqLink]: https://github.com/bobthemighty/punq "punq" 115 | [DIConferenceLink]: https://www.youtube.com/watch?v=3Z_3yCgVKkM 116 | [PyenvSuggestedBuildEnvironment]: https://github.com/pyenv/pyenv/wiki#suggested-build-environment 117 | [PyenvCommonBuildProblems]: https://github.com/pyenv/pyenv/wiki/Common-build-problems 118 | -------------------------------------------------------------------------------- /README_ru.md: -------------------------------------------------------------------------------- 1 | # fast-api-template 2 | 3 | [![English Language](https://raw.githubusercontent.com/stevenrskelton/flag-icon/master/png/16/country-4x3/us.png) English Language](/README_ru.md) 4 | 5 | ## Мотивация 6 | 7 | Я решил создать данный проект в просветительских целях для распространения 8 | применения хороших практик ведения проекта на Python и поддержании его 9 | архитектуры. 10 | 11 | ## "Best"-практики, использованные в данном проекте 12 | 13 | ### [pre-commit][PreCommitLink] 14 | 15 | Данный фреймворк значительно облегчает работу с хуками в git. 16 | 17 | С его помощью можно настроить ряд проверок качества коммитов - 18 | начиная от форматирования, линтинга, валидности кода, 19 | заканчивая сообщениями этих коммитов. 20 | 21 | ### [black][BlackLink] 22 | 23 | Это бескомпромиссный форматтер python-файлов, который по моему мнению лучше, 24 | чем другие альтернативы - autopep8 и yapf. 25 | 26 | У меня не получилось на своём опыте подружить их вместе с isort, поэтому 27 | приходится мириться с нарушениями PEP8 по вине black и довольствоваться 28 | лишь однородным форматированием кода. 29 | 30 | ### [isort][IsortLink] 31 | 32 | Инструмент для умного сортирования импортов в python-файлах. 33 | 34 | ### [mypy][MypyLink] 35 | 36 | Статический анализатор кода из мира Python. Он позволяет использовать 37 | аннотации типов для преждевременной проверки работоспособности кода. 38 | 39 | Если в репозитории будет невалидный python-код, например: 40 | 41 | ```python 42 | from typing import Any 43 | def sum_str_and_int(string: str, number: int) -> Any: 44 | return string + number 45 | ``` 46 | 47 | То mypy выдаст следующую ошибку: 48 | 49 | ```bash 50 | $ mypy . 51 | a.py:3: error: Unsupported operand types for + ("str" and "int") 52 | Found 1 error in 1 file (checked 1 source file) 53 | ``` 54 | 55 | Если принять во внимание, что mypy можно строжайше настроить, то можно быть 56 | уверенным в том, что в репозиторий не прольётся очевидно неработающий код, 57 | который разработчик мог допустить по невнимательности. 58 | 59 | На моей практике использование mypy значительно уменьшает количество таких 60 | ошибок во время рефакторинга и переработок существующего функционала. 61 | 62 | ### [flake8][Flake8Link] 63 | 64 | Flake8 - линтер. Линтеры помогают поддерживать код однородным и легким для 65 | восприятия. Функционал Flake8 расширяется путём добавления плагинов, 66 | которые вносят новые правила проверки кода. 67 | 68 | ### [poetry][PoetryLink] 69 | 70 | На данный момент - это самый лучший менеджер Python пакетов. 71 | 72 | Если вы не используете poetry... используйте poetry... 73 | 74 | ### Чистая архитектура 75 | 76 | Много дискуссий было по поводу Чистой Архитектуры. 77 | Я считаю, что это то, что должен знать каждый разработчик, 78 | но использовал не из фанатичных побуждений. 79 | 80 | ### Dependency Injection - Внедрение зависимостей 81 | 82 | Внедрение зависимостей - это подход, позволяющий объекту получать 83 | другие объекты, от которых он зависит, на этапе конструирования. 84 | 85 | Данный подход почему-то не очень активно используется в Python проектах, хотя 86 | он - далеко не новшество в программировании. 87 | 88 | В качестве DI решения была выбрана и 89 | слегка допилена библиотека [punq][PunqLink]. 90 | 91 | Данное решение мною было принято после просмотра 92 | [выступления][DIConferenceLink] Александра Шибаева, Тинькофф. 93 | В нём он рассмотрел существующие python-библиотеки и фреймворки, 94 | позволяющие использовать DI и объяснил, почему они остановились на punq. 95 | 96 | ## Далее 97 | 98 | Вы можете преисполниться чистой архитектурой, 99 | перейдя в папку [backend](./backend) 100 | 101 | ## Разработка 102 | 103 | ### Первоначальная конфигурация после клонирования репозитория 104 | 105 | Конфигурация окружения для разработки выполняется одной командой: `make`. 106 | 107 | Это установит и настроит **pre-commit**. 108 | 109 | [PreCommitLink]: https://commonmark.org/help/tutorial/07-links.html "pre-commit" 110 | [BlackLink]: https://github.com/psf/black "Black" 111 | [IsortLink]: https://github.com/PyCQA/isort "isort" 112 | [MypyLink]: https://mypy.readthedocs.io/ "mypy" 113 | [Flake8Link]: https://github.com/pycqa/flake8 "flake8" 114 | [PoetryLink]: https://python-poetry.org/ "Poetry" 115 | [PunqLink]: https://github.com/bobthemighty/punq "punq" 116 | [DIConferenceLink]: https://www.youtube.com/watch?v=3Z_3yCgVKkM 117 | [PyenvSuggestedBuildEnvironment]: https://github.com/pyenv/pyenv/wiki#suggested-build-environment 118 | [PyenvCommonBuildProblems]: https://github.com/pyenv/pyenv/wiki/Common-build-problems 119 | -------------------------------------------------------------------------------- /backend/.env.template: -------------------------------------------------------------------------------- 1 | # Security Warning! Do not commit this file to any VCS! 2 | # This is a local file to speed up development process, 3 | # so you don't have to change your environment variables. 4 | # 5 | # This is not applied to `.env.template`! 6 | # Template files must be committed to the VCS, but must not contain 7 | # any secret values. 8 | 9 | # === General === 10 | 11 | DOMAIN_NAME=localhost 12 | 13 | # === Database === 14 | 15 | # Used both by backend and PostgreSQL 16 | POSTGRES_DB=postgres 17 | POSTGRES_USER=postgres 18 | POSTGRES_PASSWORD=postgres 19 | 20 | # Used only by backend: 21 | # В случае, если нужно запускать сервер или тесты локально, 22 | # запустите команду `ifconfig`, найдите `docker0` и вставьте IP адрес `inet` 23 | BACKEND_DATABASE_HOST=backend_db 24 | BACKEND_DATABASE_PORT=5432 25 | 26 | AWS_ACCESS_KEY_ID=test_aws_access_key_id 27 | AWS_SECRET_ACCESS_KEY=test_aws_secret_access_key 28 | AWS_STORAGE_BUCKET_NAME=testbucket 29 | AWS_S3_ENDPOINT_URL=http://localhost:9000/ 30 | 31 | MINIO_ROOT_USER=test_aws_access_key_id 32 | MINIO_ROOT_PASSWORD=test_aws_secret_access_key 33 | -------------------------------------------------------------------------------- /backend/README.md: -------------------------------------------------------------------------------- 1 | # backend 2 | 3 | [![Русский язык](https://raw.githubusercontent.com/stevenrskelton/flag-icon/master/png/16/country-4x3/bg.png) Русский язык](./README_ru.md) 4 | 5 | ## Project's structure 6 | 7 | This project follows the principles of pure architecture. 8 | The business logic does not depend on web frameworks, databases 9 | or any other technologies directly. 10 | 11 | ![I love to draw schematics](./docs/assets/ArchitectureEN.svg) 12 | 13 | Considering the many file structures in the project, 14 | I settled on a simplified option that Robert Smallshire suggested in 15 | [his talk on DDD implementation][ProjectStructureLink] 16 | at the europython conference. 17 | 18 | We have 3 main modules that have strictly separated areas of responsibility: 19 | 20 | | Module's name | Area of responsibility | 21 | |------------------|--------------------------| 22 | | `domain` | Domain-logic | 23 | | `infrastructure` | Services implementation | 24 | | `application` | Application endpoints | 25 | 26 | ## Fighting external dependencies in the business logic layer 27 | 28 | Business logic scenarios depend on the interfaces of different services, 29 | which allows different implementations of these services to be specified 30 | for different entry points. 31 | 32 | Thus, the prod will use the StorageS3 implementation of the IStorage interface, 33 | while unit testing will use StorageMock. Thanks to the DI library punq, which 34 | makes it very easy to inject dependencies. 35 | 36 | You can take a look at DI container creationg at 37 | [application/common/container.py](./application/common/container.py) 38 | and using it in 39 | [application/web/upload_file/api.py](./application/web/upload_file/api.py) file. 40 | 41 | [ProjectStructureLink]: https://youtu.be/Ru2T4fu3bGQ?t=2878 42 | -------------------------------------------------------------------------------- /backend/README_ru.md: -------------------------------------------------------------------------------- 1 | # backend 2 | 3 | [![English Language](https://raw.githubusercontent.com/stevenrskelton/flag-icon/master/png/16/country-4x3/us.png) English Language](./README.md) 4 | 5 | ## Структура проекта 6 | 7 | Данный проект следует принципам чистой архитектуры. Бизнес-логика не зависит 8 | от веб-фреймворков, баз данных и любых других технологий напрямую. 9 | 10 | ![Люблю рисовать схемки](./docs/assets/ArchitectureRU.svg) 11 | 12 | Рассматривая множество структур файлов в проекте, я остановился на упрощённом 13 | до чистой архитектуры варианте, который предложил Robert Smallshire в своём 14 | [докладе][ProjectStructureLink] о реализации DDD на конференции europython. 15 | 16 | У нас есть 3 основных модуля, у которых 17 | строжайшим образом разделены зоны ответственности: 18 | 19 | | Название модуля | Зона ответственности | 20 | |------------------|--------------------------| 21 | | `domain` | Бизнес-логика | 22 | | `infrastructure` | Имплементации сервисов | 23 | | `application` | Точки входа в приложение | 24 | 25 | ## Борьба с внешними зависимостями в слое бизнес-логики 26 | 27 | Сценарии бизнес-логики зависят от интерфейсов различных сервисов, что позволяет 28 | для различных точек входа указывать разные реализации этих сервисов. 29 | 30 | Так, на проде будет использоваться StorageS3 реализация интерфейса IStorage, 31 | тогда как при unit-тестировании - StorageMock. Спасибо DI библиотеке punq, 32 | которая позволяет очень просто внедрять зависимости. 33 | 34 | Вы можете взглянуть на создание DI контейнера 35 | [application/common/container.py](./application/common/container.py) 36 | и его использование в файле 37 | [application/web/upload_file/api.py](./application/web/upload_file/api.py). 38 | 39 | [ProjectStructureLink]: https://youtu.be/Ru2T4fu3bGQ?t=2878 40 | -------------------------------------------------------------------------------- /backend/alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | script_location = infrastructure/database/alembic 6 | 7 | # template used to generate migration files 8 | # file_template = %%(rev)s_%%(slug)s 9 | 10 | # sys.path path, will be prepended to sys.path if present. 11 | # defaults to the current working directory. 12 | prepend_sys_path = . 13 | 14 | # timezone to use when rendering the date within the migration file 15 | # as well as the filename. 16 | # If specified, requires the python-dateutil library that can be 17 | # installed by adding `alembic[tz]` to the pip requirements 18 | # string value is passed to dateutil.tz.gettz() 19 | # leave blank for localtime 20 | # timezone = 21 | 22 | # max length of characters to apply to the 23 | # "slug" field 24 | # truncate_slug_length = 40 25 | 26 | # set to 'true' to run the environment during 27 | # the 'revision' command, regardless of autogenerate 28 | # revision_environment = false 29 | 30 | # set to 'true' to allow .pyc and .pyo files without 31 | # a source .py file to be detected as revisions in the 32 | # versions/ directory 33 | # sourceless = false 34 | 35 | # version location specification; this defaults 36 | # to alembic/versions. When using multiple version 37 | # directories, initial revisions must be specified with --version-path 38 | # version_locations = %(here)s/bar %(here)s/bat alembic/versions 39 | 40 | # version path separator; As mentioned above, this is the character used to split 41 | # version_locations. Valid values are: 42 | # 43 | # version_path_separator = : 44 | # version_path_separator = ; 45 | # version_path_separator = space 46 | version_path_separator = os # default: use os.pathsep 47 | 48 | # the output encoding used when revision files 49 | # are written from script.py.mako 50 | # output_encoding = utf-8 51 | 52 | # sqlalchemy.url = postgresql://fastapi:fastapi@localhost:5432/fastapi 53 | 54 | 55 | # Logging configuration 56 | [loggers] 57 | keys = root,sqlalchemy,alembic 58 | 59 | [handlers] 60 | keys = console 61 | 62 | [formatters] 63 | keys = generic 64 | 65 | [logger_root] 66 | level = WARN 67 | handlers = console 68 | qualname = 69 | 70 | [logger_sqlalchemy] 71 | level = WARN 72 | handlers = 73 | qualname = sqlalchemy.engine 74 | 75 | [logger_alembic] 76 | level = INFO 77 | handlers = 78 | qualname = alembic 79 | 80 | [handler_console] 81 | class = StreamHandler 82 | args = (sys.stderr,) 83 | level = NOTSET 84 | formatter = generic 85 | 86 | [formatter_generic] 87 | format = %(levelname)-5.5s [%(name)s] %(message)s 88 | datefmt = %H:%M:%S 89 | -------------------------------------------------------------------------------- /backend/application/__init__.py: -------------------------------------------------------------------------------- 1 | """Это слой архитектуры, отвечающий за точки входа в сервис. 2 | 3 | Тут могут быть веб-фреймворк, celery, командная строка. 4 | """ 5 | -------------------------------------------------------------------------------- /backend/application/common/__init__.py: -------------------------------------------------------------------------------- 1 | """Общие модули, используемые на всех точках входа в приложение.""" 2 | -------------------------------------------------------------------------------- /backend/application/common/container.py: -------------------------------------------------------------------------------- 1 | """Dependency-injection контейнер.""" 2 | from functools import lru_cache 3 | 4 | from common.base import UseCaseMeta 5 | from common.config import AppConfig, AWSConfig, DatabaseConfig 6 | from common.services.random_re import IRandomRe 7 | from common.services.storage import IStorage 8 | from domain.upload_file.repos import IUploadedFileRepo 9 | from infrastructure.database import DatabaseResource 10 | from infrastructure.services.random_re_rstr import RandomReXeger 11 | from infrastructure.services.storage_s3 import StorageS3 12 | from infrastructure.upload_file.repos import UploadedFileDBRepo 13 | from libs import punq 14 | 15 | 16 | @lru_cache(1) 17 | def get_container() -> punq.Container: 18 | """Singleton фабрика DI контейнера.""" 19 | return _initialize_container() 20 | 21 | 22 | def _initialize_container() -> punq.Container: 23 | """Инициализация DI контейнера.""" 24 | container = punq.Container(reassignments_prohibited=True) 25 | 26 | # Config 27 | container.register(AWSConfig, instance=AWSConfig()) 28 | container.register(AppConfig, instance=AppConfig()) 29 | container.register(DatabaseConfig, instance=DatabaseConfig()) 30 | 31 | # Resources 32 | container.register(DatabaseResource, factory=DatabaseResource) 33 | 34 | # Services 35 | container.register(IRandomRe, factory=RandomReXeger) # type: ignore[misc] 36 | container.register(IStorage, factory=StorageS3) # type: ignore[misc] 37 | 38 | # Repos 39 | container.register(IUploadedFileRepo, factory=UploadedFileDBRepo) # type: ignore[misc] 40 | 41 | # Use Cases 42 | for use_case in UseCaseMeta.registered_use_cases: 43 | container.register(use_case) 44 | 45 | return container 46 | 47 | 48 | __all__ = [ 49 | "get_container", 50 | ] 51 | -------------------------------------------------------------------------------- /backend/application/web/__init__.py: -------------------------------------------------------------------------------- 1 | """Входная точка, веб-сервер на основе FastAPI.""" 2 | -------------------------------------------------------------------------------- /backend/application/web/application.py: -------------------------------------------------------------------------------- 1 | """Создание и конфигурация FastAPI приложения.""" 2 | from fastapi import FastAPI, Request 3 | from fastapi.exceptions import RequestValidationError 4 | from starlette.middleware.cors import CORSMiddleware 5 | from starlette.responses import JSONResponse, PlainTextResponse 6 | 7 | from application.common.container import get_container 8 | from common.base import DomainException 9 | from common.config import AppConfig 10 | 11 | from .base import HttpExceptionMeta 12 | 13 | 14 | def add_cors_middleware(app: FastAPI, app_config: AppConfig) -> None: 15 | """Добавление разрешённых хостов в заголовок CORS_ORIGINS.""" 16 | origins = [] 17 | if app_config.cors_origins: 18 | origins_raw = app_config.cors_origins.split(",") 19 | for origin in origins_raw: 20 | use_origin = origin.strip() 21 | origins.append(use_origin) 22 | app.add_middleware( 23 | CORSMiddleware, 24 | allow_origins=origins, 25 | allow_credentials=True, 26 | allow_methods=["*"], 27 | allow_headers=["*"], 28 | ) 29 | 30 | 31 | async def create_app() -> FastAPI: 32 | """Создание и конфигурация FastAPI приложения.""" 33 | from . import urls 34 | 35 | container = get_container() 36 | container.finalize() # Закрываем DI контейнер для изменений 37 | 38 | app_config = container.resolve(AppConfig) 39 | app = FastAPI(title=app_config.project_name) 40 | app.include_router(urls.router) 41 | add_cors_middleware(app, app_config) 42 | 43 | @app.exception_handler(RequestValidationError) 44 | async def validation_exception_handler(_: Request, exc: Exception) -> PlainTextResponse: 45 | return PlainTextResponse(str(exc), status_code=400) 46 | 47 | @app.exception_handler(DomainException) 48 | async def domain_exception_handler(_: Request, exc: DomainException) -> JSONResponse: 49 | http_exception = HttpExceptionMeta.registered_exceptions[exc.__class__] 50 | return JSONResponse( 51 | { 52 | "message": http_exception.message, 53 | "reason": http_exception.reason, 54 | }, 55 | status_code=http_exception.status, 56 | ) 57 | 58 | return app 59 | -------------------------------------------------------------------------------- /backend/application/web/base.py: -------------------------------------------------------------------------------- 1 | """Базовые классы, приводящие функционал домена к читаемому виду в web.""" 2 | from typing import Any, Protocol, Type 3 | 4 | from common.base import DomainException 5 | 6 | 7 | class _HttpException(Protocol): 8 | message: str 9 | reason: str 10 | status: int 11 | domain_exception: Type[DomainException] 12 | 13 | 14 | class HttpExceptionMeta(type): 15 | """Метакласс для маппинга доменных исключений к HTTP ответам.""" 16 | 17 | registered_exceptions: dict[Type[DomainException], Type[_HttpException]] = {} 18 | 19 | def __new__(mcs, name: str, bases: tuple[type, ...], dct: dict[str, Any]) -> type: 20 | """Сохраняем сценарий в список.""" 21 | mcs._validate_dct(name, dct) 22 | http_exception_class = super().__new__(mcs, name, bases, dct) 23 | mcs.registered_exceptions[ 24 | dct["domain_exception"] 25 | ] = http_exception_class # type: ignore[assignment] 26 | return http_exception_class 27 | 28 | @staticmethod 29 | def _validate_dct(name: str, dct: dict[str, Any]) -> None: 30 | required_fields = { 31 | "message": str, 32 | "status": int, 33 | "reason": str, 34 | "domain_exception": DomainException, 35 | } 36 | for field, field_class in required_fields.items(): 37 | if field_class in {int, str}: 38 | if not isinstance(dct[field], field_class): 39 | raise ValueError( 40 | "{}: '{}' is not an instance of '{}'".format(name, dct[field], field_class) 41 | ) 42 | elif field_class is DomainException: 43 | if not issubclass(dct[field], field_class): 44 | raise ValueError( 45 | "{}: '{}' is not a subclass of '{}'".format(name, dct[field], field_class) 46 | ) 47 | else: 48 | raise ValueError 49 | 50 | 51 | __all__ = [ 52 | "HttpExceptionMeta", 53 | ] 54 | -------------------------------------------------------------------------------- /backend/application/web/health_check/__init__.py: -------------------------------------------------------------------------------- 1 | """Обычный health check.""" 2 | -------------------------------------------------------------------------------- /backend/application/web/health_check/api.py: -------------------------------------------------------------------------------- 1 | """Обычный health check.""" 2 | from typing import Any 3 | 4 | from fastapi.routing import APIRouter 5 | from starlette import status 6 | from starlette.responses import Response 7 | 8 | router = APIRouter() 9 | 10 | 11 | @router.get("/health-check") 12 | def health_check() -> Any: 13 | """Обычный health check.""" 14 | return Response(status_code=status.HTTP_200_OK) 15 | -------------------------------------------------------------------------------- /backend/application/web/main.py: -------------------------------------------------------------------------------- 1 | """Поднятие web сервера с использованием FastAPI и uvicorn.""" 2 | import asyncio 3 | 4 | import uvicorn # type: ignore[import] 5 | 6 | from .application import create_app 7 | 8 | app = asyncio.run(create_app()) 9 | 10 | if __name__ == "__main__": 11 | uvicorn.run(app=app, host="0.0.0.0", port=8000, log_level="info") # noqa: S104 12 | -------------------------------------------------------------------------------- /backend/application/web/upload_file/__init__.py: -------------------------------------------------------------------------------- 1 | """Прокидывание приложения загрузки файлов в FastAPI.""" 2 | 3 | from . import exceptions # noqa: F401 4 | -------------------------------------------------------------------------------- /backend/application/web/upload_file/api.py: -------------------------------------------------------------------------------- 1 | """API приложения загрузки файлов.""" 2 | from http import HTTPStatus 3 | from uuid import UUID 4 | 5 | from fastapi import Depends, File, Path, UploadFile 6 | from fastapi.routing import APIRouter 7 | 8 | from application.common.container import get_container 9 | from domain.upload_file import use_cases 10 | from libs.punq import Container 11 | 12 | from . import models 13 | 14 | router = APIRouter() 15 | 16 | 17 | @router.post("/", status_code=HTTPStatus.OK, response_model=models.UploadFileResponse) 18 | async def upload_file_view( 19 | image: UploadFile = File(...), container: Container = Depends(get_container) # noqa: B008 20 | ) -> models.UploadFileResponse: 21 | """Endpoint загрузки файла.""" 22 | uploaded_file = await container.resolve(use_cases.UploadFileUseCase).execute(image) 23 | return models.UploadFileResponse.parse_obj( 24 | { 25 | "uuid": uploaded_file.uuid, 26 | "url": uploaded_file.url, 27 | "key": uploaded_file.key, 28 | "last_modified": uploaded_file.last_modified, 29 | "content_length": uploaded_file.content_length, 30 | "upload_path": uploaded_file.upload_path, 31 | } 32 | ) 33 | 34 | 35 | @router.delete("/{uuid}", status_code=HTTPStatus.NO_CONTENT) 36 | async def delete_file_view( 37 | uuid: UUID = Path(...), container: Container = Depends(get_container) # noqa: B008 38 | ) -> None: 39 | """Endpoint загрузки файла.""" 40 | await container.resolve(use_cases.DeleteFileUseCase).execute(uuid) 41 | -------------------------------------------------------------------------------- /backend/application/web/upload_file/exceptions.py: -------------------------------------------------------------------------------- 1 | """Исключения приложения загрузки файлов.""" 2 | from http import HTTPStatus 3 | 4 | from domain.upload_file.exceptions import FileDoesNotExistError 5 | 6 | from ..base import HttpExceptionMeta 7 | 8 | 9 | class FileDoesNotExistHttpError(metaclass=HttpExceptionMeta): 10 | """Файл не найден.""" 11 | 12 | message = "Файл не найден." 13 | reason = "file_does_not_exist" 14 | status = HTTPStatus.NOT_FOUND 15 | 16 | domain_exception = FileDoesNotExistError 17 | 18 | 19 | __all__ = [ 20 | "FileDoesNotExistHttpError", 21 | ] 22 | -------------------------------------------------------------------------------- /backend/application/web/upload_file/models/__init__.py: -------------------------------------------------------------------------------- 1 | """Pydantic-модели http запросов и ответов приложения загрузки файлов.""" 2 | from .upload_file import UploadFileResponse 3 | 4 | __all__ = [ 5 | "UploadFileResponse", 6 | ] 7 | -------------------------------------------------------------------------------- /backend/application/web/upload_file/models/upload_file.py: -------------------------------------------------------------------------------- 1 | """Модели запросов/ответов загрузки файла.""" 2 | from datetime import datetime 3 | from uuid import UUID 4 | 5 | from pydantic import BaseModel 6 | 7 | 8 | class UploadFileResponse(BaseModel): 9 | """Ответ на запрос загрузки файла.""" 10 | 11 | uuid: UUID 12 | url: str 13 | key: str 14 | last_modified: datetime 15 | content_length: int 16 | upload_path: str 17 | -------------------------------------------------------------------------------- /backend/application/web/urls.py: -------------------------------------------------------------------------------- 1 | """Подключение необходимых FastAPI маршрутизаторов в один корневой.""" 2 | from fastapi.routing import APIRouter 3 | 4 | from .health_check.api import router as router_health_check 5 | from .upload_file.api import router as router_upload_file 6 | 7 | router = APIRouter() 8 | router.include_router(router_health_check, prefix="/api/v1", tags=["health_check"]) 9 | router.include_router(router_upload_file, prefix="/api/v1", tags=["hello_world"]) 10 | -------------------------------------------------------------------------------- /backend/common/__init__.py: -------------------------------------------------------------------------------- 1 | """Общие модули, используемые на всём слое доменной-логики.""" 2 | -------------------------------------------------------------------------------- /backend/common/base.py: -------------------------------------------------------------------------------- 1 | """Базовые классы, необходимые для облегчённой конфигурации dependency-injection.""" 2 | from typing import Any, Type 3 | 4 | 5 | class DomainException(Exception): 6 | """Исключение доменной логики. 7 | 8 | Им удобно собирать все доменные исключения. 9 | """ 10 | 11 | registered_exceptions: list[Type["DomainException"]] = [] 12 | 13 | def __init_subclass__(cls, **kwargs: dict[str, Any]) -> None: 14 | """Сохраняем доменное исключение в список.""" 15 | super().__init_subclass__(**kwargs) 16 | cls.registered_exceptions.append(cls) 17 | 18 | 19 | class UseCaseMeta(type): 20 | """Мета-класс сценария. 21 | 22 | Им удобно собирать все сценарии и регистрировать их в DI контейнер. 23 | """ 24 | 25 | registered_use_cases: list[type] = [] 26 | 27 | def __new__(mcs, *args: list[Any], **kwargs: dict[str, Any]) -> type: 28 | """Сохраняем сценарий в список.""" 29 | use_case_class = super().__new__(mcs, *args, **kwargs) 30 | UseCaseMeta.registered_use_cases.append(use_case_class) 31 | return use_case_class 32 | 33 | 34 | __all__ = [ 35 | "DomainException", 36 | "UseCaseMeta", 37 | ] 38 | -------------------------------------------------------------------------------- /backend/common/config.py: -------------------------------------------------------------------------------- 1 | """Конфигурация приложения на основе констант и переменных окружения.""" 2 | from pathlib import Path 3 | 4 | from pydantic import BaseSettings, Field 5 | 6 | BASE_DIR = Path(__file__).parent.parent.parent 7 | 8 | 9 | class AppConfig(BaseSettings): 10 | """Конфигурация приложения.""" 11 | 12 | class Config: # noqa: D106 13 | env_file = ".env" 14 | env_file_encoding = "utf-8" 15 | 16 | project_name: str = Field(env="PROJECT_NAME", default="backend") 17 | cors_origins: str = Field(env="BACKEND_CORS_ORIGINS", default="") 18 | sentry_dsn = Field(env="SENTRY_DSN", default="") 19 | base_dir = BASE_DIR 20 | 21 | 22 | class DatabaseConfig(BaseSettings): 23 | """Конфигурация БД.""" 24 | 25 | class Config: # noqa: D106 26 | env_file = ".env" 27 | env_file_encoding = "utf-8" 28 | 29 | protocol: str = "postgresql+asyncpg" 30 | database: str = Field(env="POSTGRES_DB") 31 | username: str = Field(env="POSTGRES_USER") 32 | password: str = Field(env="POSTGRES_PASSWORD") 33 | host: str = Field(env="BACKEND_DATABASE_HOST") 34 | port: int = Field(env="BACKEND_DATABASE_PORT", cast=int) 35 | 36 | # Список приложений, которые используют SQLAlchemy для декларации моделей 37 | # Нужно для Alembic миграций 38 | apps: list[str] = [] 39 | 40 | @property 41 | def database_url(self) -> str: 42 | """URL подключения к БД.""" 43 | return "{protocol}://{username}:{password}@{host}:{port}/{database}".format( 44 | protocol=self.protocol, 45 | username=self.username, 46 | password=self.password, 47 | host=self.host, 48 | port=self.port, 49 | database=self.database, 50 | ) 51 | 52 | 53 | class AWSConfig(BaseSettings): 54 | """Конфигурация S3.""" 55 | 56 | class Config: # noqa: D106 57 | env_file = ".env" 58 | env_file_encoding = "utf-8" 59 | 60 | access_key_id: str = Field(env="AWS_ACCESS_KEY_ID") 61 | secret_access_key: str = Field(env="AWS_SECRET_ACCESS_KEY") 62 | storage_bucket_name: str = Field(env="AWS_STORAGE_BUCKET_NAME") 63 | endpoint_url: str = Field(env="AWS_S3_ENDPOINT_URL") 64 | 65 | 66 | __all__ = [ 67 | "AWSConfig", 68 | "AppConfig", 69 | "DatabaseConfig", 70 | ] 71 | -------------------------------------------------------------------------------- /backend/common/services/__init__.py: -------------------------------------------------------------------------------- 1 | """Интерфейсы сервисов, а также вспомогательные типы, используемые в приложении.""" 2 | from .random_re import IRandomRe 3 | from .storage import FileMeta, IAsyncFile, IStorage 4 | 5 | __all__ = [ 6 | "FileMeta", 7 | "IAsyncFile", 8 | "IRandomRe", 9 | "IStorage", 10 | ] 11 | -------------------------------------------------------------------------------- /backend/common/services/random_re.py: -------------------------------------------------------------------------------- 1 | """Интерфейс сервиса генерации случайных строк по паттерну регулярного выражения.""" 2 | from abc import abstractmethod 3 | from typing import Protocol 4 | 5 | 6 | class IRandomRe(Protocol): 7 | """Интерфейс сервиса генерации случайных строк по паттерну регулярного выражения.""" 8 | 9 | @abstractmethod 10 | def execute(self, re_pattern: str) -> str: 11 | """Генерация случайной строки по паттерну регулярного выражения.""" 12 | raise NotImplementedError 13 | -------------------------------------------------------------------------------- /backend/common/services/storage.py: -------------------------------------------------------------------------------- 1 | """Интерфейс сервиса, взаимодействующим с хранилищем файлов.""" 2 | from abc import abstractmethod 3 | from datetime import datetime 4 | from typing import NamedTuple, Protocol, Union 5 | 6 | 7 | class FileMeta(NamedTuple): 8 | """Метаданные файлов, загруженных в хранилище.""" 9 | 10 | url: str 11 | key: str 12 | filename: str 13 | last_modified: datetime 14 | content_length: int 15 | upload_path: str 16 | 17 | 18 | class IAsyncFile(Protocol): 19 | """Протокол асинхронных бинарных потоков для файлов.""" 20 | 21 | filename: str 22 | 23 | async def write(self, data: Union[bytes, str]) -> None: 24 | """Запись в поток.""" 25 | 26 | async def read(self, size: int = -1) -> Union[bytes, str]: 27 | """Чтение из потока.""" 28 | 29 | async def seek(self, offset: int) -> None: 30 | """Перемещение позиции в потоке.""" 31 | 32 | async def close(self) -> None: 33 | """Закрытие потока.""" 34 | 35 | 36 | class IStorage(Protocol): 37 | """Интерфейс сервиса, взаимодействующим с хранилищем файлов.""" 38 | 39 | @abstractmethod 40 | async def upload_file(self, file: IAsyncFile, upload_path: str) -> FileMeta: 41 | """Загрузка файла в хранилище.""" 42 | raise NotImplementedError 43 | 44 | @abstractmethod 45 | async def delete_file(self, file_meta: FileMeta) -> None: 46 | """Удаление файла из хранилища.""" 47 | raise NotImplementedError 48 | -------------------------------------------------------------------------------- /backend/docker/Dockerfile: -------------------------------------------------------------------------------- 1 | # This Dockerfile uses multi-stage build to customize DEV and PROD images: 2 | # https://docs.docker.com/develop/develop-images/multistage-build/ 3 | 4 | FROM python:3.10.2-slim-buster AS development_build 5 | 6 | ARG ENVIRONMENT 7 | 8 | ENV ENVIRONMENT=${ENVIRONMENT} \ 9 | # build: 10 | BUILD_ONLY_PACKAGES='wget' \ 11 | # python: 12 | PYTHONFAULTHANDLER=1 \ 13 | PYTHONUNBUFFERED=1 \ 14 | PYTHONHASHSEED=random \ 15 | PYTHONDONTWRITEBYTECODE=1 \ 16 | # pip: 17 | PIP_NO_CACHE_DIR=off \ 18 | PIP_DISABLE_PIP_VERSION_CHECK=on \ 19 | PIP_DEFAULT_TIMEOUT=100 \ 20 | # dockerize: 21 | DOCKERIZE_VERSION=v0.6.1 \ 22 | # tini: 23 | TINI_VERSION=v0.19.0 \ 24 | # poetry: 25 | POETRY_NO_INTERACTION=1 \ 26 | POETRY_VIRTUALENVS_CREATE=false \ 27 | POETRY_CACHE_DIR='/var/cache/pypoetry' \ 28 | PATH="$PATH:/root/.poetry/bin" 29 | 30 | # System deps: 31 | RUN apt-get update && apt-get upgrade -y \ 32 | && apt-get install --no-install-recommends -y \ 33 | bash \ 34 | tar \ 35 | build-essential \ 36 | curl \ 37 | gettext \ 38 | git \ 39 | libpq-dev \ 40 | # Defining build-time-only dependencies: 41 | $BUILD_ONLY_PACKAGES \ 42 | # Installing `dockerize` utility: 43 | # https://github.com/jwilder/dockerize 44 | && wget "https://github.com/jwilder/dockerize/releases/download/${DOCKERIZE_VERSION}/dockerize-linux-amd64-${DOCKERIZE_VERSION}.tar.gz" \ 45 | && tar -C /usr/local/bin -xzvf "dockerize-linux-amd64-${DOCKERIZE_VERSION}.tar.gz" \ 46 | && rm "dockerize-linux-amd64-${DOCKERIZE_VERSION}.tar.gz" && dockerize --version \ 47 | # Installing `tini` utility: 48 | # https://github.com/krallin/tini 49 | && wget -O /usr/local/bin/tini "https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini" \ 50 | && chmod +x /usr/local/bin/tini && tini --version \ 51 | # Installing `poetry` package manager: 52 | # https://github.com/python-poetry/poetry 53 | && pip install -Iv poetry==1.1.12 \ 54 | && poetry --version \ 55 | # Removing build-time-only dependencies: 56 | && apt-get remove -y $BUILD_ONLY_PACKAGES \ 57 | # Cleaning cache: 58 | && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \ 59 | && apt-get clean -y && rm -rf /var/lib/apt/lists/* 60 | 61 | WORKDIR /code 62 | 63 | # This is a special case. We need to run this script as an entry point: 64 | COPY ./docker/entrypoint.sh /docker-entrypoint.sh 65 | 66 | # Setting up proper permissions: 67 | RUN chmod +x '/docker-entrypoint.sh' \ 68 | && groupadd -r web && useradd -d /code -r -g web web \ 69 | && chown web:web -R /code 70 | 71 | # Copy only requirements, to cache them in docker layer 72 | COPY --chown=web:web ./poetry.lock ./pyproject.toml /code/ 73 | 74 | # Project initialization: 75 | RUN echo "$ENVIRONMENT" && poetry version \ 76 | && poetry install \ 77 | $(if [ "$ENVIRONMENT" = 'production' ]; then echo '--no-dev'; fi) \ 78 | --no-interaction --no-ansi \ 79 | # Upgrading pip, it is insecure, remove after `pip@21.1` 80 | && poetry run pip install -U pip \ 81 | # Cleaning poetry installation's cache for production: 82 | && if [ "$ENVIRONMENT" = 'production' ]; then rm -rf "$POETRY_CACHE_DIR"; fi 83 | 84 | # We customize how our app is loaded with the custom entrypoint: 85 | ENTRYPOINT ["tini", "--", "/docker-entrypoint.sh"] 86 | 87 | # The following stage is only for Prod: 88 | # https://wemake-django-template.readthedocs.io/en/latest/pages/template/production.html 89 | FROM development_build AS production_build 90 | COPY --chown=web:web . /code 91 | -------------------------------------------------------------------------------- /backend/docker/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | set -o errexit 4 | set -o nounset 5 | 6 | readonly cmd="$*" 7 | 8 | postgres_ready() { 9 | # Check that postgres is up and running on port `5432`: 10 | dockerize -wait "tcp://${BACKEND_DATABASE_HOST}:${BACKEND_DATABASE_PORT}" -timeout 10s 11 | } 12 | 13 | # We need this line to make sure that this container is started 14 | # after the one with postgres: 15 | until postgres_ready; do 16 | echo >&2 'Postgres is unavailable - sleeping' 17 | done 18 | 19 | # It is also possible to wait for other services as well: redis, elastic, mongo 20 | echo >&2 'Postgres is up - continuing...' 21 | 22 | # Applying migrations 23 | echo >&2 'Applying migrations...' 24 | sh infrastructure/database/scripts/migrate.sh head 25 | 26 | # Evaluating passed command (do not touch): 27 | # shellcheck disable=SC2086 28 | exec $cmd 29 | -------------------------------------------------------------------------------- /backend/docs/assets/ArchitectureEN.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 |
External Dependencies
External Dependencies
Business Logic
Business Logic
Endpoints.
For example, HTTP
Endpoints....
Service Implementations
Service Implementati...
Repository Implementations
Repository Implement...
Repository
Interfaces
Repository...
Use Cases
Use Cases
Service
Interfaces
Service...
Models
Models
Client
Client
Text is not SVG - cannot display
-------------------------------------------------------------------------------- /backend/docs/assets/ArchitectureRU.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 |
Клиент
Клиент
Точка входа
Например, веб-фреймворк
Точка входа...
Сценарии
Сценарии
Модели
Модели
Интерфейсы
Сервисов
Интерфейсы Сервисов
Интерфейсы
Репозиториев
Интерфейсы Репозиториев
Бизнес-Логика
Бизнес-Логика
Реализации Репозиториев
Реализации Репозиториев
Внешние Зависимости
Внешние Зависимости
Реализации Сервисов
Реализации Сервисов
Viewer does not support full SVG 1.1
4 | -------------------------------------------------------------------------------- /backend/domain/__init__.py: -------------------------------------------------------------------------------- 1 | """Это слой архитектуры, отвечающий за доменную логику.""" 2 | -------------------------------------------------------------------------------- /backend/domain/upload_file/__init__.py: -------------------------------------------------------------------------------- 1 | """Доменная логика приложения загрузки файлов.""" 2 | -------------------------------------------------------------------------------- /backend/domain/upload_file/exceptions.py: -------------------------------------------------------------------------------- 1 | """Доменные исключения приложения загрузки файлов.""" 2 | from common.base import DomainException 3 | 4 | 5 | class FileDoesNotExistError(DomainException): 6 | """Файл не найден.""" 7 | 8 | 9 | __all__ = [ 10 | "FileDoesNotExistError", 11 | ] 12 | -------------------------------------------------------------------------------- /backend/domain/upload_file/repos/__init__.py: -------------------------------------------------------------------------------- 1 | """Репозитории и модели приложения загрузки файлов.""" 2 | from .uploaded_file import IUploadedFileRepo, UploadedFile 3 | 4 | __all__ = [ 5 | "IUploadedFileRepo", 6 | "UploadedFile", 7 | ] 8 | -------------------------------------------------------------------------------- /backend/domain/upload_file/repos/uploaded_file.py: -------------------------------------------------------------------------------- 1 | """Репозиторий и модель загруженного файла.""" 2 | from abc import abstractmethod 3 | from dataclasses import dataclass 4 | from datetime import datetime 5 | from typing import Optional, Protocol 6 | from uuid import UUID 7 | 8 | 9 | @dataclass 10 | class UploadedFile: 11 | """Загруженный файл.""" 12 | 13 | url: str 14 | key: str 15 | filename: str 16 | last_modified: datetime 17 | content_length: int 18 | upload_path: str 19 | uuid: Optional[UUID] = None 20 | 21 | 22 | class IUploadedFileRepo(Protocol): 23 | """Интерфейс репозиторий загруженных файлов.""" 24 | 25 | @abstractmethod 26 | async def delete(self, file: UploadedFile) -> None: 27 | """Удаление конкретного файла.""" 28 | 29 | @abstractmethod 30 | async def create(self, file: UploadedFile) -> UploadedFile: 31 | """Создание файла.""" 32 | 33 | @abstractmethod 34 | async def get(self, uuid: UUID) -> Optional[UploadedFile]: 35 | """Получение конкретного файла.""" 36 | 37 | 38 | __all__ = [ 39 | "IUploadedFileRepo", 40 | "UploadedFile", 41 | ] 42 | -------------------------------------------------------------------------------- /backend/domain/upload_file/use_cases/__init__.py: -------------------------------------------------------------------------------- 1 | """Сценарии использования приложения загрузки файлов.""" 2 | from .delete_file import DeleteFileUseCase 3 | from .upload_file import UploadFileUseCase 4 | 5 | __all__ = [ 6 | "DeleteFileUseCase", 7 | "UploadFileUseCase", 8 | ] 9 | -------------------------------------------------------------------------------- /backend/domain/upload_file/use_cases/delete_file.py: -------------------------------------------------------------------------------- 1 | """Сценарий загрузки файла в хранилище файлов.""" 2 | from uuid import UUID 3 | 4 | from common.base import UseCaseMeta 5 | from common.services.storage import FileMeta, IStorage 6 | from domain.upload_file.exceptions import FileDoesNotExistError 7 | from domain.upload_file.repos.uploaded_file import IUploadedFileRepo 8 | 9 | 10 | class DeleteFileUseCase(metaclass=UseCaseMeta): 11 | """Сценарий удаления файла из хранилища файлов.""" 12 | 13 | def __init__(self, storage_service: IStorage, file_repo: IUploadedFileRepo) -> None: 14 | """Создание экземпляра с сохранением конфигурации.""" 15 | self._storage_service = storage_service 16 | self._upload_path = "h/w" 17 | self._file_repo = file_repo 18 | 19 | async def execute(self, file_uuid: UUID) -> None: 20 | """Удаление файла из хранилища файлов.""" 21 | file = await self._file_repo.get(file_uuid) 22 | if file is None: 23 | raise FileDoesNotExistError() 24 | 25 | meta = FileMeta( 26 | url=file.url, 27 | key=file.key, 28 | filename=file.filename, 29 | last_modified=file.last_modified, 30 | content_length=file.content_length, 31 | upload_path=file.upload_path, 32 | ) 33 | 34 | await self._storage_service.delete_file(meta) 35 | await self._file_repo.delete(file) 36 | 37 | 38 | __all__ = [ 39 | "DeleteFileUseCase", 40 | ] 41 | -------------------------------------------------------------------------------- /backend/domain/upload_file/use_cases/upload_file.py: -------------------------------------------------------------------------------- 1 | """Сценарий загрузки файла в хранилище файлов.""" 2 | from common.base import UseCaseMeta 3 | from common.services.storage import IAsyncFile, IStorage 4 | from domain.upload_file.repos.uploaded_file import IUploadedFileRepo, UploadedFile 5 | 6 | 7 | class UploadFileUseCase(metaclass=UseCaseMeta): 8 | """Сценарий загрузки файла в хранилище файлов.""" 9 | 10 | def __init__(self, storage_service: IStorage, file_repo: IUploadedFileRepo) -> None: 11 | """Создание экземпляра с сохранением конфигурации.""" 12 | self._storage_service = storage_service 13 | self._upload_path = "h/w" 14 | self._file_repo = file_repo 15 | 16 | async def execute(self, upload_file: IAsyncFile) -> UploadedFile: 17 | """Загрузка файла в хранилище файлов.""" 18 | file_meta = await self._storage_service.upload_file(upload_file, self._upload_path) 19 | return await self._file_repo.create( 20 | UploadedFile( 21 | url=file_meta.url, 22 | key=file_meta.key, 23 | filename=file_meta.filename, 24 | last_modified=file_meta.last_modified, 25 | content_length=file_meta.content_length, 26 | upload_path=file_meta.upload_path, 27 | ) 28 | ) 29 | -------------------------------------------------------------------------------- /backend/infrastructure/__init__.py: -------------------------------------------------------------------------------- 1 | """Это слой архитектуры, отвечающий за инфраструктурную имплементацию. 2 | 3 | Сюда входят реализации репозиториев и сервисов. 4 | """ 5 | -------------------------------------------------------------------------------- /backend/infrastructure/database/__init__.py: -------------------------------------------------------------------------------- 1 | """Подключение к базе данных с использованием SQLAlchemy.""" 2 | from .database import DatabaseResource 3 | 4 | __all__ = [ 5 | "DatabaseResource", 6 | ] 7 | -------------------------------------------------------------------------------- /backend/infrastructure/database/alembic/env.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | import sys 3 | from logging.config import fileConfig 4 | from pathlib import Path 5 | 6 | from alembic import context 7 | from sqlalchemy import engine_from_config, pool 8 | 9 | BASE_DIR = Path(__file__).parent.parent.parent.absolute() 10 | sys.path.append(str(BASE_DIR)) 11 | 12 | from common.config import AppConfig, DatabaseConfig 13 | 14 | app_config = AppConfig() 15 | database_config = DatabaseConfig() 16 | 17 | # this is the Alembic Config object, which provides 18 | # access to the values within the .ini file in use. 19 | config = context.config 20 | dsn = database_config.database_url.replace("+asyncpg", "") 21 | print(dsn) 22 | # Interpret the config file for Python logging. 23 | # This line sets up loggers basically. 24 | fileConfig(config.config_file_name) 25 | # set endpoint from config 26 | config.set_main_option("sqlalchemy.url", dsn) 27 | 28 | # add your model's MetaData object here 29 | # for 'autogenerate' support 30 | # from myapp import mymodel 31 | # target_metadata = mymodel.Base.metadata 32 | 33 | target_metadata = [] 34 | for app in database_config.apps: 35 | try: 36 | i = importlib.import_module("infrastructure.{}.models".format(app)) 37 | except ModuleNotFoundError: 38 | continue 39 | 40 | if len(target_metadata) > 0: 41 | continue 42 | elif hasattr(i, "__all__") and len(i.__all__) > 0: 43 | model = i.__all__[0] 44 | target_metadata.append(getattr(i, model).metadata) 45 | 46 | 47 | # target_metadata = [Base.metadata] 48 | 49 | 50 | # other values from the config, defined by the needs of env.py, 51 | # can be acquired: 52 | # my_important_option = config.get_main_option("my_important_option") 53 | # ... etc. 54 | 55 | 56 | def run_migrations_offline(): 57 | """Run migrations in 'offline' mode. 58 | 59 | This configures the context with just a URL 60 | and not an Engine, though an Engine is acceptable 61 | here as well. By skipping the Engine creation 62 | we don't even need a DBAPI to be available. 63 | 64 | Calls to context.execute() here emit the given string to the 65 | script output. 66 | """ 67 | url = config.get_main_option("sqlalchemy.url") 68 | context.configure(url=url, target_metadata=target_metadata, literal_binds=True) 69 | 70 | with context.begin_transaction(): 71 | context.run_migrations() 72 | 73 | 74 | def run_migrations_online(): 75 | """Run migrations in 'online' mode. 76 | 77 | In this scenario we need to create an Engine 78 | and associate a connection with the context. 79 | """ 80 | connectable = engine_from_config( 81 | config.get_section(config.config_ini_section), 82 | prefix="sqlalchemy.", 83 | poolclass=pool.NullPool, 84 | ) 85 | 86 | with connectable.connect() as connection: 87 | context.configure(connection=connection, target_metadata=target_metadata) 88 | 89 | with context.begin_transaction(): 90 | context.run_migrations() 91 | 92 | 93 | if context.is_offline_mode(): 94 | run_migrations_offline() 95 | else: 96 | run_migrations_online() 97 | -------------------------------------------------------------------------------- /backend/infrastructure/database/alembic/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | """ 7 | from alembic import op 8 | import sqlalchemy as sa 9 | ${imports if imports else ""} 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = ${repr(up_revision)} 13 | down_revision = ${repr(down_revision)} 14 | branch_labels = ${repr(branch_labels)} 15 | depends_on = ${repr(depends_on)} 16 | 17 | 18 | def upgrade(): 19 | ${upgrades if upgrades else "pass"} 20 | 21 | 22 | def downgrade(): 23 | ${downgrades if downgrades else "pass"} 24 | -------------------------------------------------------------------------------- /backend/infrastructure/database/alembic/versions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Hulvdan/fast-api-template/b5386290b4b224cbaedff6e3ff2515c9365fff7c/backend/infrastructure/database/alembic/versions/__init__.py -------------------------------------------------------------------------------- /backend/infrastructure/database/base.py: -------------------------------------------------------------------------------- 1 | """Базовый класс декларативного описания ORM моделей SQLAlchemy.""" 2 | from sqlalchemy.ext.declarative import declarative_base, declared_attr 3 | 4 | 5 | class CustomBase: 6 | """Базовый класс декларативного описания ORM моделей SQLAlchemy.""" 7 | 8 | @declared_attr 9 | def __tablename__(cls) -> str: # noqa 10 | return cls.__name__.lower() 11 | 12 | __mapper_args__ = {"eager_defaults": True} 13 | 14 | 15 | Base = declarative_base(cls=CustomBase) 16 | 17 | 18 | __all__ = [ 19 | "Base", 20 | ] 21 | -------------------------------------------------------------------------------- /backend/infrastructure/database/database.py: -------------------------------------------------------------------------------- 1 | """Подключение к базе данных с использованием SQLAlchemy.""" 2 | from contextlib import asynccontextmanager 3 | from typing import AsyncGenerator, cast 4 | 5 | from sqlalchemy import create_engine, orm 6 | from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine 7 | from sqlalchemy_utils import create_database, database_exists, drop_database # type: ignore[import] 8 | 9 | from common.config import DatabaseConfig 10 | 11 | from .base import Base 12 | 13 | 14 | class DatabaseResource: 15 | """Подключение к базе данных с использованием SQLAlchemy.""" 16 | 17 | def __init__(self, database_config: DatabaseConfig) -> None: 18 | """Создание экземпляра с сохранением конфигурации.""" 19 | self._database_name = database_config.database 20 | self._database_url = database_config.database_url 21 | self._engine = create_async_engine(self._database_url) 22 | self._session_factory = orm.scoped_session( 23 | orm.sessionmaker( # type: ignore 24 | autocommit=False, autoflush=False, bind=self._engine, class_=AsyncSession 25 | ) 26 | ) 27 | 28 | @property 29 | def _sync_db_url(self) -> str: 30 | """Костыль для использования sqlalchemy-utils. Оно не работает с асинхронным движком.""" 31 | return self._database_url.replace("+asyncpg", "") 32 | 33 | def create_database(self) -> None: 34 | """Создание БД при условии её отсутствия. 35 | 36 | Следует использовать только для тестов. 37 | """ 38 | if not database_exists(self._sync_db_url): 39 | create_database(self._sync_db_url) 40 | 41 | def create_tables(self) -> None: 42 | """Создание всех таблиц с помощью использования декларативного стиля SQLAlchemy. 43 | 44 | Следует использовать только для быстрого запуска интеграционных тестов. 45 | Это куда быстрее, чем применять миграции. Минус в том, что мы так не тестируем миграции. 46 | """ 47 | engine = create_engine(self._sync_db_url) 48 | Base.metadata.create_all(engine) 49 | engine.dispose() 50 | 51 | async def clear_tables(self) -> None: 52 | """Удаление записей из всех таблиц БД. 53 | 54 | Следует использовать только для тестов. 55 | 56 | На данный момент не реализовано. 57 | """ 58 | raise NotImplementedError 59 | 60 | def drop_database(self) -> None: 61 | """Дроп БД при условии её наличия. 62 | 63 | Следует использовать только для тестов. 64 | """ 65 | if database_exists(self._sync_db_url): 66 | drop_database(self._sync_db_url) 67 | 68 | @asynccontextmanager 69 | async def session(self) -> AsyncGenerator[AsyncSession, None]: 70 | """Получение экземпляра сессии для потока/event-loop-а.""" 71 | session = cast(AsyncSession, self._session_factory()) 72 | try: 73 | yield session 74 | except Exception as err: 75 | await session.rollback() 76 | raise err 77 | finally: 78 | await session.close() 79 | -------------------------------------------------------------------------------- /backend/infrastructure/database/scripts/makemigrations.sh: -------------------------------------------------------------------------------- 1 | read -p "Enter revision's name: " name 2 | 3 | alembic revision --autogenerate -m "$name" 4 | -------------------------------------------------------------------------------- /backend/infrastructure/database/scripts/migrate.sh: -------------------------------------------------------------------------------- 1 | input_variable=$1 2 | 3 | case $input_variable in 4 | '--help') 5 | echo "This is a help to use migrate command" 6 | echo "--help or -h will show help" 7 | echo "Also you can use this input to introduce revision or head" 8 | break 9 | ;; 10 | *) 11 | PYTHONPATH=$(pwd) 12 | echo $PYTHONPATH 13 | alembic upgrade $input_variable 14 | break 15 | ;; 16 | esac 17 | -------------------------------------------------------------------------------- /backend/infrastructure/services/__init__.py: -------------------------------------------------------------------------------- 1 | """Сборник всех реализаций интерфейсов сервисов.""" 2 | from .random_re_rstr import RandomReXeger 3 | from .storage_mock import StorageMock 4 | from .storage_s3 import StorageS3 5 | 6 | __all__ = [ 7 | "RandomReXeger", 8 | "StorageMock", 9 | "StorageS3", 10 | ] 11 | -------------------------------------------------------------------------------- /backend/infrastructure/services/random_re_rstr.py: -------------------------------------------------------------------------------- 1 | """Реализация сервиса генерации случайных строк по паттерну регулярного выражения.""" 2 | from rstr import xeger # type: ignore[import] 3 | 4 | from common.services.random_re import IRandomRe 5 | 6 | 7 | class RandomReXeger(IRandomRe): 8 | """Сервис генерации случайных строк по паттерну регулярного выражения. 9 | 10 | Использует rstr под капотом. 11 | """ 12 | 13 | def execute(self, re_pattern: str) -> str: 14 | """Генерация случайной строки по паттерну регулярного выражения.""" 15 | return xeger(re_pattern) 16 | -------------------------------------------------------------------------------- /backend/infrastructure/services/storage_mock.py: -------------------------------------------------------------------------------- 1 | """Реализация сервиса взаимодействия с хранилищем файлов для тестов.""" 2 | from datetime import datetime 3 | 4 | from common.config import AWSConfig 5 | from common.services.random_re import IRandomRe 6 | from common.services.storage import FileMeta, IAsyncFile, IStorage 7 | 8 | 9 | class StorageMock(IStorage): 10 | """Мок хранилища файлов для тестов.""" 11 | 12 | def __init__(self, aws_config: AWSConfig, random_re: IRandomRe) -> None: 13 | """Создание экземпляра с сохранением конфигурации.""" 14 | self.aws_config = aws_config 15 | self.random_re = random_re 16 | 17 | async def upload_file(self, file: IAsyncFile, upload_path: str) -> FileMeta: 18 | """Эмуляция загрузки файла в хранилище.""" 19 | random_str = self.random_re.execute("[a-zA-Z0-9]{60}") 20 | if upload_path[:-1] != "/": 21 | upload_path += "/" 22 | 23 | file_url = self.aws_config.endpoint_url + upload_path + random_str 24 | 25 | return FileMeta( 26 | url=file_url, 27 | key=random_str, 28 | filename=file.filename, 29 | last_modified=datetime.now(), 30 | content_length=1, 31 | upload_path=upload_path, 32 | ) 33 | 34 | async def delete_file(self, file_meta: FileMeta) -> None: 35 | """Эмуляция удаления файла в хранилище.""" 36 | return None 37 | -------------------------------------------------------------------------------- /backend/infrastructure/services/storage_s3.py: -------------------------------------------------------------------------------- 1 | """Реализация сервиса взаимодействия с хранилищем файлов, подобному S3 Bucket.""" 2 | from datetime import datetime 3 | from typing import TypedDict 4 | 5 | from aioboto3.session import Session # type: ignore[import] 6 | 7 | from common.config import AWSConfig 8 | from common.services.random_re import IRandomRe 9 | from common.services.storage import FileMeta, IAsyncFile, IStorage 10 | 11 | 12 | class _ResponseMetadata(TypedDict): 13 | RequestId: str 14 | HostId: str 15 | HTTPStatusCode: int 16 | HTTPHeaders: dict 17 | RetryAttempts: int 18 | 19 | 20 | class _ResponseHeadObject(TypedDict): 21 | ResponseMetadata: _ResponseMetadata 22 | AcceptRanges: str 23 | LastModified: datetime 24 | ContentLength: int 25 | ETag: str 26 | ContentType: str 27 | Metadata: dict 28 | 29 | 30 | class StorageS3(IStorage): 31 | """Сервис взаимодействия с хранилищем файлов, подобному S3 Bucket.""" 32 | 33 | def __init__(self, aws_config: AWSConfig, random_re: IRandomRe) -> None: 34 | """Создание экземпляра с сохранением конфигурации.""" 35 | self.aws_config = aws_config 36 | self.random_re = random_re 37 | 38 | self.service_name = "s3" 39 | self.endpoint_url = self.aws_config.endpoint_url 40 | self.bucket = self.aws_config.storage_bucket_name 41 | self.aws_access_key_id = self.aws_config.access_key_id 42 | self.aws_secret_access_key = self.aws_config.secret_access_key 43 | 44 | self.session_options = { 45 | "aws_access_key_id": self.aws_access_key_id, 46 | "aws_secret_access_key": self.aws_secret_access_key, 47 | } 48 | self.client_options = { 49 | "service_name": self.service_name, 50 | "endpoint_url": self.endpoint_url, 51 | "aws_access_key_id": self.aws_access_key_id, 52 | "aws_secret_access_key": self.aws_secret_access_key, 53 | } 54 | self.resource_options = { 55 | "service_name": self.service_name, 56 | "endpoint_url": self.endpoint_url, 57 | "aws_access_key_id": self.aws_access_key_id, 58 | "aws_secret_access_key": self.aws_secret_access_key, 59 | } 60 | 61 | self.session: Session = Session(**self.session_options) 62 | 63 | async def upload_file(self, file: IAsyncFile, upload_path: str) -> FileMeta: 64 | """Загрузка файла в S3 Bucket.""" 65 | random_str = self.random_re.execute("[a-zA-Z0-9]{60}") 66 | if upload_path[:-1] != "/": 67 | upload_path += "/" 68 | file_key = upload_path + random_str 69 | 70 | await file.seek(0) 71 | async with self.session.client(**self.client_options) as s3: 72 | await s3.upload_fileobj(file, self.bucket, file_key) 73 | meta: _ResponseHeadObject = await s3.head_object(Bucket=self.bucket, Key=file_key) 74 | file_url = self.aws_config.endpoint_url + upload_path + random_str 75 | 76 | return FileMeta( 77 | url=file_url, 78 | key=random_str, 79 | filename=file.filename, 80 | last_modified=meta["LastModified"], 81 | content_length=meta["ContentLength"], 82 | upload_path=upload_path, 83 | ) 84 | 85 | async def delete_file(self, file_meta: FileMeta) -> None: 86 | """Удаление файла из S3 Bucket.""" 87 | file_key = file_meta.upload_path + file_meta.key 88 | async with self.session.client(**self.client_options) as s3: 89 | await s3.delete_object(Bucket=self.bucket, Key=file_key) 90 | -------------------------------------------------------------------------------- /backend/infrastructure/upload_file/__init__.py: -------------------------------------------------------------------------------- 1 | """Инфраструктурная реализация приложения загруженных файлов.""" 2 | -------------------------------------------------------------------------------- /backend/infrastructure/upload_file/repos/__init__.py: -------------------------------------------------------------------------------- 1 | """Модели и репозитории приложения загруженных файлов.""" 2 | from .upload_file_db_repo import UploadedFileDBRepo 3 | 4 | __all__ = [ 5 | "UploadedFileDBRepo", 6 | ] 7 | -------------------------------------------------------------------------------- /backend/infrastructure/upload_file/repos/upload_file_db_repo.py: -------------------------------------------------------------------------------- 1 | """Модель и репозиторий загруженных файлов на SQLAlchemy.""" 2 | from datetime import datetime 3 | from typing import Optional 4 | from uuid import UUID, uuid4 5 | 6 | from sqlalchemy import Column, DateTime, Integer, String 7 | from sqlalchemy.dialects.postgresql import UUID as AlchemyUUID 8 | from sqlalchemy.ext.asyncio import AsyncSession 9 | 10 | from domain.upload_file.repos import IUploadedFileRepo, UploadedFile 11 | from infrastructure.database.base import Base 12 | from infrastructure.database.database import DatabaseResource 13 | 14 | 15 | class UploadedFileDBModel(Base): 16 | """SQLAlchemy модель загруженного файла.""" 17 | 18 | url: str = Column(String) 19 | key: str = Column(String) 20 | filename: str = Column(String) 21 | last_modified: datetime = Column(DateTime) 22 | content_length: int = Column(Integer) 23 | upload_path: str = Column(String) 24 | uuid: Optional[UUID] = Column(AlchemyUUID(as_uuid=True), primary_key=True, default=uuid4) 25 | 26 | @classmethod 27 | def from_model(cls, file: UploadedFile) -> "UploadedFileDBModel": 28 | """Приведение из бизнес-модели.""" 29 | return cls( 30 | url=file.url, 31 | key=file.key, 32 | filename=file.filename, 33 | last_modified=file.last_modified, 34 | content_length=file.content_length, 35 | upload_path=file.upload_path, 36 | uuid=file.uuid, 37 | ) 38 | 39 | def to_model(self) -> UploadedFile: 40 | """Приведение к бизнес-модели.""" 41 | return UploadedFile( 42 | url=self.url, 43 | key=self.key, 44 | filename=self.filename, 45 | last_modified=self.last_modified, 46 | content_length=self.content_length, 47 | upload_path=self.upload_path, 48 | uuid=self.uuid, 49 | ) 50 | 51 | 52 | class UploadedFileDBRepo(IUploadedFileRepo): 53 | """Репозиторий загруженных файлов, работающий с помощью SQLAlchemy.""" 54 | 55 | def __init__(self, db_resource: DatabaseResource) -> None: 56 | """Пробрасываем сессию.""" 57 | self._session = db_resource.session 58 | 59 | async def delete(self, file: UploadedFile) -> None: 60 | """Удаление конкретного файла.""" 61 | session: AsyncSession 62 | async with self._session() as session: 63 | db_file = await session.get(UploadedFileDBModel, file.uuid) 64 | if db_file is None: 65 | raise 66 | await session.delete(db_file) 67 | await session.commit() 68 | 69 | async def create(self, file: UploadedFile) -> UploadedFile: 70 | """Создание файла.""" 71 | session: AsyncSession 72 | async with self._session() as session: 73 | db_file = UploadedFileDBModel.from_model(file) 74 | session.add(db_file) 75 | await session.flush() 76 | 77 | file = db_file.to_model() 78 | await session.commit() 79 | 80 | return file 81 | 82 | async def get(self, uuid: UUID) -> Optional[UploadedFile]: 83 | """Получение конкретного файла.""" 84 | session: AsyncSession 85 | async with self._session() as session: 86 | db_file = await session.get(UploadedFileDBModel, uuid) 87 | 88 | if db_file is None: 89 | return None 90 | 91 | return db_file.to_model() 92 | -------------------------------------------------------------------------------- /backend/libs/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Hulvdan/fast-api-template/b5386290b4b224cbaedff6e3ff2515c9365fff7c/backend/libs/__init__.py -------------------------------------------------------------------------------- /backend/libs/punq/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Punq is a dependency injection library you can understand. 3 | 4 | https://github.com/Hulvdan/punq forked from https://github.com/bobthemighty/punq 5 | """ 6 | 7 | import inspect 8 | from collections import defaultdict 9 | from enum import Enum 10 | from typing import Any, Callable, List, NamedTuple, Type, TypeVar, Union, get_type_hints 11 | 12 | from pkg_resources import DistributionNotFound, get_distribution 13 | 14 | from ._compat import ensure_forward_ref, is_generic_list 15 | 16 | try: # pragma no cover 17 | __version__ = get_distribution(__name__).version 18 | except DistributionNotFound: # pragma no cover 19 | # package is not installed 20 | pass 21 | 22 | 23 | T_co = TypeVar("T_co", covariant=True) 24 | 25 | 26 | class AbstractClassException(Exception): 27 | """ 28 | Raised when factory returns Abstract class. 29 | Examples: 30 | >>> from abc import ABC, abstractmethod 31 | >>> from punq import Container 32 | ... class IClient(ABC): 33 | ... @abstractmethod 34 | ... def execute(self): 35 | ... raise NotImplementedError 36 | ... class Client(IClient): 37 | ... pass 38 | >>> container = Container() 39 | >>> container.register(IClient, Client) 40 | """ 41 | 42 | 43 | class AbstractFactoryReturnClassException(Exception): 44 | """ 45 | Raised when factory returns Abstract class. 46 | Examples: 47 | >>> from abc import ABC, abstractmethod 48 | >>> from punq import Container 49 | ... class IClient(ABC): 50 | ... @abstractmethod 51 | ... def execute(self): 52 | ... raise NotImplementedError 53 | ... def get_client_s3_implementation() -> IClient: 54 | ... return ClientS3() 55 | >>> container = Container() 56 | >>> container.register(IClient, factory=get_client_s3_implementation) 57 | """ 58 | 59 | 60 | class FinalizedException(Exception): 61 | """ 62 | Raised when `finalize` method of container was called 63 | and later tried to register smth. 64 | 65 | Examples: 66 | >>> from punq import Container 67 | ... class IClient: 68 | ... pass 69 | ... class Client(IClient): 70 | ... pass 71 | ... class IConsumer: 72 | ... pass 73 | ... class Consumer(Consumer): 74 | ... pass 75 | >>> container = Container() 76 | >>> container.register(IClient, Client) 77 | >>> container.finalize() 78 | >>> container.register(IConsumer, Consumer) 79 | """ 80 | 81 | 82 | class ReassignmentsProhibitedException(Exception): 83 | """ 84 | Raised when registering multiple implementations of the same abstract 85 | service again if `reassignments_prohibited` is set to `True`. 86 | 87 | Examples: 88 | >>> from punq import Container 89 | ... class IClient: 90 | ... pass 91 | ... class FirstClient(IClient): 92 | ... pass 93 | ... class SecondClient(IClient): 94 | ... pass 95 | >>> container = Container() 96 | >>> container.register(IClient, FirstClient) 97 | >>> container.register(IClient, SecondClient) 98 | """ 99 | 100 | 101 | class MissingDependencyException(Exception): 102 | """ 103 | Raised when a service, or one of its dependencies, is not registered. 104 | 105 | Examples: 106 | >>> import punq 107 | >>> container = punq.Container() 108 | >>> container.resolve("foo") 109 | Traceback (most recent call last): 110 | punq.MissingDependencyException: Failed to resolve implementation for foo 111 | """ 112 | 113 | 114 | class InvalidRegistrationException(Exception): 115 | """ 116 | Raised when a registration would result in an unresolvable service. 117 | """ 118 | 119 | 120 | class InvalidForwardReferenceException(Exception): 121 | """ 122 | Raised when a registered service has a forward reference that can't be 123 | resolved. 124 | 125 | Examples: 126 | In this example, we register a service with a string as a type annotation. 127 | When we try to inspect the constructor for the service we fail with an 128 | InvalidForwardReferenceException 129 | 130 | >>> from attr import dataclass 131 | >>> from punq import Container 132 | >>> @dataclass 133 | ... class Client: 134 | ... dep: 'Dependency' 135 | >>> container = Container() 136 | >>> container.register(Client) 137 | Traceback (most recent call last): 138 | ... 139 | punq.InvalidForwardReferenceException: name 'Dependency' is not defined 140 | 141 | 142 | This error can be resolved by first registering a type with the name 143 | 'Dependency' in the container. 144 | 145 | >>> class Dependency: 146 | ... pass 147 | ... 148 | >>> container.register(Dependency) 149 | 150 | >>> container.register(Client) 151 | 152 | >>> container.resolve(Client) 153 | Client(dep=) 154 | 155 | 156 | Alternatively, we can register a type using the literal key 'Dependency'. 157 | 158 | >>> class AlternativeDependency: 159 | ... pass 160 | ... 161 | >>> container = Container() 162 | >>> container.register('Dependency', AlternativeDependency) 163 | 164 | >>> container.register(Client) 165 | 166 | >>> container.resolve(Client) 167 | Client(dep=) 168 | 169 | """ 170 | 171 | 172 | class Scope(Enum): 173 | transient = 0 174 | singleton = 1 175 | 176 | 177 | class Registration(NamedTuple): 178 | service: str 179 | scope: Scope 180 | builder: Callable[[], Any] 181 | needs: Any 182 | args: List[Any] 183 | 184 | 185 | class Empty: 186 | pass 187 | 188 | 189 | empty = Empty() 190 | 191 | 192 | def match_defaults(args, defaults): 193 | """ 194 | Matches args with their defaults in the result of getfullargspec 195 | 196 | inspect.getfullargspec returns a complex object that includes the defaults 197 | on args and kwonly args. This function takes a list of args, and a tuple of 198 | the last N defaults and returns a dict of args to defaults. 199 | """ 200 | 201 | if defaults is None: 202 | return {} 203 | 204 | offset = len(args) - len(defaults) 205 | defaults = ([None] * offset) + list(defaults) 206 | 207 | return {key: value for key, value in zip(args, defaults) if value is not None} 208 | 209 | 210 | class Registry: 211 | def __init__(self, reassignments_prohibited: bool): 212 | self.__registrations = defaultdict(list) 213 | self._localns = {} 214 | self._reassignments_prohibited = reassignments_prohibited 215 | 216 | def register_service_and_impl(self, service, scope, impl, resolve_args): 217 | """Registers a concrete implementation of an abstract service. 218 | 219 | Examples: 220 | In this example, the EmailSender type is an abstract class 221 | and SmtpEmailSender is our concrete implementation. 222 | 223 | >>> from punq import Container 224 | >>> container = Container() 225 | 226 | >>> class EmailSender: 227 | ... def send(self, msg): 228 | ... pass 229 | ... 230 | >>> class SmtpEmailSender(EmailSender): 231 | ... def send(self, msg): 232 | ... print("Sending message via smtp: " + msg) 233 | ... 234 | >>> container.register(EmailSender, SmtpEmailSender) 235 | 236 | >>> instance = container.resolve(EmailSender) 237 | >>> instance.send("Hello") 238 | Sending message via smtp: Hello 239 | """ 240 | if self._reassignments_prohibited and len(self.__registrations[service]) > 0: 241 | raise ReassignmentsProhibitedException 242 | self.__registrations[service].append( 243 | Registration(service, scope, impl, self._get_needs_for_ctor(impl), resolve_args) 244 | ) 245 | 246 | def register_service_and_instance(self, service, instance): 247 | """Register a singleton instance to implement a service. 248 | 249 | Examples: 250 | If we have an object that is expensive to construct, or that 251 | wraps a resource that must not be shared, we might choose to 252 | use a singleton instance. 253 | 254 | >>> from punq import Container 255 | >>> container = Container() 256 | 257 | >>> class DataAccessLayer: 258 | ... pass 259 | ... 260 | >>> class SqlAlchemyDataAccessLayer(DataAccessLayer): 261 | ... def __init__(self, engine: SQLAlchemy.Engine): 262 | ... pass 263 | ... 264 | >>> container.register( 265 | ... DataAccessLayer, 266 | ... instance=SqlAlchemyDataAccessLayer(create_engine("sqlite:///")) 267 | ... ) 268 | 269 | """ 270 | if self._reassignments_prohibited and len(self.__registrations[service]) > 0: 271 | raise ReassignmentsProhibitedException 272 | self.__registrations[service].append( 273 | Registration(service, Scope.singleton, lambda: instance, {}, {}) 274 | ) 275 | 276 | def register_concrete_service(self, service, scope): 277 | """Register a service as its own implementation. 278 | 279 | Examples: 280 | If we need to register a dependency, but we don't need to 281 | abstract it, we can register it as concrete. 282 | 283 | >>> from punq import Container 284 | >>> container = Container() 285 | >>> class FileReader: 286 | ... def read(self): 287 | ... # Assorted legerdemain and rigmarole 288 | ... pass 289 | ... 290 | >>> container.register(FileReader) 291 | 292 | """ 293 | if not inspect.isclass(service): 294 | raise InvalidRegistrationException( 295 | "The service {} can't be registered as its own implementation".format(repr(service)) 296 | ) 297 | if self._reassignments_prohibited and len(self.__registrations[service]) > 0: 298 | raise ReassignmentsProhibitedException 299 | self.__registrations[service].append( 300 | Registration(service, scope, service, self._get_needs_for_ctor(service), {}) 301 | ) 302 | 303 | def purge_service(self, service): 304 | try: 305 | self.__registrations.pop(service) 306 | except KeyError: 307 | raise MissingDependencyException 308 | 309 | def build_context(self, key, existing=None): 310 | if existing is None: 311 | return ResolutionContext(key, list(self.__getitem__(key))) 312 | 313 | if key not in existing.targets: 314 | existing.targets[key] = ResolutionTarget(key, list(self.__getitem__(key))) 315 | 316 | return existing 317 | 318 | def register(self, service, factory=empty, instance=empty, scope=Scope.transient, **kwargs): 319 | resolve_args = kwargs or {} 320 | 321 | if instance is not empty: 322 | self.register_service_and_instance(service, instance) 323 | elif factory is empty: 324 | self.register_concrete_service(service, scope) 325 | elif callable(factory): 326 | self.register_service_and_impl(service, scope, factory, resolve_args) 327 | else: 328 | raise InvalidRegistrationException( 329 | f"Expected a callable factory for the service {service} but received {factory}" 330 | ) 331 | 332 | self._update_localns(service) 333 | ensure_forward_ref(self, service, factory, instance, **kwargs) 334 | 335 | def __getitem__(self, service): 336 | return self.__registrations[service] 337 | 338 | def _get_needs_for_ctor(self, cls): 339 | try: 340 | return get_type_hints(cls.__init__, None, self._localns) 341 | except NameError as e: 342 | raise InvalidForwardReferenceException(str(e)) 343 | 344 | def _update_localns(self, service): 345 | if type(service) == type: # noqa: PIE789 346 | self._localns[service.__name__] = service 347 | else: 348 | self._localns[service] = service 349 | 350 | 351 | class ResolutionTarget: 352 | def __init__(self, key, impls): 353 | self.service = key 354 | self.impls = impls 355 | 356 | @property 357 | def generic_parameter(self): 358 | return self.service.__args__[0] 359 | 360 | def is_generic_list(self): 361 | return is_generic_list(self.service) 362 | 363 | def next_impl(self): 364 | if len(self.impls) > 0: 365 | return self.impls.pop() 366 | 367 | 368 | class ResolutionContext: 369 | def __init__(self, key, impls): 370 | self.targets = {key: ResolutionTarget(key, impls)} 371 | self.cache = {} 372 | self.service = key 373 | 374 | def target(self, key): 375 | return self.targets.get(key) 376 | 377 | def has_cached(self, key): 378 | return key in self.cache 379 | 380 | def all_registrations(self, service): 381 | return self.targets[service].impls 382 | 383 | def __getitem__(self, key): 384 | return self.cache.get(key) 385 | 386 | def __setitem__(self, key, instance): 387 | self.cache[key] = instance 388 | 389 | 390 | class Container: 391 | """ 392 | Provides dependency registration and resolution. 393 | 394 | This is the main entrypoint of the Punq library. In normal scenarios users 395 | will only need to interact with this class. 396 | """ 397 | 398 | def __init__(self, reassignments_prohibited: bool = False): 399 | self._finalized = False 400 | self.registrations = Registry(reassignments_prohibited) 401 | self.register(Container, instance=self) 402 | self._singletons = {} 403 | 404 | def register( 405 | self, 406 | service: Type[T_co], 407 | factory: Union[Empty, Callable[..., T_co]] = empty, 408 | instance: Union[Empty, T_co] = empty, 409 | scope: Scope = Scope.transient, 410 | **kwargs: dict[str, Any], 411 | ): 412 | """ 413 | Register a dependency into the container. 414 | 415 | Each registration in Punq has a "service", which is the key used for 416 | resolving dependencies, and either an "instance" that implements the 417 | service or a "factory" that understands how to create an instance on 418 | demand. 419 | 420 | Examples: 421 | If we have an object that is expensive to construct, or that 422 | wraps a resouce that must not be shared, we might choose to 423 | use a singleton instance. 424 | 425 | >>> from punq import Container 426 | >>> container = Container() 427 | 428 | >>> class DataAccessLayer: 429 | ... pass 430 | ... 431 | >>> class SqlAlchemyDataAccessLayer(DataAccessLayer): 432 | ... def __init__(self, engine: SQLAlchemy.Engine): 433 | ... pass 434 | ... 435 | >>> dal = SqlAlchemyDataAccessLayer(create_engine("sqlite:///")) 436 | >>> container.register( 437 | ... DataAccessLayer, 438 | ... instance=dal 439 | ... ) 440 | 441 | >>> assert container.resolve(DataAccessLayer) is dal 442 | 443 | If we need to register a dependency, but we don't need to 444 | abstract it, we can register it as concrete. 445 | 446 | >>> class FileReader: 447 | ... def read (self): 448 | ... # Assorted legerdemain and rigmarole 449 | ... pass 450 | ... 451 | >>> container.register(FileReader) 452 | 453 | >>> assert type(container.resolve(FileReader)) == FileReader 454 | 455 | In this example, the EmailSender type is an abstract class 456 | and SmtpEmailSender is our concrete implementation. 457 | 458 | >>> class EmailSender: 459 | ... def send(self, msg): 460 | ... pass 461 | ... 462 | >>> class SmtpEmailSender (EmailSender): 463 | ... def send(self, msg): 464 | ... print("Sending message via smtp") 465 | ... 466 | >>> container.register(EmailSender, SmtpEmailSender) 467 | 468 | >>> instance = container.resolve(EmailSender) 469 | >>> instance.send("beep") 470 | Sending message via smtp 471 | """ 472 | if self._finalized: 473 | raise FinalizedException 474 | if isinstance(factory, type): 475 | if inspect.isabstract(factory): 476 | raise AbstractClassException 477 | elif callable(factory): 478 | annotations = get_type_hints(factory) 479 | return_type = annotations["return"] 480 | if inspect.isabstract(return_type): 481 | raise AbstractFactoryReturnClassException 482 | self.registrations.register(service, factory, instance, scope, **kwargs) 483 | return self 484 | 485 | def resolve_all(self, service, **kwargs): 486 | """ 487 | Return all registrations for a given service. 488 | 489 | Some patterns require us to use multiple implementations of an 490 | interface at the same time. 491 | 492 | Examples: 493 | 494 | In this example, we want to use multiple Authenticator instances to 495 | check a request. 496 | 497 | >>> class Authenticator: 498 | ... def matches(self, req): 499 | ... return False 500 | ... 501 | ... def authenticate(self, req): 502 | ... return False 503 | ... 504 | >>> class BasicAuthenticator(Authenticator): 505 | ... 506 | ... def matches(self, req): 507 | ... head = req.headers.get("Authorization", "") 508 | ... return head.startswith("Basic ") 509 | ... 510 | >>> class TokenAuthenticator(Authenticator): 511 | ... 512 | ... def matches(self, req): 513 | ... head = req.headers.get("Authorization", "") 514 | ... return head.startswith("Bearer ") 515 | ... 516 | >>> def authenticate_request(container, req): 517 | ... for authn in req.resolve_all(Authenticator): 518 | ... if authn.matches(req): 519 | ... return authn.authenticate(req) 520 | """ 521 | context = self.registrations.build_context(service) 522 | 523 | return [self._build_impl(x, kwargs, context) for x in context.all_registrations(service)] 524 | 525 | def purge(self, service: Any) -> None: 526 | self.registrations.purge_service(service) 527 | 528 | def finalize(self) -> None: 529 | self._finalized = True 530 | 531 | def resolve(self, service_key: Type[T_co], **kwargs: dict[str, Any]) -> T_co: 532 | context = self.registrations.build_context(service_key) 533 | 534 | return self._resolve_impl(service_key, kwargs, context) 535 | 536 | def instantiate(self, service_key, **kwargs): 537 | """ 538 | Instantiate an unregistered service 539 | """ 540 | registration = Registration( 541 | service_key, 542 | Scope.transient, 543 | service_key, 544 | self.registrations._get_needs_for_ctor(service_key), 545 | {}, 546 | ) 547 | 548 | context = ResolutionContext(service_key, [registration]) 549 | 550 | return self._build_impl(registration, kwargs, context) 551 | 552 | def _build_impl(self, registration, resolution_args, context): 553 | """Instantiate the registered service.""" 554 | 555 | spec = inspect.getfullargspec(registration.builder) 556 | target_args = spec.args 557 | 558 | args = match_defaults(spec.args, spec.defaults) 559 | args.update( 560 | { 561 | k: self._resolve_impl(v, resolution_args, context, args.get(k)) 562 | for k, v in registration.needs.items() 563 | if k != "return" and k not in registration.args and k not in resolution_args 564 | } 565 | ) 566 | args.update(registration.args) 567 | 568 | if "self" in target_args: 569 | target_args.remove("self") 570 | condensed_resolution_args = { 571 | key: resolution_args[key] for key in resolution_args if key in target_args 572 | } 573 | args.update(condensed_resolution_args or {}) 574 | 575 | result = registration.builder(**args) 576 | 577 | if registration.scope == Scope.singleton: 578 | self._singletons[registration.service] = result 579 | 580 | context[registration.service] = result 581 | 582 | return result 583 | 584 | def _resolve_impl(self, service_key, kwargs, context, default=None): 585 | 586 | context = self.registrations.build_context(service_key, context) 587 | 588 | if service_key in self._singletons: 589 | return self._singletons[service_key] 590 | 591 | if context.has_cached(service_key): 592 | return context[service_key] 593 | 594 | target = context.target(service_key) 595 | 596 | if target.is_generic_list(): 597 | return self.resolve_all(target.generic_parameter) 598 | 599 | registration = target.next_impl() 600 | 601 | if registration is None and default is not None: 602 | return default 603 | 604 | if registration is None: 605 | raise MissingDependencyException( 606 | "Failed to resolve implementation for " + str(service_key) 607 | ) 608 | 609 | if service_key in registration.needs.values(): 610 | self._resolve_impl(service_key, kwargs, context) 611 | 612 | return self._build_impl(registration, kwargs, context) 613 | -------------------------------------------------------------------------------- /backend/libs/punq/_compat.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | if sys.version_info >= (3, 7, 0): 4 | from typing import ForwardRef 5 | 6 | GenericListClass = list 7 | else: 8 | from typing import List 9 | from typing import _ForwardRef as ForwardRef 10 | 11 | GenericListClass = List 12 | 13 | 14 | def is_generic_list(service): 15 | try: 16 | return service.__origin__ == GenericListClass 17 | except AttributeError: 18 | return False 19 | 20 | 21 | def ensure_forward_ref(self, service, factory, instance, **kwargs): 22 | if isinstance(service, str): 23 | self.register(ForwardRef(service), factory, instance, **kwargs) 24 | -------------------------------------------------------------------------------- /backend/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "backend" 3 | version = "0.0.0" 4 | description = "" 5 | authors = ["Алексей Чистов "] 6 | 7 | [build-system] 8 | requires = ["poetry-core>=1.0.0"] 9 | build-backend = "poetry.core.masonry.api" 10 | 11 | [tool.black] 12 | line-length = 100 13 | 14 | [tool.isort] 15 | multi_line_output = 3 16 | line_length = 100 17 | include_trailing_comma = true 18 | 19 | [tool.poetry.dependencies] 20 | python = "^3.10.2" 21 | 22 | # Server 23 | uvicorn = "^0.15.0" 24 | uvloop = "^0.16.0" 25 | 26 | # DB 27 | SQLAlchemy = {version = ">=1.4,<2", extras = ["asyncio", "mypy"]} 28 | SQLAlchemy-Utils = "^0.37.9" 29 | alembic = "^1.7.4" 30 | asyncpg = "^0.24.0" 31 | psycopg2-binary = "^2.9.1" 32 | 33 | # Web-Framework 34 | fastapi = "^0.70.0" 35 | pydantic = "^1.8.2" 36 | python-multipart = "^0.0.5" 37 | 38 | # Utils 39 | Pillow = "^8.4.0" 40 | PyJWT = "^2.3.0" 41 | aioboto3 = "^9.2.2" 42 | rstr = "^3.0.0" 43 | 44 | # Typehinting 45 | mypy-extensions = "^0.4.3" 46 | mypy = "^0.931" 47 | 48 | [tool.poetry.dev-dependencies] 49 | # Linting 50 | flake8 = "^4.0.1" 51 | flake8-alphabetize = "^0.0.17" 52 | flake8-bandit = "^2.1.2" 53 | flake8-broken-line = "^0.4.0" 54 | flake8-bugbear = "^22.1.11" 55 | flake8-builtins = "^1.5.3" 56 | flake8-class-attributes-order = "^0.1.2" 57 | flake8-comprehensions = "^3.8.0" 58 | flake8-debugger = "^4.0.0" 59 | flake8-docstrings = "^1.6.0" 60 | flake8-eradicate = "^1.2.0" 61 | flake8-pie = "^0.15.0" 62 | flake8-print = "^4.0.0" 63 | flake8-printf-formatting = "^1.1.2" 64 | flake8-pytest-style = "^1.6.0" 65 | flake8-walrus = "^1.1.0" 66 | 67 | # Formatting 68 | black = "^21.9b0" 69 | isort = "^5.9.3" 70 | 71 | # Stubs 72 | python-dotenv = "^0.19.2" 73 | 74 | # Tests 75 | httpx = "^0.20.0" 76 | pytest = "^6.2.5" 77 | pytest-asyncio = "^0.16.0" 78 | pytest-xdist = "^2.4.0" 79 | -------------------------------------------------------------------------------- /backend/setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | show-source = True 3 | statistics = False 4 | 5 | max-line-length = 100 6 | 7 | exclude = 8 | .git 9 | __pycache__ 10 | .venv 11 | .eggs 12 | *.egg 13 | infrastructure/database/alembic/env.py 14 | 15 | # Plugins configuration 16 | # - flake8-class-attributes-order 17 | use_class_attributes_order_strict_mode = True 18 | # - flake8-docstrings 19 | docstring-convention=google 20 | 21 | ignore = 22 | # Игнорируем всё от flake8-alphabetize, оставляя только AZ400 23 | # Оно будет ругаться на сортировку __all__ 24 | # 25 | # Import statements are in the wrong order 26 | AZ100 27 | # The names in the import from are in the wrong order 28 | AZ200 29 | # Two import from statements must be combined 30 | AZ300 31 | 32 | per-file-ignores = 33 | # Отрубаем flake8-docstring проверки в Alembic миграциях 34 | infrastructure/database/alembic/versions/__init__.py:D 35 | # Отрубаем flake8-docstring проверки в библиотеках 36 | libs/*:D 37 | # Отрубаем некоторые flake8-проверки для тестов 38 | # - D100 Missing docstring in public module 39 | # - D101 Missing docstring in public class 40 | # - D104 Missing docstring in public package 41 | # - S101 Use of assert detected 42 | tests/*:D100,D101,D104,S101 43 | 44 | [mypy] 45 | # Mypy configuration: 46 | # https://mypy.readthedocs.io/en/latest/config_file.html 47 | allow_redefinition = False 48 | check_untyped_defs = True 49 | disallow_untyped_decorators = True 50 | disallow_any_explicit = False 51 | disallow_any_generics = False 52 | disallow_untyped_calls = True 53 | disallow_untyped_defs = True 54 | disallow_incomplete_defs = True 55 | ignore_errors = False 56 | ignore_missing_imports = False 57 | implicit_reexport = False 58 | local_partial_types = True 59 | strict_optional = True 60 | strict_equality = True 61 | no_implicit_optional = True 62 | warn_unused_ignores = True 63 | warn_redundant_casts = True 64 | warn_unused_configs = True 65 | warn_unreachable = True 66 | warn_no_return = True 67 | 68 | exclude = ^(docs|infrastructure/database/alembic).*$ 69 | 70 | plugins = 71 | pydantic.mypy, 72 | sqlalchemy.ext.mypy.plugin 73 | 74 | [mypy-libs.punq.*] 75 | ignore_errors = True 76 | 77 | [mypy-infrastructure.database.alembic.versions.*] 78 | ignore_errors = True 79 | 80 | [mypy-tests.*] 81 | disallow_untyped_defs = False 82 | disallow_incomplete_defs = False 83 | 84 | [tool:pytest] 85 | minversion = 6.0 86 | addopts = -ra -q 87 | testpaths = 88 | tests 89 | 90 | filterwarnings = 91 | ignore::DeprecationWarning 92 | ignore::UserWarning 93 | 94 | [coverage:report] 95 | exclude_lines = 96 | pragma: no cover 97 | Protocol 98 | @abstractclassmethod 99 | @abstractmethod 100 | @abstractproperty 101 | @abstractstaticmethod 102 | -------------------------------------------------------------------------------- /backend/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Hulvdan/fast-api-template/b5386290b4b224cbaedff6e3ff2515c9365fff7c/backend/tests/__init__.py -------------------------------------------------------------------------------- /backend/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import io 3 | import os 4 | from typing import AsyncGenerator, Callable, Generator, Union 5 | 6 | import httpx 7 | import pytest 8 | from PIL import Image # type: ignore[import] 9 | 10 | from common.services.storage import IAsyncFile, IStorage 11 | from domain.upload_file.repos import IUploadedFileRepo 12 | from infrastructure.database import DatabaseResource 13 | from infrastructure.services.storage_mock import StorageMock 14 | from libs.punq import Container 15 | 16 | 17 | def pytest_sessionstart() -> None: 18 | """Подмена названия БД при запуске тестов.""" 19 | os.environ["POSTGRES_DB"] = "postgres_test" 20 | 21 | 22 | @pytest.fixture(scope="session") 23 | def container() -> Container: 24 | """Тут мы переопределяем сервисы на моки. 25 | 26 | Например, смс-ки, email-ы, файлы и т.п. 27 | """ 28 | from application.common.container import get_container 29 | 30 | container = get_container() 31 | 32 | container.purge(IStorage) 33 | container.register(IStorage, StorageMock) # type: ignore[misc] 34 | 35 | container.finalize() 36 | return container 37 | 38 | 39 | @pytest.fixture(scope="session", autouse=True) 40 | def _db(container: Container) -> Generator[None, None, None]: 41 | """Инициализация БД для тестов. 42 | 43 | Создание БД при старте тестов и удаление при завершении. 44 | """ 45 | db: DatabaseResource = container.resolve(DatabaseResource) 46 | db.drop_database() 47 | db.create_database() 48 | db.create_tables() 49 | yield 50 | db.drop_database() 51 | 52 | 53 | @pytest.fixture(autouse=True) 54 | def _clear_db_tables(container: Container) -> Generator[None, None, None]: 55 | """Очистка таблиц БД после каждого теста.""" 56 | yield 57 | db_resource = container.resolve(DatabaseResource) 58 | db_resource.clear_tables() 59 | 60 | 61 | @pytest.fixture(scope="session") 62 | def event_loop() -> asyncio.AbstractEventLoop: 63 | """Фикстура event-loop-а. Нужна для работы python-asyncio.""" 64 | return asyncio.get_event_loop() 65 | 66 | 67 | @pytest.fixture() 68 | async def client() -> AsyncGenerator[httpx.AsyncClient, None]: 69 | """Фикстура HTTP клиента.""" 70 | from application.web.application import create_app 71 | 72 | app = await create_app() 73 | async with httpx.AsyncClient(base_url="http://localhost:8000", app=app) as client: 74 | yield client 75 | 76 | 77 | @pytest.fixture() 78 | def image_factory() -> Callable[[str], io.BytesIO]: 79 | """Фикстура фабрики синхронных бинарных потоков.""" 80 | 81 | def _get_image(image_name: str) -> io.BytesIO: 82 | file = io.BytesIO() 83 | image = Image.new("RGBA", size=(100, 100), color=(155, 0, 0)) 84 | image.save(file, "png") 85 | file.name = image_name 86 | file.seek(0) 87 | return file 88 | 89 | return _get_image 90 | 91 | 92 | class AsyncImage: 93 | """Эмуляция асинхронного бинарного потока.""" 94 | 95 | def __init__(self, file: io.BytesIO) -> None: 96 | """Оборачивание обычного бинарного потока в виде файла.""" 97 | self._file = file 98 | self.filename: str = file.name 99 | 100 | async def write(self, data: Union[bytes, str]) -> None: 101 | """Запись в бинарный поток.""" 102 | self._file.write(data) # type: ignore 103 | 104 | async def read(self, size: int = -1) -> Union[bytes, str]: 105 | """Чтение из бинарного потока.""" 106 | return self._file.read(size) 107 | 108 | async def seek(self, offset: int) -> None: 109 | """Перемещение по бинарному потоку.""" 110 | self._file.seek(offset) 111 | 112 | async def close(self) -> None: 113 | """Закрытие бинарного потока.""" 114 | self._file.close() 115 | 116 | 117 | @pytest.fixture() 118 | def async_file_factory() -> Callable[[str], IAsyncFile]: 119 | """Фикстура фабрики асинхронных бинарных потоков.""" 120 | 121 | def _get_async_file(file_name: str) -> IAsyncFile: 122 | file = io.BytesIO() 123 | image = Image.new("RGBA", size=(100, 100), color=(155, 0, 0)) 124 | image.save(file, "png") 125 | file.name = file_name 126 | file.seek(0) 127 | return AsyncImage(file) 128 | 129 | return _get_async_file 130 | 131 | 132 | @pytest.fixture() 133 | def upload_file_repo(container: Container) -> IUploadedFileRepo: 134 | """Репозиторий загруженных файлов.""" 135 | return container.resolve(IUploadedFileRepo) # type: ignore[misc] 136 | -------------------------------------------------------------------------------- /backend/tests/test_application/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Hulvdan/fast-api-template/b5386290b4b224cbaedff6e3ff2515c9365fff7c/backend/tests/test_application/__init__.py -------------------------------------------------------------------------------- /backend/tests/test_application/test_web/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Hulvdan/fast-api-template/b5386290b4b224cbaedff6e3ff2515c9365fff7c/backend/tests/test_application/test_web/__init__.py -------------------------------------------------------------------------------- /backend/tests/test_application/test_web/test_health_check.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from starlette import status 3 | 4 | 5 | @pytest.mark.asyncio() 6 | class TestHealthCheck: 7 | async def test_health_check(self, client) -> None: 8 | """Проверка работы health check-а.""" 9 | response = await client.get("/api/v1/health-check", follow_redirects=True) 10 | assert status.HTTP_200_OK == response.status_code 11 | -------------------------------------------------------------------------------- /backend/tests/test_application/test_web/test_upload_file.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from domain.upload_file.repos import UploadedFile 4 | 5 | 6 | @pytest.fixture() 7 | async def uploaded_file(client, image_factory, upload_file_repo) -> UploadedFile: 8 | """Фикстура загруженного файла.""" 9 | image = image_factory("test.png") 10 | files = {"image": image} 11 | response = await client.post("/api/v1/", files=files) 12 | assert response.status_code == 200 13 | 14 | return await upload_file_repo.get(response.json()["uuid"]) 15 | 16 | 17 | @pytest.mark.asyncio() 18 | async def test_upload_file_view(client, image_factory) -> None: 19 | """Проверка endpoint-а загрузки файлов.""" 20 | image = image_factory("test.png") 21 | files = {"image": image} 22 | response = await client.post("/api/v1/", files=files) 23 | assert response.status_code == 200 24 | 25 | response_json = response.json() 26 | assert response_json["uuid"] is not None 27 | assert response_json["url"] is not None 28 | assert response_json["key"] is not None 29 | assert response_json["last_modified"] is not None 30 | assert response_json["content_length"] is not None 31 | assert response_json["upload_path"] is not None 32 | 33 | 34 | @pytest.mark.asyncio() 35 | async def test_delete_file_view_success(client, uploaded_file, upload_file_repo) -> None: 36 | """Проверка endpoint-а удаления файлов.""" 37 | assert uploaded_file.uuid is not None 38 | 39 | response = await client.delete(f"/api/v1/{uploaded_file.uuid}") 40 | assert response.status_code == 204 41 | 42 | file_from_repo = await upload_file_repo.get(uploaded_file.uuid) 43 | assert file_from_repo is None 44 | 45 | 46 | @pytest.mark.asyncio() 47 | async def test_delete_file_view_not_found(client, upload_file_repo) -> None: 48 | """Проверка endpoint-а удаления файлов.""" 49 | response = await client.delete("/api/v1/00000000-0000-0000-0000-000000000000") 50 | assert response.status_code == 404 51 | assert response.json()["reason"] == "file_does_not_exist" 52 | -------------------------------------------------------------------------------- /backend/tests/test_domain/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Hulvdan/fast-api-template/b5386290b4b224cbaedff6e3ff2515c9365fff7c/backend/tests/test_domain/__init__.py -------------------------------------------------------------------------------- /backend/tests/test_domain/test_upload_file/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Hulvdan/fast-api-template/b5386290b4b224cbaedff6e3ff2515c9365fff7c/backend/tests/test_domain/test_upload_file/__init__.py -------------------------------------------------------------------------------- /backend/tests/test_domain/test_upload_file/test_use_cases.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from domain.upload_file.use_cases import DeleteFileUseCase, UploadFileUseCase 4 | 5 | 6 | @pytest.mark.asyncio() 7 | async def test_upload_file_use_case(container, async_file_factory, upload_file_repo) -> None: 8 | """Проверка успешной отработки сценария загрузки файлов.""" 9 | file = async_file_factory("a.png") 10 | uploaded_file = await container.resolve(UploadFileUseCase).execute(file) 11 | assert uploaded_file.uuid is not None 12 | 13 | file_from_db = await upload_file_repo.get(uploaded_file.uuid) 14 | assert file_from_db is not None 15 | 16 | 17 | @pytest.mark.asyncio() 18 | async def test_delete_file_use_case(container, async_file_factory, upload_file_repo) -> None: 19 | """Проверка успешной отработки сценария удаления файлов.""" 20 | file = async_file_factory("a.png") 21 | uploaded_file = await container.resolve(UploadFileUseCase).execute(file) 22 | assert uploaded_file.uuid is not None 23 | 24 | await container.resolve(DeleteFileUseCase).execute(uploaded_file.uuid) 25 | 26 | file_from_db = await upload_file_repo.get(uploaded_file.uuid) 27 | assert file_from_db is None 28 | -------------------------------------------------------------------------------- /docker-compose.override.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # This docker-compose file is required to bind ports in development, 3 | # since binding ports in regular compose file will ruin scaling 4 | # in production. Due to how `ports` directive is merged with two files. 5 | # 6 | # This file is ignored in production, but 7 | # it is automatically picked up in development with: 8 | # 9 | # $ docker-compose up 10 | 11 | version: "3.8" 12 | services: 13 | backend_db: 14 | ports: 15 | - "5432:5432" 16 | 17 | backend: 18 | ports: 19 | - "8000:8000" 20 | volumes: 21 | - ./backend:/code 22 | -------------------------------------------------------------------------------- /docker-compose.test.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # This docker-compose file is required to bind ports in development, 3 | # since binding ports in regular compose file will ruin scaling 4 | # in production. Due to how `ports` directive is merged with two files. 5 | # 6 | # This file is ignored in production, but 7 | # it is automatically picked up in development with: 8 | # 9 | # $ docker-compose up 10 | 11 | version: "3.8" 12 | services: 13 | backend_db: 14 | ports: 15 | - "5432:5432" 16 | 17 | backend: 18 | command: 19 | - pytest 20 | volumes: 21 | - ./backend:/code 22 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Default compose file for development and production. 3 | # Should be used directly in development. 4 | # Automatically loads `docker-compose.override.yml` if it exists. 5 | # No extra steps required. 6 | # Should be used together with `docker/docker-compose.prod.yml` 7 | # in production. 8 | 9 | version: "3.8" 10 | services: 11 | minio: 12 | image: "minio/minio" 13 | ports: 14 | - "9000:9000" 15 | volumes: 16 | - /home/hulvdan/mdata:/data 17 | networks: 18 | - webnet 19 | command: server /data 20 | env_file: backend/.env 21 | 22 | backend_db: 23 | image: "postgres:13-alpine" 24 | restart: unless-stopped 25 | volumes: 26 | - backend_pgdata:/var/lib/postgresql/data 27 | networks: 28 | - webnet 29 | env_file: backend/.env 30 | 31 | backend: 32 | <<: &backend # Image name is changed in production: 33 | image: "backend:dev" 34 | build: 35 | target: development_build 36 | context: backend 37 | dockerfile: ./docker/Dockerfile 38 | args: 39 | ENVIRONMENT: development 40 | cache_from: 41 | - "backend:dev" 42 | - "backend:latest" 43 | - "*" 44 | 45 | depends_on: 46 | - backend_db 47 | networks: 48 | - webnet 49 | env_file: backend/.env 50 | 51 | command: python -m application.web.main 52 | # healthcheck: 53 | # # We use `$$` here because: 54 | # # one `$` goes to shell, 55 | # # one `$` goes to `docker-compose.yml` escaping 56 | # test: | 57 | # /usr/bin/test $$( 58 | # /usr/bin/curl --fail http://localhost:8000/api/v1/health-check/?format=json 59 | # --write-out "%{http_code}" --silent --output /dev/null 60 | # ) -eq 200 61 | # interval: 10s 62 | # timeout: 5s 63 | # retries: 5 64 | # start_period: 30s 65 | 66 | networks: 67 | # Network for your internals, use it by default: 68 | webnet: 69 | 70 | volumes: 71 | backend_pgdata: 72 | --------------------------------------------------------------------------------