├── {{cookiecutter.project_slug}}
├── tests
│ ├── __init__.py
│ ├── test_domain
│ │ ├── __init__.py
│ │ ├── test_entities
│ │ │ ├── __init__.py
│ │ │ └── test_artifact.py
│ │ └── test_value_objects
│ │ │ ├── __init__.py
│ │ │ └── test_era.py
│ ├── test_application
│ │ ├── __init__.py
│ │ └── test_use_cases
│ │ │ └── __init__.py
│ ├── test_infrastructure
│ │ ├── __init__.py
│ │ └── test_db
│ │ │ ├── __init__.py
│ │ │ └── models
│ │ │ ├── __init__.py
│ │ │ └── test_artifact_model.py
│ ├── test_integration
│ │ ├── __init__.py
│ │ └── test_api_integration.py
│ ├── test_presentation
│ │ ├── __init__.py
│ │ └── test_api
│ │ │ ├── __init__.py
│ │ │ └── test_controllers
│ │ │ ├── __init__.py
│ │ │ └── test_artifact_controller.py
│ ├── faker.py
│ └── conftest.py
├── src
│ └── {{cookiecutter.project_slug}}
│ │ ├── __init__.py
│ │ ├── domain
│ │ ├── __init__.py
│ │ ├── entities
│ │ │ ├── __init__.py
│ │ │ └── artifact.py
│ │ ├── services
│ │ │ └── __init__.py
│ │ ├── value_objects
│ │ │ ├── __init__.py
│ │ │ ├── era.py
│ │ │ └── material.py
│ │ └── exceptions.py
│ │ ├── application
│ │ ├── __init__.py
│ │ ├── dtos
│ │ │ ├── __init__.py
│ │ │ └── artifact.py
│ │ ├── interfaces
│ │ │ ├── __init__.py
│ │ │ ├── db_mapper.py
│ │ │ ├── message_broker.py
│ │ │ ├── repositories.py
│ │ │ ├── uow.py
│ │ │ ├── serialization.py
│ │ │ ├── http_clients.py
│ │ │ ├── mappers.py
│ │ │ └── cache.py
│ │ ├── use_cases
│ │ │ ├── __init__.py
│ │ │ ├── save_artifact_to_cache.py
│ │ │ ├── save_artifact_to_repo.py
│ │ │ ├── get_artifact_from_cache.py
│ │ │ ├── get_artifact_from_repo.py
│ │ │ ├── publish_artifact_to_broker.py
│ │ │ ├── publish_artifact_to_catalog.py
│ │ │ ├── fetch_artifact_from_museum_api.py
│ │ │ └── process_artifact.py
│ │ ├── exceptions.py
│ │ └── mappers.py
│ │ ├── config
│ │ ├── ioc
│ │ │ ├── __init__.py
│ │ │ └── di.py
│ │ ├── base.py
│ │ ├── __init__.py
│ │ ├── app.py
│ │ ├── broker.py
│ │ ├── cors.py
│ │ ├── logging.py
│ │ ├── redis.py
│ │ ├── external_apis.py
│ │ └── database.py
│ │ ├── presentation
│ │ ├── __init__.py
│ │ ├── api
│ │ │ ├── __init__.py
│ │ │ └── rest
│ │ │ │ ├── __init__.py
│ │ │ │ ├── v1
│ │ │ │ ├── __init__.py
│ │ │ │ ├── exceptions.py
│ │ │ │ ├── controllers
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── artifact_controller.py
│ │ │ │ ├── schemas
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── responses.py
│ │ │ │ ├── mappers
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── artifact_mapper.py
│ │ │ │ └── routers.py
│ │ │ │ ├── middlewares.py
│ │ │ │ └── error_handling.py
│ │ └── cli
│ │ │ └── __init__.py
│ │ ├── infrastructures
│ │ ├── __init__.py
│ │ ├── broker
│ │ │ ├── __init__.py
│ │ │ └── publisher.py
│ │ ├── cache
│ │ │ ├── __init__.py
│ │ │ └── redis_client.py
│ │ ├── db
│ │ │ ├── __init__.py
│ │ │ ├── models
│ │ │ │ ├── __init__.py
│ │ │ │ └── artifact.py
│ │ │ ├── migrations
│ │ │ │ ├── __init__.py
│ │ │ │ ├── script.py.mako
│ │ │ │ ├── versions
│ │ │ │ │ └── c3cca8a62218_initial_migration_create_artifacts_table.py
│ │ │ │ └── env.py
│ │ │ ├── repositories
│ │ │ │ ├── __init__.py
│ │ │ │ └── artifact.py
│ │ │ ├── mappers
│ │ │ │ ├── __init__.py
│ │ │ │ ├── artifact_uow_mapper.py
│ │ │ │ └── artifact_db_mapper.py
│ │ │ ├── exceptions.py
│ │ │ ├── session.py
│ │ │ ├── uow.py
│ │ │ └── uow_new.py
│ │ ├── http
│ │ │ ├── __init__.py
│ │ │ └── clients.py
│ │ └── mappers
│ │ │ ├── __init__.py
│ │ │ └── artifact.py
│ │ └── main.py
├── scripts
│ ├── setup-env.sh
│ ├── migrate.sh
│ ├── init-mysql.sql
│ ├── init-db.sql
│ ├── init-sqlite.sh
│ └── init-db.sh
├── docker-compose.override.yml
├── docs
│ ├── environment.md
│ ├── migrations.md
│ ├── mypy-usage.md
│ ├── ruff-usage.md
│ └── docker.md
├── .pre-commit-config.yaml
├── .dockerignore
├── .git-commit-template
├── alembic.ini
├── .gitignore
├── env.template
├── Dockerfile
└── Makefile
├── docs
├── requirements.txt
├── changelog.rst
├── Makefile
├── README.md
├── reference
│ └── environment-variables.rst
├── license.rst
├── conf.py
├── index.rst
└── getting-started
│ ├── installation.rst
│ └── quickstart.rst
├── .cookiecutterignore
├── .readthedocs.yaml
├── CONTRIBUTING.md
├── renovate.json
├── .editorconfig
├── LICENSE
├── cookiecutter.json
├── .github
└── pull_request_template.md
└── .gitignore
/{{cookiecutter.project_slug}}/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/tests/test_domain/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/tests/test_application/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/tests/test_infrastructure/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/tests/test_integration/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/tests/test_presentation/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/tests/test_domain/test_entities/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/tests/test_domain/test_value_objects/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/tests/test_infrastructure/test_db/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/tests/test_presentation/test_api/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/domain/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/tests/test_application/test_use_cases/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/tests/test_infrastructure/test_db/models/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/config/ioc/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/presentation/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/dtos/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/domain/entities/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/domain/services/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/presentation/api/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/presentation/cli/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/tests/test_presentation/test_api/test_controllers/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/interfaces/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/use_cases/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/domain/value_objects/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/broker/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/cache/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/db/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/http/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/presentation/api/rest/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/db/models/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/mappers/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/presentation/api/rest/v1/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/presentation/api/rest/v1/exceptions.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/db/migrations/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/db/repositories/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/presentation/api/rest/middlewares.py:
--------------------------------------------------------------------------------
1 | # TODO
2 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/presentation/api/rest/v1/controllers/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | sphinx==8.2.3
2 | sphinx-rtd-theme==3.0.2
3 | sphinx-autodoc-typehints==3.5.2
4 | sphinx-copybutton==0.5.2
5 | myst-parser==4.0.1
6 | sphinxcontrib-mermaid==1.0.0
7 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/presentation/api/rest/v1/schemas/__init__.py:
--------------------------------------------------------------------------------
1 | from .responses import ArtifactResponseSchema
2 |
3 | __all__ = ["ArtifactResponseSchema"]
4 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/config/base.py:
--------------------------------------------------------------------------------
1 | from {{cookiecutter.project_slug}}.config.settings import Settings
2 |
3 | # Re-export Settings for backward compatibility
4 | __all__ = ["Settings"]
5 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/db/mappers/__init__.py:
--------------------------------------------------------------------------------
1 | from {{cookiecutter.project_slug}}.infrastructures.db.mappers.artifact_db_mapper import (
2 | ArtifactDBMapper,
3 | )
4 |
5 | __all__ = ["ArtifactDBMapper"]
6 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/presentation/api/rest/v1/mappers/__init__.py:
--------------------------------------------------------------------------------
1 | from {{cookiecutter.project_slug}}.presentation.api.rest.v1.mappers.artifact_mapper import ArtifactPresentationMapper
2 |
3 | __all__ = ["ArtifactPresentationMapper"]
4 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/scripts/setup-env.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Always recreate .env from template
4 | echo "Creating .env file from template..."
5 | cp env.template .env
6 | echo "✅ .env file created successfully!"
7 | echo "📝 Please review and modify .env file if needed"
8 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/scripts/migrate.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Wait for database to be ready
4 | echo "Waiting for database to be ready..."
5 | sleep 10
6 |
7 | # Run migrations
8 | echo "Running database migrations..."
9 | poetry run alembic upgrade head
10 |
11 | echo "Migrations completed successfully!"
12 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/presentation/api/rest/v1/routers.py:
--------------------------------------------------------------------------------
1 | from fastapi import APIRouter
2 |
3 | from {{cookiecutter.project_slug}}.presentation.api.rest.v1.controllers.artifact_controller import (
4 | router as artifact_router,
5 | )
6 |
7 | api_v1_router = APIRouter()
8 | api_v1_router.include_router(artifact_router)
9 |
--------------------------------------------------------------------------------
/.cookiecutterignore:
--------------------------------------------------------------------------------
1 | .venv/
2 | .venv/*
3 | .git/
4 | .git/*
5 | .idea/
6 | .idea/*
7 | .ruff_cache/
8 | .ruff_cache/*
9 | .DS_Store
10 | *.pyc
11 | __pycache__/
12 | __pycache__/*
13 | .pytest_cache/
14 | .pytest_cache/*
15 | .coverage
16 | htmlcov/
17 | dist/
18 | build/
19 | *.egg-info/
20 | node_modules/
21 | .vscode/
22 | .vscode/*
23 | *.log
24 | *.tmp
25 | *.temp
26 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/db/exceptions.py:
--------------------------------------------------------------------------------
1 | from typing import final
2 |
3 |
4 | class RepositorySaveError(Exception):
5 | """Exception raised when an error occurs during repository save operation."""
6 |
7 |
8 | @final
9 | class RepositoryConflictError(Exception):
10 | """Exception raised when a conflict occurs during a repository operation."""
11 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/docker-compose.override.yml:
--------------------------------------------------------------------------------
1 | version: '3.8'
2 |
3 | services:
4 | adminer:
5 | image: adminer:latest
6 | container_name: antiques-adminer
7 | ports:
8 | - "8080:8080"
9 | environment:
10 | ADMINER_DEFAULT_SERVER: postgres
11 | depends_on:
12 | - postgres
13 | networks:
14 | - antiques-network
15 | profiles:
16 | - dev-tools
17 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/docs/environment.md:
--------------------------------------------------------------------------------
1 | # Environment setup
2 |
3 | ## Overview
4 |
5 | The application uses environment variables for configuration.
6 | All variables are defined in the `.env` file and can be customized for different environments.
7 |
8 | ## Creating the .env file
9 |
10 | ```bash
11 | # Copy from the template
12 | make setup-env
13 |
14 | # Or copy manually
15 | cp env.template .env
16 | ```
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | # Read the Docs configuration file
2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
3 |
4 | version: 2
5 |
6 | build:
7 | os: ubuntu-22.04
8 | tools:
9 | python: "3.12"
10 |
11 | sphinx:
12 | configuration: docs/conf.py
13 | fail_on_warning: false
14 |
15 | formats:
16 | - pdf
17 | - epub
18 |
19 | python:
20 | install:
21 | - requirements: docs/requirements.txt
22 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/domain/exceptions.py:
--------------------------------------------------------------------------------
1 | from typing import final
2 |
3 |
4 | @final
5 | class InvalidMaterialException(Exception):
6 | """Raised when an invalid material is provided."""
7 |
8 |
9 | @final
10 | class InvalidEraException(Exception):
11 | """Raised when an invalid era is provided."""
12 |
13 |
14 | @final
15 | class DomainValidationError(Exception):
16 | """Raised when domain entity validation fails."""
17 | ...
18 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/tests/test_integration/test_api_integration.py:
--------------------------------------------------------------------------------
1 | from uuid import uuid4
2 |
3 | from fastapi.testclient import TestClient
4 | import pytest
5 |
6 |
7 | class TestApiIntegration:
8 | @pytest.mark.asyncio
9 | async def test_get_artifact_endpoint_success(self, client: TestClient):
10 | """Test successful artifact retrieval through API endpoint"""
11 | inventory_id = str(uuid4())
12 |
13 | with pytest.raises(Exception):
14 | client.get(f"/api/v1/artifacts/{inventory_id}")
15 |
--------------------------------------------------------------------------------
/docs/changelog.rst:
--------------------------------------------------------------------------------
1 | Changelog
2 | =========
3 |
4 | All notable changes to the Clean Architecture FastAPI Template will be documented in this file.
5 |
6 | The format is based on `Keep a Changelog `_,
7 | and this project adheres to `Semantic Versioning `_.
8 |
9 | [Unreleased]
10 | ------------
11 |
12 | [1.0.0] - 2025-10-01
13 | --------------------
14 |
15 | Added
16 | ~~~~~
17 |
18 | .. _Unreleased: https://github.com/Peopl3s/clean-architecture-fastapi-project-template/compare/v1.0.0...HEAD
19 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/tests/faker.py:
--------------------------------------------------------------------------------
1 | from datetime import UTC
2 | from faker import Faker
3 |
4 |
5 | _faker = Faker()
6 |
7 |
8 | def get_faker() -> Faker:
9 | """Get the global Faker instance."""
10 | return _faker
11 |
12 |
13 | def uuid4() -> str:
14 | """Generate a random UUID."""
15 | return _faker.uuid4()
16 |
17 |
18 | def word() -> str:
19 | """Generate a random word."""
20 | return _faker.word()
21 |
22 |
23 | def text() -> str:
24 | """Generate random text."""
25 | return _faker.text()
26 |
27 |
28 | def date_time_this_century() -> str:
29 | """Generate a datetime from this century."""
30 | return _faker.date_time_this_century(tzinfo=UTC)
31 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 |
3 | # You can set these variables from the command line, and also
4 | # from the environment for the first two.
5 | SPHINXOPTS ?=
6 | SPHINXBUILD ?= sphinx-build
7 | SOURCEDIR = .
8 | BUILDDIR = _build
9 |
10 | # Put it first so that "make" without argument is like "make help".
11 | help:
12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
13 |
14 | .PHONY: help Makefile
15 |
16 | # Catch-all target: route all unknown targets to Sphinx using the new
17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
18 | %: Makefile
19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
20 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/exceptions.py:
--------------------------------------------------------------------------------
1 | from typing import final
2 |
3 |
4 | @final
5 | class ArtifactNotFoundError(Exception):
6 | """Exception raised when an artifact is not found."""
7 |
8 |
9 | @final
10 | class FailedFetchArtifactMuseumAPIException(Exception):
11 | """Exception raised when fetching an artifact from the museum API fails."""
12 |
13 |
14 | @final
15 | class FailedPublishArtifactMessageBrokerException(Exception):
16 | """Exception raised when publishing an artifact to the message broker fails."""
17 |
18 |
19 | @final
20 | class FailedPublishArtifactInCatalogException(Exception):
21 | """Exception raised when publishing an artifact to the catalog fails."""
22 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/tests/test_domain/test_value_objects/test_era.py:
--------------------------------------------------------------------------------
1 | from {{cookiecutter.project_slug}}.domain.exceptions import InvalidEraException
2 | from {{cookiecutter.project_slug}}.domain.value_objects.era import Era
3 |
4 |
5 | class TestEra:
6 | def test_create_era_success(self):
7 | """Test successful creation of Era with valid values"""
8 | valid_eras = [
9 | "paleolithic",
10 | "neolithic",
11 | "bronze_age",
12 | "iron_age",
13 | "antiquity",
14 | "middle_ages",
15 | "modern",
16 | ]
17 |
18 | for era_value in valid_eras:
19 | era = Era(value=era_value)
20 | assert era.value == era_value
21 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/interfaces/db_mapper.py:
--------------------------------------------------------------------------------
1 | from abc import abstractmethod
2 | from typing import Protocol, TypeVar, Type
3 |
4 | T = TypeVar('T')
5 |
6 |
7 | class DbMapperProtocol(Protocol[T]):
8 | """Protocol for database mappers used in Unit of Work."""
9 |
10 | @abstractmethod
11 | def insert(self, model: T) -> None:
12 | """Insert a new model into the database."""
13 | ...
14 |
15 | @abstractmethod
16 | def update(self, model: T) -> None:
17 | """Update an existing model in the database."""
18 | ...
19 |
20 | @abstractmethod
21 | def delete(self, model: T) -> None:
22 | """Delete a model from the database."""
23 | ...
24 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/interfaces/message_broker.py:
--------------------------------------------------------------------------------
1 | from abc import abstractmethod
2 | from typing import Protocol
3 |
4 | from {{cookiecutter.project_slug}}.application.dtos.artifact import ArtifactAdmissionNotificationDTO
5 |
6 |
7 | class MessageBrokerPublisherProtocol(Protocol):
8 | """
9 | Protocol for publishing messages to a message broker.
10 | """
11 |
12 | @abstractmethod
13 | async def publish_new_artifact(
14 | self, artifact: ArtifactAdmissionNotificationDTO
15 | ) -> None:
16 | """
17 | Publishes a new artifact admission notification to the message broker.
18 |
19 | Args:
20 | artifact: The ArtifactAdmissionNotificationDTO to publish.
21 | """
22 | ...
23 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # 🤝Contributing
2 |
3 | Thanks for your interest in contributing! 🎉
4 | This guide is about contributing **to the template itself**.
5 | It does not cover projects that are generated from this template.
6 |
7 | ---
8 |
9 | ## Workflow
10 |
11 | 1. Fork the repository
12 | 2. Create a new feature branch
13 | 3. Make your changes in the **template files**
14 | (not in generated project code)
15 | 4. Add or update tests for new functionality
16 | 5. Run locally and ensure all checks pass (`make check`)
17 | 6. Commit and push your changes to your fork
18 | 7. Submit a pull request 🚀
19 |
20 | ---
21 |
22 | ## Development setup
23 |
24 | We use [Poetry](https://python-poetry.org/) to manage dependencies.
25 |
26 | Install development dependencies:
27 |
28 | ```bash
29 | make dev-setup
30 | ```
31 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/db/migrations/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision | comma,n}
5 | Create Date: ${create_date}
6 |
7 | """
8 | from typing import Sequence, Union
9 |
10 | from alembic import op
11 | import sqlalchemy as sa
12 | ${imports if imports else ""}
13 |
14 | # revision identifiers, used by Alembic.
15 | revision: str = ${repr(up_revision)}
16 | down_revision: Union[str, None] = ${repr(down_revision)}
17 | branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
18 | depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
19 |
20 |
21 | def upgrade() -> None:
22 | ${upgrades if upgrades else "pass"}
23 |
24 |
25 | def downgrade() -> None:
26 | ${downgrades if downgrades else "pass"}
27 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/config/__init__.py:
--------------------------------------------------------------------------------
1 | from {{cookiecutter.project_slug}}.config.app import AppSettings
2 | from {{cookiecutter.project_slug}}.config.base import Settings
3 | from {{cookiecutter.project_slug}}.config.broker import BrokerSettings
4 | from {{cookiecutter.project_slug}}.config.cors import CORSSettings
5 | from {{cookiecutter.project_slug}}.config.database import DatabaseSettings
6 | from {{cookiecutter.project_slug}}.config.external_apis import ExternalAPISettings
7 | from {{cookiecutter.project_slug}}.config.redis import RedisSettings
8 | from {{cookiecutter.project_slug}}.config.settings import Settings as NewSettings
9 |
10 | __all__ = [
11 | "AppSettings",
12 | "Settings", # Backward compatibility
13 | "NewSettings", # New modular settings
14 | "DatabaseSettings",
15 | "RedisSettings",
16 | "ExternalAPISettings",
17 | "BrokerSettings",
18 | "CORSSettings",
19 | ]
20 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/config/app.py:
--------------------------------------------------------------------------------
1 | from typing import Literal, final
2 |
3 | from pydantic import Field
4 | from pydantic_settings import BaseSettings
5 |
6 |
7 | @final
8 | class AppSettings(BaseSettings):
9 | """
10 | Application core settings.
11 |
12 | Attributes:
13 | app_name (str): Name of the application.
14 | environment (Literal["local", "dev", "development", "prod"]): Application environment.
15 | log_level (Literal["DEBUG", "INFO", "WARNING", "ERROR"]): Logging level.
16 | debug (bool): Debug mode flag.
17 | """
18 |
19 | app_name: str = "Antiquarium Service"
20 | environment: Literal["local", "dev", "development", "prod"] = "local"
21 | log_level: Literal["DEBUG", "INFO", "WARNING", "ERROR"] = "INFO"
22 | debug: bool = Field(False, alias="DEBUG")
23 |
24 | class Config:
25 | env_file = ".env"
26 | env_file_encoding = "utf-8"
27 | extra = "ignore"
28 |
--------------------------------------------------------------------------------
/renovate.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json",
3 | "extends": [
4 | "config:recommended",
5 | ":dependencyDashboard"
6 | ],
7 | "prHourlyLimit": 0,
8 | "prConcurrentLimit": 0,
9 | "labels": [
10 | "dependencies"
11 | ],
12 | "commitMessagePrefix": "chore(deps): ",
13 | "minimumReleaseAge": "1 day",
14 | "packageRules": [
15 | {
16 | "matchManagers": [
17 | "github-actions"
18 | ],
19 | "groupName": "GitHub Actions updates",
20 | "schedule": [
21 | "* 2 * * *"
22 | ]
23 | },
24 | {
25 | "matchManagers": [
26 | "dockerfile"
27 | ],
28 | "groupName": "Docker image updates",
29 | "schedule": [
30 | "* 2 * * *"
31 | ]
32 | },
33 | {
34 | "matchManagers": [
35 | "pip_requirements"
36 | ],
37 | "groupName": "Python dependencies",
38 | "schedule": [
39 | "* 2 * * *"
40 | ]
41 | }
42 | ]
43 | }
44 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/config/ioc/di.py:
--------------------------------------------------------------------------------
1 | from dishka import Provider
2 |
3 | from {{cookiecutter.project_slug}}.config.ioc.providers import (
4 | BrokerProvider,
5 | CacheProvider,
6 | DatabaseProvider,
7 | HTTPClientProvider,
8 | MapperProvider,
9 | RepositoryProvider,
10 | ServiceProvider,
11 | SettingsProvider,
12 | UseCaseProvider,
13 | UnitOfWorkProvider,
14 | )
15 |
16 |
17 | def get_providers() -> list[Provider]:
18 | """
19 | Returns a list of Dishka providers for dependency injection.
20 |
21 | Returns:
22 | list[Provider]: A list of configured providers.
23 | """
24 | return [
25 | SettingsProvider(),
26 | DatabaseProvider(),
27 | HTTPClientProvider(),
28 | BrokerProvider(),
29 | RepositoryProvider(),
30 | ServiceProvider(),
31 | MapperProvider(),
32 | CacheProvider(),
33 | UseCaseProvider(),
34 | UnitOfWorkProvider(),
35 | ]
36 |
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | # This is the top-most .editorconfig file
2 | root = true
3 |
4 | # All Files
5 | [*]
6 | charset = utf-8
7 | end_of_line = lf
8 | insert_final_newline = true
9 | indent_style = space
10 | indent_size = 2
11 | trim_trailing_whitespace = true
12 |
13 | # Python Files
14 | [*.py]
15 | max_line_length = 88
16 | indent_size = 4
17 |
18 | # Configuration files (JSON, YAML, TOML, INI)
19 | [*.{json,yaml,yml,toml,ini}]
20 | indent_size = 2
21 |
22 | # Markdown Files
23 | [*.md]
24 | trim_trailing_whitespace = false
25 | max_line_length = off
26 |
27 | # Shell Scripts
28 | [*.sh]
29 | indent_size = 2
30 |
31 | # SQL Files
32 | [*.sql]
33 | indent_size = 2
34 |
35 | # Dockerfile
36 | [Dockerfile*]
37 | indent_size = 4
38 |
39 | # Makefile
40 | [Makefile]
41 | indent_style = tab
42 |
43 | # Git files
44 | [.git*]
45 | indent_size = unset
46 | indent_style = unset
47 | trim_trailing_whitespace = unset
48 | insert_final_newline = unset
49 |
50 | # Editor-specific files
51 | [*.{vscode,sublime-project,sublime-workspace}]
52 | indent_size = unset
53 | indent_style = unset
54 | trim_trailing_whitespace = unset
55 | insert_final_newline = unset
56 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2025 Максим Мельников
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/config/broker.py:
--------------------------------------------------------------------------------
1 | from typing import final
2 |
3 | from pydantic import Field
4 | from pydantic_settings import BaseSettings
5 |
6 |
7 | @final
8 | class BrokerSettings(BaseSettings):
9 | """
10 | Message broker configuration settings.
11 |
12 | Attributes:
13 | broker_url (str): Message broker URL.
14 | broker_new_artifact_queue (str): Queue name for new artifact messages.
15 | publish_retries (int): Number of retries for publishing operations.
16 | publish_retry_backoff (float): Backoff factor for publish retries.
17 | """
18 |
19 | broker_url: str = Field(
20 | ..., alias="BROKER_URL"
21 | ) # e.g. amqp://guest:guest@localhost:5672/
22 | broker_new_artifact_queue: str = Field(
23 | "new_artifacts", alias="BROKER_NEW_ARTIFACT_QUEUE"
24 | )
25 |
26 | publish_retries: int = Field(3, alias="PUBLISH_RETRIES")
27 | publish_retry_backoff: float = Field(0.5, alias="PUBLISH_RETRY_BACKOFF")
28 |
29 | class Config:
30 | env_file = ".env"
31 | env_file_encoding = "utf-8"
32 | extra = "ignore"
33 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/scripts/init-mysql.sql:
--------------------------------------------------------------------------------
1 | -- MySQL initialization script for {{ cookiecutter.project_name }}
2 | -- This script is executed when the MySQL container is first created
3 |
4 | -- Create database if it doesn't exist
5 | CREATE DATABASE IF NOT EXISTS `{{ cookiecutter.database_name }}` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;
6 |
7 | -- Create user if it doesn't exist and grant privileges
8 | CREATE USER IF NOT EXISTS '{{ cookiecutter.database_user }}'@'%' IDENTIFIED BY '{{ cookiecutter.database_password }}';
9 |
10 | -- Grant all privileges on the database to the user
11 | GRANT ALL PRIVILEGES ON `{{ cookiecutter.database_name }}`.* TO '{{ cookiecutter.database_user }}'@'%';
12 |
13 | -- Flush privileges to apply changes
14 | FLUSH PRIVILEGES;
15 |
16 | -- Switch to the created database
17 | USE `{{ cookiecutter.database_name }}`;
18 |
19 | -- Create initial tables (if needed)
20 | -- This is where you can add any initial table creation
21 | -- Note: Alembic migrations will handle the actual schema creation
22 |
23 | -- Show current database and user
24 | SELECT DATABASE() as current_database;
25 | SELECT CURRENT_USER() as current_user;
26 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/interfaces/repositories.py:
--------------------------------------------------------------------------------
1 | from abc import abstractmethod
2 | from typing import Protocol
3 | from uuid import UUID
4 |
5 | from {{cookiecutter.project_slug}}.domain.entities.artifact import ArtifactEntity
6 |
7 |
8 | class ArtifactRepositoryProtocol(Protocol):
9 | """
10 | Protocol for an artifact repository.
11 | Defines methods for retrieving and saving artifact entities.
12 | """
13 |
14 | @abstractmethod
15 | async def get_by_inventory_id(
16 | self, inventory_id: str | UUID
17 | ) -> ArtifactEntity | None:
18 | """
19 | Retrieves an artifact by its inventory ID.
20 |
21 | Args:
22 | inventory_id: The unique identifier of the artifact.
23 |
24 | Returns:
25 | The ArtifactEntity if found, otherwise None.
26 | """
27 | ...
28 |
29 | @abstractmethod
30 | async def save(self, artifact: ArtifactEntity) -> None:
31 | """
32 | Saves a new artifact or updates an existing one.
33 |
34 | Args:
35 | artifact: The ArtifactEntity to save.
36 | """
37 | ...
38 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/interfaces/uow.py:
--------------------------------------------------------------------------------
1 | from abc import abstractmethod
2 | from typing import Protocol
3 |
4 | from {{cookiecutter.project_slug}}.application.interfaces.repositories import ArtifactRepositoryProtocol
5 |
6 |
7 | class UnitOfWorkProtocol(Protocol):
8 | """
9 | Protocol for a Unit of Work.
10 |
11 | This protocol defines the interface for managing transactions and
12 | the lifecycle of repositories within a single business transaction.
13 | """
14 |
15 | repository: ArtifactRepositoryProtocol
16 |
17 | @abstractmethod
18 | async def __aenter__(self) -> "UnitOfWorkProtocol":
19 | """Enters the asynchronous context manager."""
20 | ...
21 |
22 | @abstractmethod
23 | async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
24 | """Exits the asynchronous context manager."""
25 | ...
26 |
27 | @abstractmethod
28 | async def commit(self) -> None:
29 | """Commits the current transaction."""
30 | ...
31 |
32 | @abstractmethod
33 | async def rollback(self) -> None:
34 | """Rolls back the current transaction."""
35 | ...
36 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/config/cors.py:
--------------------------------------------------------------------------------
1 | from typing import final
2 |
3 | from pydantic import Field
4 | from pydantic_settings import BaseSettings
5 |
6 |
7 | @final
8 | class CORSSettings(BaseSettings):
9 | """
10 | CORS configuration settings.
11 |
12 | Attributes:
13 | cors_origins (list[str]): List of allowed CORS origins.
14 | cors_allow_credentials (bool): Whether CORS requests should support credentials.
15 | cors_allow_methods (list[str]): List of allowed CORS HTTP methods.
16 | cors_allow_headers (list[str]): List of allowed CORS HTTP headers.
17 | """
18 |
19 | cors_origins: list[str] = Field(
20 | ["http://localhost:3000", "http://localhost:8080"], alias="CORS_ORIGINS"
21 | )
22 | cors_allow_credentials: bool = Field(True, alias="CORS_ALLOW_CREDENTIALS")
23 | cors_allow_methods: list[str] = Field(
24 | ["GET", "POST", "PUT", "DELETE", "OPTIONS"], alias="CORS_ALLOW_METHODS"
25 | )
26 | cors_allow_headers: list[str] = Field(["*"], alias="CORS_ALLOW_HEADERS")
27 |
28 | class Config:
29 | env_file = ".env"
30 | env_file_encoding = "utf-8"
31 | extra = "ignore"
32 |
--------------------------------------------------------------------------------
/cookiecutter.json:
--------------------------------------------------------------------------------
1 | {
2 | "project_name": "My FastAPI Project",
3 | "project_slug": "{{ cookiecutter.project_name.lower().replace(' ', '_').replace('-', '_') }}",
4 | "project_description": "A modern FastAPI application with clean architecture",
5 | "author_name": "Your Name",
6 | "author_email": "your.email@example.com",
7 | "github_username": "yourusername",
8 | "version": "0.1.0",
9 | "python_version": "3.12",
10 | "database_name": "{{ cookiecutter.project_slug }}",
11 | "database_user": "{{ cookiecutter.project_slug }}_user",
12 | "database_password": "{{ cookiecutter.project_slug }}_password",
13 | "redis_password": "redis_password",
14 | "api_title": "{{ cookiecutter.project_name }} API",
15 | "api_version": "1.0.0",
16 | "api_description": "API for {{ cookiecutter.project_description }}",
17 | "domain_name": "example.com",
18 | "use_broker": ["none", "kafka", "rabbitmq", "nats"],
19 | "use_cache": ["none", "redis", "keydb", "tarantool", "dragonfly"],
20 | "use_database": ["none", "postgresql", "sqlite", "mysql"],
21 | "add_docker": "y",
22 | "add_tests": "y",
23 | "add_docs": "y",
24 | "add_precommit": "y",
25 | "license_type": ["MIT", "Apache-2.0", "GPL-3.0", "BSD-3-Clause", "None"]
26 | }
27 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/interfaces/serialization.py:
--------------------------------------------------------------------------------
1 | from abc import abstractmethod
2 | from typing import Protocol
3 |
4 | from {{cookiecutter.project_slug}}.application.dtos.artifact import ArtifactDTO
5 |
6 |
7 | class SerializationMapperProtocol(Protocol):
8 | """Protocol for serialization/deserialization of Application DTOs.
9 |
10 | This interface allows the Application layer to serialize DTOs
11 | without depending on Infrastructure implementations.
12 | """
13 |
14 | @abstractmethod
15 | def to_dict(self, dto: ArtifactDTO) -> dict:
16 | """
17 | Converts an Application DTO to a dictionary for serialization.
18 |
19 | Args:
20 | dto: The ArtifactDTO to convert.
21 |
22 | Returns:
23 | A dictionary representation of the DTO.
24 | """
25 | ...
26 |
27 | @abstractmethod
28 | def from_dict(self, data: dict) -> ArtifactDTO:
29 | """
30 | Converts a dictionary from deserialization back to an Application DTO.
31 |
32 | Args:
33 | data: The dictionary to convert.
34 |
35 | Returns:
36 | An ArtifactDTO object.
37 | """
38 | ...
39 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/domain/value_objects/era.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import ClassVar, final
3 |
4 | from {{cookiecutter.project_slug}}.domain.exceptions import InvalidEraException
5 |
6 |
7 | @final
8 | @dataclass(frozen=True, slots=True, kw_only=True, order=True)
9 | class Era:
10 | """
11 | Value object representing a historical era.
12 |
13 | Ensures that the era value is one of the predefined allowed values.
14 | """
15 | _allowed_values: ClassVar[set[str]] = {
16 | "paleolithic",
17 | "neolithic",
18 | "bronze_age",
19 | "iron_age",
20 | "antiquity",
21 | "middle_ages",
22 | "modern",
23 | }
24 | value: str
25 |
26 | def __post_init__(self) -> None:
27 | """
28 | Validates the era value after initialization.
29 |
30 | Raises:
31 | InvalidEraException: If the provided era value is not allowed.
32 | """
33 | if self.value not in self._allowed_values:
34 | raise InvalidEraException(f"Invalid era: {self.value}")
35 |
36 | def __str__(self) -> str:
37 | """
38 | Returns the string representation of the era.
39 | """
40 | return self.value
41 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/config/logging.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import sys
3 |
4 | import structlog
5 |
6 |
7 | def setup_logging(level: str = "INFO") -> None:
8 | """
9 | Sets up structlog and standard library logging.
10 |
11 | Args:
12 | level: The logging level (e.g., "INFO", "DEBUG").
13 | """
14 | structlog.configure(
15 | processors=[
16 | structlog.stdlib.filter_by_level,
17 | structlog.stdlib.add_logger_name,
18 | structlog.stdlib.add_log_level,
19 | structlog.stdlib.PositionalArgumentsFormatter(),
20 | structlog.processors.TimeStamper(fmt="iso"),
21 | structlog.processors.StackInfoRenderer(),
22 | structlog.processors.format_exc_info,
23 | structlog.processors.UnicodeDecoder(),
24 | structlog.processors.JSONRenderer()
25 | ],
26 | context_class=dict,
27 | logger_factory=structlog.stdlib.LoggerFactory(),
28 | wrapper_class=structlog.stdlib.BoundLogger,
29 | cache_logger_on_first_use=True,
30 | )
31 |
32 | # Configure standard library logging
33 | logging.basicConfig(
34 | format="%(message)s",
35 | stream=sys.stdout,
36 | level=getattr(logging, level.upper()),
37 | )
38 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/domain/value_objects/material.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import ClassVar, final
3 |
4 | from {{cookiecutter.project_slug}}.domain.exceptions import InvalidMaterialException
5 |
6 |
7 | @final
8 | @dataclass(frozen=True, slots=True, kw_only=True, order=True)
9 | class Material:
10 | """
11 | Value object representing a material type.
12 |
13 | Ensures that the material value is one of the predefined allowed values.
14 | """
15 | _allowed_values: ClassVar[set[str]] = {
16 | "ceramic",
17 | "metal",
18 | "stone",
19 | "glass",
20 | "bone",
21 | "wood",
22 | "textile",
23 | "other",
24 | }
25 | value: str
26 |
27 | def __post_init__(self) -> None:
28 | """
29 | Validates the material value after initialization.
30 |
31 | Raises:
32 | InvalidMaterialException: If the provided material value is not allowed.
33 | """
34 | if self.value not in self._allowed_values:
35 | raise InvalidMaterialException(f"Invalid material: {self.value}")
36 |
37 | def __str__(self) -> str:
38 | """
39 | Returns the string representation of the material.
40 | """
41 | return self.value
42 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/db/session.py:
--------------------------------------------------------------------------------
1 | from sqlalchemy.ext.asyncio import (
2 | AsyncEngine,
3 | AsyncSession,
4 | async_sessionmaker,
5 | create_async_engine,
6 | )
7 |
8 |
9 | def create_engine(url: str, is_echo: bool = True) -> AsyncEngine:
10 | """
11 | Creates an asynchronous SQLAlchemy engine.
12 |
13 | Args:
14 | url: The database connection URL.
15 | is_echo: If True, SQL statements will be echoed to the console.
16 |
17 | Returns:
18 | An AsyncEngine instance.
19 | """
20 | return create_async_engine(
21 | url=url,
22 | echo=is_echo,
23 | pool_size=20,
24 | max_overflow=30,
25 | pool_pre_ping=True,
26 | pool_recycle=3600,
27 | connect_args={},
28 | )
29 |
30 |
31 | def get_session_factory(engine: AsyncEngine) -> async_sessionmaker[AsyncSession]:
32 | """
33 | Creates an asynchronous sessionmaker for SQLAlchemy sessions.
34 |
35 | Args:
36 | engine: The AsyncEngine instance.
37 |
38 | Returns:
39 | An async_sessionmaker configured for AsyncSession.
40 | """
41 | return async_sessionmaker(
42 | bind=engine,
43 | class_=AsyncSession,
44 | expire_on_commit=False,
45 | autoflush=False,
46 | )
47 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/scripts/init-db.sql:
--------------------------------------------------------------------------------
1 | -- Database initialization script for {{ cookiecutter.project_name }} application
2 | -- This script runs when the PostgreSQL container starts for the first time
3 |
4 | -- Create the main user
5 | CREATE USER {{ cookiecutter.database_user }} WITH PASSWORD '{{ cookiecutter.database_password }}';
6 |
7 | -- Create the database
8 | CREATE DATABASE {{ cookiecutter.database_name }} OWNER {{ cookiecutter.database_user }};
9 |
10 | -- Grant privileges
11 | GRANT ALL PRIVILEGES ON DATABASE {{ cookiecutter.database_name }} TO {{ cookiecutter.database_user }};
12 |
13 | -- Connect to the {{ cookiecutter.database_name }} database
14 | \c {{ cookiecutter.database_name }};
15 |
16 | -- Create extensions
17 | CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
18 | CREATE EXTENSION IF NOT EXISTS "pg_trgm";
19 |
20 | -- Set timezone
21 | SET timezone = 'UTC';
22 |
23 | -- Grant schema privileges
24 | GRANT ALL ON SCHEMA public TO {{ cookiecutter.database_user }};
25 | GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO {{ cookiecutter.database_user }};
26 | GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO {{ cookiecutter.database_user }};
27 | ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON TABLES TO {{ cookiecutter.database_user }};
28 | ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON SEQUENCES TO {{ cookiecutter.database_user }};
29 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/presentation/api/rest/v1/mappers/artifact_mapper.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import final
3 |
4 | from {{cookiecutter.project_slug}}.application.dtos.artifact import ArtifactDTO
5 | from {{cookiecutter.project_slug}}.presentation.api.rest.v1.schemas.responses import (
6 | ArtifactResponseSchema,
7 | EraResponseSchema,
8 | MaterialResponseSchema,
9 | )
10 |
11 |
12 | @final
13 | @dataclass(frozen=True, slots=True)
14 | class ArtifactPresentationMapper:
15 | """Mapper for converting Application DTOs to Presentation Response models.
16 |
17 | This mapper isolates the Presentation layer from direct dependencies on Application DTOs,
18 | following Clean Architecture principles.
19 | """
20 |
21 | def to_response(self, dto: ArtifactDTO) -> ArtifactResponseSchema:
22 | """Convert Application DTO to API Response model."""
23 | return ArtifactResponseSchema(
24 | inventory_id=dto.inventory_id,
25 | created_at=dto.created_at,
26 | acquisition_date=dto.acquisition_date,
27 | name=dto.name,
28 | department=dto.department,
29 | era=EraResponseSchema(value=dto.era.value),
30 | material=MaterialResponseSchema(value=dto.material.value),
31 | description=dto.description,
32 | )
33 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/config/redis.py:
--------------------------------------------------------------------------------
1 | from typing import final
2 |
3 | from pydantic import Field, RedisDsn
4 | from pydantic_settings import BaseSettings
5 |
6 |
7 | @final
8 | class RedisSettings(BaseSettings):
9 | """
10 | Redis configuration settings.
11 |
12 | Attributes:
13 | redis_url (RedisDsn): Redis connection URL.
14 | redis_password (str): Redis password.
15 | redis_port (int): Redis port.
16 | redis_host (str): Redis host.
17 | redis_db (int): Redis database number.
18 | redis_cache_ttl (int): Time-to-live for Redis cache entries in seconds.
19 | redis_cache_prefix (str): Prefix for Redis cache keys.
20 | """
21 |
22 | redis_url: RedisDsn = Field(
23 | RedisDsn("redis://:redis_password@redis:6379/0"), alias="REDIS_URL"
24 | )
25 | redis_password: str = Field("redis_password", alias="REDIS_PASSWORD")
26 | redis_port: int = Field(6379, alias="REDIS_PORT")
27 | redis_host: str = Field("redis", alias="REDIS_HOST")
28 | redis_db: int = Field(0, alias="REDIS_DB")
29 | redis_cache_ttl: int = Field(3600, alias="REDIS_CACHE_TTL") # 1 hour default TTL
30 | redis_cache_prefix: str = Field("antiques:", alias="REDIS_CACHE_PREFIX")
31 |
32 | class Config:
33 | env_file = ".env"
34 | env_file_encoding = "utf-8"
35 | extra = "ignore"
36 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/interfaces/http_clients.py:
--------------------------------------------------------------------------------
1 | from abc import abstractmethod
2 | from typing import Protocol
3 | from uuid import UUID
4 |
5 | from {{cookiecutter.project_slug}}.application.dtos.artifact import ArtifactCatalogPublicationDTO, ArtifactDTO
6 |
7 |
8 | class ExternalMuseumAPIProtocol(Protocol):
9 | """
10 | Protocol for interacting with an external museum API.
11 | """
12 |
13 | @abstractmethod
14 | async def fetch_artifact(self, inventory_id: str | UUID) -> ArtifactDTO:
15 | """
16 | Fetches an artifact from the external museum API.
17 |
18 | Args:
19 | inventory_id: The ID of the artifact to fetch.
20 |
21 | Returns:
22 | An ArtifactDTO object.
23 | """
24 | ...
25 |
26 |
27 | class PublicCatalogAPIProtocol(Protocol):
28 | """
29 | Protocol for interacting with a public catalog API.
30 | """
31 |
32 | @abstractmethod
33 | async def publish_artifact(
34 | self, artifact: ArtifactCatalogPublicationDTO
35 | ) -> str:
36 | """
37 | Publishes an artifact to the public catalog API.
38 |
39 | Args:
40 | artifact: The ArtifactCatalogPublicationDTO to publish.
41 |
42 | Returns:
43 | A string representing the publication status or ID.
44 | """
45 | ...
46 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/interfaces/mappers.py:
--------------------------------------------------------------------------------
1 | from abc import abstractmethod
2 | from typing import Protocol
3 |
4 | from {{cookiecutter.project_slug}}.application.dtos.artifact import (
5 | ArtifactAdmissionNotificationDTO,
6 | ArtifactCatalogPublicationDTO,
7 | ArtifactDTO,
8 | )
9 | from {{cookiecutter.project_slug}}.domain.entities.artifact import ArtifactEntity
10 |
11 |
12 | class DtoEntityMapperProtocol(Protocol):
13 | """Protocol for Application layer mapper (Domain Entity <-> Application DTO)."""
14 |
15 | @abstractmethod
16 | def to_dto(self, entity: ArtifactEntity) -> ArtifactDTO:
17 | """Converts a Domain Entity to an Application DTO."""
18 | ...
19 |
20 | @abstractmethod
21 | def to_entity(self, dto: ArtifactDTO) -> ArtifactEntity:
22 | """Converts an Application DTO to a Domain Entity."""
23 | ...
24 |
25 | @abstractmethod
26 | def to_notification_dto(
27 | self, entity: ArtifactEntity
28 | ) -> ArtifactAdmissionNotificationDTO:
29 | """Converts a Domain Entity to an ArtifactAdmissionNotificationDTO."""
30 | ...
31 |
32 | @abstractmethod
33 | def to_publication_dto(
34 | self, entity: ArtifactEntity
35 | ) -> ArtifactCatalogPublicationDTO:
36 | """Converts a Domain Entity to an ArtifactCatalogPublicationDTO."""
37 | ...
38 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | # https://pre-commit.com/
2 |
3 | repos:
4 | - repo: https://github.com/pre-commit/pre-commit-hooks
5 | rev: v4.5.0
6 | hooks:
7 | - id: trailing-whitespace
8 | - id: end-of-file-fixer
9 | - id: debug-statements
10 | - id: mixed-line-ending
11 | args: [ '--fix=lf' ]
12 | description: Forces to replace line ending by the UNIX 'lf' character.
13 | - id: check-merge-conflict
14 | - id: check-ast
15 |
16 | - repo: https://github.com/jorisroovers/gitlint
17 | rev: v0.19.1
18 | hooks:
19 | - id: gitlint
20 |
21 | - repo: https://github.com/astral-sh/ruff-pre-commit
22 | # Ruff version.
23 | rev: v0.12.12
24 | hooks:
25 | - id: ruff
26 | args: [ --fix, --exit-non-zero-on-fix, --exclude=tests/ ]
27 | pass_filenames: false
28 | - id: ruff-format
29 | args: [ --config=pyproject.toml ]
30 |
31 | - repo: https://github.com/pre-commit/mirrors-mypy
32 | rev: v1.5.1
33 | hooks:
34 | - id: mypy
35 | additional_dependencies: [
36 | types-requests,
37 | types-redis,
38 | types-setuptools,
39 | fastapi,
40 | pydantic,
41 | sqlalchemy,
42 | httpx,
43 | dishka,
44 | ]
45 | args: [--config-file=pyproject.toml, src/, --exclude=tests/]
46 | pass_filenames: false
47 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/config/external_apis.py:
--------------------------------------------------------------------------------
1 | from typing import final
2 |
3 | from pydantic import Field
4 | from pydantic_settings import BaseSettings
5 |
6 |
7 | @final
8 | class ExternalAPISettings(BaseSettings):
9 | """
10 | External API configuration settings.
11 |
12 | Attributes:
13 | museum_api_base (str): Base URL for the external museum API.
14 | catalog_api_base (str): Base URL for the public catalog API.
15 | external_api_base_url (str): Alias for museum_api_base.
16 | catalog_api_base_url (str): Alias for catalog_api_base.
17 | http_timeout (float): HTTP request timeout in seconds.
18 | """
19 |
20 | museum_api_base: str = Field(
21 | "https://api.antiquarium-museum.ru", alias="MUSEUM_API_BASE"
22 | )
23 | catalog_api_base: str = Field(
24 | "https://catalog.antiquarium-museum.ru", alias="CATALOG_API_BASE"
25 | )
26 |
27 | # Aliases for compatibility
28 | external_api_base_url: str = Field(
29 | "https://api.antiquarium-museum.ru", alias="EXTERNAL_API_BASE_URL"
30 | )
31 | catalog_api_base_url: str = Field(
32 | "https://catalog.antiquarium-museum.ru", alias="CATALOG_API_BASE_URL"
33 | )
34 |
35 | http_timeout: float = Field(10.0, alias="HTTP_TIMEOUT")
36 |
37 | class Config:
38 | env_file = ".env"
39 | env_file_encoding = "utf-8"
40 | extra = "ignore"
41 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/tests/test_domain/test_entities/test_artifact.py:
--------------------------------------------------------------------------------
1 | from datetime import UTC, datetime
2 | from uuid import uuid4
3 |
4 | from {{cookiecutter.project_slug}}.domain.entities.artifact import ArtifactEntity
5 | from {{cookiecutter.project_slug}}.domain.value_objects.era import Era
6 | from {{cookiecutter.project_slug}}.domain.value_objects.material import Material
7 |
8 |
9 | class TestArtifactEntity:
10 | def test_create_artifact_entity_success(self):
11 | """Test successful creation of ArtifactEntity"""
12 | inventory_id = uuid4()
13 | acquisition_date = datetime(2023, 1, 1, tzinfo=UTC)
14 |
15 | artifact = ArtifactEntity(
16 | inventory_id=inventory_id,
17 | acquisition_date=acquisition_date,
18 | name="Ancient Vase",
19 | department="Archaeology",
20 | era=Era(value="antiquity"),
21 | material=Material(value="ceramic"),
22 | description="A beautiful ancient vase",
23 | )
24 |
25 | assert artifact.inventory_id == inventory_id
26 | assert artifact.acquisition_date == acquisition_date
27 | assert artifact.name == "Ancient Vase"
28 | assert artifact.department == "Archaeology"
29 | assert artifact.era.value == "antiquity"
30 | assert artifact.material.value == "ceramic"
31 | assert artifact.description == "A beautiful ancient vase"
32 | assert isinstance(artifact.created_at, datetime)
33 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/use_cases/save_artifact_to_cache.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import TYPE_CHECKING, final
3 |
4 | import structlog
5 |
6 | from {{cookiecutter.project_slug}}.application.dtos.artifact import ArtifactDTO
7 | from {{cookiecutter.project_slug}}.application.interfaces.cache import CacheProtocol
8 | from {{cookiecutter.project_slug}}.application.interfaces.serialization import SerializationMapperProtocol
9 |
10 | if TYPE_CHECKING:
11 | from {{cookiecutter.project_slug}}.domain.entities.artifact import ArtifactEntity
12 |
13 | logger = structlog.get_logger(__name__)
14 |
15 |
16 | @final
17 | @dataclass(frozen=True, slots=True, kw_only=True)
18 | class SaveArtifactToCacheUseCase:
19 | """
20 | Use case for saving an artifact to the cache.
21 | """
22 |
23 | cache_client: CacheProtocol
24 | serialization_mapper: SerializationMapperProtocol
25 |
26 | async def __call__(self, inventory_id: str, artifact_dto: ArtifactDTO) -> None:
27 | """
28 | Executes the use case to save an artifact to the cache.
29 |
30 | Args:
31 | inventory_id: The ID of the artifact to save.
32 | artifact_dto: The ArtifactDTO to save.
33 | """
34 | await self.cache_client.set(
35 | inventory_id, self.serialization_mapper.to_dict(artifact_dto)
36 | )
37 | logger.info("Artifact saved to cache", inventory_id=inventory_id)
38 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/use_cases/save_artifact_to_repo.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import TYPE_CHECKING, final
3 |
4 | import structlog
5 |
6 | from {{cookiecutter.project_slug}}.application.dtos.artifact import ArtifactDTO
7 | from {{cookiecutter.project_slug}}.application.interfaces.mappers import DtoEntityMapperProtocol
8 | from {{cookiecutter.project_slug}}.application.interfaces.uow import UnitOfWorkProtocol
9 |
10 | if TYPE_CHECKING:
11 | from {{cookiecutter.project_slug}}.domain.entities.artifact import ArtifactEntity
12 |
13 | logger = structlog.get_logger(__name__)
14 |
15 |
16 | @final
17 | @dataclass(frozen=True, slots=True, kw_only=True)
18 | class SaveArtifactToRepoUseCase:
19 | """
20 | Use case for saving an artifact to the repository.
21 | """
22 |
23 | uow: UnitOfWorkProtocol
24 | artifact_mapper: DtoEntityMapperProtocol
25 |
26 | async def __call__(self, artifact_dto: ArtifactDTO) -> None:
27 | """
28 | Executes the use case to save an artifact to the repository.
29 |
30 | Args:
31 | artifact_dto: The ArtifactDTO to save.
32 | """
33 | async with self.uow:
34 | artifact_entity = self.artifact_mapper.to_entity(artifact_dto)
35 | await self.uow.repository.save(artifact_entity)
36 | logger.info(
37 | "Artifact saved to repository", inventory_id=artifact_dto.inventory_id
38 | )
39 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/.dockerignore:
--------------------------------------------------------------------------------
1 | # Git
2 | .git
3 | .gitignore
4 | .gitattributes
5 | .gitmodules
6 |
7 | # Python
8 | __pycache__
9 | *.pyc
10 | *.pyo
11 | *.pyd
12 | .Python
13 | *.so
14 | .tox
15 | .coverage
16 | .coverage.*
17 | .cache
18 | nosetests.xml
19 | coverage.xml
20 | *.cover
21 | *.log
22 | .pytest_cache
23 | .mypy_cache
24 | .ruff_cache
25 |
26 | # Virtual environments
27 | .env
28 | .venv
29 | env/
30 | venv/
31 | ENV/
32 | env.bak/
33 | venv.bak/
34 |
35 | # IDE
36 | .vscode/
37 | .idea/
38 | *.swp
39 | *.swo
40 | *~
41 |
42 | # OS
43 | .DS_Store
44 | .DS_Store?
45 | ._*
46 | .Spotlight-V100
47 | .Trashes
48 | ehthumbs.db
49 | Thumbs.db
50 |
51 | # Documentation
52 | docs/_build/
53 | *.md
54 | !README.md
55 |
56 | # Docker
57 | Dockerfile*
58 | docker-compose*.yml
59 | .dockerignore
60 |
61 | # CI/CD
62 | .github/
63 | .gitlab-ci.yml
64 | .travis.yml
65 | .circleci/
66 |
67 | # Build artifacts
68 | build/
69 | dist/
70 | *.egg-info/
71 |
72 | # Logs
73 | logs/
74 | *.log
75 |
76 | # Temporary files
77 | tmp/
78 | temp/
79 | .tmp/
80 |
81 | # Node.js (if any frontend assets)
82 | node_modules/
83 | npm-debug.log*
84 | yarn-debug.log*
85 | yarn-error.log*
86 |
87 | # Database
88 | *.db
89 | *.sqlite
90 | *.sqlite3
91 |
92 | # Environment files
93 | .env*
94 | !.env.template
95 |
96 | # Test artifacts
97 | htmlcov/
98 | .coverage
99 | .pytest_cache/
100 | test-results/
101 |
102 | # Local development
103 | .local/
104 | local/
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/presentation/api/rest/v1/controllers/artifact_controller.py:
--------------------------------------------------------------------------------
1 | from uuid import UUID
2 |
3 | from dishka.integrations.fastapi import FromDishka, inject
4 | from fastapi import APIRouter, Path
5 |
6 | from {{cookiecutter.project_slug}}.application.use_cases.process_artifact import ProcessArtifactUseCase
7 | from {{cookiecutter.project_slug}}.presentation.api.rest.v1.mappers.artifact_mapper import ArtifactPresentationMapper
8 | from {{cookiecutter.project_slug}}.presentation.api.rest.v1.schemas import ArtifactResponseSchema
9 |
10 | router = APIRouter(prefix="/v1/artifacts", tags=["Artifacts"])
11 |
12 |
13 | @router.get(
14 | "/{inventory_id}",
15 | response_model=ArtifactResponseSchema,
16 | summary="Get artifact by inventory ID",
17 | responses={
18 | 200: {"description": "Artifact retrieved successfully"},
19 | 400: {"description": "Bad request (e.g., invalid external API response)"},
20 | 404: {"description": "Artifact not found"},
21 | 500: {"description": "Internal server error"},
22 | 502: {"description": "Failed to notify via message broker"},
23 | },
24 | )
25 | @inject
26 | async def get_artifact(
27 | inventory_id: UUID = Path(..., description="Artifact UUID"),
28 | use_case: FromDishka[ProcessArtifactUseCase] = None,
29 | presentation_mapper: FromDishka[ArtifactPresentationMapper] = None,
30 | ) -> ArtifactResponseSchema:
31 | artifact_dto = await use_case(str(inventory_id))
32 | return presentation_mapper.to_response(artifact_dto)
33 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/presentation/api/rest/v1/schemas/responses.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from uuid import UUID
3 |
4 | from pydantic import BaseModel, ConfigDict, Field
5 |
6 |
7 | class MaterialResponseSchema(BaseModel):
8 | model_config = ConfigDict(
9 | frozen=True,
10 | extra="forbid",
11 | )
12 | value: str
13 |
14 |
15 | class EraResponseSchema(BaseModel):
16 | model_config = ConfigDict(
17 | frozen=True,
18 | extra="forbid",
19 | )
20 | value: str
21 |
22 |
23 | class ArtifactResponseSchema(BaseModel):
24 | model_config = ConfigDict(
25 | frozen=True,
26 | extra="forbid",
27 | from_attributes=True,
28 | )
29 |
30 | inventory_id: UUID = Field(..., description="Unique identifier of the artifact")
31 | created_at: datetime = Field(
32 | description="Timestamp when the artifact record was created (UTC)",
33 | )
34 | acquisition_date: datetime = Field(
35 | ..., description="Date when the artifact was acquired"
36 | )
37 | name: str = Field(..., description="Name of the artifact")
38 | department: str = Field(
39 | ...,
40 | description="Department responsible for the artifact",
41 | )
42 | era: EraResponseSchema = Field(..., description="Historical era of the artifact")
43 | material: MaterialResponseSchema = Field(..., description="Material of the artifact")
44 | description: str | None = Field(
45 | None, description="Optional description of the artifact"
46 | )
47 |
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 | ## Description
2 |
3 |
4 |
5 | ## Type of Change
6 |
7 |
8 |
9 | - [ ] 🐛 Bug fix (non-breaking change which fixes an issue)
10 | - [ ] ✨ New feature (non-breaking change which adds functionality)
11 | - [ ] 💥 Breaking change (fix or feature that would cause existing functionality to not work as expected)
12 | - [ ] 📚 Documentation update
13 | - [ ] 🔧 Configuration change
14 | - [ ] 🧪 Test improvements
15 | - [ ] 🏗️ Refactoring (no functional changes)
16 | - [ ] 🚀 Performance improvement
17 | - [ ] 🔒 Security improvement
18 |
19 | ## Related Issues
20 |
21 |
22 |
23 | Fixes #
24 | Related to #
25 |
26 | ## Checklist
27 |
28 |
29 |
30 | - [ ] My code follows the project's style guidelines
31 | - [ ] I have performed a self-review of my own code
32 | - [ ] I have commented my code, particularly in hard-to-understand areas
33 | - [ ] I have made corresponding changes to the documentation
34 | - [ ] My changes generate no new warnings
35 | - [ ] I have added tests that prove my fix is effective or that my feature works
36 | - [ ] New and existing unit tests pass locally with my changes
37 | - [ ] Any dependent changes have been merged and published
38 |
39 | ## Screenshots/Videos
40 |
41 |
42 |
43 | ## Additional Notes
44 |
45 |
46 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/docs/migrations.md:
--------------------------------------------------------------------------------
1 | # Database migrations
2 |
3 | This project uses Alembic to manage PostgreSQL database migrations.
4 |
5 | ## Migration commands
6 |
7 | ### Creating a new migration
8 | ```bash
9 | # Create a migration with autogeneration (requires DB connection)
10 | make migration msg="Description of changes"
11 |
12 | # Or directly with alembic
13 | poetry run alembic revision --autogenerate -m "Description of changes"
14 | ```
15 |
16 | ### Applying migrations
17 | ```bash
18 | # Apply all pending migrations
19 | make migrate
20 |
21 | # Or directly
22 | poetry run alembic upgrade head
23 | ```
24 |
25 | ### Rolling back migrations
26 | ```bash
27 | # Roll back one migration
28 | make migrate-downgrade
29 |
30 | # Or directly
31 | poetry run alembic downgrade -1
32 | ```
33 |
34 | ### Viewing migration history
35 | ```bash
36 | # Show migration history
37 | make migrate-history
38 |
39 | # Show the current migration
40 | make migrate-current
41 | ```
42 |
43 | ### Other useful commands
44 | ```bash
45 | # Mark the DB as being at a specific migration (without applying it)
46 | make migrate-stamp
47 |
48 | # Show SQL for a migration (without applying it)
49 | poetry run alembic upgrade head --sql
50 | ```
51 |
52 | ## File structure
53 |
54 | - `alembic.ini` - Alembic configuration
55 | - `src/{{cookiecutter.project_slug}}/infrastructures/db/migrations/env.py` - migration environment settings
56 | - `src/{{cookiecutter.project_slug}}/infrastructures/db/migrations/versions/` - directory with migration files
57 | - `src/{{cookiecutter.project_slug}}/infrastructures/db/migrations/script.py.mako` - template for new migrations
58 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/use_cases/get_artifact_from_cache.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import TYPE_CHECKING, final
3 |
4 | import structlog
5 |
6 | from {{cookiecutter.project_slug}}.application.dtos.artifact import ArtifactDTO
7 | from {{cookiecutter.project_slug}}.application.interfaces.cache import CacheProtocol
8 | from {{cookiecutter.project_slug}}.application.interfaces.serialization import SerializationMapperProtocol
9 |
10 | if TYPE_CHECKING:
11 | from {{cookiecutter.project_slug}}.domain.entities.artifact import ArtifactEntity
12 |
13 | logger = structlog.get_logger(__name__)
14 |
15 |
16 | @final
17 | @dataclass(frozen=True, slots=True, kw_only=True)
18 | class GetArtifactFromCacheUseCase:
19 | """
20 | Use case for retrieving an artifact from the cache.
21 | """
22 |
23 | cache_client: CacheProtocol
24 | serialization_mapper: SerializationMapperProtocol
25 |
26 | async def __call__(self, inventory_id: str) -> ArtifactDTO | None:
27 | """
28 | Executes the use case to get an artifact from the cache.
29 |
30 | Args:
31 | inventory_id: The ID of the artifact to retrieve.
32 |
33 | Returns:
34 | An ArtifactDTO if found in cache, otherwise None.
35 | """
36 | cached_artifact_data: dict | None = await self.cache_client.get(inventory_id)
37 | if cached_artifact_data:
38 | logger.info("Artifact found in cache", inventory_id=inventory_id)
39 | return self.serialization_mapper.from_dict(cached_artifact_data)
40 | logger.info("Artifact not found in cache", inventory_id=inventory_id)
41 | return None
42 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/scripts/init-sqlite.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # SQLite initialization script for {{ cookiecutter.project_name }}
3 | # This script is executed when the SQLite container is first created
4 |
5 | set -e
6 |
7 | echo "Initializing SQLite database for {{ cookiecutter.project_name }}..."
8 |
9 | # Create data directory if it doesn't exist
10 | mkdir -p /data
11 |
12 | # Set database path
13 | DB_PATH="/data/{{ cookiecutter.database_name }}.db"
14 |
15 | # Create SQLite database file if it doesn't exist
16 | if [ ! -f "$DB_PATH" ]; then
17 | echo "Creating SQLite database at $DB_PATH"
18 | touch "$DB_PATH"
19 | chmod 666 "$DB_PATH"
20 |
21 | # Create initial tables using sqlite3 command
22 | sqlite3 "$DB_PATH" << 'EOF'
23 | -- Enable foreign key support
24 | PRAGMA foreign_keys = ON;
25 |
26 | -- Create initial metadata table to track database initialization
27 | CREATE TABLE IF NOT EXISTS database_metadata (
28 | id INTEGER PRIMARY KEY AUTOINCREMENT,
29 | key TEXT NOT NULL UNIQUE,
30 | value TEXT NOT NULL,
31 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
32 | );
33 |
34 | -- Insert initialization record
35 | INSERT OR IGNORE INTO database_metadata (key, value) VALUES ('initialized', '{{ cookiecutter.project_name }}');
36 | INSERT OR IGNORE INTO database_metadata (key, value) VALUES ('version', '1.0.0');
37 | INSERT OR IGNORE INTO database_metadata (key, value) VALUES ('created_at', datetime('now'));
38 |
39 | -- Show database info
40 | SELECT 'Database initialized successfully' as status;
41 | SELECT datetime('now') as initialization_time;
42 | EOF
43 |
44 | echo "SQLite database initialized successfully"
45 | else
46 | echo "SQLite database already exists at $DB_PATH"
47 | fi
48 |
49 | # Set proper permissions
50 | chmod 666 "$DB_PATH"
51 |
52 | echo "SQLite initialization completed"
53 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/db/mappers/artifact_uow_mapper.py:
--------------------------------------------------------------------------------
1 | """Database mapper for ArtifactModel that implements DbMapperProtocol for Unit of Work.
2 |
3 | This mapper provides CRUD operations for ArtifactModel within the Unit of Work pattern.
4 | """
5 |
6 | import logging
7 | from typing import final
8 |
9 | from sqlalchemy.ext.asyncio import AsyncSession
10 |
11 | from {{cookiecutter.project_slug}}.application.interfaces.db_mapper import DbMapperProtocol
12 | from {{cookiecutter.project_slug}}.infrastructures.db.models.artifact import ArtifactModel
13 |
14 | logger = logging.getLogger(__name__)
15 |
16 |
17 | @final
18 | class ArtifactUoWMapper(DbMapperProtocol[ArtifactModel]):
19 | """
20 | Mapper for ArtifactModel that works with Unit of Work pattern.
21 |
22 | This mapper handles database operations for ArtifactModel instances
23 | tracked by the Unit of Work.
24 | """
25 |
26 | def __init__(self, session: AsyncSession):
27 | self.session = session
28 |
29 | def insert(self, model: ArtifactModel) -> None:
30 | """Insert a new ArtifactModel into the database."""
31 | self.session.add(model)
32 | logger.debug(f"Added ArtifactModel {model.inventory_id} to session")
33 |
34 | def update(self, model: ArtifactModel) -> None:
35 | """Update an existing ArtifactModel in the database."""
36 | # The model is already tracked by SQLAlchemy session
37 | # We just need to ensure it's marked as dirty
38 | self.session.merge(model)
39 | logger.debug(f"Merged ArtifactModel {model.inventory_id} into session")
40 |
41 | def delete(self, model: ArtifactModel) -> None:
42 | """Delete an ArtifactModel from the database."""
43 | self.session.delete(model)
44 | logger.debug(f"Deleted ArtifactModel {model.inventory_id} from session")
45 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/broker/publisher.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass, field
2 | import json
3 | from typing import final
4 |
5 | import structlog
6 | from faststream.kafka import KafkaBroker
7 |
8 | from {{cookiecutter.project_slug}}.application.dtos.artifact import ArtifactAdmissionNotificationDTO
9 | from {{cookiecutter.project_slug}}.application.interfaces.message_broker import MessageBrokerPublisherProtocol
10 | from {{cookiecutter.project_slug}}.infrastructures.mappers.artifact import InfrastructureArtifactMapper
11 |
12 |
13 | @final
14 | @dataclass(frozen=True, slots=True, kw_only=True)
15 | class KafkaPublisher(MessageBrokerPublisherProtocol):
16 | """
17 | Kafka implementation of the MessageBrokerPublisherProtocol.
18 | Publishes artifact admission notifications to a Kafka topic.
19 | """
20 |
21 | broker: KafkaBroker
22 | topic: str = field(default="new_artifacts")
23 | mapper: InfrastructureArtifactMapper
24 |
25 | async def publish_new_artifact(
26 | self, artifact: ArtifactAdmissionNotificationDTO
27 | ) -> None:
28 | """
29 | Publishes a new artifact admission notification to Kafka.
30 |
31 | Args:
32 | artifact: The ArtifactAdmissionNotificationDTO to publish.
33 |
34 | Raises:
35 | Exception: If publishing the message fails.
36 | """
37 | try:
38 | artifact_dict = self.mapper.to_admission_notification_dict(artifact)
39 | await self.broker.publish(
40 | key=artifact_dict["inventory_id"],
41 | message=json.dumps(artifact_dict, ensure_ascii=False),
42 | topic=self.topic,
43 | )
44 | except Exception as e:
45 | logger = structlog.get_logger(__name__)
46 | logger.error("Failed to publish artifact", error=str(e))
47 | raise
48 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/use_cases/get_artifact_from_repo.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import TYPE_CHECKING, final
3 |
4 | import structlog
5 |
6 | from {{cookiecutter.project_slug}}.application.dtos.artifact import ArtifactDTO
7 | from {{cookiecutter.project_slug}}.application.interfaces.mappers import DtoEntityMapperProtocol
8 | from {{cookiecutter.project_slug}}.application.interfaces.repositories import ArtifactRepositoryProtocol
9 | from {{cookiecutter.project_slug}}.application.interfaces.uow import UnitOfWorkProtocol
10 |
11 | if TYPE_CHECKING:
12 | from {{cookiecutter.project_slug}}.domain.entities.artifact import ArtifactEntity
13 |
14 | logger = structlog.get_logger(__name__)
15 |
16 |
17 | @final
18 | @dataclass(frozen=True, slots=True, kw_only=True)
19 | class GetArtifactFromRepoUseCase:
20 | """
21 | Use case for retrieving an artifact from the repository.
22 | """
23 |
24 | uow: UnitOfWorkProtocol
25 | artifact_mapper: DtoEntityMapperProtocol
26 |
27 | async def __call__(self, inventory_id: str) -> ArtifactDTO | None:
28 | """
29 | Executes the use case to get an artifact from the repository.
30 |
31 | Args:
32 | inventory_id: The ID of the artifact to retrieve.
33 |
34 | Returns:
35 | An ArtifactDTO if found in the repository, otherwise None.
36 | """
37 | async with self.uow:
38 | artifact_entity: (
39 | ArtifactEntity | None
40 | ) = await self.uow.repository.get_by_inventory_id(inventory_id)
41 | if artifact_entity:
42 | logger.info("Artifact found in repository", inventory_id=inventory_id)
43 | return self.artifact_mapper.to_dto(artifact_entity)
44 | logger.info("Artifact not found in repository", inventory_id=inventory_id)
45 | return None
46 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/config/database.py:
--------------------------------------------------------------------------------
1 | from typing import cast, final
2 |
3 | from pydantic import Field, PostgresDsn, computed_field
4 | from pydantic_settings import BaseSettings
5 |
6 |
7 | @final
8 | class DatabaseSettings(BaseSettings):
9 | """
10 | Database configuration settings.
11 |
12 | Attributes:
13 | postgres_user (str): PostgreSQL username.
14 | postgres_password (str): PostgreSQL password.
15 | postgres_server (str): PostgreSQL server host.
16 | postgres_port (int): PostgreSQL server port.
17 | postgres_db (str): PostgreSQL database name.
18 | """
19 |
20 | postgres_user: str = Field(..., alias="POSTGRES_USER")
21 | postgres_password: str = Field(..., alias="POSTGRES_PASSWORD")
22 | postgres_server: str = Field(..., alias="POSTGRES_SERVER")
23 | postgres_port: int = Field(5432, alias="POSTGRES_PORT")
24 | postgres_db: str = Field(..., alias="POSTGRES_DB")
25 |
26 | @computed_field
27 | def database_url(self) -> PostgresDsn:
28 | """
29 | Constructs the PostgreSQL database URL.
30 |
31 | Returns:
32 | PostgresDsn: The constructed database URL.
33 | """
34 | return PostgresDsn.build(
35 | scheme="postgresql+asyncpg",
36 | username=self.postgres_user,
37 | password=self.postgres_password,
38 | host=self.postgres_server,
39 | port=self.postgres_port,
40 | path=self.postgres_db,
41 | )
42 |
43 | @computed_field
44 | def sqlalchemy_database_uri(self) -> PostgresDsn:
45 | """
46 | Returns the SQLAlchemy compatible database URI.
47 |
48 | Returns:
49 | PostgresDsn: The SQLAlchemy database URI.
50 | """
51 | return cast("PostgresDsn", self.database_url)
52 |
53 | class Config:
54 | env_file = ".env"
55 | env_file_encoding = "utf-8"
56 | extra = "ignore"
57 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/db/migrations/versions/c3cca8a62218_initial_migration_create_artifacts_table.py:
--------------------------------------------------------------------------------
1 | """Initial migration: create artifacts table
2 |
3 | Revision ID: c3cca8a62218
4 | Revises:
5 | Create Date: 2025-09-25 06:39:37.872475
6 |
7 | """
8 |
9 | from collections.abc import Sequence
10 | from typing import Union
11 |
12 | import sqlalchemy as sa
13 | from sqlalchemy.dialects import postgresql
14 |
15 | from alembic import op
16 |
17 | # revision identifiers, used by Alembic.
18 | revision: str = "c3cca8a62218"
19 | down_revision: str | None = None
20 | branch_labels: str | Sequence[str] | None = None
21 | depends_on: str | Sequence[str] | None = None
22 |
23 |
24 | def upgrade() -> None:
25 | # Create artifacts table
26 | op.create_table(
27 | "artifacts",
28 | sa.Column("inventory_id", postgresql.UUID(as_uuid=True), nullable=False),
29 | sa.Column(
30 | "created_at",
31 | sa.DateTime(timezone=True),
32 | nullable=False,
33 | server_default=sa.text("now()"),
34 | ),
35 | sa.Column("acquisition_date", sa.DateTime(timezone=True), nullable=False),
36 | sa.Column("name", sa.String(length=255), nullable=False),
37 | sa.Column("department", sa.String(length=255), nullable=False),
38 | sa.Column("era", sa.String(length=50), nullable=False),
39 | sa.Column("material", sa.String(length=50), nullable=False),
40 | sa.Column("description", sa.Text(), nullable=True),
41 | sa.PrimaryKeyConstraint("inventory_id"),
42 | )
43 |
44 | # Create indexes
45 | op.create_index("ix_artifacts_name", "artifacts", ["name"])
46 | op.create_index("ix_artifacts_department", "artifacts", ["department"])
47 |
48 |
49 | def downgrade() -> None:
50 | # Drop indexes
51 | op.drop_index("ix_artifacts_department", table_name="artifacts")
52 | op.drop_index("ix_artifacts_name", table_name="artifacts")
53 |
54 | # Drop table
55 | op.drop_table("artifacts")
56 |
--------------------------------------------------------------------------------
/docs/README.md:
--------------------------------------------------------------------------------
1 | # Documentation
2 |
3 | This directory contains the Sphinx documentation for the Clean Architecture FastAPI Template.
4 |
5 | ## Building Documentation
6 |
7 | ### Prerequisites
8 |
9 | Install documentation dependencies:
10 |
11 | ```bash
12 | pip install -r requirements.txt
13 | ```
14 |
15 | ### Build HTML Documentation
16 |
17 | ```bash
18 | make html
19 | ```
20 |
21 | The documentation will be available in `_build/html/index.html`.
22 |
23 | ### Build PDF Documentation
24 |
25 | ```bash
26 | make latexpdf
27 | ```
28 |
29 | ### Clean Build Files
30 |
31 | ```bash
32 | make clean
33 | ```
34 |
35 | ## Documentation Structure
36 |
37 | ```
38 | docs/
39 | ├── conf.py # Sphinx configuration
40 | ├── index.rst # Main documentation page
41 | ├── getting-started/ # Installation and quickstart guides
42 | ├── user-guide/ # User documentation
43 | ├── development/ # Development guides
44 | ├── reference/ # Reference documentation
45 | ├── changelog.rst # Version history
46 | └── license.rst # License information
47 | ```
48 |
49 | ## Writing Documentation
50 |
51 | ### Format
52 |
53 | Documentation is written in reStructuredText (`.rst`) format.
54 |
55 | ### Adding New Pages
56 |
57 | 1. Create a new `.rst` file in the appropriate directory
58 | 2. Add it to the `toctree` in `index.rst` or parent page
59 | 3. Build and verify
60 |
61 | ### Code Examples
62 |
63 | Use code blocks with language specification:
64 |
65 | ```rst
66 | .. code-block:: python
67 |
68 | def example():
69 | pass
70 | ```
71 |
72 | ### Cross-References
73 |
74 | Link to other pages:
75 |
76 | ```rst
77 | See :doc:`getting-started/installation` for details.
78 | ```
79 |
80 | ## Live Preview
81 |
82 | For live preview during development:
83 |
84 | ```bash
85 | pip install sphinx-autobuild
86 | sphinx-autobuild . _build/html
87 | ```
88 |
89 | Then open http://localhost:8000 in your browser.
90 |
91 | ## Read the Docs
92 |
93 | Documentation is automatically built and published to Read the Docs on every commit to the main branch.
94 |
95 | Configuration: `.readthedocs.yaml` in the repository root.
96 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/interfaces/cache.py:
--------------------------------------------------------------------------------
1 | from abc import abstractmethod
2 | from typing import Any, Protocol, TypeVar
3 |
4 | T = TypeVar("T")
5 |
6 |
7 | class CacheProtocol(Protocol):
8 | """Protocol for cache operations.
9 |
10 | This protocol defines the interface for caching implementations.
11 | Values are stored as Any type to support various serialization formats.
12 | """
13 |
14 | @abstractmethod
15 | async def get(self, key: str) -> dict[str, Any] | None:
16 | """Retrieve a value from cache by key.
17 |
18 | Args:
19 | key: Cache key to retrieve
20 |
21 | Returns:
22 | Cached dictionary data or None if not found
23 | """
24 | ...
25 |
26 | @abstractmethod
27 | async def set(self, key: str, value: dict[str, Any], ttl: int | None = None) -> bool:
28 | """Store a value in cache with optional TTL.
29 |
30 | Args:
31 | key: Cache key to store under
32 | value: Dictionary data to cache
33 | ttl: Time-to-live in seconds (None for default)
34 |
35 | Returns:
36 | True if successful, False otherwise
37 | """
38 | ...
39 |
40 | @abstractmethod
41 | async def delete(self, key: str) -> bool:
42 | """Delete a value from cache.
43 |
44 | Args:
45 | key: Cache key to delete
46 |
47 | Returns:
48 | True if key was deleted, False if key didn't exist
49 | """
50 | ...
51 |
52 | @abstractmethod
53 | async def exists(self, key: str) -> bool:
54 | """Check if a key exists in cache.
55 |
56 | Args:
57 | key: Cache key to check
58 |
59 | Returns:
60 | True if key exists, False otherwise
61 | """
62 | ...
63 |
64 | @abstractmethod
65 | async def clear(self, pattern: str) -> int:
66 | """Clear cache entries matching a pattern.
67 |
68 | Args:
69 | pattern: Pattern to match keys (e.g., 'user:*')
70 |
71 | Returns:
72 | Number of keys deleted
73 | """
74 | ...
75 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/dtos/artifact.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass, field
2 | from datetime import UTC, datetime
3 | from typing import Literal, final
4 | from uuid import UUID
5 |
6 |
7 | @final
8 | @dataclass(frozen=True, slots=True, kw_only=True)
9 | class MaterialDTO:
10 | """Data Transfer Object for Material."""
11 | value: Literal[
12 | "ceramic",
13 | "metal",
14 | "stone",
15 | "glass",
16 | "bone",
17 | "wood",
18 | "textile",
19 | "other",
20 | ]
21 |
22 |
23 | @final
24 | @dataclass(frozen=True, slots=True, kw_only=True)
25 | class EraDTO:
26 | """Data Transfer Object for Era."""
27 | value: Literal[
28 | "paleolithic",
29 | "neolithic",
30 | "bronze_age",
31 | "iron_age",
32 | "antiquity",
33 | "middle_ages",
34 | "modern",
35 | ]
36 |
37 |
38 | @final
39 | @dataclass(frozen=True, slots=True, kw_only=True)
40 | class ArtifactDTO:
41 | """Application DTO for transferring artifact data between layers.
42 |
43 | Note: This DTO does NOT perform business validation.
44 | Business rules are enforced by the Domain Entity (ArtifactEntity).
45 | DTOs are simple data carriers for inter-layer communication.
46 | """
47 | inventory_id: UUID
48 | acquisition_date: datetime
49 | name: str
50 | department: str
51 | era: EraDTO
52 | material: MaterialDTO
53 | description: str | None = None
54 | created_at: datetime = field(default_factory=lambda: datetime.now(UTC))
55 |
56 |
57 | @final
58 | @dataclass(frozen=True, slots=True, kw_only=True)
59 | class ArtifactAdmissionNotificationDTO:
60 | """DTO for artifact admission notifications."""
61 | inventory_id: UUID
62 | name: str
63 | acquisition_date: datetime
64 | department: str
65 |
66 |
67 | @final
68 | @dataclass(frozen=True, slots=True, kw_only=True)
69 | class ArtifactCatalogPublicationDTO:
70 | """DTO for publishing artifacts to the catalog."""
71 | inventory_id: UUID
72 | name: str
73 | era: EraDTO
74 | material: MaterialDTO
75 | description: str | None = None
76 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/domain/entities/artifact.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass, field
2 | from datetime import UTC, datetime
3 | from typing import final
4 | from uuid import UUID
5 |
6 | from {{cookiecutter.project_slug}}.domain.exceptions import DomainValidationError
7 | from {{cookiecutter.project_slug}}.domain.value_objects.era import Era
8 | from {{cookiecutter.project_slug}}.domain.value_objects.material import Material
9 |
10 |
11 | @final
12 | @dataclass(frozen=True, slots=True, kw_only=True)
13 | class ArtifactEntity:
14 | """Domain Entity representing an artifact with business invariants.
15 |
16 | This entity enforces business rules and maintains data integrity
17 | at the domain level, ensuring that invalid artifacts cannot exist.
18 | """
19 | inventory_id: UUID
20 | created_at: datetime = field(default_factory=lambda: datetime.now(UTC))
21 | acquisition_date: datetime
22 | name: str
23 | department: str
24 | era: Era
25 | material: Material
26 | description: str | None = None
27 |
28 | def __post_init__(self) -> None:
29 | """
30 | Validates business invariants of the ArtifactEntity.
31 |
32 | Domain entities must protect their invariants and ensure
33 | that invalid state cannot exist in the domain model.
34 |
35 | Raises:
36 | DomainValidationError: If any business invariant is violated.
37 | """
38 | if self.acquisition_date > datetime.now(UTC):
39 | raise DomainValidationError("Acquisition date cannot be in the future")
40 | if self.acquisition_date > self.created_at:
41 | raise DomainValidationError("Acquisition date cannot be later than created_at")
42 | if len(self.name) < 2 or len(self.name) > 100:
43 | raise DomainValidationError("Name must be between 2 and 100 characters")
44 | if len(self.department) < 2 or len(self.department) > 100:
45 | raise DomainValidationError("Department must be between 2 and 100 characters")
46 | if self.description is not None and len(self.description) > 1000:
47 | raise DomainValidationError("Description must be at most 1000 characters")
48 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/docs/mypy-usage.md:
--------------------------------------------------------------------------------
1 | # Using MyPy in the Antiques project
2 |
3 | ## Installation
4 |
5 | ```bash
6 | # Install dev dependencies (including MyPy)
7 | poetry install --no-root
8 |
9 | # Or install only MyPy
10 | poetry add --group dev mypy
11 | ```
12 |
13 | ## Main commands
14 |
15 | ### Type checking
16 |
17 | ```bash
18 | # Check the entire project
19 | poetry run mypy src/
20 |
21 | # Check a specific file
22 | poetry run mypy src/main.py
23 |
24 | # Check with detailed error codes
25 | poetry run mypy src/ --show-error-codes
26 |
27 | # Check with error context
28 | poetry run mypy src/ --show-error-context
29 |
30 | # Check only specific modules
31 | poetry run mypy src/application/ src/domain/
32 | ```
33 |
34 | ### Using Makefile
35 |
36 | ```bash
37 | # Show all available commands
38 | make help
39 |
40 | # Run type checking
41 | make type-check
42 |
43 | # Run all checks (lint + format + type check)
44 | make check
45 |
46 | # Run CI pipeline
47 | make ci
48 | ```
49 |
50 | ## MyPy configuration
51 |
52 | The main MyPy configuration is located in `pyproject.toml` under the `[tool.mypy]` section.
53 |
54 | ### Core settings
55 |
56 | - **python_version**: Python 3.12
57 | - **strict**: false (soft checking by default)
58 | - **ignore_missing_imports**: true (ignore missing imports)
59 | - **warn_return_any**: true (warn on returning Any)
60 | - **no_implicit_optional**: true (require explicit Optional)
61 |
62 | ### Per-module settings
63 |
64 | ```toml
65 | # More softer rules for tests and examples
66 | [[tool.mypy.overrides]]
67 | module = [
68 | "tests.*",
69 | "examples.*",
70 | ]
71 | disallow_untyped_defs = false
72 | disallow_incomplete_defs = false
73 | check_untyped_defs = false
74 |
75 | # Ignore missing imports for external libraries
76 | [[tool.mypy.overrides]]
77 | module = [
78 | "faststream.*",
79 | "granian.*",
80 | "structlog.*",
81 | ]
82 | ignore_missing_imports = true
83 | ```
84 |
85 | ## Pre-commit integration
86 |
87 | MyPy is automatically run on commit via pre-commit hooks:
88 |
89 | ```bash
90 | # Install pre-commit hooks
91 | pre-commit install
92 |
93 | # Run hooks on all files
94 | pre-commit run --all-files
95 |
96 | # Run only mypy
97 | pre-commit run mypy
98 | ```
99 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/use_cases/publish_artifact_to_broker.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import TYPE_CHECKING, final
3 |
4 | import structlog
5 |
6 | from {{cookiecutter.project_slug}}.application.dtos.artifact import ArtifactDTO
7 | from {{cookiecutter.project_slug}}.application.exceptions import FailedPublishArtifactMessageBrokerException
8 | from {{cookiecutter.project_slug}}.application.interfaces.mappers import DtoEntityMapperProtocol
9 | from {{cookiecutter.project_slug}}.application.interfaces.message_broker import MessageBrokerPublisherProtocol
10 |
11 | if TYPE_CHECKING:
12 | from {{cookiecutter.project_slug}}.domain.entities.artifact import ArtifactEntity
13 |
14 | logger = structlog.get_logger(__name__)
15 |
16 |
17 | @final
18 | @dataclass(frozen=True, slots=True, kw_only=True)
19 | class PublishArtifactToBrokerUseCase:
20 | """
21 | Use case for publishing an artifact to a message broker.
22 | """
23 |
24 | message_broker: MessageBrokerPublisherProtocol
25 | artifact_mapper: DtoEntityMapperProtocol
26 |
27 | async def __call__(self, artifact_dto: ArtifactDTO) -> None:
28 | """
29 | Executes the use case to publish an artifact to the message broker.
30 |
31 | Args:
32 | artifact_dto: The ArtifactDTO to publish.
33 |
34 | Raises:
35 | FailedPublishArtifactMessageBrokerException: If publishing to the message broker fails.
36 | """
37 | try:
38 | notification_dto = self.artifact_mapper.to_notification_dto(artifact_dto)
39 | await self.message_broker.publish_new_artifact(notification_dto)
40 | logger.info(
41 | "Published new artifact event to message broker",
42 | inventory_id=artifact_dto.inventory_id,
43 | )
44 | except Exception as e:
45 | logger.warning(
46 | "Failed to publish artifact notification to message broker (non-critical)",
47 | inventory_id=artifact_dto.inventory_id,
48 | error=str(e),
49 | )
50 | raise FailedPublishArtifactMessageBrokerException(
51 | "Failed to publish message to broker", str(e)
52 | ) from e
53 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/.git-commit-template:
--------------------------------------------------------------------------------
1 | # Пожалуйста, используйте следующие рекомендации для форматирования всех
2 | # ваших commit-сообщений
3 | #
4 | # Шаблон commit-сообщения:
5 | # /():
6 | #
7 | # Структура commit-сообщения:
8 | # + номер задачи из JIRA (любой другой системе трекинга)
9 | # + описывает тип commit-сообщения. Допустимые типы:
10 | # * feat — добавлении нового функционала
11 | # * fix — исправление ошибок
12 | # * docs — всё, что касается документации
13 | # * style — правки по codestyle (табуляция, отступы, точки, запятые, опечатки
14 | # и т.д.)
15 | # * refactor — правки кода без исправления ошибок или добавления новых функций
16 | # * perf — изменения связанные с оптимизацией скорости (performance)
17 | # * test — всё, что связано с тестированием
18 | # * hotfix/revert — исправление критических ошибок (для данного типа сообщения
19 | # не работает коммитлинтер)
20 | # * build/chore — изменения связанные со сборкой проекта/изменения не касающиеся
21 | # кода проекта (изменение .gitignore, .git-commit-template т.д.)
22 | # * ci — настройка CI и работа со скриптами
23 | # + область на которую распространяется commit-сообщения (например, страница,
24 | # блок страницы, компонент, модель, метод и т.д.)
25 | # + — это очень краткое описание изменения в следующем формате:
26 | # * повелительное наклонение, настоящее время: «изменить», а не «изменение»
27 | # или «изменен»
28 | # * нет заглавной первой буквы
29 | # * нет точки (.) в конце
30 | #
31 | # Примеры commit-сообщений:
32 | # Antiq-1/refactor(Index): оптимизировать работу якорного скролла
33 | # Antiq-2/feat(News): добавить поле banner_image
34 | # Antiq-3/fix(ProducHero): исправить в баннере формат даты
35 | #
36 | # ВАЖНО! Если необходимо создать commit-сообщение, которое не подходит под базовый
37 | # шаблон, то при формировании этого сообщения необходимо указать флаг --no-verify,
38 | # тогда не будет инициирован коммитлинтер
39 | #
40 | # Добавить шаблон commit-сообщения в локальный config файл /.git:
41 | # git config --local commit.template ".git-commit-template"
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/use_cases/publish_artifact_to_catalog.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import TYPE_CHECKING, final
3 |
4 | import structlog
5 |
6 | from {{cookiecutter.project_slug}}.application.dtos.artifact import ArtifactDTO
7 | from {{cookiecutter.project_slug}}.application.exceptions import FailedPublishArtifactInCatalogException
8 | from {{cookiecutter.project_slug}}.application.interfaces.http_clients import PublicCatalogAPIProtocol
9 | from {{cookiecutter.project_slug}}.application.interfaces.mappers import DtoEntityMapperProtocol
10 |
11 | if TYPE_CHECKING:
12 | from {{cookiecutter.project_slug}}.domain.entities.artifact import ArtifactEntity
13 |
14 | logger = structlog.get_logger(__name__)
15 |
16 |
17 | @final
18 | @dataclass(frozen=True, slots=True, kw_only=True)
19 | class PublishArtifactToCatalogUseCase:
20 | """
21 | Use case for publishing an artifact to a public catalog.
22 | """
23 |
24 | catalog_api_client: PublicCatalogAPIProtocol
25 | artifact_mapper: DtoEntityMapperProtocol
26 |
27 | async def __call__(self, artifact_dto: ArtifactDTO) -> None:
28 | """
29 | Executes the use case to publish an artifact to the public catalog.
30 |
31 | Args:
32 | artifact_dto: The ArtifactDTO to publish.
33 |
34 | Raises:
35 | FailedPublishArtifactInCatalogException: If publishing to the catalog fails.
36 | """
37 | try:
38 | publication_dto = self.artifact_mapper.to_publication_dto(artifact_dto)
39 | public_id: str = await self.catalog_api_client.publish_artifact(
40 | publication_dto
41 | )
42 | logger.info(
43 | "Artifact published to public catalog",
44 | inventory_id=artifact_dto.inventory_id,
45 | public_id=public_id,
46 | )
47 | except Exception as e:
48 | logger.exception(
49 | "Failed to publish artifact to catalog",
50 | inventory_id=artifact_dto.inventory_id,
51 | error=str(e),
52 | )
53 | raise FailedPublishArtifactInCatalogException(
54 | "Could not publish artifact to catalog", str(e)
55 | ) from e
56 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/db/uow.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from dataclasses import dataclass
3 | from typing import final
4 |
5 | from sqlalchemy.ext.asyncio import AsyncSession
6 |
7 | from {{cookiecutter.project_slug}}.application.interfaces.repositories import ArtifactRepositoryProtocol
8 | from {{cookiecutter.project_slug}}.application.interfaces.uow import UnitOfWorkProtocol
9 |
10 | logger = logging.getLogger(__name__)
11 |
12 |
13 | @final
14 | @dataclass(frozen=True, slots=True, kw_only=True)
15 | class UnitOfWorkSQLAlchemy(UnitOfWorkProtocol):
16 | """SQLAlchemy implementation of Unit of Work pattern.
17 |
18 | This class coordinates database transactions and provides access to repositories.
19 | Uses Protocol types instead of concrete implementations for better testability
20 | and adherence to Dependency Inversion Principle.
21 | """
22 |
23 | session: AsyncSession
24 | repository: ArtifactRepositoryProtocol
25 |
26 | async def __aenter__(self) -> "UnitOfWorkSQLAlchemy":
27 | """
28 | Enters the asynchronous context manager.
29 | Returns this UOW instance.
30 | """
31 | logger.debug("Starting database transaction")
32 | return self
33 |
34 | async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
35 | """
36 | Exits the asynchronous context manager.
37 | Commits changes if no exception occurred, otherwise rolls back.
38 | """
39 | if exc_type is not None:
40 | logger.warning(
41 | "Transaction rolled back due to exception: %s - %s",
42 | exc_type.__name__,
43 | str(exc_val)
44 | )
45 | await self.rollback()
46 | else:
47 | await self.commit()
48 |
49 | async def commit(self) -> None:
50 | """
51 | Commits the current transaction to the database.
52 | """
53 | logger.debug("Committing transaction")
54 | await self.session.commit()
55 | logger.debug("Transaction committed successfully")
56 |
57 | async def rollback(self) -> None:
58 | """
59 | Rolls back the current transaction in the database.
60 | """
61 | logger.debug("Rolling back transaction")
62 | await self.session.rollback()
63 | logger.debug("Transaction rolled back successfully")
64 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/main.py:
--------------------------------------------------------------------------------
1 | from collections.abc import AsyncIterator
2 | from contextlib import asynccontextmanager
3 |
4 | import structlog
5 | from dishka import AsyncContainer, make_async_container
6 | from dishka.integrations.fastapi import setup_dishka
7 | from fastapi import FastAPI
8 | from fastapi.middleware.cors import CORSMiddleware
9 |
10 | from {{cookiecutter.project_slug}}.config.ioc.di import get_providers
11 | from {{cookiecutter.project_slug}}.config.logging import setup_logging
12 | from {{cookiecutter.project_slug}}.presentation.api.rest.error_handling import setup_exception_handlers
13 | from {{cookiecutter.project_slug}}.presentation.api.rest.v1.routers import api_v1_router
14 |
15 | setup_logging()
16 | logger = structlog.get_logger(__name__)
17 |
18 |
19 | @asynccontextmanager
20 | async def lifespan(_: FastAPI) -> AsyncIterator[None]:
21 | """
22 | Asynchronous context manager for managing the lifespan of the FastAPI application.
23 |
24 | Args:
25 | _: The FastAPI application instance.
26 |
27 | Yields:
28 | None
29 | """
30 | logger.info("Starting application...")
31 | yield
32 | logger.info("Shutting down application...")
33 |
34 |
35 | def create_app() -> FastAPI:
36 | """
37 | Creates and configures the FastAPI application.
38 |
39 | Returns:
40 | FastAPI: The configured FastAPI application instance.
41 | """
42 | app = FastAPI(
43 | title="{{ cookiecutter.api_title }}",
44 | version="{{ cookiecutter.api_version }}",
45 | description="{{ cookiecutter.api_description }}",
46 | lifespan=lifespan,
47 | docs_url="/api/docs",
48 | redoc_url="/api/redoc",
49 | openapi_url="/api/openapi.json",
50 | )
51 |
52 | app.add_middleware( # type: ignore[call-arg]
53 | CORSMiddleware, # type: ignore[arg-type]
54 | allow_origins=[
55 | "http://localhost",
56 | "http://localhost:8080",
57 | ],
58 | allow_credentials=True,
59 | allow_methods=["*"],
60 | allow_headers=["*"],
61 | )
62 |
63 | container: AsyncContainer = make_async_container(*get_providers())
64 | setup_dishka(container, app)
65 |
66 | setup_exception_handlers(app)
67 | app.include_router(api_v1_router, prefix="/api")
68 |
69 | return app
70 |
71 |
72 | app = create_app()
73 |
--------------------------------------------------------------------------------
/docs/reference/environment-variables.rst:
--------------------------------------------------------------------------------
1 | Environment Variables
2 | =====================
3 |
4 | Complete reference of environment variables used in generated projects.
5 |
6 | Application Variables
7 | ---------------------
8 |
9 | APP_NAME
10 | ~~~~~~~~
11 | * **Type**: String
12 | * **Default**: Project name
13 | * **Description**: Application name
14 |
15 | APP_VERSION
16 | ~~~~~~~~~~~
17 | * **Type**: String
18 | * **Default**: 0.1.0
19 | * **Description**: Application version
20 |
21 | DEBUG
22 | ~~~~~
23 | * **Type**: Boolean
24 | * **Default**: false
25 | * **Description**: Enable debug mode
26 |
27 | LOG_LEVEL
28 | ~~~~~~~~~
29 | * **Type**: String
30 | * **Default**: INFO
31 | * **Options**: DEBUG, INFO, WARNING, ERROR, CRITICAL
32 | * **Description**: Logging level
33 |
34 | Server Variables
35 | ----------------
36 |
37 | HOST
38 | ~~~~
39 | * **Type**: String
40 | * **Default**: 0.0.0.0
41 | * **Description**: Server host
42 |
43 | PORT
44 | ~~~~
45 | * **Type**: Integer
46 | * **Default**: 8000
47 | * **Description**: Server port
48 |
49 | WORKERS
50 | ~~~~~~~
51 | * **Type**: Integer
52 | * **Default**: 1
53 | * **Description**: Number of worker processes
54 |
55 | Database Variables
56 | ------------------
57 |
58 | DATABASE_URL
59 | ~~~~~~~~~~~~
60 | * **Type**: String
61 | * **Required**: Yes
62 | * **Format**: ``driver://user:password@host:port/database``
63 | * **Examples**:
64 |
65 | * PostgreSQL: ``postgresql+asyncpg://user:pass@localhost:5432/db``
66 | * MySQL: ``mysql+aiomysql://user:pass@localhost:3306/db``
67 | * SQLite: ``sqlite+aiosqlite:///./database.db``
68 |
69 | DB_ECHO
70 | ~~~~~~~
71 | * **Type**: Boolean
72 | * **Default**: false
73 | * **Description**: Echo SQL queries to console
74 |
75 | DB_POOL_SIZE
76 | ~~~~~~~~~~~~
77 | * **Type**: Integer
78 | * **Default**: 5
79 | * **Description**: Connection pool size
80 |
81 | Cache Variables
82 | ---------------
83 |
84 | REDIS_URL
85 | ~~~~~~~~~
86 | * **Type**: String
87 | * **Format**: ``redis://host:port/db``
88 | * **Example**: ``redis://localhost:6379/0``
89 |
90 | REDIS_PASSWORD
91 | ~~~~~~~~~~~~~~
92 | * **Type**: String
93 | * **Description**: Redis password
94 |
95 | See Also
96 | --------
97 |
98 | * :doc:`../user-guide/configuration` - Configuration guide
99 | * :doc:`../getting-started/template-variables` - Template variables
100 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/docs/ruff-usage.md:
--------------------------------------------------------------------------------
1 | # Using Ruff in the Antiques project
2 |
3 | ## Installation
4 |
5 | ```bash
6 | # Install dev dependencies (including Ruff)
7 | poetry install --no-root
8 |
9 | # Or install only Ruff
10 | poetry add --group dev ruff
11 | ```
12 |
13 | ## Main commands
14 |
15 | ### Linting
16 |
17 | ```bash
18 | # Lint the entire project
19 | poetry run ruff check src/ tests/
20 |
21 | # Lint a specific file
22 | poetry run ruff check src/main.py
23 |
24 | # Lint with verbose output
25 | poetry run ruff check src/ tests/ --verbose
26 |
27 | # Lint only with specific rules
28 | poetry run ruff check src/ --select E,F,W
29 |
30 | # Ignore specific rules
31 | poetry run ruff check src/ --ignore E501,W503
32 | ```
33 |
34 | ### Automatic fixing
35 |
36 | ```bash
37 | # Fix all automatically fixable issues
38 | poetry run ruff check --fix src/ tests/
39 |
40 | # Fix only specific rules
41 | poetry run ruff check --fix --select E,F src/
42 |
43 | # Show what would be fixed without actually fixing
44 | poetry run ruff check --fix --diff src/
45 | ```
46 |
47 | ### Code formatting
48 |
49 | ```bash
50 | # Format the entire project
51 | poetry run ruff format src/ tests/
52 |
53 | # Check formatting without making changes
54 | poetry run ruff format --check src/ tests/
55 |
56 | # Show formatting diff
57 | poetry run ruff format --diff src/
58 | ```
59 |
60 | ### Full check
61 |
62 | ```bash
63 | # Check and fix all issues
64 | poetry run ruff check --fix src/ tests/
65 | poetry run ruff format src/ tests/
66 |
67 | # Or with a single command (if configured in pyproject.toml)
68 | poetry run ruff check --fix src/ tests/ && poetry run ruff format src/ tests/
69 | ```
70 |
71 | ## Using Makefile
72 |
73 | ```bash
74 | # Show all available commands
75 | make help
76 |
77 | # Install dev dependencies
78 | make install-dev
79 |
80 | # Lint code
81 | make lint
82 |
83 | # Automatically fix issues
84 | make lint-fix
85 |
86 | # Format code
87 | make format
88 |
89 | # Run all checks
90 | make check
91 |
92 | # Run tests
93 | make test
94 |
95 | # Run tests with coverage
96 | make test-cov
97 |
98 | # Clean cache
99 | make clean
100 |
101 | # Set up dev environment
102 | make dev-setup
103 |
104 | # Run CI pipeline
105 | make ci
106 | ```
107 |
108 | ## Pre-commit hooks
109 |
110 | ```bash
111 | # Install pre-commit hooks
112 | pre-commit install
113 |
114 | # Run hooks on all files
115 | pre-commit run --all-files
116 |
117 | # Update hooks
118 | pre-commit autoupdate
119 | ```
120 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/use_cases/fetch_artifact_from_museum_api.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import TYPE_CHECKING, final
3 |
4 | import structlog
5 |
6 | from {{cookiecutter.project_slug}}.application.dtos.artifact import ArtifactDTO
7 | from {{cookiecutter.project_slug}}.application.exceptions import (
8 | ArtifactNotFoundError,
9 | FailedFetchArtifactMuseumAPIException,
10 | )
11 | from {{cookiecutter.project_slug}}.application.interfaces.http_clients import ExternalMuseumAPIProtocol
12 |
13 | if TYPE_CHECKING:
14 | from {{cookiecutter.project_slug}}.domain.entities.artifact import ArtifactEntity
15 |
16 | logger = structlog.get_logger(__name__)
17 |
18 |
19 | @final
20 | @dataclass(frozen=True, slots=True, kw_only=True)
21 | class FetchArtifactFromMuseumAPIUseCase:
22 | """
23 | Use case for fetching an artifact from an external museum API.
24 | """
25 |
26 | museum_api_client: ExternalMuseumAPIProtocol
27 |
28 | async def __call__(self, inventory_id: str) -> ArtifactDTO:
29 | """
30 | Executes the use case to fetch an artifact.
31 |
32 | Args:
33 | inventory_id: The ID of the artifact to fetch.
34 |
35 | Returns:
36 | An ArtifactDTO representing the fetched artifact.
37 |
38 | Raises:
39 | ArtifactNotFoundError: If the artifact is not found in the external API.
40 | FailedFetchArtifactMuseumAPIException: If fetching the artifact fails due to other reasons.
41 | """
42 | logger.info(
43 | "Artifact not found locally, fetching from external museum API...",
44 | inventory_id=inventory_id,
45 | )
46 | try:
47 | artifact_dto = await self.museum_api_client.fetch_artifact(inventory_id)
48 | logger.info("Artifact fetched from museum API", inventory_id=inventory_id)
49 | return artifact_dto
50 | except ArtifactNotFoundError as e:
51 | logger.error(
52 | "Artifact not found in external museum API",
53 | inventory_id=inventory_id,
54 | error=str(e),
55 | )
56 | raise
57 | except Exception as e:
58 | logger.exception(
59 | "Failed to fetch artifact from external museum API",
60 | inventory_id=inventory_id,
61 | error=str(e),
62 | )
63 | raise FailedFetchArtifactMuseumAPIException(
64 | "Could not fetch artifact from external service", str(e)
65 | ) from e
66 |
--------------------------------------------------------------------------------
/docs/license.rst:
--------------------------------------------------------------------------------
1 | License
2 | =======
3 |
4 | The Clean Architecture FastAPI Template is released under the MIT License.
5 |
6 | MIT License
7 | -----------
8 |
9 | Copyright (c) 2025 Peopl3s
10 |
11 | Permission is hereby granted, free of charge, to any person obtaining a copy
12 | of this software and associated documentation files (the "Software"), to deal
13 | in the Software without restriction, including without limitation the rights
14 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
15 | copies of the Software, and to permit persons to whom the Software is
16 | furnished to do so, subject to the following conditions:
17 |
18 | The above copyright notice and this permission notice shall be included in all
19 | copies or substantial portions of the Software.
20 |
21 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
22 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
23 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
24 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
25 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
26 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
27 | SOFTWARE.
28 |
29 | Generated Projects
30 | ------------------
31 |
32 | Projects generated from this template can use any license you choose. The template
33 | includes options for:
34 |
35 | * MIT License
36 | * Apache License 2.0
37 | * GNU General Public License v3.0
38 | * BSD 3-Clause License
39 | * No License
40 |
41 | Select your preferred license when creating a project using the ``license_type``
42 | template variable.
43 |
44 | Third-Party Licenses
45 | --------------------
46 |
47 | This template uses several third-party packages, each with their own licenses:
48 |
49 | FastAPI
50 | ~~~~~~~
51 |
52 | * **License**: MIT
53 | * **URL**: https://github.com/tiangolo/fastapi
54 |
55 | SQLAlchemy
56 | ~~~~~~~~~~
57 |
58 | * **License**: MIT
59 | * **URL**: https://github.com/sqlalchemy/sqlalchemy
60 |
61 | Pydantic
62 | ~~~~~~~~
63 |
64 | * **License**: MIT
65 | * **URL**: https://github.com/pydantic/pydantic
66 |
67 | Ruff
68 | ~~~~
69 |
70 | * **License**: MIT
71 | * **URL**: https://github.com/astral-sh/ruff
72 |
73 | Dishka
74 | ~~~~~~
75 |
76 | * **License**: Apache 2.0
77 | * **URL**: https://github.com/reagento/dishka
78 |
79 | For a complete list of dependencies and their licenses, see the ``pyproject.toml``
80 | file in generated projects.
81 |
82 | Contributing
83 | ------------
84 |
85 | By contributing to this template, you agree that your contributions will be
86 | licensed under the MIT License.
87 |
88 | See :doc:`development/contributing` for contribution guidelines.
89 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/db/migrations/env.py:
--------------------------------------------------------------------------------
1 | """Simple Alembic environment configuration for the Antiques project."""
2 |
3 | import asyncio
4 | from logging.config import fileConfig
5 | import os
6 |
7 | from sqlalchemy import pool
8 | from sqlalchemy.engine import Connection
9 | from sqlalchemy.ext.asyncio import async_engine_from_config
10 |
11 | from alembic import context
12 |
13 | # Import your models here
14 | from {{cookiecutter.project_slug}}.infrastructures.db.models.artifact import mapper_registry
15 |
16 | # this is the Alembic Config object, which provides
17 | # access to the values within the .ini file in use.
18 | config = context.config
19 |
20 | # Interpret the config file for Python logging.
21 | # This line sets up loggers basically.
22 | if config.config_file_name is not None:
23 | fileConfig(config.config_file_name)
24 |
25 | # add your model's MetaData object here
26 | # for 'autogenerate' support
27 | target_metadata = mapper_registry.metadata
28 |
29 | # other values from the config, defined by the needs of env.py,
30 | # can be acquired:
31 | # my_important_option = config.get_main_option("my_important_option")
32 | # ... etc.
33 |
34 |
35 | def get_url() -> str:
36 | """Get database URL from environment variable or config."""
37 | return os.getenv("DATABASE_URL", config.get_main_option("sqlalchemy.url"))
38 |
39 |
40 | def run_migrations_offline() -> None:
41 | """Run migrations in 'offline' mode."""
42 | url = get_url()
43 | context.configure(
44 | url=url,
45 | target_metadata=target_metadata,
46 | )
47 |
48 | with context.begin_transaction():
49 | context.run_migrations()
50 |
51 |
52 | def do_run_migrations(connection: Connection) -> None:
53 | """Run migrations with the given connection."""
54 | context.configure(connection=connection, target_metadata=target_metadata)
55 |
56 | with context.begin_transaction():
57 | context.run_migrations()
58 |
59 |
60 | async def run_async_migrations() -> None:
61 | """Run migrations in 'online' mode with async engine."""
62 | configuration = config.get_section(config.config_ini_section)
63 | configuration["sqlalchemy.url"] = get_url()
64 |
65 | connectable = async_engine_from_config(
66 | configuration,
67 | prefix="sqlalchemy.",
68 | poolclass=pool.NullPool,
69 | )
70 |
71 | async with connectable.connect() as connection:
72 | await connection.run_sync(do_run_migrations)
73 |
74 | await connectable.dispose()
75 |
76 |
77 | def run_migrations_online() -> None:
78 | """Run migrations in 'online' mode."""
79 | asyncio.run(run_async_migrations())
80 |
81 |
82 | if context.is_offline_mode():
83 | run_migrations_offline()
84 | else:
85 | run_migrations_online()
86 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/tests/test_presentation/test_api/test_controllers/test_artifact_controller.py:
--------------------------------------------------------------------------------
1 | from unittest.mock import AsyncMock
2 | from uuid import uuid4
3 |
4 | from fastapi import HTTPException, status
5 | import pytest
6 |
7 | from {{cookiecutter.project_slug}}.application.dtos.artifact import ArtifactDTO, EraDTO, MaterialDTO
8 | from {{cookiecutter.project_slug}}.application.exceptions import (
9 | ArtifactNotFoundError,
10 | FailedFetchArtifactMuseumAPIException,
11 | FailedPublishArtifactInCatalogException,
12 | FailedPublishArtifactMessageBrokerException,
13 | )
14 | from {{cookiecutter.project_slug}}.application.use_cases.process_artifact import ProcessArtifactUseCase
15 |
16 |
17 | class TestArtifactController:
18 | async def _call_controller_with_mock(
19 | self, inventory_id: str, mock_use_case: ProcessArtifactUseCase
20 | ):
21 | """Helper method to call the controller function with a mock use case"""
22 | try:
23 | return await mock_use_case(inventory_id)
24 | except ArtifactNotFoundError as err:
25 | raise HTTPException(
26 | status_code=status.HTTP_404_NOT_FOUND,
27 | detail="Artifact not found in the system.",
28 | ) from err
29 | except FailedFetchArtifactMuseumAPIException as err:
30 | raise HTTPException(
31 | status_code=status.HTTP_400_BAD_REQUEST,
32 | detail="Failed to fetch artifact data from the museum API.",
33 | ) from err
34 | except FailedPublishArtifactInCatalogException as err:
35 | raise HTTPException(
36 | status_code=status.HTTP_400_BAD_REQUEST,
37 | detail="Artifact could not be published in the catalog.",
38 | ) from err
39 | except FailedPublishArtifactMessageBrokerException as err:
40 | raise HTTPException(
41 | status_code=status.HTTP_502_BAD_GATEWAY,
42 | detail="Failed to send notification via message broker.",
43 | ) from err
44 |
45 | @pytest.mark.asyncio
46 | async def test_get_artifact_success(self):
47 | """Test successful artifact retrieval"""
48 | inventory_id = str(uuid4())
49 | expected_dto = ArtifactDTO(
50 | inventory_id=uuid4(),
51 | created_at="2023-01-01T00:00:00Z",
52 | acquisition_date="2023-01-01T00:00:00Z",
53 | name="Ancient Vase",
54 | department="Archaeology",
55 | era=EraDTO(value="antiquity"),
56 | material=MaterialDTO(value="ceramic"),
57 | description="A beautiful ancient vase",
58 | )
59 |
60 | mock_use_case = AsyncMock()
61 | mock_use_case.return_value = expected_dto
62 |
63 | result = await self._call_controller_with_mock(inventory_id, mock_use_case)
64 |
65 | assert result == expected_dto
66 | mock_use_case.assert_called_once_with(inventory_id)
67 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/mappers.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import final
3 |
4 | from {{cookiecutter.project_slug}}.application.dtos.artifact import (
5 | ArtifactAdmissionNotificationDTO,
6 | ArtifactCatalogPublicationDTO,
7 | ArtifactDTO,
8 | EraDTO,
9 | MaterialDTO,
10 | )
11 | from {{cookiecutter.project_slug}}.application.interfaces.mappers import DtoEntityMapperProtocol
12 | from {{cookiecutter.project_slug}}.domain.entities.artifact import ArtifactEntity
13 | from {{cookiecutter.project_slug}}.domain.value_objects.era import Era
14 | from {{cookiecutter.project_slug}}.domain.value_objects.material import Material
15 |
16 |
17 | @final
18 | @dataclass(frozen=True, slots=True)
19 | class ArtifactMapper(DtoEntityMapperProtocol):
20 | """Mapper for converting between Domain Entities and Application DTOs.
21 |
22 | This mapper is part of the Application layer and handles conversions between:
23 | - Domain Entities (business logic)
24 | - Application DTOs (use case data transfer)
25 |
26 | It does NOT handle infrastructure concerns like JSON serialization.
27 | """
28 |
29 | def to_dto(self, entity: ArtifactEntity) -> ArtifactDTO:
30 | """Convert Domain Entity to Application DTO."""
31 | return ArtifactDTO(
32 | inventory_id=entity.inventory_id,
33 | created_at=entity.created_at,
34 | acquisition_date=entity.acquisition_date,
35 | name=entity.name,
36 | department=entity.department,
37 | era=EraDTO(value=entity.era.value),
38 | material=MaterialDTO(value=entity.material.value),
39 | description=entity.description,
40 | )
41 |
42 | def to_entity(self, dto: ArtifactDTO) -> ArtifactEntity:
43 | """Convert Application DTO to Domain Entity."""
44 | return ArtifactEntity(
45 | inventory_id=dto.inventory_id,
46 | name=dto.name,
47 | acquisition_date=dto.acquisition_date,
48 | department=dto.department,
49 | era=Era(value=dto.era.value),
50 | material=Material(value=dto.material.value),
51 | description=dto.description,
52 | )
53 |
54 | def to_notification_dto(
55 | self, entity: ArtifactEntity
56 | ) -> ArtifactAdmissionNotificationDTO:
57 | """Convert Domain Entity to Notification DTO for message broker."""
58 | return ArtifactAdmissionNotificationDTO(
59 | inventory_id=entity.inventory_id,
60 | name=entity.name,
61 | acquisition_date=entity.acquisition_date,
62 | department=entity.department,
63 | )
64 |
65 | def to_publication_dto(
66 | self, entity: ArtifactEntity
67 | ) -> ArtifactCatalogPublicationDTO:
68 | """Convert Domain Entity to Publication DTO for external catalog API."""
69 | return ArtifactCatalogPublicationDTO(
70 | inventory_id=entity.inventory_id,
71 | name=entity.name,
72 | era=EraDTO(value=entity.era.value),
73 | material=MaterialDTO(value=entity.material.value),
74 | description=entity.description,
75 | )
76 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/db/models/artifact.py:
--------------------------------------------------------------------------------
1 | from datetime import UTC, datetime
2 | from uuid import UUID
3 |
4 | from sqlalchemy import DateTime, Index, String, Text, func
5 | from sqlalchemy.dialects.postgresql import UUID as PG_UUID
6 | from sqlalchemy.orm import Mapped, mapped_column, registry
7 |
8 | mapper_registry = registry()
9 |
10 |
11 | @mapper_registry.mapped
12 | class ArtifactModel:
13 | """
14 | SQLAlchemy model for storing artifact data.
15 |
16 | Maps to the 'artifacts' table in the database.
17 | """
18 | __tablename__ = "artifacts"
19 | __table_args__ = (
20 | Index("ix_artifacts_name", "name"),
21 | Index("ix_artifacts_department", "department"),
22 | )
23 |
24 | def __init__(
25 | self,
26 | *,
27 | inventory_id: UUID,
28 | created_at: datetime,
29 | acquisition_date: datetime,
30 | name: str,
31 | department: str,
32 | era: str,
33 | material: str,
34 | description: str | None = None,
35 | ) -> None:
36 | """
37 | Initializes a new ArtifactModel instance.
38 |
39 | Args:
40 | inventory_id: Unique identifier for the artifact.
41 | created_at: Timestamp when the artifact record was created.
42 | acquisition_date: Date when the artifact was acquired.
43 | name: Name of the artifact.
44 | department: Department where the artifact is located.
45 | era: Historical era of the artifact.
46 | material: Primary material of the artifact.
47 | description: Optional description of the artifact.
48 | """
49 | self.inventory_id = inventory_id
50 | self.created_at = created_at
51 | self.acquisition_date = acquisition_date
52 | self.name = name
53 | self.department = department
54 | self.era = era
55 | self.material = material
56 | self.description = description
57 |
58 | inventory_id: Mapped[UUID] = mapped_column(
59 | PG_UUID(as_uuid=True),
60 | primary_key=True,
61 | nullable=False,
62 | )
63 | created_at: Mapped[datetime] = mapped_column(
64 | DateTime(timezone=True),
65 | nullable=False,
66 | default=lambda: datetime.now(UTC),
67 | server_default=func.now(),
68 | )
69 | acquisition_date: Mapped[datetime] = mapped_column(
70 | DateTime(timezone=True), nullable=False
71 | )
72 | name: Mapped[str] = mapped_column(String(length=255), nullable=False)
73 | department: Mapped[str] = mapped_column(String(length=255), nullable=False)
74 | era: Mapped[str] = mapped_column(String(length=50), nullable=False)
75 | material: Mapped[str] = mapped_column(String(length=50), nullable=False)
76 | description: Mapped[str | None] = mapped_column(Text, nullable=True)
77 |
78 | def __repr__(self) -> str:
79 | """
80 | Returns a string representation of the ArtifactModel.
81 | """
82 | return (
83 | f""
85 | )
86 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/mappers/artifact.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from datetime import datetime
3 | from typing import final
4 | from uuid import UUID
5 |
6 | from {{cookiecutter.project_slug}}.application.dtos.artifact import (
7 | ArtifactAdmissionNotificationDTO,
8 | ArtifactCatalogPublicationDTO,
9 | ArtifactDTO,
10 | EraDTO,
11 | MaterialDTO,
12 | )
13 | from {{cookiecutter.project_slug}}.application.interfaces.serialization import SerializationMapperProtocol
14 |
15 |
16 | @final
17 | @dataclass(frozen=True, slots=True)
18 | class InfrastructureArtifactMapper(SerializationMapperProtocol):
19 | """Mapper for converting Application DTOs to
20 | dictionaries for external API communication.
21 |
22 | This mapper implements:
23 | - SerializationMapperProtocol: JSON serialization/deserialization
24 | for caching and external APIs
25 | """
26 |
27 | def to_dict(self, dto: ArtifactDTO) -> dict:
28 | """
29 | Converts an Application ArtifactDTO to a dictionary for JSON serialization
30 | (e.g., caching, external APIs).
31 | """
32 | return {
33 | "inventory_id": str(dto.inventory_id),
34 | "created_at": dto.created_at.isoformat(),
35 | "acquisition_date": dto.acquisition_date.isoformat(),
36 | "name": dto.name,
37 | "department": dto.department,
38 | "era": {"value": dto.era.value},
39 | "material": {"value": dto.material.value},
40 | "description": dto.description,
41 | }
42 |
43 | def from_dict(self, data: dict) -> ArtifactDTO:
44 | """
45 | Converts a dictionary from JSON deserialization to an Application ArtifactDTO.
46 | """
47 | return ArtifactDTO(
48 | inventory_id=UUID(data["inventory_id"]),
49 | created_at=datetime.fromisoformat(data["created_at"]),
50 | acquisition_date=datetime.fromisoformat(data["acquisition_date"]),
51 | name=data["name"],
52 | department=data["department"],
53 | era=EraDTO(value=data["era"]["value"]),
54 | material=MaterialDTO(value=data["material"]["value"]),
55 | description=data.get("description"),
56 | )
57 |
58 | def to_admission_notification_dict(
59 | self, dto: ArtifactAdmissionNotificationDTO
60 | ) -> dict:
61 | """
62 | Converts an ArtifactAdmissionNotificationDTO to
63 | a dictionary for message broker communication.
64 | """
65 | return {
66 | "inventory_id": str(dto.inventory_id),
67 | "name": dto.name,
68 | "acquisition_date": dto.acquisition_date.isoformat(),
69 | "department": dto.department,
70 | }
71 |
72 | def to_catalog_publication_dict(
73 | self, dto: ArtifactCatalogPublicationDTO
74 | ) -> dict:
75 | """
76 | Converts an ArtifactCatalogPublicationDTO to a dictionary
77 | for external catalog API communication.
78 | """
79 | return {
80 | "inventory_id": str(dto.inventory_id),
81 | "name": dto.name,
82 | "era": {"value": dto.era.value},
83 | "material": {"value": dto.material.value},
84 | "description": dto.description,
85 | }
86 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/presentation/api/rest/error_handling.py:
--------------------------------------------------------------------------------
1 | from fastapi import FastAPI, Request, status
2 | from fastapi.responses import JSONResponse
3 |
4 | from {{cookiecutter.project_slug}}.application.exceptions import (
5 | ArtifactNotFoundError,
6 | FailedFetchArtifactMuseumAPIException,
7 | FailedPublishArtifactInCatalogException,
8 | FailedPublishArtifactMessageBrokerException,
9 | )
10 | from {{cookiecutter.project_slug}}.domain.exceptions import (
11 | DomainValidationError,
12 | InvalidEraException,
13 | InvalidMaterialException,
14 | )
15 |
16 |
17 | def setup_exception_handlers(app: FastAPI) -> None:
18 | @app.exception_handler(ArtifactNotFoundError)
19 | async def artifact_not_found_exception_handler(
20 | request: Request,
21 | exc: ArtifactNotFoundError,
22 | ) -> JSONResponse:
23 | return JSONResponse(
24 | status_code=status.HTTP_404_NOT_FOUND,
25 | content={"message": str(exc)},
26 | )
27 |
28 | @app.exception_handler(FailedFetchArtifactMuseumAPIException)
29 | async def failed_fetch_artifact_museum_api_exception_handler(
30 | request: Request,
31 | exc: FailedFetchArtifactMuseumAPIException,
32 | ) -> JSONResponse:
33 | return JSONResponse(
34 | status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
35 | content={"message": str(exc)},
36 | )
37 |
38 | @app.exception_handler(FailedPublishArtifactInCatalogException)
39 | async def failed_publish_artifact_in_catalog_exception_handler(
40 | request: Request,
41 | exc: FailedPublishArtifactInCatalogException,
42 | ) -> JSONResponse:
43 | return JSONResponse(
44 | status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
45 | content={"message": str(exc)},
46 | )
47 |
48 | @app.exception_handler(FailedPublishArtifactMessageBrokerException)
49 | async def failed_publish_artifact_message_broker_exception_handler(
50 | request: Request,
51 | exc: FailedPublishArtifactMessageBrokerException,
52 | ) -> JSONResponse:
53 | return JSONResponse(
54 | status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
55 | content={"message": str(exc)},
56 | )
57 |
58 | @app.exception_handler(DomainValidationError)
59 | async def domain_validation_error_handler(
60 | request: Request,
61 | exc: DomainValidationError,
62 | ) -> JSONResponse:
63 | return JSONResponse(
64 | status_code=status.HTTP_400_BAD_REQUEST,
65 | content={"message": str(exc)},
66 | )
67 |
68 | @app.exception_handler(InvalidEraException)
69 | async def invalid_era_exception_handler(
70 | request: Request,
71 | exc: InvalidEraException,
72 | ) -> JSONResponse:
73 | return JSONResponse(
74 | status_code=status.HTTP_400_BAD_REQUEST,
75 | content={"message": str(exc)},
76 | )
77 |
78 | @app.exception_handler(InvalidMaterialException)
79 | async def invalid_material_exception_handler(
80 | request: Request,
81 | exc: InvalidMaterialException,
82 | ) -> JSONResponse:
83 | return JSONResponse(
84 | status_code=status.HTTP_400_BAD_REQUEST,
85 | content={"message": str(exc)},
86 | )
87 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | """Sphinx configuration file for Clean Architecture FastAPI Template."""
2 |
3 | import os
4 | import sys
5 | from datetime import datetime
6 |
7 | # -- Project information -----------------------------------------------------
8 | project = "Clean Architecture FastAPI Template"
9 | copyright = f"{datetime.now().year}, Peopl3s"
10 | author = "Peopl3s"
11 | release = "1.0.0"
12 | version = "1.0.0"
13 |
14 | # -- General configuration ---------------------------------------------------
15 | extensions = [
16 | "sphinx.ext.autodoc",
17 | "sphinx.ext.napoleon",
18 | "sphinx.ext.viewcode",
19 | "sphinx.ext.intersphinx",
20 | "sphinx.ext.todo",
21 | "sphinx_copybutton",
22 | "myst_parser",
23 | "sphinxcontrib.mermaid",
24 | ]
25 |
26 | # Add any paths that contain templates here, relative to this directory.
27 | templates_path = ["_templates"]
28 |
29 | # List of patterns, relative to source directory, that match files and
30 | # directories to ignore when looking for source files.
31 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
32 |
33 | # The suffix(es) of source filenames.
34 | source_suffix = {
35 | ".rst": "restructuredtext",
36 | ".md": "markdown",
37 | }
38 |
39 | # The master toctree document.
40 | master_doc = "index"
41 |
42 | # -- Options for HTML output -------------------------------------------------
43 | html_theme = "sphinx_rtd_theme"
44 | html_static_path = ["_static"]
45 | html_logo = None
46 | html_favicon = None
47 |
48 | html_theme_options = {
49 | "logo_only": False,
50 | "display_version": True,
51 | "prev_next_buttons_location": "bottom",
52 | "style_external_links": True,
53 | "collapse_navigation": False,
54 | "sticky_navigation": True,
55 | "navigation_depth": 4,
56 | "includehidden": True,
57 | "titles_only": False,
58 | }
59 |
60 | # -- Extension configuration -------------------------------------------------
61 |
62 | # Napoleon settings
63 | napoleon_google_docstring = True
64 | napoleon_numpy_docstring = False
65 | napoleon_include_init_with_doc = True
66 | napoleon_include_private_with_doc = False
67 | napoleon_include_special_with_doc = True
68 | napoleon_use_admonition_for_examples = True
69 | napoleon_use_admonition_for_notes = True
70 | napoleon_use_admonition_for_references = False
71 | napoleon_use_ivar = False
72 | napoleon_use_param = True
73 | napoleon_use_rtype = True
74 |
75 | # Intersphinx mapping
76 | intersphinx_mapping = {
77 | "python": ("https://docs.python.org/3", None),
78 | "fastapi": ("https://fastapi.tiangolo.com", None),
79 | "cookiecutter": ("https://cookiecutter.readthedocs.io/en/stable/", None),
80 | }
81 |
82 | # Todo extension settings
83 | todo_include_todos = True
84 |
85 | # MyST parser settings
86 | myst_enable_extensions = [
87 | "colon_fence",
88 | "deflist",
89 | "substitution",
90 | "tasklist",
91 | ]
92 |
93 | # Copybutton settings
94 | copybutton_prompt_text = r">>> |\.\.\. |\$ |In \[\d*\]: | {2,5}\.\.\.: | {5,8}: "
95 | copybutton_prompt_is_regexp = True
96 |
97 | # HTML context
98 | html_context = {
99 | "display_github": True,
100 | "github_user": "Peopl3s",
101 | "github_repo": "clean-architecture-fastapi-project-template",
102 | "github_version": "main",
103 | "conf_py_path": "/docs/",
104 | }
105 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/db/mappers/artifact_db_mapper.py:
--------------------------------------------------------------------------------
1 | """Database mapper for converting between Domain Entities and SQLAlchemy Models.
2 |
3 | This mapper is responsible for the conversion logic between the domain layer
4 | and the database persistence layer, following the Single Responsibility Principle.
5 | """
6 |
7 | from dataclasses import dataclass
8 | from typing import final
9 |
10 | from {{cookiecutter.project_slug}}.domain.entities.artifact import ArtifactEntity
11 | from {{cookiecutter.project_slug}}.domain.value_objects.era import Era
12 | from {{cookiecutter.project_slug}}.domain.value_objects.material import Material
13 | from {{cookiecutter.project_slug}}.infrastructures.db.models.artifact import ArtifactModel
14 |
15 |
16 | @final
17 | @dataclass(frozen=True, slots=True)
18 | class ArtifactDBMapper:
19 | """
20 | Mapper for converting between ArtifactEntity (Domain) and ArtifactModel (SQLAlchemy).
21 |
22 | This class provides methods for bidirectional mapping, ensuring separation of concerns
23 | between the domain logic and database persistence.
24 | """
25 |
26 | def to_entity(self, model: ArtifactModel) -> ArtifactEntity:
27 | """
28 | Converts an SQLAlchemy ArtifactModel to a Domain ArtifactEntity.
29 |
30 | Args:
31 | model: The SQLAlchemy ArtifactModel instance.
32 |
33 | Returns:
34 | An ArtifactEntity instance.
35 | """
36 | return ArtifactEntity(
37 | inventory_id=model.inventory_id,
38 | created_at=model.created_at,
39 | acquisition_date=model.acquisition_date,
40 | name=model.name,
41 | department=model.department,
42 | era=Era(value=model.era),
43 | material=Material(value=model.material),
44 | description=model.description,
45 | )
46 |
47 | def to_model(self, entity: ArtifactEntity) -> ArtifactModel:
48 | """
49 | Converts a Domain ArtifactEntity to an SQLAlchemy ArtifactModel.
50 |
51 | Args:
52 | entity: The Domain ArtifactEntity instance.
53 |
54 | Returns:
55 | An SQLAlchemy ArtifactModel instance.
56 | """
57 | return ArtifactModel(
58 | inventory_id=entity.inventory_id,
59 | created_at=entity.created_at,
60 | acquisition_date=entity.acquisition_date,
61 | name=entity.name,
62 | department=entity.department,
63 | era=str(entity.era),
64 | material=str(entity.material),
65 | description=entity.description,
66 | )
67 |
68 | def update_model_from_entity(
69 | self, model: ArtifactModel, entity: ArtifactEntity
70 | ) -> None:
71 | """
72 | Updates an existing SQLAlchemy ArtifactModel with data from a Domain ArtifactEntity.
73 |
74 | This method is used for updating database records based on changes in the domain entity.
75 |
76 | Args:
77 | model: The existing SQLAlchemy ArtifactModel to update.
78 | entity: The Domain ArtifactEntity containing the new data.
79 | """
80 | model.name = entity.name
81 | model.era = str(entity.era)
82 | model.material = str(entity.material)
83 | model.description = entity.description
84 | model.acquisition_date = entity.acquisition_date
85 | model.department = entity.department
86 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | Clean Architecture FastAPI Project Template
2 | ===========================================
3 |
4 | A comprehensive cookiecutter template for creating modern FastAPI applications with clean architecture, Docker support, and best practices included.
5 |
6 | .. image:: https://img.shields.io/badge/python-3.12+-blue.svg
7 | :target: https://www.python.org/downloads/
8 | :alt: Python Version
9 |
10 | .. image:: https://img.shields.io/badge/FastAPI-0.117+-green.svg
11 | :target: https://fastapi.tiangolo.com
12 | :alt: FastAPI
13 |
14 | .. image:: https://img.shields.io/badge/code%20style-ruff-000000.svg
15 | :target: https://github.com/astral-sh/ruff
16 | :alt: Code Style
17 |
18 | Overview
19 | --------
20 |
21 | This cookiecutter template provides a production-ready foundation for building FastAPI applications following Clean Architecture principles. It includes everything you need to start developing a scalable, maintainable web application.
22 |
23 | **Key Features:**
24 |
25 | * 🏗️ **Clean Architecture** - Domain-Driven Design with clear separation of concerns
26 | * ⚡ **FastAPI** - High-performance async web framework
27 | * 🗄️ **Multiple Database Support** - PostgreSQL, MySQL, or SQLite
28 | * 🔄 **Message Brokers** - Kafka, RabbitMQ, or NATS integration
29 | * 💾 **Caching** - Redis, KeyDB, Tarantool, or Dragonfly support
30 | * 🐳 **Docker** - Complete containerization with Docker Compose
31 | * 🧪 **Testing** - Comprehensive test suite with pytest
32 | * 📝 **Code Quality** - Ruff linting, MyPy type checking, pre-commit hooks
33 | * 🔄 **Migrations** - Alembic for database schema management
34 |
35 | Quick Start
36 | -----------
37 |
38 | Install cookiecutter and create a new project:
39 |
40 | .. code-block:: bash
41 |
42 | pip install cookiecutter
43 | cookiecutter https://github.com/Peopl3s/clean-architecture-fastapi-project-template.git
44 |
45 | Follow the prompts to configure your project, then:
46 |
47 | .. code-block:: bash
48 |
49 | cd your-project-name
50 | make install-dev
51 | make docker-up
52 |
53 | Your API will be available at http://localhost:8000 with interactive documentation at http://localhost:8000/docs
54 |
55 | Documentation Contents
56 | ----------------------
57 |
58 | .. toctree::
59 | :maxdepth: 2
60 | :caption: Getting Started
61 |
62 | getting-started/installation
63 | getting-started/quickstart
64 | getting-started/template-variables
65 |
66 | .. toctree::
67 | :maxdepth: 2
68 | :caption: User Guide
69 |
70 | user-guide/project-structure
71 | user-guide/architecture
72 | user-guide/configuration
73 | user-guide/database
74 | user-guide/caching
75 | user-guide/message-brokers
76 | user-guide/testing
77 | user-guide/deployment
78 |
79 | .. toctree::
80 | :maxdepth: 2
81 | :caption: Development
82 |
83 | development/code-quality
84 | development/docker
85 | development/migrations
86 | development/contributing
87 |
88 | .. toctree::
89 | :maxdepth: 2
90 | :caption: Advanced Topics
91 |
92 | advanced/customization
93 | advanced/hooks
94 | advanced/ci-cd
95 | advanced/best-practices
96 |
97 | .. toctree::
98 | :maxdepth: 1
99 | :caption: Reference
100 |
101 | reference/makefile-commands
102 | reference/environment-variables
103 | reference/faq
104 | changelog
105 | license
106 |
107 | Indices and tables
108 | ==================
109 |
110 | * :ref:`genindex`
111 | * :ref:`search`
112 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/tests/test_infrastructure/test_db/models/test_artifact_model.py:
--------------------------------------------------------------------------------
1 | from datetime import UTC, datetime
2 | from uuid import UUID
3 |
4 | from sqlalchemy import DateTime, Index, String, Text, func
5 | from sqlalchemy.orm import Mapped, mapped_column, registry
6 |
7 | from {{cookiecutter.project_slug}}.domain.entities.artifact import ArtifactEntity
8 | from {{cookiecutter.project_slug}}.domain.value_objects.era import Era
9 | from {{cookiecutter.project_slug}}.domain.value_objects.material import Material
10 |
11 | test_mapper_registry = registry()
12 |
13 |
14 | @test_mapper_registry.mapped
15 | class TestArtifactModel:
16 | """SQLite-compatible artifact model for testing"""
17 | __tablename__ = "artifacts"
18 | __table_args__ = (
19 | Index("ix_artifacts_name", "name"),
20 | Index("ix_artifacts_department", "department"),
21 | )
22 |
23 | def __init__(
24 | self,
25 | *,
26 | inventory_id: str | UUID,
27 | created_at: datetime,
28 | acquisition_date: datetime,
29 | name: str,
30 | department: str,
31 | era: str,
32 | material: str,
33 | description: str | None = None,
34 | ) -> None:
35 | self.inventory_id = str(inventory_id) if isinstance(inventory_id, UUID) else inventory_id
36 | self.created_at = created_at
37 | self.acquisition_date = acquisition_date
38 | self.name = name
39 | self.department = department
40 | self.era = era
41 | self.material = material
42 | self.description = description
43 |
44 | inventory_id: Mapped[str] = mapped_column(
45 | String(length=36),
46 | primary_key=True,
47 | nullable=False,
48 | )
49 | created_at: Mapped[datetime] = mapped_column(
50 | DateTime(timezone=True),
51 | nullable=False,
52 | default=lambda: datetime.now(UTC),
53 | server_default=func.now(),
54 | )
55 | acquisition_date: Mapped[datetime] = mapped_column(
56 | DateTime(timezone=True), nullable=False
57 | )
58 | name: Mapped[str] = mapped_column(String(length=255), nullable=False)
59 | department: Mapped[str] = mapped_column(String(length=255), nullable=False)
60 | era: Mapped[str] = mapped_column(String(length=50), nullable=False)
61 | material: Mapped[str] = mapped_column(String(length=50), nullable=False)
62 | description: Mapped[str | None] = mapped_column(Text, nullable=True)
63 |
64 | def __repr__(self) -> str:
65 | return (
66 | f""
68 | )
69 |
70 | def to_dataclass(self) -> ArtifactEntity:
71 | return ArtifactEntity(
72 | inventory_id=UUID(self.inventory_id),
73 | created_at=self.created_at,
74 | acquisition_date=self.acquisition_date,
75 | name=self.name,
76 | department=self.department,
77 | era=Era(value=self.era),
78 | material=Material(value=self.material),
79 | description=self.description,
80 | )
81 |
82 | @classmethod
83 | def from_dataclass(
84 | cls: type["TestArtifactModel"], artifact: ArtifactEntity
85 | ) -> "TestArtifactModel":
86 | return cls(
87 | inventory_id=str(artifact.inventory_id),
88 | created_at=artifact.created_at,
89 | acquisition_date=artifact.acquisition_date,
90 | name=artifact.name,
91 | department=artifact.department,
92 | era=str(artifact.era),
93 | material=str(artifact.material),
94 | description=artifact.description,
95 | )
96 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/alembic.ini:
--------------------------------------------------------------------------------
1 | # A generic, single database configuration.
2 |
3 | [alembic]
4 | # path to migration scripts
5 | script_location = src/{{cookiecutter.project_slug}}/infrastructures/db/migrations
6 |
7 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
8 | # Uncomment the line below if you want the files to be prepended with date and time
9 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
10 |
11 | # sys.path path, will be prepended to sys.path if present.
12 | # defaults to the current working directory.
13 | prepend_sys_path = .
14 |
15 | # timezone to use when rendering the date within the migration file
16 | # as well as the filename.
17 | # If specified, requires the python-dateutil library that can be
18 | # installed by adding `alembic[tz]` to the pip requirements
19 | # string value is passed to dateutil.tz.gettz()
20 | # leave blank for localtime
21 | # timezone =
22 |
23 | # max length of characters to apply to the
24 | # "slug" field
25 | # truncate_slug_length = 40
26 |
27 | # set to 'true' to run the environment during
28 | # the 'revision' command, regardless of autogenerate
29 | # revision_environment = false
30 |
31 | # set to 'true' to allow .pyc and .pyo files without
32 | # a source .py file to be detected as revisions in the
33 | # versions/ directory
34 | # sourceless = false
35 |
36 | # version number format
37 | version_num_format = %%04d
38 |
39 | # version path separator; As mentioned above, this is the character used to split
40 | # version_locations. The default within new alembic.ini files is "os", which uses
41 | # os.pathsep. If this key is omitted entirely, it falls back to the legacy
42 | # behavior of splitting on spaces and/or commas.
43 | # Valid values for version_path_separator are:
44 | #
45 | # version_path_separator = :
46 | # version_path_separator = ;
47 | # version_path_separator = space
48 | version_path_separator = os
49 |
50 | # set to 'true' to search source files recursively
51 | # in each "version_locations" directory
52 | # new in Alembic version 1.10
53 | # recursive_version_locations = false
54 |
55 | # the output encoding used when revision files
56 | # are written from script.py.mako
57 | # output_encoding = utf-8
58 |
59 | sqlalchemy.url = postgresql+asyncpg://{{cookiecutter.project_slug}}_user:{{cookiecutter.project_slug}}_password@localhost:5432/{{cookiecutter.project_slug}}_db
60 |
61 |
62 | [post_write_hooks]
63 | # post_write_hooks defines scripts or Python functions that are run
64 | # on newly generated revision scripts. See the documentation for further
65 | # detail and examples
66 |
67 | # format using "black" - use the console_scripts runner, against the "black" entrypoint
68 | # hooks = black
69 | # black.type = console_scripts
70 | # black.entrypoint = black
71 | # black.options = -l 79 REVISION_SCRIPT_FILENAME
72 |
73 | # lint with attempts to fix using "ruff" - use the exec runner, execute a binary
74 | # hooks = ruff
75 | # ruff.type = exec
76 | # ruff.executable = %(here)s/.venv/bin/ruff
77 | # ruff.options = --fix REVISION_SCRIPT_FILENAME
78 |
79 | # Logging configuration
80 | [loggers]
81 | keys = root,sqlalchemy,alembic
82 |
83 | [handlers]
84 | keys = console
85 |
86 | [formatters]
87 | keys = generic
88 |
89 | [logger_root]
90 | level = WARN
91 | handlers = console
92 | qualname =
93 |
94 | [logger_sqlalchemy]
95 | level = WARN
96 | handlers =
97 | qualname = sqlalchemy.engine
98 |
99 | [logger_alembic]
100 | level = INFO
101 | handlers =
102 | qualname = alembic
103 |
104 | [handler_console]
105 | class = StreamHandler
106 | args = (sys.stderr,)
107 | level = NOTSET
108 | formatter = generic
109 |
110 | [formatter_generic]
111 | format = %(levelname)-5.5s [%(name)s] %(message)s
112 | datefmt = %H:%M:%S
113 |
--------------------------------------------------------------------------------
/docs/getting-started/installation.rst:
--------------------------------------------------------------------------------
1 | Installation
2 | ============
3 |
4 | This guide will help you install and use the Clean Architecture FastAPI template.
5 |
6 | Prerequisites
7 | -------------
8 |
9 | Before you begin, ensure you have the following installed:
10 |
11 | * **Python 3.12+** - `Download Python `_
12 | * **Cookiecutter** - Template rendering tool
13 | * **Git** - Version control system
14 | * **Docker & Docker Compose** (optional) - For containerized development
15 |
16 | Installing Cookiecutter
17 | ------------------------
18 |
19 | Install cookiecutter using pip:
20 |
21 | .. code-block:: bash
22 |
23 | pip install cookiecutter
24 |
25 | Or using pipx (recommended for global tools):
26 |
27 | .. code-block:: bash
28 |
29 | pipx install cookiecutter
30 |
31 | Creating a Project
32 | ------------------
33 |
34 | From GitHub
35 | ~~~~~~~~~~~
36 |
37 | Create a new project directly from the GitHub repository:
38 |
39 | .. code-block:: bash
40 |
41 | cookiecutter https://github.com/Peopl3s/clean-architecture-fastapi-project-template.git
42 |
43 | From Local Clone
44 | ~~~~~~~~~~~~~~~~
45 |
46 | If you want to customize the template or work offline:
47 |
48 | .. code-block:: bash
49 |
50 | # Clone the template repository
51 | git clone https://github.com/Peopl3s/clean-architecture-fastapi-project-template.git
52 | cd clean-architecture-fastapi-project-template
53 |
54 | # Create a new project from the local template
55 | cookiecutter .
56 |
57 | Interactive Setup
58 | ~~~~~~~~~~~~~~~~~
59 |
60 | During project creation, you'll be prompted for various configuration options:
61 |
62 | .. code-block:: text
63 |
64 | project_name [My FastAPI Project]: My Awesome API
65 | project_slug [my_awesome_api]:
66 | project_description [A modern FastAPI application]: An awesome API
67 | author_name [Your Name]: John Doe
68 | author_email [your.email@example.com]: john@example.com
69 | github_username [yourusername]: johndoe
70 | version [0.1.0]:
71 | python_version [3.12]:
72 | use_database [postgresql]: postgresql
73 | use_cache [redis]: redis
74 | use_broker [kafka]: kafka
75 | add_docker [y]: y
76 | add_tests [y]: y
77 | add_docs [y]: y
78 | add_precommit [y]: y
79 | license_type [MIT]: MIT
80 |
81 | Non-Interactive Setup
82 | ~~~~~~~~~~~~~~~~~~~~~~
83 |
84 | For automation or CI/CD, use non-interactive mode:
85 |
86 | .. code-block:: bash
87 |
88 | cookiecutter https://github.com/Peopl3s/clean-architecture-fastapi-project-template.git \
89 | --no-input \
90 | project_name="My Awesome API" \
91 | project_description="An awesome API for my project" \
92 | author_name="John Doe" \
93 | author_email="john@example.com" \
94 | github_username="johndoe" \
95 | use_database="postgresql" \
96 | use_cache="redis" \
97 | use_broker="kafka"
98 |
99 | Post-Installation
100 | -----------------
101 |
102 | After creating your project:
103 |
104 | 1. Navigate to the project directory:
105 |
106 | .. code-block:: bash
107 |
108 | cd your-project-slug
109 |
110 | 2. Install dependencies:
111 |
112 | .. code-block:: bash
113 |
114 | # Using Poetry
115 | poetry install
116 |
117 | # Or using pip
118 | pip install -e ".[dev]"
119 |
120 | 3. Set up environment:
121 |
122 | .. code-block:: bash
123 |
124 | cp env.template .env
125 | # Edit .env with your configuration
126 |
127 | 4. Start development:
128 |
129 | .. code-block:: bash
130 |
131 | # With Docker
132 | make docker-up
133 |
134 | # Or locally
135 | make migrate
136 | poetry run python -m your_project_slug.main
137 |
138 | Next Steps
139 | ----------
140 |
141 | * Read the :doc:`quickstart` guide
142 | * Learn about :doc:`template-variables`
143 | * Explore the :doc:`../user-guide/project-structure`
144 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/application/use_cases/process_artifact.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import TYPE_CHECKING, final
3 |
4 | import structlog
5 |
6 | from {{cookiecutter.project_slug}}.application.dtos.artifact import ArtifactDTO
7 | from {{cookiecutter.project_slug}}.application.exceptions import ArtifactNotFoundError
8 | from {{cookiecutter.project_slug}}.application.use_cases.fetch_artifact_from_museum_api import (
9 | FetchArtifactFromMuseumAPIUseCase,
10 | )
11 | from {{cookiecutter.project_slug}}.application.use_cases.get_artifact_from_cache import (
12 | GetArtifactFromCacheUseCase,
13 | )
14 | from {{cookiecutter.project_slug}}.application.use_cases.get_artifact_from_repo import (
15 | GetArtifactFromRepoUseCase,
16 | )
17 | from {{cookiecutter.project_slug}}.application.use_cases.publish_artifact_to_broker import (
18 | PublishArtifactToBrokerUseCase,
19 | )
20 | from {{cookiecutter.project_slug}}.application.use_cases.publish_artifact_to_catalog import (
21 | PublishArtifactToCatalogUseCase,
22 | )
23 | from {{cookiecutter.project_slug}}.application.use_cases.save_artifact_to_cache import (
24 | SaveArtifactToCacheUseCase,
25 | )
26 | from {{cookiecutter.project_slug}}.application.use_cases.save_artifact_to_repo import (
27 | SaveArtifactToRepoUseCase,
28 | )
29 |
30 | if TYPE_CHECKING:
31 | from {{cookiecutter.project_slug}}.domain.entities.artifact import ArtifactEntity
32 |
33 | logger = structlog.get_logger(__name__)
34 |
35 |
36 | @final
37 | @dataclass(frozen=True, slots=True, kw_only=True)
38 | class ProcessArtifactUseCase:
39 | """
40 | Use case for processing an artifact, including fetching from cache, repository,
41 | or external API, saving, and publishing.
42 | """
43 |
44 | get_artifact_from_cache_use_case: GetArtifactFromCacheUseCase
45 | get_artifact_from_repo_use_case: GetArtifactFromRepoUseCase
46 | fetch_artifact_from_museum_api_use_case: FetchArtifactFromMuseumAPIUseCase
47 | save_artifact_to_repo_use_case: SaveArtifactToRepoUseCase
48 | save_artifact_to_cache_use_case: SaveArtifactToCacheUseCase
49 | publish_artifact_to_broker_use_case: PublishArtifactToBrokerUseCase
50 | publish_artifact_to_catalog_use_case: PublishArtifactToCatalogUseCase
51 |
52 | async def __call__(self, inventory_id: str) -> ArtifactDTO:
53 | """
54 | Executes the artifact processing flow.
55 |
56 | Args:
57 | inventory_id: The ID of the artifact to process.
58 |
59 | Returns:
60 | An ArtifactDTO representing the processed artifact.
61 | """
62 | if artifact_dto := await self.get_artifact_from_cache_use_case(inventory_id):
63 | return artifact_dto
64 |
65 | if artifact_dto := await self.get_artifact_from_repo_use_case(inventory_id):
66 | await self.save_artifact_to_cache_use_case(inventory_id, artifact_dto)
67 | return artifact_dto
68 |
69 | artifact_dto = await self.fetch_artifact_from_museum_api_use_case(inventory_id)
70 | await self.save_artifact_to_repo_use_case(artifact_dto)
71 | await self.save_artifact_to_cache_use_case(inventory_id, artifact_dto)
72 |
73 | try:
74 | await self.publish_artifact_to_broker_use_case(artifact_dto)
75 | except Exception:
76 | logger.warning(
77 | "Failed to publish artifact notification to message broker (non-critical)",
78 | inventory_id=inventory_id,
79 | )
80 |
81 | try:
82 | await self.publish_artifact_to_catalog_use_case(artifact_dto)
83 | except Exception:
84 | logger.warning(
85 | "Failed to publish artifact to public catalog (non-critical)",
86 | inventory_id=inventory_id,
87 | )
88 |
89 | logger.info(
90 | "Artifact successfully fetched and processed",
91 | inventory_id=inventory_id,
92 | )
93 | return artifact_dto
94 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/db/repositories/artifact.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import final
3 | from uuid import UUID
4 |
5 | from sqlalchemy.exc import IntegrityError, SQLAlchemyError
6 | from sqlalchemy.ext.asyncio import AsyncSession
7 | from sqlalchemy.future import select
8 |
9 | from {{cookiecutter.project_slug}}.application.interfaces.repositories import ArtifactRepositoryProtocol
10 | from {{cookiecutter.project_slug}}.domain.entities.artifact import ArtifactEntity
11 | from {{cookiecutter.project_slug}}.infrastructures.db.exceptions import (
12 | RepositoryConflictError,
13 | RepositorySaveError,
14 | )
15 | from {{cookiecutter.project_slug}}.infrastructures.db.mappers.artifact_db_mapper import ArtifactDBMapper
16 | from {{cookiecutter.project_slug}}.infrastructures.db.models.artifact import ArtifactModel
17 |
18 |
19 | @final
20 | @dataclass(frozen=True, slots=True, kw_only=True)
21 | class ArtifactRepositorySQLAlchemy(ArtifactRepositoryProtocol):
22 | """SQLAlchemy implementation of the Artifact Repository.
23 |
24 | This repository is responsible for database operations (CRUD) only.
25 | Mapping logic is delegated to ArtifactDBMapper following SRP.
26 | """
27 |
28 | session: AsyncSession
29 | mapper: ArtifactDBMapper
30 |
31 | async def get_by_inventory_id(
32 | self, inventory_id: str | UUID
33 | ) -> ArtifactEntity | None:
34 | """
35 | Retrieves an artifact by its inventory ID from the database.
36 |
37 | Args:
38 | inventory_id: The unique identifier of the artifact.
39 |
40 | Returns:
41 | The ArtifactEntity if found, otherwise None.
42 |
43 | Raises:
44 | RepositorySaveError: If a database error occurs during retrieval.
45 | """
46 | try:
47 | stmt = select(ArtifactModel).where(
48 | ArtifactModel.inventory_id == inventory_id
49 | )
50 | result = await self.session.execute(stmt)
51 | artifact_model = result.scalar_one_or_none()
52 | if artifact_model is None:
53 | return None
54 | return self.mapper.to_entity(artifact_model)
55 | except SQLAlchemyError as e:
56 | raise RepositorySaveError(
57 | f"Failed to retrieve artifact by inventory_id '{inventory_id}': {e}"
58 | ) from e
59 |
60 | async def save(self, artifact: ArtifactEntity) -> None:
61 | """
62 | Saves a new artifact or updates an existing one in the database.
63 |
64 | Args:
65 | artifact: The ArtifactEntity to persist.
66 |
67 | Raises:
68 | RepositoryConflictError: If a unique constraint is violated during save.
69 | RepositorySaveError: If a database error occurs during save.
70 | """
71 | try:
72 | stmt = select(ArtifactModel).where(
73 | ArtifactModel.inventory_id == artifact.inventory_id
74 | )
75 | result = await self.session.execute(stmt)
76 | model = result.scalar_one_or_none()
77 |
78 | if model:
79 | # Update existing model using mapper
80 | self.mapper.update_model_from_entity(model, artifact)
81 | else:
82 | # Create new model using mapper
83 | model = self.mapper.to_model(artifact)
84 |
85 | self.session.add(model)
86 | except IntegrityError as e:
87 | raise RepositoryConflictError(
88 | f"Conflict while saving artifact '{artifact.inventory_id}': {e}"
89 | ) from e
90 | except SQLAlchemyError as e:
91 | raise RepositorySaveError(
92 | f"Failed to save artifact '{artifact.inventory_id}': {e}"
93 | ) from e
94 | except Exception as e:
95 | raise RepositorySaveError(
96 | f"Unexpected error while saving artifact '{artifact.inventory_id}': {e}"
97 | ) from e
98 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/scripts/init-db.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # PostgreSQL Database Initialization Script
4 | # This script initializes the PostgreSQL database with proper schema and extensions
5 |
6 | set -e # Exit on any error
7 |
8 | echo "🚀 Starting PostgreSQL database initialization..."
9 |
10 | # Load environment variables
11 | if [ -f .env ]; then
12 | echo "📝 Loading environment variables from .env file..."
13 | export $(grep -v '^#' .env | xargs)
14 | else
15 | echo "⚠️ Warning: .env file not found. Using default values."
16 | fi
17 |
18 | # Default database connection parameters
19 | DB_HOST=${POSTGRES_SERVER:-localhost}
20 | DB_PORT=${POSTGRES_PORT:-5432}
21 | DB_USER=${POSTGRES_USER:-{{ cookiecutter.database_user }}}
22 | DB_PASSWORD=${POSTGRES_PASSWORD:-{{ cookiecutter.database_password }}}
23 | DB_NAME=${POSTGRES_DB:-{{ cookiecutter.database_name }}}
24 |
25 | # Construct database URL
26 | DB_URL="postgresql://${DB_USER}:${DB_PASSWORD}@${DB_HOST}:${DB_PORT}/postgres"
27 |
28 | echo "🔗 Connecting to PostgreSQL at ${DB_HOST}:${DB_PORT}"
29 |
30 | # Wait for database to be ready
31 | echo "⏳ Waiting for database to be ready..."
32 | max_attempts=30
33 | attempt=1
34 |
35 | while [ $attempt -le $max_attempts ]; do
36 | if docker-compose exec -T postgres psql -U "${DB_USER}" -d postgres -c "SELECT 1;" > /dev/null 2>&1; then
37 | echo "✅ Database is ready!"
38 | break
39 | fi
40 |
41 | echo "🔄 Attempt ${attempt}/${max_attempts}: Database not ready, waiting 2 seconds..."
42 | sleep 2
43 | attempt=$((attempt + 1))
44 | done
45 |
46 | if [ $attempt -gt $max_attempts ]; then
47 | echo "❌ Error: Database is not ready after ${max_attempts} attempts"
48 | exit 1
49 | fi
50 |
51 | # Check if database already exists
52 | echo "🔍 Checking if database '${DB_NAME}' exists..."
53 | DB_EXISTS=$(docker-compose exec -T postgres psql -U "${DB_USER}" -d postgres -tAc "SELECT 1 FROM pg_database WHERE datname='${DB_NAME}'")
54 |
55 | if [ "$DB_EXISTS" = "1" ]; then
56 | echo "⚠️ Database '${DB_NAME}' already exists. Skipping initialization."
57 | echo "💡 If you want to reinitialize the database, drop it first:"
58 | echo " docker-compose exec postgres psql -U \"${DB_USER}\" -d postgres -c \"DROP DATABASE IF EXISTS ${DB_NAME};\""
59 | exit 0
60 | fi
61 |
62 | # Create database
63 | echo "🏗️ Creating database '${DB_NAME}'..."
64 | docker-compose exec -T postgres psql -U "${DB_USER}" -d postgres -c "CREATE DATABASE ${DB_NAME};"
65 |
66 | # Connect to the new database and create extensions
67 | echo "🔧 Creating extensions in database '${DB_NAME}'..."
68 | docker-compose exec -T postgres psql -U "${DB_USER}" -d "${DB_NAME}" -c "
69 | CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\";
70 | CREATE EXTENSION IF NOT EXISTS \"pg_trgm\";
71 | SET timezone = 'UTC';
72 | "
73 |
74 | # Run the init-db.sql script if it exists
75 | if [ -f "scripts/init-db.sql" ]; then
76 | echo "📄 Running init-db.sql script..."
77 | docker-compose exec -T postgres psql -U "${DB_USER}" -d "${DB_NAME}" < scripts/init-db.sql
78 | else
79 | echo "ℹ️ init-db.sql not found, skipping custom initialization script"
80 | fi
81 |
82 | # Grant privileges
83 | echo "🔐 Setting up privileges..."
84 | docker-compose exec -T postgres psql -U "${DB_USER}" -d "${DB_NAME}" -c "
85 | GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO ${DB_USER};
86 | GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO ${DB_USER};
87 | ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON TABLES TO ${DB_USER};
88 | ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON SEQUENCES TO ${DB_USER};
89 | "
90 |
91 | echo "✅ Database initialization completed successfully!"
92 | echo "🎉 Database '${DB_NAME}' is ready for use."
93 | echo ""
94 | echo "📋 Connection details:"
95 | echo " Host: ${DB_HOST}"
96 | echo " Port: ${DB_PORT}"
97 | echo " Database: ${DB_NAME}"
98 | echo " User: ${DB_USER}"
99 | echo ""
100 | echo "💡 Next steps:"
101 | echo " 1. Run migrations: make migrate"
102 | echo " 2. Start the application: make docker-up-dev"
103 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # Poetry
98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99 | # This is especially recommended for binary packages to ensure reproducibility, and is more
100 | # commonly ignored for libraries.
101 | #poetry.lock
102 |
103 | # poetry
104 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
105 | # This is especially recommended for binary packages to ensure reproducibility, and is more
106 | # commonly ignored for libraries.
107 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
108 | #poetry.lock
109 |
110 | # pdm
111 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
112 | #pdm.lock
113 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
114 | # in version control.
115 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control
116 | .pdm.toml
117 | .pdm-python
118 | .pdm-build/
119 |
120 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
121 | __pypackages__/
122 |
123 | # Celery stuff
124 | celerybeat-schedule
125 | celerybeat.pid
126 |
127 | # SageMath parsed files
128 | *.sage.py
129 |
130 | # Environments
131 | .env
132 | .venv
133 | env/
134 | venv/
135 | ENV/
136 | env.bak/
137 | venv.bak/
138 |
139 | # Spyder project settings
140 | .spyderproject
141 | .spyproject
142 |
143 | # Rope project settings
144 | .ropeproject
145 |
146 | # mkdocs documentation
147 | /site
148 |
149 | # mypy
150 | .mypy_cache/
151 | .dmypy.json
152 | dmypy.json
153 |
154 | # Pyre type checker
155 | .pyre/
156 |
157 | # pytype static type analyzer
158 | .pytype/
159 |
160 | # Cython debug symbols
161 | cython_debug/
162 |
163 | # PyCharm
164 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
165 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
166 | # and can be added to the global gitignore or merged into this file. For a more nuclear
167 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
168 | #.idea/
169 |
170 | # Ruff stuff:
171 | .ruff_cache/
172 |
173 | # PyPI configuration file
174 | .pypirc
175 |
176 | # Direnv files
177 | .direnv
178 | .envrc
179 |
180 | # Virtual env
181 | .venv
182 |
183 | # Editor specific files and folders
184 | .idea
185 | .vscode
186 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/env.template:
--------------------------------------------------------------------------------
1 | # Application Configuration
2 | ENVIRONMENT=dev
3 | LOG_LEVEL=DEBUG
4 | DEBUG=true
5 |
6 | {% if cookiecutter.use_database == "postgresql" %}
7 | # PostgreSQL Configuration
8 | POSTGRES_USER={{ cookiecutter.database_user }}
9 | POSTGRES_PASSWORD={{ cookiecutter.database_password }}
10 | POSTGRES_SERVER=postgres
11 | POSTGRES_PORT=5432
12 | POSTGRES_DB={{ cookiecutter.database_name }}
13 | {% endif %}
14 |
15 | {% if cookiecutter.use_database == "sqlite" %}
16 | # SQLite Configuration
17 | SQLITE_DB_PATH={{ cookiecutter.database_name }}.db
18 | SQLITE_DB_DIR=./data
19 | {% endif %}
20 |
21 | {% if cookiecutter.use_database == "mysql" %}
22 | # MySQL Configuration
23 | MYSQL_USER={{ cookiecutter.database_user }}
24 | MYSQL_PASSWORD={{ cookiecutter.database_password }}
25 | MYSQL_SERVER=mysql
26 | MYSQL_PORT=3306
27 | MYSQL_DB={{ cookiecutter.database_name }}
28 | {% endif %}
29 |
30 | # External APIs
31 | MUSEUM_API_BASE=https://api.{{ cookiecutter.domain_name }}
32 | CATALOG_API_BASE=https://catalog.{{ cookiecutter.domain_name }}
33 | HTTP_TIMEOUT=10.0
34 |
35 | {% if cookiecutter.use_broker == "kafka" %}
36 | # Message Broker (Kafka)
37 | BROKER_URL=kafka://kafka:9092
38 | BROKER_NEW_ARTIFACT_QUEUE=new_artifacts
39 |
40 | # Retry Configuration
41 | PUBLISH_RETRIES=3
42 | PUBLISH_RETRY_BACKOFF=0.5
43 | {% endif %}
44 |
45 | {% if cookiecutter.use_broker == "rabbitmq" %}
46 | # Message Broker (RabbitMQ)
47 | BROKER_URL=amqp://guest:guest@rabbitmq:5672/
48 | BROKER_NEW_ARTIFACT_QUEUE=new_artifacts
49 |
50 | # Retry Configuration
51 | PUBLISH_RETRIES=3
52 | PUBLISH_RETRY_BACKOFF=0.5
53 | {% endif %}
54 |
55 | {% if cookiecutter.use_broker == "nats" %}
56 | # Message Broker (NATS)
57 | BROKER_URL=nats://nats:4222
58 | BROKER_NEW_ARTIFACT_QUEUE=new_artifacts
59 |
60 | # Retry Configuration
61 | PUBLISH_RETRIES=3
62 | PUBLISH_RETRY_BACKOFF=0.5
63 | {% endif %}
64 |
65 | {% if cookiecutter.use_cache == "redis" %}
66 | # Redis Configuration
67 | REDIS_PASSWORD={{ cookiecutter.redis_password }}
68 | REDIS_PORT=6379
69 | REDIS_HOST=redis
70 | REDIS_DB=0
71 | REDIS_CACHE_TTL=3600
72 | REDIS_CACHE_PREFIX={{ cookiecutter.project_slug }}:
73 | {% endif %}
74 |
75 | {% if cookiecutter.use_cache == "keydb" %}
76 | # KeyDB Configuration
77 | KEYDB_PASSWORD={{ cookiecutter.redis_password }}
78 | KEYDB_PORT=6379
79 | KEYDB_HOST=keydb
80 | KEYDB_DB=0
81 | KEYDB_CACHE_TTL=3600
82 | KEYDB_CACHE_PREFIX={{ cookiecutter.project_slug }}:
83 | {% endif %}
84 |
85 | {% if cookiecutter.use_cache == "tarantool" %}
86 | # Tarantool Configuration
87 | TARANTOOL_USER={{ cookiecutter.database_user }}
88 | TARANTOOL_PASSWORD={{ cookiecutter.database_password }}
89 | TARANTOOL_PORT=3301
90 | TARANTOOL_HOST=tarantool
91 | TARANTOOL_CACHE_TTL=3600
92 | TARANTOOL_CACHE_PREFIX={{ cookiecutter.project_slug }}:
93 | {% endif %}
94 |
95 | {% if cookiecutter.use_cache == "dragonfly" %}
96 | # Dragonfly Configuration
97 | DRAGONFLY_PASSWORD={{ cookiecutter.redis_password }}
98 | DRAGONFLY_PORT=6379
99 | DRAGONFLY_HOST=dragonfly
100 | DRAGONFLY_DB=0
101 | DRAGONFLY_CACHE_TTL=3600
102 | DRAGONFLY_CACHE_PREFIX={{ cookiecutter.project_slug }}:
103 | {% endif %}
104 |
105 | {% if cookiecutter.use_database == "postgresql" %}
106 | # Database URLs (computed)
107 | DATABASE_URL=postgresql+asyncpg://{{ cookiecutter.database_user }}:{{ cookiecutter.database_password }}@postgres:5432/{{ cookiecutter.database_name }}
108 | {% endif %}
109 |
110 | {% if cookiecutter.use_database == "sqlite" %}
111 | # Database URLs (computed)
112 | DATABASE_URL=sqlite+aiosqlite:///${SQLITE_DB_DIR}/${SQLITE_DB_PATH}
113 | {% endif %}
114 |
115 | {% if cookiecutter.use_database == "mysql" %}
116 | # Database URLs (computed)
117 | DATABASE_URL=mysql+aiomysql://${MYSQL_USER}:${MYSQL_PASSWORD}@${MYSQL_SERVER}:${MYSQL_PORT}/${MYSQL_DB}
118 | {% endif %}
119 |
120 | {% if cookiecutter.use_cache == "redis" %}
121 | REDIS_URL=redis://:${REDIS_PASSWORD}@${REDIS_HOST}:${REDIS_PORT}/${REDIS_DB}
122 | {% endif %}
123 |
124 | {% if cookiecutter.use_cache == "keydb" %}
125 | KEYDB_URL=redis://:${KEYDB_PASSWORD}@${KEYDB_HOST}:${KEYDB_PORT}/${KEYDB_DB}
126 | {% endif %}
127 |
128 | {% if cookiecutter.use_cache == "tarantool" %}
129 | TARANTOOL_URL=tarantool://${TARANTOOL_USER}:${TARANTOOL_PASSWORD}@${TARANTOOL_HOST}:${TARANTOOL_PORT}
130 | {% endif %}
131 |
132 | {% if cookiecutter.use_cache == "dragonfly" %}
133 | DRAGONFLY_URL=redis://:${DRAGONFLY_PASSWORD}@${DRAGONFLY_HOST}:${DRAGONFLY_PORT}/${DRAGONFLY_DB}
134 | {% endif %}
135 |
136 | # CORS Configuration
137 | CORS_ORIGINS=http://localhost:3000,http://localhost:8080
138 | CORS_ALLOW_CREDENTIALS=true
139 | CORS_ALLOW_METHODS=GET,POST,PUT,DELETE,OPTIONS
140 | CORS_ALLOW_HEADERS=*
141 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/Dockerfile:
--------------------------------------------------------------------------------
1 | # =============================================================================
2 | # Stage 1: Base image with system dependencies
3 | # =============================================================================
4 | FROM python:3.14-slim-bookworm AS base
5 |
6 | # Set environment variables
7 | ENV PYTHONUNBUFFERED=1 \
8 | PYTHONDONTWRITEBYTECODE=1 \
9 | PIP_NO_CACHE_DIR=1 \
10 | PIP_DISABLE_PIP_VERSION_CHECK=1 \
11 | POETRY_VIRTUALENVS_IN_PROJECT=true \
12 | POETRY_NO_INTERACTION=1 \
13 | PYTHONPATH="/app/src"
14 |
15 | # Install system dependencies
16 | RUN apt-get update && apt-get install -y --no-install-recommends \
17 | build-essential \
18 | curl \
19 | && rm -rf /var/lib/apt/lists/*
20 |
21 | # Install Poetry
22 | RUN pip install poetry
23 |
24 | # Create non-root user
25 | RUN groupadd --gid 1000 appuser && \
26 | useradd --uid 1000 --gid appuser --shell /bin/bash --create-home appuser
27 |
28 | # =============================================================================
29 | # Stage 2: Dependencies installation
30 | # =============================================================================
31 | FROM base AS deps
32 |
33 | # Set working directory
34 | WORKDIR /app
35 |
36 | # Copy dependency files and README (required by hatchling)
37 | COPY pyproject.toml README.md ./
38 |
39 | # Create virtual environment and install dependencies
40 | RUN poetry config virtualenvs.create true && \
41 | poetry config virtualenvs.in-project true && \
42 | poetry env use python3.12 && \
43 | poetry install --no-root --only main --no-interaction --no-ansi
44 |
45 | # =============================================================================
46 | # Stage 3: Development dependencies (optional)
47 | # =============================================================================
48 | FROM deps AS deps-dev
49 |
50 | # Install development dependencies
51 | RUN poetry install --no-root --no-interaction --no-ansi
52 |
53 | # =============================================================================
54 | # Stage 4: Production image
55 | # =============================================================================
56 | FROM deps AS production
57 |
58 | # Set working directory
59 | WORKDIR /app
60 |
61 | # Copy application code
62 | COPY --chown=appuser:appuser src/ ./src/
63 | COPY --chown=appuser:appuser alembic.ini ./
64 |
65 | # Create necessary directories
66 | RUN mkdir -p /app/logs /app/htmlcov && \
67 | chown -R appuser:appuser /app
68 |
69 | # Switch to non-root user
70 | USER appuser
71 |
72 | # Expose port
73 | EXPOSE 8000
74 |
75 | # Health check
76 | HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
77 | CMD curl -f http://localhost:8000/api/docs || exit 1
78 |
79 | # Default command
80 | CMD ["poetry", "run", "python", "-m", "{{cookiecutter.project_slug}}/main.py"]
81 |
82 | # =============================================================================
83 | # Stage 5: Development image
84 | # =============================================================================
85 | FROM deps-dev AS development
86 |
87 | # Set working directory
88 | WORKDIR /app
89 |
90 | # Copy application code
91 | COPY --chown=appuser:appuser src/ ./src/
92 | COPY --chown=appuser:appuser alembic.ini ./
93 | COPY --chown=appuser:appuser tests/ ./tests/
94 | COPY --chown=appuser:appuser docs/ ./docs/
95 | COPY --chown=appuser:appuser Makefile ./
96 |
97 | # Create necessary directories
98 | RUN mkdir -p /app/logs /app/htmlcov && \
99 | chown -R appuser:appuser /app
100 |
101 | # Switch to non-root user
102 | USER appuser
103 |
104 | # Expose port
105 | EXPOSE 8000
106 |
107 | # Default command for development
108 | CMD ["poetry", "run", "uvicorn", "src.{{cookiecutter.project_slug}}.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
109 |
110 | # =============================================================================
111 | # Stage 6: Testing image
112 | # =============================================================================
113 | FROM deps-dev AS testing
114 |
115 | # Set working directory
116 | WORKDIR /app
117 |
118 | # Copy application code and tests
119 | COPY --chown=appuser:appuser src/ ./src/
120 | COPY --chown=appuser:appuser tests/ ./tests/
121 | COPY --chown=appuser:appuser alembic.ini ./
122 | COPY --chown=appuser:appuser Makefile ./
123 |
124 | # Create necessary directories
125 | RUN mkdir -p /app/logs /app/htmlcov && \
126 | chown -R appuser:appuser /app
127 |
128 | # Switch to non-root user
129 | USER appuser
130 |
131 | # Default command for testing
132 | CMD ["poetry", "run", "pytest", "tests/", "-v", "--cov=src", "--cov-report=html", "--cov-report=term"]
133 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/db/uow_new.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from typing import Any, Dict, Type, TypeVar
3 |
4 | from {{cookiecutter.project_slug}}.application.interfaces.db_mapper import DbMapperProtocol
5 |
6 | logger = logging.getLogger(__name__)
7 |
8 | T = TypeVar('T')
9 |
10 |
11 | class UoWModel:
12 | """Wrapper class for models tracked by Unit of Work."""
13 |
14 | def __init__(self, model: T, uow: "UnitOfWork"):
15 | self._model = model
16 | self._uow = uow
17 |
18 | @property
19 | def model(self) -> T:
20 | """Get the underlying model."""
21 | return self._model
22 |
23 | def __getattr__(self, name: str) -> Any:
24 | """Delegate attribute access to the underlying model."""
25 | return getattr(self._model, name)
26 |
27 | def __setattr__(self, name: str, value: Any) -> None:
28 | """Delegate attribute setting to the underlying model and mark as dirty."""
29 | if name in ('_model', '_uow'):
30 | super().__setattr__(name, value)
31 | else:
32 | setattr(self._model, name, value)
33 | self._uow.register_dirty(self._model)
34 |
35 |
36 | class UnitOfWork:
37 | """Unit of Work implementation following the Identity Map pattern."""
38 |
39 | def __init__(self):
40 | self.dirty: Dict[int, Any] = {}
41 | self.new: Dict[int, Any] = {}
42 | self.deleted: Dict[int, Any] = {}
43 | self.mappers: Dict[Type[Any], DbMapperProtocol[Any]] = {}
44 |
45 | def register_dirty(self, model: Any) -> None:
46 | """Register a model as dirty (modified)."""
47 | model_id = id(model)
48 | if model_id in self.new:
49 | return
50 | self.dirty[model_id] = model
51 | logger.debug(f"Registered model {model_id} as dirty")
52 |
53 | def register_deleted(self, model: Any) -> None:
54 | """Register a model as deleted."""
55 | if isinstance(model, UoWModel):
56 | model = model._model
57 |
58 | model_id = id(model)
59 | if model_id in self.new:
60 | self.new.pop(model_id)
61 | return
62 | if model_id in self.dirty:
63 | self.dirty.pop(model_id)
64 | self.deleted[model_id] = model
65 | logger.debug(f"Registered model {model_id} as deleted")
66 |
67 | def register_new(self, model: Any) -> UoWModel:
68 | """Register a new model and return a UoWModel wrapper."""
69 | model_id = id(model)
70 | self.new[model_id] = model
71 | logger.debug(f"Registered new model {model_id}")
72 | return UoWModel(model, self)
73 |
74 | def commit(self) -> None:
75 | """Commit all changes to the database."""
76 | logger.debug("Starting commit process")
77 |
78 | # here we can add optimizations like request batching
79 | # but it will also require extending of Mapper protocol
80 |
81 | # Insert new models
82 | for model in self.new.values():
83 | model_type = type(model)
84 | if model_type not in self.mappers:
85 | raise ValueError(f"No mapper registered for type {model_type}")
86 | self.mappers[model_type].insert(model)
87 | logger.debug(f"Inserted model of type {model_type}")
88 |
89 | # Update dirty models
90 | for model in self.dirty.values():
91 | model_type = type(model)
92 | if model_type not in self.mappers:
93 | raise ValueError(f"No mapper registered for type {model_type}")
94 | self.mappers[model_type].update(model)
95 | logger.debug(f"Updated model of type {model_type}")
96 |
97 | # Delete models
98 | for model in self.deleted.values():
99 | model_type = type(model)
100 | if model_type not in self.mappers:
101 | raise ValueError(f"No mapper registered for type {model_type}")
102 | self.mappers[model_type].delete(model)
103 | logger.debug(f"Deleted model of type {model_type}")
104 |
105 | # Clear all tracking collections
106 | self.clear()
107 | logger.debug("Commit completed successfully")
108 |
109 | def clear(self) -> None:
110 | """Clear all tracked models."""
111 | self.dirty.clear()
112 | self.new.clear()
113 | self.deleted.clear()
114 | logger.debug("Cleared all tracked models")
115 |
116 | def register_mapper(self, model_type: Type[T], mapper: DbMapperProtocol[T]) -> None:
117 | """Register a mapper for a specific model type."""
118 | self.mappers[model_type] = mapper
119 | logger.debug(f"Registered mapper for type {model_type}")
120 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | #### macos ####
2 | # General
3 | *.DS_Store
4 | .AppleDouble
5 | .LSOverride
6 |
7 | # Icon must end with two \r
8 | Icon
9 |
10 | # Thumbnails
11 | ._*
12 |
13 | # Files that might appear in the root of a volume
14 | .DocumentRevisions-V100
15 | .fseventsd
16 | .Spotlight-V100
17 | .TemporaryItems
18 | .Trashes
19 | .VolumeIcon.icns
20 | .com.apple.timemachine.donotpresent
21 |
22 | # Directories potentially created on remote AFP share
23 | .AppleDB
24 | .AppleDesktop
25 | Network Trash Folder
26 | Temporary Items
27 | .apdisk
28 |
29 | #### linux ####
30 | *~
31 |
32 | # temporary files which can be created if a process still has a handle open of a deleted file
33 | .fuse_hidden*
34 |
35 | # KDE directory preferences
36 | .directory
37 |
38 | # Linux trash folder which might appear on any partition or disk
39 | .Trash-*
40 |
41 | # .nfs files are created when an open file is removed but is still being accessed
42 | .nfs*
43 |
44 | #### windows ####
45 | # Windows thumbnail cache files
46 | Thumbs.db
47 | ehthumbs.db
48 | ehthumbs_vista.db
49 |
50 | # Dump file
51 | *.stackdump
52 |
53 | # Folder config file
54 | Desktop.ini
55 |
56 | # Recycle Bin used on file shares
57 | $RECYCLE.BIN/
58 |
59 | # Windows Installer files
60 | *.cab
61 | *.msi
62 | *.msm
63 | *.msp
64 |
65 | # Windows shortcuts
66 | *.lnk
67 |
68 | #### python ####
69 | # Byte-compiled / optimized / DLL files
70 | __pycache__/
71 | *.py[cod]
72 | *$py.class
73 |
74 | # C extensions
75 | *.so
76 |
77 | # Distribution / packaging
78 | .Python
79 | build/
80 | develop-eggs/
81 | dist/
82 | downloads/
83 | eggs/
84 | .eggs/
85 | lib/
86 | lib64/
87 | parts/
88 | sdist/
89 | var/
90 | wheels/
91 | share/python-wheels/
92 | *.egg-info/
93 | .installed.cfg
94 | *.egg
95 | MANIFEST
96 |
97 | # PyInstaller
98 | # Usually these files are written by a python script from a template
99 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
100 | *.manifest
101 | *.spec
102 |
103 | # Installer logs
104 | pip-log.txt
105 | pip-delete-this-directory.txt
106 |
107 | # Unit test / coverage reports
108 | htmlcov/
109 | .tox/
110 | .nox/
111 | .coverage
112 | .coverage.*
113 | .cache
114 | nosetests.xml
115 | coverage.xml
116 | *.cover
117 | *.py,cover
118 | .hypothesis/
119 | .pytest_cache/
120 | cover/
121 |
122 | # Translations
123 | *.mo
124 | *.pot
125 |
126 | # Django stuff:
127 | *.log
128 | local_settings.py
129 | db.sqlite3
130 | db.sqlite3-journal
131 |
132 | # Flask stuff:
133 | instance/
134 | .webassets-cache
135 |
136 | # Scrapy stuff:
137 | .scrapy
138 |
139 | # Sphinx documentation
140 | docs/_build/
141 |
142 | # PyBuilder
143 | .pybuilder/
144 | target/
145 |
146 | # Jupyter Notebook
147 | .ipynb_checkpoints
148 |
149 | # IPython
150 | profile_default/
151 | ipython_config.py
152 |
153 | # Celery
154 | celerybeat-schedule
155 | celerybeat.pid
156 |
157 | # SageMath
158 | *.sage.py
159 |
160 | # Environments
161 | .env
162 | .venv
163 | env/
164 | venv/
165 | ENV/
166 | env.bak/
167 | venv.bak/
168 |
169 | # pipenv / poetry / pdm
170 | .pdm.toml
171 | .pdm-python
172 | .pdm-build/
173 |
174 | # PEP 582
175 | __pypackages__/
176 |
177 | # mypy
178 | .mypy_cache/
179 | .dmypy.json
180 | dmypy.json
181 |
182 | # Pyre type checker
183 | .pyre/
184 |
185 | # pytype static type analyzer
186 | .pytype/
187 |
188 | # Ruff
189 | .ruff_cache/
190 |
191 | # Cython debug symbols
192 | cython_debug/
193 |
194 | # PyPI configuration
195 | .pypirc
196 |
197 | # Direnv
198 | .direnv
199 | .envrc
200 |
201 | #### jetbrains ####
202 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
203 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
204 |
205 | .idea/
206 |
207 | # User-specific stuff:
208 | .idea/**/workspace.xml
209 | .idea/**/tasks.xml
210 | .idea/dictionaries
211 |
212 | # Sensitive or high-churn files:
213 | .idea/**/dataSources/
214 | .idea/**/dataSources.ids
215 | .idea/**/dataSources.xml
216 | .idea/**/dataSources.local.xml
217 | .idea/**/sqlDataSources.xml
218 | .idea/**/dynamic.xml
219 | .idea/**/uiDesigner.xml
220 |
221 | # Gradle:
222 | .idea/**/gradle.xml
223 | .idea/**/libraries
224 |
225 | # CMake
226 | cmake-build-debug/
227 |
228 | # Mongo Explorer plugin:
229 | .idea/**/mongoSettings.xml
230 |
231 | ## File-based project format:
232 | *.iws
233 |
234 | ## Plugin-specific files:
235 |
236 | # IntelliJ
237 | /out/
238 |
239 | # mpeltonen/sbt-idea plugin
240 | .idea_modules/
241 |
242 | # JIRA plugin
243 | atlassian-ide-plugin.xml
244 |
245 | # Cursive Clojure plugin
246 | .idea/replstate.xml
247 |
248 | # Crashlytics plugin (for Android Studio and IntelliJ)
249 | com_crashlytics_export_strings.xml
250 | crashlytics.properties
251 | crashlytics-build.properties
252 | fabric.properties
253 |
254 | #### vscode ####
255 | .vscode/
256 |
257 | #### spyder / rope ####
258 | .spyderproject
259 | .spyproject
260 | .ropeproject
261 |
262 | #### mkdocs ####
263 | /site
264 |
265 | #### cookiecutter ####
266 | # Cookiecutter replay files
267 | .cookiecutter_replay/
268 |
269 | # Temporary projects created during tests
270 | tmp-*
271 | cookiecutter-test-*
272 |
273 | #### misc ####
274 | # Logs / pids
275 | *.log
276 | *.pid
277 | *.out
278 |
279 | # Editor swap/backup files
280 | *~
281 | *.swp
282 | *.swo
283 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/Makefile:
--------------------------------------------------------------------------------
1 | .PHONY: help install install-dev lint lint-fix format type-check check test clean
2 |
3 | help: ## Show this help message
4 | @echo "Available commands:"
5 | @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}'
6 |
7 | install: ## Install production dependencies
8 | poetry install --no-root --sync
9 |
10 | install-dev: ## Install development dependencies
11 | poetry install --no-root --sync
12 |
13 | lint: ## Run linting with ruff
14 | poetry run ruff check src/{{cookiecutter.project_slug}}/
15 |
16 | lint-fix: ## Run linting with ruff and fix auto-fixable issues
17 | poetry run ruff check --fix src/{{cookiecutter.project_slug}}/
18 |
19 | format: ## Format code with ruff
20 | poetry run ruff format src/{{cookiecutter.project_slug}}/
21 |
22 | type-check: ## Run type checking with mypy
23 | poetry run mypy src/{{cookiecutter.project_slug}}/
24 |
25 | check: ## Run all checks (lint + format check + type check)
26 | poetry run ruff check src/{{cookiecutter.project_slug}}/
27 | poetry run ruff format --check src/{{cookiecutter.project_slug}}/
28 | poetry run mypy src/{{cookiecutter.project_slug}}/
29 |
30 | test: ## Run tests
31 | poetry run pytest tests/ -v
32 |
33 | test-cov: ## Run tests with coverage
34 | poetry run pytest tests/ -v --cov=src/{{cookiecutter.project_slug}} --cov-report=html --cov-report=term
35 |
36 | clean: ## Clean up cache and temporary files
37 | find . -type d -name "__pycache__" -exec rm -rf {} +
38 | find . -type f -name "*.pyc" -delete
39 | find . -type d -name "*.egg-info" -exec rm -rf {} +
40 | rm -rf .pytest_cache/
41 | rm -rf .ruff_cache/
42 | rm -rf htmlcov/
43 | rm -rf .coverage
44 |
45 | dev-setup: install-dev ## Set up development environment
46 | @echo "Development environment set up successfully!"
47 | @echo "Run 'make check' to verify everything is working."
48 |
49 | # Database migration commands
50 | migration: ## Create a new migration file
51 | poetry run alembic revision --autogenerate -m "$(msg)"
52 |
53 | migrate: ## Apply all pending migrations
54 | poetry run alembic upgrade head
55 |
56 | migrate-downgrade: ## Downgrade to previous migration
57 | poetry run alembic downgrade -1
58 |
59 | migrate-history: ## Show migration history
60 | poetry run alembic history
61 |
62 | migrate-current: ## Show current migration
63 | poetry run alembic current
64 |
65 | migrate-stamp: ## Stamp database with current migration (without applying)
66 | poetry run alembic stamp head
67 |
68 | # Docker commands
69 | docker-build: ## Build Docker image for production
70 | docker build --target production -t antiques:latest .
71 |
72 | docker-build-dev: ## Build Docker image for development
73 | docker build --target development -t antiques:dev .
74 |
75 | docker-build-test: ## Build Docker image for testing
76 | docker build --target testing -t antiques:test .
77 |
78 | docker-up: ## Start all services with docker-compose
79 | docker-compose up -d
80 |
81 | docker-up-dev: ## Start development environment
82 | docker-compose --profile dev up -d
83 |
84 | docker-down: ## Stop all services
85 | docker-compose down
86 |
87 | docker-logs: ## Show logs for all services
88 | docker-compose logs -f
89 |
90 | docker-logs-app: ## Show logs for application
91 | docker-compose logs -f app
92 |
93 | docker-shell: ## Open shell in running app container
94 | docker-compose exec app bash
95 |
96 | docker-migrate: ## Run database migrations
97 | docker-compose --profile migrate run --rm migrate
98 |
99 | docker-test: ## Run tests in Docker
100 | docker-compose --profile test run --rm test
101 |
102 | docker-clean: ## Clean up Docker resources
103 | docker-compose down -v --remove-orphans
104 | docker system prune -f
105 |
106 | docker-rebuild: ## Rebuild and restart services
107 | docker-compose down
108 | docker-compose build --no-cache
109 | docker-compose up -d
110 |
111 | # Environment setup
112 | setup-env: ## Create .env file from template
113 | ./scripts/setup-env.sh
114 |
115 | # Development helpers
116 |
117 | dev-setup-docker: setup-env ## Set up development environment with Docker
118 | docker-compose --profile dev up -d postgres redis
119 | @echo "Waiting for services to be ready..."
120 | @sleep 10
121 | ./scripts/init-db.sh
122 | make docker-migrate
123 | @echo "Development environment is ready!"
124 | @echo "Run 'make docker-up-dev' to start the application"
125 |
126 | ci: check test ## Run CI pipeline (lint + type check + test)
127 | @echo "CI pipeline completed successfully!"
128 |
129 | # Kafka commands
130 | docker-kafka-logs: ## Show Kafka logs
131 | docker-compose logs -f kafka
132 |
133 | docker-kafka-shell: ## Open shell in Kafka container
134 | docker-compose exec kafka bash
135 |
136 | docker-kafka-topics: ## List Kafka topics
137 | docker-compose exec kafka kafka-topics --bootstrap-server localhost:9092 --list
138 |
139 | docker-kafka-create-topic: ## Create Kafka topic for artifacts
140 | docker-compose exec kafka kafka-topics --bootstrap-server localhost:9092 --create --topic new_artifacts --partitions 3 --replication-factor 1
141 |
142 | docker-kafka-consume: ## Consume messages from Kafka topic
143 | docker-compose exec kafka kafka-console-consumer --bootstrap-server localhost:9092 --topic new_artifacts --from-beginning
144 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/docs/docker.md:
--------------------------------------------------------------------------------
1 | # Docker Setup for Antiques project
2 |
3 | This document describes how to set up and use Docker for the Antiques application.
4 |
5 | {% if cookiecutter.add_docker != "y" %}
6 | > Docker support was not enabled for this project when it was generated.
7 | >
8 | > See the non-Docker run instructions in the main README.
9 | {% else %}
10 |
11 | ## Docker Commands
12 |
13 | ### Building images
14 |
15 | ```bash
16 | # Production image
17 | make docker-build
18 |
19 | # Development image
20 | make docker-build-dev
21 |
22 | # Testing image
23 | make docker-build-test
24 | ```
25 |
26 | ### Starting services
27 |
28 | ```bash
29 | # Start all services (production)
30 | make docker-up
31 |
32 | # Start development environment
33 | make docker-up-dev
34 |
35 | # Stop all services
36 | make docker-down
37 | ```
38 |
39 | ### Database operations
40 |
41 | ```bash
42 | # Run all database migrations
43 | make docker-migrate
44 | ```
45 |
46 | {% if cookiecutter.use_database == "postgresql" %}
47 | ```bash
48 | # Connect to PostgreSQL
49 | docker-compose exec postgres psql -U antiques_user -d antiques
50 | ```
51 | {% elif cookiecutter.use_database == "mysql" %}
52 | ```bash
53 | # Connect to MySQL
54 | docker-compose exec mysql mysql -u antiques_user -p antiques
55 | ```
56 | {% elif cookiecutter.use_database == "sqlite" %}
57 | ```bash
58 | # Connect to SQLite if needed
59 | docker-compose exec sqlite sqlite3 /data/{{ cookiecutter.database_name }}.db
60 | ```
61 | {% endif %}
62 |
63 | ### Testing
64 |
65 | ```bash
66 | # Run tests in Docker
67 | make docker-test
68 |
69 | # View test coverage reports
70 | docker-compose --profile test run --rm test
71 | ```
72 |
73 | ### Debugging
74 |
75 | ```bash
76 | # View logs
77 | make docker-logs
78 |
79 | # Logs for the application only
80 | make docker-logs-app
81 |
82 | # Open a shell in the container
83 | make docker-shell
84 | ```
85 |
86 | ## Docker Compose Profiles
87 |
88 | ### production (default)
89 | - app (production)
90 | {% if cookiecutter.use_database == "postgresql" %}
91 | - postgres
92 | {% elif cookiecutter.use_database == "mysql" %}
93 | - mysql
94 | {% elif cookiecutter.use_database == "sqlite" %}
95 | - sqlite
96 | {% endif %}
97 | {% if cookiecutter.use_cache in ["redis", "keydb", "tarantool", "dragonfly"] %}
98 | - {{ cookiecutter.use_cache }}
99 | {% endif %}
100 |
101 | ### dev
102 | - app-dev (with hot reload)
103 | {% if cookiecutter.use_database == "postgresql" %}
104 | - postgres (with exposed ports)
105 | {% elif cookiecutter.use_database == "mysql" %}
106 | - mysql (with exposed ports)
107 | {% elif cookiecutter.use_database == "sqlite" %}
108 | - sqlite (with exposed ports)
109 | {% endif %}
110 | {% if cookiecutter.use_cache in ["redis", "keydb", "tarantool", "dragonfly"] %}
111 | - {{ cookiecutter.use_cache }} (with exposed ports)
112 | {% endif %}
113 |
114 | ### migrate
115 | - migrate (run migrations)
116 |
117 | ### test
118 | - test (run tests)
119 |
120 | ### dev-tools
121 | - adminer (web interface for DB)
122 |
123 | ## Environment Variables
124 |
125 | ### Required
126 | {% if cookiecutter.use_database == "postgresql" %}
127 | - DATABASE_URL - PostgreSQL connection URL
128 | {% elif cookiecutter.use_database == "mysql" %}
129 | - DATABASE_URL - MySQL connection URL
130 | {% elif cookiecutter.use_database == "sqlite" %}
131 | - DATABASE_URL - SQLite DB path
132 | {% endif %}
133 | {% if cookiecutter.use_cache in ["redis", "keydb", "tarantool", "dragonfly"] %}
134 | - {{ cookiecutter.use_cache | upper }}_URL - {{ cookiecutter.use_cache | capitalize }} connection URL
135 | {% endif %}
136 |
137 | ### Опциональные
138 | - `ENVIRONMENT` - environment (production/development/testing)
139 | - `LOG_LEVEL` - logging level
140 | - `API_HOST` - API host
141 | - `API_PORT` - API port
142 | - `API_WORKERS` - number of worker processes
143 |
144 | ## Volumes
145 |
146 | ### Named Volumes
147 | {% if cookiecutter.use_database == "postgresql" %}
148 | - postgres_data - PostgreSQL data
149 | {% elif cookiecutter.use_database == "mysql" %}
150 | - mysql_data - MySQL data
151 | {% elif cookiecutter.use_database == "sqlite" %}
152 | - sqlite_data - SQLite DB file
153 | {% endif %}
154 | {% if cookiecutter.use_cache in ["redis", "keydb", "tarantool", "dragonfly"] %}
155 | - {{ cookiecutter.use_cache }}_data - cache data
156 | {% endif %}
157 | - app_logs - application logs
158 | - test_reports - test reports
159 |
160 | ### Bind Mounts (development)
161 | - `./src:/app/src` - source code
162 | - `./tests:/app/tests` - tests
163 | - `./alembic:/app/alembic` - migrations
164 |
165 | ## Network
166 |
167 | All services are connected to the `antiques-network` for isolation.
168 |
169 | ## Health Checks
170 |
171 | Все сервисы имеют health checks:
172 | - **app**: HTTP request to /api/docs
173 | {% if cookiecutter.use_database == "postgresql" %}
174 | - **postgres**: pg_isready
175 | {% elif cookiecutter.use_database == "mysql" %}
176 | - **mysql**: mysqladmin ping
177 | {% elif cookiecutter.use_database == "sqlite" %}
178 | - **sqlite**: ensure DB file exists
179 | {% endif %}
180 | {% if cookiecutter.use_cache in ["redis", "keydb", "tarantool", "dragonfly"] %}
181 | - **{{ cookiecutter.use_cache }}**: {{ cookiecutter.use_cache }}-cli ping
182 | {% endif %}
183 |
184 | ## Monitoring
185 |
186 | ### Logs
187 | ```bash
188 | # All services
189 | docker-compose logs -f
190 |
191 | # Application only
192 | docker-compose logs -f app
193 | ```
194 | {% endif %}
195 |
--------------------------------------------------------------------------------
/docs/getting-started/quickstart.rst:
--------------------------------------------------------------------------------
1 | Quick Start
2 | ===========
3 |
4 | Get up and running with the Clean Architecture FastAPI template in minutes.
5 |
6 | Create Your First Project
7 | --------------------------
8 |
9 | 1. **Install Cookiecutter**
10 |
11 | .. code-block:: bash
12 |
13 | pip install cookiecutter
14 |
15 | 2. **Generate Project**
16 |
17 | .. code-block:: bash
18 |
19 | cookiecutter https://github.com/Peopl3s/clean-architecture-fastapi-project-template.git
20 |
21 | 3. **Configure Your Project**
22 |
23 | Answer the prompts with your project details. For a quick start, you can accept most defaults:
24 |
25 | .. code-block:: text
26 |
27 | project_name [My FastAPI Project]: Todo API
28 | project_slug [todo_api]:
29 | project_description: A simple todo list API
30 | author_name: Your Name
31 | use_database [postgresql]: postgresql
32 | use_cache [redis]: redis
33 | use_broker [none]: none
34 |
35 | 4. **Navigate to Project**
36 |
37 | .. code-block:: bash
38 |
39 | cd todo_api
40 |
41 | Project Structure Overview
42 | ---------------------------
43 |
44 | Your generated project will have this structure:
45 |
46 | .. code-block:: text
47 |
48 | todo_api/
49 | ├── src/
50 | │ └── todo_api/
51 | │ ├── domain/ # Business entities
52 | │ ├── application/ # Use cases
53 | │ ├── infrastructure/ # External services
54 | │ │ └── db/ # Database components
55 | │ │ └── migrations/ # Database migrations
56 | │ ├── presentation/ # API endpoints
57 | │ └── config/ # Configuration
58 | ├── tests/ # Test suite
59 | ├── alembic.ini # Alembic configuration
60 | ├── docker-compose.yml # Docker services
61 | ├── Dockerfile # Application container
62 | ├── pyproject.toml # Dependencies
63 | ├── Makefile # Common commands
64 | └── README.md # Project documentation
65 |
66 | Running the Application
67 | -----------------------
68 |
69 | Using Docker (Recommended)
70 | ~~~~~~~~~~~~~~~~~~~~~~~~~~
71 |
72 | .. code-block:: bash
73 |
74 | # Start all services
75 | make docker-up
76 |
77 | # View logs
78 | make docker-logs
79 |
80 | # Stop services
81 | make docker-down
82 |
83 | The API will be available at:
84 |
85 | * **API**: http://localhost:8000
86 | * **API Docs**: http://localhost:8000/docs
87 | * **ReDoc**: http://localhost:8000/redoc
88 |
89 | Local Development
90 | ~~~~~~~~~~~~~~~~~
91 |
92 | .. code-block:: bash
93 |
94 | # Install dependencies
95 | make install-dev
96 |
97 | # Set up environment
98 | cp env.template .env
99 | # Edit .env with your database credentials
100 |
101 | # Run migrations
102 | make migrate
103 |
104 | # Start the application
105 | poetry run python -m todo_api.main
106 |
107 | Exploring the API
108 | -----------------
109 |
110 | Health Check
111 | ~~~~~~~~~~~~
112 |
113 | .. code-block:: bash
114 |
115 | curl http://localhost:8000/health
116 |
117 | Response:
118 |
119 | .. code-block:: json
120 |
121 | {
122 | "status": "healthy",
123 | "version": "0.1.0"
124 | }
125 |
126 | Interactive Documentation
127 | ~~~~~~~~~~~~~~~~~~~~~~~~~
128 |
129 | Open your browser and navigate to:
130 |
131 | * **Swagger UI**: http://localhost:8000/docs
132 | * **ReDoc**: http://localhost:8000/redoc
133 |
134 | These provide interactive API documentation where you can test endpoints directly.
135 |
136 | Development Workflow
137 | --------------------
138 |
139 | Running Tests
140 | ~~~~~~~~~~~~~
141 |
142 | .. code-block:: bash
143 |
144 | # Run all tests
145 | make test
146 |
147 | # Run with coverage
148 | make test-cov
149 |
150 | Code Quality
151 | ~~~~~~~~~~~~
152 |
153 | .. code-block:: bash
154 |
155 | # Lint code
156 | make lint
157 |
158 | # Format code
159 | make format
160 |
161 | # Type check
162 | make type-check
163 |
164 | # Run all checks
165 | make check
166 |
167 | Database Migrations
168 | ~~~~~~~~~~~~~~~~~~~
169 |
170 | .. code-block:: bash
171 |
172 | # Create a new migration
173 | make migration msg="Add users table"
174 |
175 | # Apply migrations
176 | make migrate
177 |
178 | # View migration history
179 | make migrate-history
180 |
181 | Common Commands
182 | ---------------
183 |
184 | .. code-block:: bash
185 |
186 | # Development
187 | make install-dev # Install dependencies
188 | make dev-setup # Complete dev setup
189 |
190 | # Code Quality
191 | make lint # Run linting
192 | make format # Format code
193 | make type-check # Type checking
194 | make check # All checks
195 |
196 | # Testing
197 | make test # Run tests
198 | make test-cov # Tests with coverage
199 |
200 | # Database
201 | make migration # Create migration
202 | make migrate # Apply migrations
203 | make migrate-downgrade # Rollback migration
204 |
205 | # Docker
206 | make docker-up # Start services
207 | make docker-down # Stop services
208 | make docker-logs # View logs
209 | make docker-shell # Shell into container
210 |
211 | Next Steps
212 | ----------
213 |
214 | * Learn about :doc:`../user-guide/architecture`
215 | * Explore :doc:`../user-guide/project-structure`
216 | * Read about :doc:`../user-guide/configuration`
217 | * Check out :doc:`../development/code-quality`
218 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/cache/redis_client.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | import json
3 | from typing import Any, final
4 |
5 | import structlog
6 | from redis.asyncio import Redis
7 | import redis.exceptions
8 |
9 | from {{cookiecutter.project_slug}}.application.interfaces.cache import CacheProtocol
10 |
11 | logger = structlog.get_logger(__name__)
12 |
13 |
14 | @final
15 | @dataclass(frozen=True, slots=True, kw_only=True)
16 | class RedisCacheClient(CacheProtocol):
17 | """
18 | Redis implementation of the CacheProtocol for caching operations.
19 | """
20 | client: Redis
21 | ttl: int | None = None
22 |
23 | async def get(self, key: str) -> dict[str, Any] | None:
24 | """
25 | Retrieves a value from Redis cache by key.
26 |
27 | Args:
28 | key: Cache key to retrieve.
29 |
30 | Returns:
31 | Cached dictionary data or None if not found or an error occurs.
32 | """
33 | try:
34 | value = await self.client.get(key)
35 | if value is None:
36 | return None
37 | return json.loads(value)
38 | except (ConnectionError, redis.exceptions.RedisError) as e:
39 | logger.error(
40 | "Redis get operation failed", key=key, error=str(e)
41 | )
42 | return None
43 | except (json.JSONDecodeError, TypeError) as e:
44 | logger.warning(
45 | "Failed to decode cached value", key=key, error=str(e)
46 | )
47 | return None
48 |
49 | async def set(self, key: str, value: dict[str, Any], ttl: int | None = None) -> bool:
50 | """
51 | Stores a value in Redis cache with an optional TTL.
52 |
53 | Args:
54 | key: Cache key to store under.
55 | value: Dictionary data to cache.
56 | ttl: Time-to-live in seconds (None for default or no expiration).
57 |
58 | Returns:
59 | True if successful, False otherwise.
60 | """
61 | try:
62 | serialized_value = json.dumps(value, default=str)
63 | if ttl is not None:
64 | await self.client.setex(key, ttl, serialized_value)
65 | elif self.ttl is not None:
66 | await self.client.setex(key, self.ttl, serialized_value)
67 | else:
68 | await self.client.set(key, serialized_value)
69 | return True
70 | except (ConnectionError, redis.exceptions.RedisError) as e:
71 | logger.error(
72 | "Redis set operation failed", key=key, error=str(e)
73 | )
74 | return False
75 | except (TypeError, ValueError) as e:
76 | logger.error(
77 | "Failed to serialize value for cache",
78 | key=key,
79 | error=str(e),
80 | )
81 | return False
82 |
83 | async def delete(self, key: str) -> bool:
84 | """
85 | Deletes a value from Redis cache.
86 |
87 | Args:
88 | key: Cache key to delete.
89 |
90 | Returns:
91 | True if key was deleted, False if key didn't exist or an error occurs.
92 | """
93 | try:
94 | result = await self.client.delete(key)
95 | return result > 0
96 | except (ConnectionError, redis.exceptions.RedisError) as e:
97 | logger.error(
98 | "Redis delete operation failed", key=key, error=str(e)
99 | )
100 | return False
101 |
102 | async def exists(self, key: str) -> bool:
103 | """
104 | Checks if a key exists in Redis cache.
105 |
106 | Args:
107 | key: Cache key to check.
108 |
109 | Returns:
110 | True if key exists, False otherwise or if an error occurs.
111 | """
112 | try:
113 | return bool(await self.client.exists(key))
114 | except (ConnectionError, redis.exceptions.RedisError) as e:
115 | logger.error(
116 | "Redis exists operation failed", key=key, error=str(e)
117 | )
118 | return False
119 |
120 | async def clear(self, pattern: str) -> int:
121 | """
122 | Clears cache entries matching a pattern in Redis.
123 |
124 | Args:
125 | pattern: Pattern to match keys (e.g., 'user:*').
126 |
127 | Returns:
128 | Number of keys deleted.
129 | """
130 | try:
131 | keys = []
132 | async for key in self.client.scan_iter(match=pattern):
133 | keys.append(key)
134 | if keys:
135 | deleted_count = await self.client.delete(*keys)
136 | logger.info(
137 | "Cleared cache keys matching pattern",
138 | pattern=pattern,
139 | count=deleted_count,
140 | )
141 | return deleted_count
142 | return 0
143 | except (ConnectionError, redis.exceptions.RedisError) as e:
144 | logger.error(
145 | "Redis clear pattern operation failed",
146 | pattern=pattern,
147 | error=str(e),
148 | )
149 | return 0
150 |
151 | async def close(self) -> None:
152 | """
153 | Closes the Redis client connection.
154 | """
155 | try:
156 | await self.client.close()
157 | logger.info("Redis connection closed")
158 | except (ConnectionError, redis.exceptions.RedisError) as e:
159 | logger.error("Failed to close Redis connection", error=str(e))
160 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/infrastructures/http/clients.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import final
3 | from uuid import UUID
4 |
5 | import httpx
6 | import stamina
7 | import structlog
8 |
9 | from {{cookiecutter.project_slug}}.application.dtos.artifact import (
10 | ArtifactCatalogPublicationDTO,
11 | ArtifactDTO,
12 | )
13 | from {{cookiecutter.project_slug}}.application.exceptions import ArtifactNotFoundError
14 | from {{cookiecutter.project_slug}}.application.interfaces.http_clients import (
15 | ExternalMuseumAPIProtocol,
16 | PublicCatalogAPIProtocol,
17 | )
18 | from {{cookiecutter.project_slug}}.infrastructures.mappers.artifact import InfrastructureArtifactMapper
19 |
20 | logger = structlog.get_logger(__name__)
21 |
22 |
23 | @final
24 | @dataclass(frozen=True, slots=True, kw_only=True)
25 | class ExternalMuseumAPIClient(ExternalMuseumAPIProtocol):
26 | """
27 | Client for interacting with an external museum API to fetch artifact data.
28 | """
29 | base_url: str
30 | client: httpx.AsyncClient
31 | mapper: InfrastructureArtifactMapper
32 |
33 | @stamina.retry(
34 | on=(httpx.HTTPError, httpx.RequestError),
35 | attempts=3,
36 | wait_initial=0.5,
37 | wait_jitter=1.0,
38 | )
39 | async def fetch_artifact(self, inventory_id: str | UUID) -> ArtifactDTO:
40 | """
41 | Fetches an artifact from the external museum API.
42 |
43 | Args:
44 | inventory_id: The ID of the artifact to fetch.
45 |
46 | Returns:
47 | An ArtifactDTO object if found.
48 |
49 | Raises:
50 | ArtifactNotFoundError: If the artifact is not found (404).
51 | httpx.HTTPStatusError: For other HTTP errors.
52 | httpx.RequestError: For network-related errors.
53 | ValueError: If data validation fails.
54 | Exception: For any other unexpected errors.
55 | """
56 | inventory_id_str = (
57 | str(inventory_id) if isinstance(inventory_id, UUID) else inventory_id
58 | )
59 | url = f"{self.base_url}/artifacts/{inventory_id_str}"
60 | logger.debug("Fetching artifact from URL", url=url)
61 |
62 | try:
63 | response = await self.client.get(url)
64 | if response.status_code == 404:
65 | logger.warning("Artifact not found (404)", inventory_id=inventory_id_str)
66 | raise ArtifactNotFoundError(
67 | f"Artifact {inventory_id_str} not found in external service"
68 | )
69 |
70 | response.raise_for_status()
71 | data = response.json()
72 |
73 | logger.debug("Successfully fetched artifact", data=data)
74 |
75 | return self.mapper.from_dict(data)
76 |
77 | except (httpx.HTTPStatusError, httpx.RequestError) as e:
78 | logger.exception(
79 | "HTTP error while fetching artifact",
80 | inventory_id=inventory_id_str,
81 | error=str(e),
82 | )
83 | raise
84 | except ValueError as e:
85 | logger.exception(
86 | "Data validation error for artifact",
87 | inventory_id=inventory_id_str,
88 | error=str(e),
89 | )
90 | raise
91 | except Exception as e:
92 | logger.exception(
93 | "Unexpected error while fetching artifact",
94 | inventory_id=inventory_id_str,
95 | error=str(e),
96 | )
97 | raise
98 |
99 |
100 | @final
101 | @dataclass(frozen=True, slots=True, kw_only=True)
102 | class PublicCatalogAPIClient(PublicCatalogAPIProtocol):
103 | """
104 | Client for interacting with a public catalog API to publish artifact data.
105 | """
106 | base_url: str
107 | client: httpx.AsyncClient
108 | mapper: InfrastructureArtifactMapper
109 |
110 | @stamina.retry(
111 | on=(httpx.HTTPError, httpx.RequestError),
112 | attempts=3,
113 | wait_initial=1.0,
114 | wait_jitter=1.0,
115 | )
116 | async def publish_artifact(self, artifact: ArtifactCatalogPublicationDTO) -> str:
117 | """
118 | Publishes an artifact to the public catalog API.
119 |
120 | Args:
121 | artifact: The ArtifactCatalogPublicationDTO to publish.
122 |
123 | Returns:
124 | A string representing the public ID of the published artifact.
125 |
126 | Raises:
127 | httpx.HTTPStatusError: For HTTP errors during publication.
128 | httpx.RequestError: For network-related errors.
129 | ValueError: If the response data is missing the 'public_id'.
130 | Exception: For any other unexpected errors.
131 | """
132 | payload = self.mapper.to_catalog_publication_dict(artifact)
133 |
134 | url = f"{self.base_url}/items"
135 | logger.debug("Publishing artifact to URL", url=url, payload=payload)
136 |
137 | try:
138 | response = await self.client.post(
139 | url, json=payload, timeout=httpx.Timeout(10.0)
140 | )
141 | response.raise_for_status()
142 | data = response.json()
143 | except (httpx.HTTPStatusError, httpx.RequestError) as e:
144 | logger.exception("Error during HTTP request", url=url, error=str(e))
145 | raise
146 | except Exception as e:
147 | logger.exception("Unexpected error during publishing artifact", error=str(e))
148 | raise Exception("Failed to publish artifact to catalog: %s", e) from e
149 |
150 | public_id = str(data.get("public_id", ""))
151 | if not public_id:
152 | logger.exception("Response JSON missing 'public_id' field", data=data)
153 | raise ValueError("Invalid response data: missing 'public_id'")
154 |
155 | logger.debug("Successfully published artifact", public_id=public_id)
156 | return public_id
157 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/tests/conftest.py:
--------------------------------------------------------------------------------
1 | from collections.abc import AsyncGenerator
2 | from typing import Any
3 | from unittest.mock import AsyncMock, MagicMock
4 |
5 | from dishka import AsyncContainer, make_async_container
6 | from fastapi.testclient import TestClient
7 | import pytest
8 | from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
9 |
10 | from tests.faker import uuid4, word, text, date_time_this_century
11 | from tests.factories import (
12 | MaterialDTOFactory,
13 | EraDTOFactory,
14 | ArtifactDTOFactory,
15 | MaterialFactory,
16 | EraFactory,
17 | ArtifactEntityFactory,
18 | )
19 |
20 | from {{cookiecutter.project_slug}}.application.dtos.artifact import (
21 | ArtifactAdmissionNotificationDTO,
22 | ArtifactCatalogPublicationDTO,
23 | ArtifactDTO,
24 | EraDTO,
25 | MaterialDTO,
26 | )
27 | from {{cookiecutter.project_slug}}.application.interfaces.cache import CacheProtocol
28 | from {{cookiecutter.project_slug}}.application.interfaces.http_clients import (
29 | ExternalMuseumAPIProtocol,
30 | PublicCatalogAPIProtocol,
31 | )
32 | from {{cookiecutter.project_slug}}.application.interfaces.mappers import DtoEntityMapperProtocol
33 | from {{cookiecutter.project_slug}}.application.interfaces.message_broker import MessageBrokerPublisherProtocol
34 | from {{cookiecutter.project_slug}}.application.interfaces.repositories import ArtifactRepositoryProtocol
35 | from {{cookiecutter.project_slug}}.application.interfaces.uow import UnitOfWorkProtocol
36 | from {{cookiecutter.project_slug}}.application.use_cases.process_artifact import ProcessArtifactUseCase
37 | from {{cookiecutter.project_slug}}.config.ioc.di import get_providers
38 | from {{cookiecutter.project_slug}}.domain.entities.artifact import ArtifactEntity, Material, Era
39 | from {{cookiecutter.project_slug}}.main import create_app
40 | from tests.test_infrastructure.test_db.models.test_artifact_model import (
41 | test_mapper_registry,
42 | )
43 |
44 |
45 | @pytest.fixture(scope="session")
46 | def anyio_backend() -> str:
47 | return "asyncio"
48 |
49 |
50 | @pytest.fixture
51 | async def test_engine() -> AsyncGenerator[Any, None]:
52 | engine = create_async_engine(
53 | "sqlite+aiosqlite:///:memory:",
54 | echo=False,
55 | connect_args={"check_same_thread": False},
56 | )
57 | yield engine
58 | await engine.dispose()
59 |
60 |
61 | @pytest.fixture
62 | async def test_session(test_engine: Any) -> AsyncGenerator[AsyncSession, None]:
63 | async with test_engine.begin() as conn:
64 | await conn.run_sync(test_mapper_registry.metadata.create_all)
65 |
66 | async_session = async_sessionmaker(
67 | test_engine, class_=AsyncSession, expire_on_commit=False
68 | )
69 | async with async_session() as session:
70 | yield session
71 | await session.rollback()
72 |
73 |
74 | @pytest.fixture
75 | async def test_container() -> AsyncContainer:
76 | return make_async_container(*get_providers())
77 |
78 |
79 | @pytest.fixture
80 | def client() -> TestClient:
81 | app = create_app()
82 | return TestClient(app)
83 |
84 |
85 | @pytest.fixture
86 | def sample_artifact_dto() -> ArtifactDTO:
87 | return ArtifactDTOFactory.build()
88 |
89 |
90 | @pytest.fixture
91 | def sample_artifact_entity() -> ArtifactEntity:
92 | return ArtifactEntityFactory.build()
93 |
94 |
95 | @pytest.fixture
96 | def mock_repository() -> AsyncMock:
97 | mock = AsyncMock(spec=ArtifactRepositoryProtocol)
98 | return mock
99 |
100 |
101 | @pytest.fixture
102 | def mock_uow() -> AsyncMock:
103 | mock = AsyncMock(spec=UnitOfWorkProtocol)
104 | mock.repositories = mock_repository()
105 | return mock
106 |
107 |
108 | @pytest.fixture
109 | def mock_museum_api() -> AsyncMock:
110 | mock = AsyncMock(spec=ExternalMuseumAPIProtocol)
111 | return mock
112 |
113 |
114 | @pytest.fixture
115 | def mock_catalog_api() -> AsyncMock:
116 | mock = AsyncMock(spec=PublicCatalogAPIProtocol)
117 | return mock
118 |
119 |
120 | @pytest.fixture
121 | def mock_message_broker() -> AsyncMock:
122 | mock = AsyncMock(spec=MessageBrokerPublisherProtocol)
123 | return mock
124 |
125 |
126 | @pytest.fixture
127 | def mock_mapper(
128 | sample_notification_dto: ArtifactAdmissionNotificationDTO,
129 | sample_publication_dto: ArtifactCatalogPublicationDTO,
130 | ) -> MagicMock:
131 | mock = MagicMock(spec=DtoEntityMapperProtocol)
132 | # Configure default return values for new methods
133 | mock.to_notification_dto.return_value = sample_notification_dto
134 | mock.to_publication_dto.return_value = sample_publication_dto
135 | mock.to_dict.return_value = {}
136 | return mock
137 |
138 |
139 | @pytest.fixture
140 | def mock_cache_client() -> AsyncMock:
141 | mock = AsyncMock(spec=CacheProtocol)
142 | # By default, return None to simulate cache miss
143 | mock.get.return_value = None
144 | return mock
145 |
146 |
147 | @pytest.fixture
148 | def get_artifact_use_case(
149 | mock_uow: AsyncMock,
150 | mock_museum_api: AsyncMock,
151 | mock_catalog_api: AsyncMock,
152 | mock_message_broker: AsyncMock,
153 | mock_mapper: MagicMock,
154 | mock_cache_client: AsyncMock,
155 | ) -> ProcessArtifactUseCase:
156 | return ProcessArtifactUseCase(
157 | uow=mock_uow,
158 | museum_api_client=mock_museum_api,
159 | catalog_api_client=mock_catalog_api,
160 | message_broker=mock_message_broker,
161 | artifact_mapper=mock_mapper,
162 | cache_client=mock_cache_client,
163 | )
164 |
165 |
166 | @pytest.fixture
167 | def sample_notification_dto() -> ArtifactAdmissionNotificationDTO:
168 | return ArtifactAdmissionNotificationDTO(
169 | inventory_id=uuid4(),
170 | name=word(),
171 | acquisition_date=date_time_this_century(),
172 | department=word(),
173 | )
174 |
175 |
176 | @pytest.fixture
177 | def sample_publication_dto() -> ArtifactCatalogPublicationDTO:
178 | return ArtifactCatalogPublicationDTO(
179 | inventory_id=uuid4(),
180 | name=word(),
181 | era=EraDTOFactory.build(),
182 | material=MaterialDTOFactory.build(),
183 | description=text(),
184 | )
185 |
--------------------------------------------------------------------------------