├── .env.example
├── .gitignore
├── DEVELOPER-GUIDE.md
├── Dockerfile
├── LICENSE
├── README.md
├── alembic.ini
├── alembic
├── env.py
└── versions
│ └── 001_initial.py
├── cli.py
├── docker-compose.yml
├── docs
├── assets
│ └── logo-main.png
├── cache.md
└── cache_design.md
├── main.py
├── requirements-dev.txt
├── requirements.txt
├── src
├── __init__.py
├── api
│ ├── __init__.py
│ ├── deps.py
│ └── v1
│ │ ├── __init__.py
│ │ ├── endpoints
│ │ ├── __init__.py
│ │ └── items.py
│ │ └── router.py
├── cache
│ ├── __init__.py
│ ├── backends
│ │ ├── __init__.py
│ │ ├── base.py
│ │ ├── factory.py
│ │ ├── file.py
│ │ ├── memory.py
│ │ └── redis.py
│ ├── decorators.py
│ ├── dependencies.py
│ └── redis.py
├── core
│ ├── __init__.py
│ ├── config.py
│ ├── events.py
│ ├── exceptions.py
│ └── logging.py
├── db
│ ├── __init__.py
│ ├── base.py
│ ├── models
│ │ ├── __init__.py
│ │ └── item.py
│ └── session.py
├── schedulers
│ ├── __init__.py
│ ├── jobs.py
│ ├── scheduler.py
│ └── scheduler_runner.py
├── schemas
│ ├── __init__.py
│ └── item.py
├── services
│ ├── __init__.py
│ ├── cached_item_service.py
│ └── item_service.py
├── tasks
│ ├── __init__.py
│ ├── jobs.py
│ └── worker.py
└── utils
│ ├── __init__.py
│ └── helpers.py
└── tests
├── __init__.py
├── conftest.py
├── test_api
├── __init__.py
└── v1
│ ├── __init__.py
│ └── test_items.py
├── test_cache
├── __init__.py
├── test_backends.py
└── test_decorators.py
├── test_services
├── __init__.py
└── test_item_service.py
└── test_tasks
├── __init__.py
└── test_jobs.py
/.env.example:
--------------------------------------------------------------------------------
1 | # Environment settings
2 | ENV=development
3 | DEBUG=true
4 | LOG_LEVEL=DEBUG
5 |
6 | # Project info
7 | PROJECT_NAME="Rankyx"
8 | PROJECT_DESCRIPTION="A production-ready FastAPI application"
9 | VERSION=0.1.0
10 |
11 | # API settings
12 | API_PREFIX=/api
13 | HOST=0.0.0.0
14 | PORT=8000
15 |
16 | # CORS settings
17 | CORS_ORIGINS=["*"]
18 |
19 | # PostgreSQL connection
20 | POSTGRES_HOST=localhost
21 | POSTGRES_PORT=5432
22 | POSTGRES_USER=postgres
23 | POSTGRES_PASSWORD=postgres
24 | POSTGRES_DB=app_db
25 |
26 | # Redis connection
27 | REDIS_HOST=localhost
28 | REDIS_PORT=6300
29 | REDIS_PASSWORD=
30 | REDIS_DB=0
31 |
32 | # Dramatiq settings
33 | DRAMATIQ_BROKER=redis
34 | DRAMATIQ_PROCESSES=2
35 | DRAMATIQ_THREADS=8
36 |
37 | # Cache settings
38 | # set any of (redis, memory, file) | check docs/cache.md
39 | CACHE_BACKEND_TYPE=memory
40 | CACHE_TTL_SECONDS=300
41 | CACHE_FILE_PATH=cache
42 |
43 | # Scheduler settings
44 | SCHEDULER_ENABLED=true
45 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 | MANIFEST
27 |
28 | # Unit test / coverage reports
29 | htmlcov/
30 | .tox/
31 | .nox/
32 | .coverage
33 | .coverage.*
34 | .cache
35 | nosetests.xml
36 | coverage.xml
37 | *.cover
38 | *.py,cover
39 | .hypothesis/
40 | .pytest_cache/
41 |
42 | # Jupyter Notebook
43 | .ipynb_checkpoints
44 |
45 | # Environments
46 | .env
47 | .venv
48 | env/
49 | venv/
50 | ENV/
51 | env.bak/
52 | venv.bak/
53 | .python-version
54 |
55 | # VS Code
56 | .vscode/
57 | *.code-workspace
58 |
59 | # PyCharm
60 | .idea/
61 |
62 | # mypy
63 | .mypy_cache/
64 | .dmypy.json
65 | dmypy.json
66 |
67 | # Logs
68 | *.log
69 | logs/
70 |
71 | # Database
72 | *.sqlite
73 | *.db
74 | *.sqlite3
75 |
76 | # Other
77 | .DS_Store
78 | .AppleDouble
79 | .LSOverride
80 | node_modules/
81 |
--------------------------------------------------------------------------------
/DEVELOPER-GUIDE.md:
--------------------------------------------------------------------------------
1 | # Developer Guide
2 |
3 | This document provides guidance for developers working on FastAPI-Ignite Boilerplate project. It covers the architecture, development workflow, and best practices.
4 |
5 | ## Table of Contents
6 |
7 | - [Architecture Overview](#architecture-overview)
8 | - [Project Structure](#project-structure)
9 | - [Development Environment Setup](#development-environment-setup)
10 | - [Database Management](#database-management)
11 | - [API Development](#api-development)
12 | - [Background Tasks](#background-tasks)
13 | - [Scheduled Tasks](#scheduled-tasks)
14 | - [Caching](#caching)
15 | - [Testing](#testing)
16 | - [Docker Development](#docker-development)
17 | - [Deployment](#deployment)
18 |
19 | ## Architecture Overview
20 |
21 | FastAPI-Ignite FastAPI Boilerplate is built with a clean architecture design that separates concerns into distinct layers:
22 |
23 | 1. **API Layer**: Handles HTTP requests/responses using FastAPI
24 | 2. **Service Layer**: Contains business logic
25 | 3. **Repository Layer**: Handles data access through SQLAlchemy
26 | 4. **Domain Layer**: Core business entities and logic
27 |
28 | The application also employs several subsystems:
29 | - **Task Queue**: Dramatiq for background task processing
30 | - **Scheduler**: APScheduler for periodic tasks
31 | - **Cache**: Multi-backend caching system (Redis, File, Memory)
32 | - **Config**: Environment variable based configuration
33 |
34 | ### Configuration Loading
35 | When the application starts (via `get_settings()` in `src/core/config.py`), settings are loaded from:
36 | 1. Environment variables (including from `.env`)
37 |
38 | The settings are cached and drive application behavior.
39 |
40 | ## Project Structure
41 |
42 | ```
43 | fastapi-ignite/
44 | │
45 | ├── alembic/ # Database migrations
46 | │
47 | ├── src/ # Application source code
48 | │ ├── api/ # API routes and dependencies
49 | │ │ ├── v1/ # API version 1
50 | │ │ │ ├── endpoints/ # API endpoints by resource
51 | │ │ │ └── router.py # v1 router configuration
52 | │ │ └── deps.py # Shared API dependencies
53 | │ │
54 | │ ├── cache/ # Caching utilities
55 | │ │ ├── backends/ # Cache backend implementations
56 | │ │ │ ├── base.py # Abstract base class
57 | │ │ │ ├── redis.py # Redis backend
58 | │ │ │ ├── file.py # File-based backend
59 | │ │ │ ├── memory.py # In-memory backend
60 | │ │ │ └── factory.py # Cache backend factory
61 | │ │ ├── dependencies.py # Cache dependency provider
62 | │ │ └── decorators.py # Caching decorators
63 | │ │
64 | │ ├── core/ # Core application code
65 | │ │ ├── config.py # Configuration management
66 | │ │ ├── events.py # Startup/shutdown events
67 | │ │ ├── exceptions.py # Exception handling
68 | │ │ └── logging.py # Logging configuration
69 | │ │
70 | │ ├── db/ # Database models and sessions
71 | │ │ ├── base.py # Base SQLAlchemy models
72 | │ │ ├── session.py # Database session management
73 | │ │ └── models/ # SQLAlchemy models
74 | │ │
75 | │ ├── schedulers/ # Scheduled tasks
76 | │ │ ├── jobs.py # Scheduled job definitions
77 | │ │ └── scheduler.py # APScheduler configuration
78 | │ │
79 | │ ├── schemas/ # Pydantic schemas
80 | │ ├── services/ # Business logic services
81 | │ ├── tasks/ # Background tasks
82 | │ └── utils/ # Utility functions
83 | │
84 | ├── tests/ # Test suite
85 | ├── .env.example # Example environment variables
86 | ├── alembic.ini # Alembic configuration
87 | ├── docker-compose.yml # Docker Compose configuration
88 | ├── Dockerfile # Docker build configuration
89 | ├── main.py # Application entry point
90 | ├── requirements.txt # Project dependencies
91 | └── requirements-dev.txt # Development dependencies
92 | ```
93 |
94 | ## Development Environment Setup
95 |
96 | ### Prerequisites
97 |
98 | - Python 3.10+
99 | - PostgreSQL
100 |
101 | ### Setting up locally
102 |
103 | 1. **Clone the repository**:
104 | ```cmd
105 | git clone https://github.com/bakrianoo/fastapi-ignite
106 | cd fastapi-ignite
107 | ```
108 |
109 | 2. **Create a virtual environment**:
110 | ```cmd
111 | python -m venv venv
112 | venv\Scripts\activate
113 | ```
114 |
115 | 3. **Install dependencies**:
116 | ```cmd
117 | pip install -r requirements.txt
118 | pip install -r requirements-dev.txt
119 | ```
120 |
121 | 4. **Set up environment variables**:
122 | ```cmd
123 | copy .env.example .env
124 | ```
125 | Edit the .env file with your configuration. All environment settings are now consolidated in this single file.
126 |
127 | 5. **Run database migrations**:
128 | ```cmd
129 | alembic upgrade head
130 | ```
131 |
132 | 6. Start the API server
133 | ```bash
134 | python cli.py api --reload
135 | ```
136 |
137 | 7. Run database migrations
138 | ```bash
139 | python cli.py db migrate
140 | ```
141 |
142 | 8. Start the background worker
143 | ```bash
144 | python cli.py worker
145 | ```
146 |
147 | 9. Start the scheduler
148 | ```bash
149 | python cli.py scheduler
150 | ```
151 |
152 | ### VS Code Configuration
153 |
154 | For VS Code users, here's a recommended `settings.json` configuration:
155 |
156 | ```json
157 | {
158 | "python.linting.enabled": true,
159 | "python.linting.flake8Enabled": true,
160 | "python.linting.mypyEnabled": true,
161 | "python.formatting.provider": "black",
162 | "python.formatting.blackArgs": ["--line-length", "88"],
163 | "editor.formatOnSave": true,
164 | "editor.codeActionsOnSave": {
165 | "source.organizeImports": true
166 | },
167 | "python.testing.pytestEnabled": true,
168 | "python.testing.unittestEnabled": false,
169 | "python.testing.nosetestsEnabled": false
170 | }
171 | ```
172 |
173 | ## Database Management
174 |
175 | ### Models
176 |
177 | SQLAlchemy models are defined in `src/db/models/`. Each model should inherit from the base classes in `src/db/base.py`:
178 |
179 | ```python
180 | from src.db.base import Base, TableNameMixin, TimestampMixin, UUIDMixin
181 |
182 | class YourModel(Base, UUIDMixin, TableNameMixin, TimestampMixin):
183 | # Define your model here
184 | name = Column(String, index=True)
185 | ```
186 |
187 | ### Migrations
188 |
189 | We use Alembic for database migrations:
190 |
191 | 1. **Create a new migration**:
192 | ```cmd
193 | alembic revision --autogenerate -m "description of changes"
194 | ```
195 |
196 | 2. **Apply migrations**:
197 | ```cmd
198 | alembic upgrade head
199 | ```
200 |
201 | 3. **Downgrade migrations**:
202 | ```cmd
203 | alembic downgrade -1
204 | ```
205 |
206 | ## API Development
207 |
208 | ### Creating a New API Endpoint
209 |
210 | 1. **Define a Pydantic schema** in `src/schemas/`:
211 | ```python
212 | from pydantic import BaseModel, Field
213 |
214 | class ItemBase(BaseModel):
215 | name: str = Field(..., description="Item name")
216 | description: str | None = Field(None, description="Item description")
217 |
218 | class ItemCreate(ItemBase):
219 | pass
220 |
221 | class ItemResponse(ItemBase):
222 | id: uuid.UUID
223 | created_at: datetime
224 |
225 | model_config = {"from_attributes": True}
226 | ```
227 |
228 | 2. **Create a service** in `src/services/`:
229 | ```python
230 | class ItemService:
231 | @staticmethod
232 | async def create_item(db: AsyncSession, item_data: ItemCreate) -> Item:
233 | # Service logic
234 | ```
235 |
236 | 3. **Add an API endpoint** in `src/api/v1/endpoints/`:
237 | ```python
238 | @router.post("/", response_model=ItemResponse)
239 | async def create_item(item_data: ItemCreate, db: AsyncSession = Depends(get_db_session)):
240 | return await ItemService.create_item(db, item_data)
241 | ```
242 |
243 | 4. **Register the router** in `src/api/v1/router.py`:
244 | ```python
245 | from src.api.v1.endpoints import your_endpoint
246 | router.include_router(your_endpoint.router)
247 | ```
248 |
249 | ### API Versioning
250 |
251 | Each API version has its own directory (`v1`, `v2`, etc.) with separate routers and endpoints. This allows for multiple API versions to coexist.
252 |
253 | ### API Documentation
254 | FastAPI automatically exposes interactive docs for every version:
255 | - **Swagger UI**: `http://:{{settings.API_PREFIX}}/docs` (e.g. `http://localhost:8000/api/docs`)
256 | - **ReDoc**: `http://:{{settings.API_PREFIX}}/redoc` (e.g. `http://localhost:8000/api/redoc`)
257 | - **OpenAPI schema JSON**: `http://:{{settings.API_PREFIX}}/openapi.json`
258 |
259 | These paths come from the `docs_url`, `redoc_url`, and `openapi_url` parameters in `create_application()` (see `main.py`).
260 |
261 | ## Background Tasks
262 |
263 | ### Adding a New Background Task
264 |
265 | 1. **Define a task** in `src/tasks/jobs.py`:
266 | ```python
267 | @dramatiq.actor(
268 | queue_name="default",
269 | max_retries=3,
270 | time_limit=60000,
271 | )
272 | def process_something(item_id: str) -> Dict:
273 | # Task implementation
274 | ```
275 |
276 | 2. **Call the task** from your service or API:
277 | ```python
278 | from src.tasks.jobs import process_something
279 |
280 | # Call the task
281 | process_something.send(str(item_id))
282 | ```
283 |
284 | 3. **Run the worker** to process tasks:
285 | ```cmd
286 | python -m dramatiq src.tasks.jobs
287 | ```
288 |
289 | ## Scheduled Tasks
290 |
291 | ### Adding a New Scheduled Task
292 |
293 | 1. **Define a scheduled job** in `src/schedulers/jobs.py`:
294 | ```python
295 | async def your_scheduled_job():
296 | # Job implementation
297 | ```
298 |
299 | 2. **Register the job** in the `setup_jobs` function:
300 | ```python
301 | def setup_jobs(scheduler: AsyncIOScheduler) -> None:
302 | # Add your job
303 | scheduler.add_job(
304 | your_scheduled_job,
305 | trigger="interval",
306 | hours=1,
307 | id="your_job_id",
308 | replace_existing=True,
309 | )
310 | ```
311 |
312 | ### Enabling and Disabling the Scheduler
313 |
314 | FastAPI-Ignite FastAPI Boilerplate allows you to control whether the APScheduler runs or not using several methods:
315 |
316 | #### Environment Configuration
317 |
318 | You can enable or disable the scheduler in the .env file:
319 |
320 | ```
321 | # Scheduler settings
322 | SCHEDULER_ENABLED=true # Set to false to disable the scheduler
323 | ```
324 |
325 | #### Command Line Interface
326 |
327 | When using the CLI, you can override the scheduler setting:
328 |
329 | 1. **When running the API server**:
330 | ```cmd
331 | # Enable the scheduler (regardless of config setting)
332 | python cli.py api --scheduler-enabled
333 |
334 | # Disable the scheduler (regardless of config setting)
335 | python cli.py api --scheduler-disabled
336 | ```
337 |
338 | 2. **When running the scheduler directly**:
339 | ```cmd
340 | # Enable the scheduler (regardless of config setting)
341 | python cli.py scheduler --enabled
342 |
343 | # Disable the scheduler (regardless of config setting)
344 | python cli.py scheduler --disabled
345 | ```
346 |
347 | #### Environment Variables
348 |
349 | You can also control the scheduler with environment variables:
350 |
351 | ```cmd
352 | # Enable the scheduler
353 | set SCHEDULER_ENABLED=true
354 | python cli.py api
355 |
356 | # Disable the scheduler
357 | set SCHEDULER_ENABLED=false
358 | python cli.py api
359 | ```
360 |
361 | This is useful for deployment environments where you might want to run the scheduler on only one instance of your application.
362 |
363 | ## Caching
364 |
365 | FastAPI-Ignite FastAPI Boilerplate supports multiple cache backends that can be configured through environment variables or configuration files. For detailed information on caching, see [Cache Documentation](docs/cache.md).
366 |
367 | ### Available Cache Backends
368 |
369 | The application supports three different cache backends:
370 |
371 | 1. **Redis** - Default, distributed cache for production environments
372 | 2. **File** - Persistent file-based cache for single-server deployments
373 | 3. **Memory** - In-memory cache for development and testing
374 |
375 | ### Configuring Cache Backends
376 |
377 | You can configure which cache backend to use through the following settings:
378 |
379 | ```
380 | # In .env or environment variables
381 | CACHE_BACKEND_TYPE=redis # Options: "redis", "file", "memory"
382 | CACHE_TTL_SECONDS=300 # Default TTL for cached items
383 | CACHE_FILE_PATH=cache # Path for file-based cache (relative to project root)
384 | ```
385 |
386 | Configure in your .env file:
387 |
388 | ```
389 | # Cache settings
390 | CACHE_BACKEND_TYPE=redis # Options: redis, file, memory
391 | CACHE_TTL_SECONDS=300
392 | CACHE_FILE_PATH=cache # Used only with file backend
393 | ```
394 |
395 | ### Recommended Cache Backends for Different Environments
396 |
397 | - **Development**: Use "memory" backend for faster startup and no external dependencies
398 | - **Testing**: Use "memory" backend for test isolation
399 | - **Staging**: Use "redis" or "file" depending on your infrastructure
400 | - **Production**: Use "redis" for scalable, distributed caching
401 |
402 | ### Basic Usage Patterns
403 |
404 | Here are the three main ways to use the caching system:
405 |
406 | #### 1. Function-Level Caching with Decorators
407 |
408 | ```python
409 | from src.cache.decorators import cached
410 |
411 | @cached(ttl=300, key_prefix="user")
412 | async def get_user(user_id: str) -> Dict:
413 | # Function implementation - result will be automatically cached
414 | return user_data
415 | ```
416 |
417 | #### 2. Cache Invalidation with Decorators
418 |
419 | ```python
420 | from src.cache.decorators import invalidate_cache
421 |
422 | @invalidate_cache(key_pattern="user:*")
423 | async def update_user(user_id: str, data: Dict) -> Dict:
424 | # After this function completes, matching cache keys will be invalidated
425 | return updated_user
426 | ```
427 |
428 | #### 3. Direct Cache Access in Endpoints
429 |
430 | ```python
431 | from src.cache import CacheBackend, get_cache
432 | from fastapi import Depends
433 |
434 | @router.get("/items/{item_id}")
435 | async def get_item(item_id: str, cache: CacheBackend = Depends(get_cache)):
436 | # Manual cache handling gives you more control
437 | cache_key = f"item:{item_id}"
438 |
439 | # Try to get from cache first
440 | cached_item = await cache.get(cache_key)
441 | if cached_item:
442 | return json.loads(cached_item)
443 |
444 | # Get from database if not in cache
445 | item = await get_item_from_db(item_id)
446 |
447 | # Store in cache for future requests
448 | await cache.set(cache_key, json.dumps(item), ex=300)
449 | return item
450 | ```
451 |
452 | For more detailed examples and advanced usage, refer to the [Cache Documentation](docs/cache.md).
453 |
454 | ### Cache Best Practices
455 |
456 | 1. **Use consistent key prefixes** for related items (e.g., `user:123`, `user:settings:123`)
457 | 2. **Set appropriate TTLs** based on data volatility and access patterns
458 | 3. **Implement proper cache invalidation** when data changes
459 | 4. **Handle cache misses gracefully** with fallback to database queries
460 | 5. **Serialize data consistently** (typically using JSON)
461 | 6. **Consider cache stampedes** for high-traffic applications
462 |
463 | For detailed information on cache implementation, advanced usage patterns, and performance considerations, see the [Cache Documentation](docs/cache.md).
464 |
465 | ## Testing
466 |
467 | ### Running Tests
468 |
469 | We use pytest for testing:
470 |
471 | ```cmd
472 | pytest
473 | ```
474 |
475 | Run with coverage:
476 |
477 | ```cmd
478 | pytest --cov=src
479 | ```
480 |
481 | ### Writing Tests
482 |
483 | 1. **API Tests** go in `tests/test_api/`
484 | 2. **Service Tests** go in `tests/test_services/`
485 | 3. **Model Tests** go in `tests/test_models/`
486 |
487 | Example test:
488 |
489 | ```python
490 | import pytest
491 | from httpx import AsyncClient
492 |
493 | @pytest.mark.asyncio
494 | async def test_create_item(client: AsyncClient):
495 | response = await client.post(
496 | "/api/v1/items/",
497 | json={"name": "Test Item", "description": "Test Description"}
498 | )
499 | assert response.status_code == 201
500 | assert response.json()["name"] == "Test Item"
501 | ```
502 |
503 | ## Docker Development
504 |
505 | In the root `Dockerfile` we define two build stages:
506 | 1. **production** (aliased `production`) uses the base image and sets a `CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]` without reload. Build it with:
507 | ```cmd
508 | docker build --target production -t rankyx:prod .
509 | ```
510 | 2. **development** (the final stage) installs dev dependencies and sets a `CMD` with `--reload` for hot reloading. Build or run it without specifying `--target`:
511 | ```cmd
512 | docker build -t rankyx:dev .
513 | ```
514 |
515 | You can choose which image to run; the development image includes auto-reload, whereas the production image is lean and without reload.
516 |
517 | ### Starting the Docker environment
518 |
519 | ```cmd
520 | docker-compose up -d
521 | ```
522 |
523 | ### Running commands in Docker
524 |
525 | ```cmd
526 | docker-compose exec api alembic upgrade head
527 | ```
528 |
529 | ### Rebuilding after changes
530 |
531 | ```cmd
532 | docker-compose build
533 | docker-compose up -d
534 | ```
535 |
536 | ## Deployment
537 |
538 | ### Production Configuration
539 |
540 | 1. **Create a production environment file**:
541 | ```cmd
542 | copy .env.example .env.prod
543 | ```
544 |
545 | 2. **Update the production settings** with appropriate values for your production environment.
546 |
547 | ### Docker Production Deployment
548 |
549 | Build and run production Docker containers:
550 |
551 | ```cmd
552 | docker-compose -f docker-compose.yml -f docker-compose.prod.yml up -d
553 | ```
554 |
555 | ### Database Migrations in Production
556 |
557 | Always back up your database before running migrations in production:
558 |
559 | ```cmd
560 | # In the Docker container
561 | docker-compose exec api alembic upgrade head
562 | ```
563 |
564 | ### Monitoring and Logging
565 |
566 | In production, you can integrate the application with monitoring systems like Prometheus and logging solutions like ELK Stack or Datadog.
567 |
568 | ## Best Practices
569 |
570 | 1. **Follow PEP 8** for code style
571 | 2. **Write tests** for new features
572 | 3. **Document** your code with docstrings
573 | 4. **Type hint** all functions
574 | 5. **Validate data** with Pydantic
575 | 6. **Handle errors** gracefully
576 | 7. **Use async/await** consistently
577 | 8. **Log appropriately** at different levels
578 | 9. **Create migrations** for database changes
579 | 10. **Validate environment** before deployment
580 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11-slim as base
2 |
3 | # Set environment variables
4 | ENV PYTHONDONTWRITEBYTECODE=1 \
5 | PYTHONUNBUFFERED=1 \
6 | PIP_NO_CACHE_DIR=1 \
7 | PIP_DISABLE_PIP_VERSION_CHECK=on
8 |
9 | WORKDIR /app
10 |
11 | # Install system dependencies
12 | RUN apt-get update && \
13 | apt-get install -y --no-install-recommends \
14 | build-essential \
15 | curl \
16 | netcat-traditional \
17 | && apt-get clean \
18 | && rm -rf /var/lib/apt/lists/*
19 |
20 | # Install Python dependencies
21 | COPY requirements.txt .
22 | RUN pip install --no-cache-dir -r requirements.txt
23 |
24 | # Production stage
25 | FROM base as production
26 |
27 | # Copy project
28 | COPY . .
29 |
30 | # Create a non-root user
31 | RUN addgroup --system app && \
32 | adduser --system --ingroup app app && \
33 | chown -R app:app /app
34 |
35 | # Switch to non-root user
36 | USER app
37 |
38 | # Run the application
39 | CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]
40 |
41 | # Development stage
42 | FROM base as development
43 |
44 | # Install development dependencies
45 | COPY requirements-dev.txt .
46 | RUN pip install --no-cache-dir -r requirements-dev.txt
47 |
48 | # Copy project
49 | COPY . .
50 |
51 | # Run the application with hot reload
52 | CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2025 Abu Bakr Soliman
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
FastAPI-Ignite Boilerplate
12 |
13 |
14 | **FastAPI-Ignite** Boilerplate is a production-ready FastAPI boilerplate application with a comprehensive set of features for modern web backend development.
15 |
16 | ## Core Technologies
17 |
18 | - **FastAPI**: High-performance async web framework for building APIs
19 | - **SQLAlchemy**: SQL toolkit and ORM with async support
20 | - **Pydantic v2**: Data validation and settings management using Python type hints
21 | - **PostgreSQL**: Powerful open-source relational database
22 | - **Redis**: In-memory data store for caching and message broker
23 | - **Dramatiq**: Distributed task processing for background jobs
24 | - **APScheduler**: Advanced Python scheduler for periodic tasks
25 | - **Alembic**: Database migration tool
26 |
27 | ## Features
28 |
29 | - ✅ **Modern Python codebase** using async/await syntax
30 | - ✅ **Structured project layout** for maintainability
31 | - ✅ **API versioning** to manage API evolution
32 | - ✅ **Database integration** with async SQLAlchemy 2.0
33 | - ✅ **Background task processing** with Dramatiq
34 | - ✅ **Scheduled tasks** with APScheduler
35 | - ✅ **Simple configuration** using environment variables
36 | - ✅ **Comprehensive logging** with structured logs
37 | - ✅ **Docker support** for easy deployment
38 | - ✅ **Database migrations** with Alembic
39 | - ✅ **Production-ready** with health checks, error handling, and more
40 | - ✅ **Advanced caching** with multiple backends (Redis, File, Memory) at function and API endpoint levels
41 |
42 | ## Quick Start
43 |
44 | 1. Clone the repository:
45 | ```bash
46 | git clone https://github.com/bakrianoo/fastapi-ignite.git
47 | cd fastapi-ignite
48 | ```
49 |
50 | 2. Set up environment:
51 | ```bash
52 | # Copy the example .env file and edit with your configuration
53 | cp .env.example .env
54 | ```
55 |
56 | ### Start with Docker:
57 | ```bash
58 | docker-compose up -d
59 | ```
60 |
61 | ### Setting up locally
62 |
63 | 1. **Create a virtual environment**:
64 | ```bash
65 | python -m venv venv
66 | venv\Scripts\activate
67 | ```
68 |
69 | 2. **Install dependencies**:
70 | ```bash
71 | pip install -r requirements.txt
72 | pip install -r requirements-dev.txt
73 | ```
74 |
75 | 3. **Set up environment variables**:
76 | ```bash
77 | copy .env.example .env
78 | ```
79 | Edit the .env file with your configuration. All environment settings are now consolidated in this single file.
80 |
81 | 4. **Run database migrations**:
82 | ```bash
83 | alembic upgrade head
84 | ```
85 |
86 | 5. Start the API server
87 | ```bash
88 | python cli.py api --reload
89 | ```
90 |
91 | 6. Run database migrations
92 | ```bash
93 | python cli.py db migrate
94 | ```
95 |
96 | 7. Start the background worker
97 | ```bash
98 | python cli.py worker
99 | ```
100 |
101 | 8. Start the scheduler
102 | ```bash
103 | python cli.py scheduler
104 | ```
105 |
106 | 9. Access the API documentation:
107 | - Swagger UI: http://localhost:8000/api/docs
108 | - ReDoc: http://localhost:8000/api/redoc
109 |
110 | ## Development
111 |
112 | See [DEVELOPER GUIDE](DEVELOPER-GUIDE.md) for detailed development information.
113 |
114 | ## License
115 |
116 | This project is licensed under the MIT License - see the LICENSE file for details.
--------------------------------------------------------------------------------
/alembic.ini:
--------------------------------------------------------------------------------
1 |
2 |
3 | [alembic]
4 | script_location = alembic
5 | [post_write_hooks]
6 | # post_write_hooks defines scripts or Python functions that are run
7 | # on newly generated revision scripts. See the documentation for further
8 | # detail and examples
9 |
10 | # format using "black" - use the console_scripts runner, against the "black" entrypoint
11 | # hooks = black
12 | # black.type = console_scripts
13 | # black.entrypoint = black
14 | # black.options = -l 79 REVISION_SCRIPT_FILENAME
15 |
16 | # Logging configuration
17 | [loggers]
18 | keys = root,sqlalchemy,alembic
19 |
20 | [handlers]
21 | keys = console
22 |
23 | [formatters]
24 | keys = generic
25 |
26 | [logger_root]
27 | level = WARN
28 | handlers = console
29 | qualname =
30 |
31 | [logger_sqlalchemy]
32 | level = WARN
33 | handlers =
34 | qualname = sqlalchemy.engine
35 |
36 | [logger_alembic]
37 | level = INFO
38 | handlers =
39 | qualname = alembic
40 |
41 | [handler_console]
42 | class = StreamHandler
43 | args = (sys.stderr,)
44 | level = NOTSET
45 | formatter = generic
46 |
47 | [formatter_generic]
48 | format = %(levelname)-5.5s [%(name)s] %(message)s
49 | datefmt = %H:%M:%S
50 |
--------------------------------------------------------------------------------
/alembic/env.py:
--------------------------------------------------------------------------------
1 | """
2 | Alembic environment configuration for database migrations
3 | """
4 | import asyncio
5 | import os
6 | import sys
7 | from logging.config import fileConfig
8 | from pathlib import Path
9 |
10 | from alembic import context
11 | from sqlalchemy import pool
12 | from sqlalchemy.engine import Connection
13 | from sqlalchemy.ext.asyncio import async_engine_from_config
14 |
15 | # Add the parent directory to sys.path to make src module importable
16 | BASE_DIR = Path(__file__).resolve().parent.parent
17 | sys.path.append(str(BASE_DIR))
18 |
19 | # Import SQLAlchemy models and Alembic config
20 | from src.core.config import settings
21 | from src.db.base import Base
22 |
23 | # This is the Alembic Config object
24 | config = context.config
25 |
26 | # Set the SQLAlchemy URL from settings
27 | sqlalchemy_url = str(settings.DATABASE_URI)
28 | config.set_main_option("sqlalchemy.url", sqlalchemy_url)
29 |
30 | # Interpret the config file for Python logging
31 | if config.config_file_name is not None:
32 | fileConfig(config.config_file_name)
33 |
34 | # Add model's MetaData object for autogenerate support
35 | target_metadata = Base.metadata
36 |
37 |
38 | def run_migrations_offline() -> None:
39 | """
40 | Run migrations in 'offline' mode.
41 |
42 | This configures the context with just a URL and not an Engine,
43 | though an Engine is acceptable here as well. By skipping the Engine creation
44 | we don't even need a DBAPI to be available.
45 | """
46 | context.configure(
47 | url=sqlalchemy_url,
48 | target_metadata=target_metadata,
49 | literal_binds=True,
50 | dialect_opts={"paramstyle": "named"},
51 | compare_type=True,
52 | )
53 |
54 | with context.begin_transaction():
55 | context.run_migrations()
56 |
57 |
58 | def do_run_migrations(connection: Connection) -> None:
59 | """
60 | Run migrations with a connection
61 | """
62 | context.configure(
63 | connection=connection,
64 | target_metadata=target_metadata,
65 | compare_type=True,
66 | )
67 |
68 | with context.begin_transaction():
69 | context.run_migrations()
70 |
71 |
72 | async def run_async_migrations() -> None:
73 | """
74 | Run migrations in async mode
75 | """
76 | config_section = config.get_section(config.config_ini_section)
77 | config_section["sqlalchemy.url"] = sqlalchemy_url
78 |
79 | connectable = async_engine_from_config(
80 | config_section,
81 | prefix="sqlalchemy.",
82 | poolclass=pool.NullPool,
83 | )
84 |
85 | async with connectable.connect() as connection:
86 | await connection.run_sync(do_run_migrations)
87 |
88 |
89 | def run_migrations_online() -> None:
90 | """
91 | Run migrations in 'online' mode
92 | """
93 | # Use asyncio to run async migrations
94 | asyncio.run(run_async_migrations())
95 |
96 |
97 | # Run the appropriate migration function
98 | if context.is_offline_mode():
99 | run_migrations_offline()
100 | else:
101 | run_migrations_online()
--------------------------------------------------------------------------------
/alembic/versions/001_initial.py:
--------------------------------------------------------------------------------
1 | """
2 | Initial database migration
3 | """
4 | from alembic import op
5 | import sqlalchemy as sa
6 | from sqlalchemy.dialects import postgresql
7 | import uuid
8 |
9 |
10 | # revision identifiers, used by Alembic
11 | revision = '001_initial'
12 | down_revision = None
13 | branch_labels = None
14 | depends_on = None
15 |
16 |
17 | def upgrade() -> None:
18 | # Create item table
19 | op.create_table(
20 | 'item',
21 | sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True, default=uuid.uuid4),
22 | sa.Column('name', sa.String(255), nullable=False, index=True),
23 | sa.Column('description', sa.Text(), nullable=True),
24 | sa.Column('is_active', sa.Boolean(), nullable=False, default=True),
25 | sa.Column('created_at', sa.DateTime(), nullable=False, default=sa.func.now()),
26 | sa.Column('updated_at', sa.DateTime(), nullable=False, default=sa.func.now(), onupdate=sa.func.now())
27 | )
28 |
29 |
30 | def downgrade() -> None:
31 | op.drop_table('item')
32 |
--------------------------------------------------------------------------------
/cli.py:
--------------------------------------------------------------------------------
1 | """
2 | Command-line utility for running various components of the application
3 | """
4 | import argparse
5 | import asyncio
6 | import os
7 | import sys
8 | from pathlib import Path
9 |
10 | # Add the project root directory to Python path
11 | root_dir = Path(__file__).resolve().parent
12 | sys.path.append(str(root_dir))
13 |
14 | # load .env using dotenv
15 | from dotenv import load_dotenv
16 | load_dotenv()
17 |
18 |
19 | def parse_args():
20 | """
21 | Parse command-line arguments
22 | """
23 | parser = argparse.ArgumentParser(
24 | description="FastAPI Boilerplate command-line utilities",
25 | formatter_class=argparse.ArgumentDefaultsHelpFormatter,
26 | )
27 |
28 | # Create subparsers for different commands
29 | subparsers = parser.add_subparsers(dest="command", help="Command to run")
30 |
31 | # API server command
32 | api_parser = subparsers.add_parser("api", help="Run the API server")
33 | api_parser.add_argument("--host", type=str, default="0.0.0.0", help="Host to bind")
34 | api_parser.add_argument("--port", type=int, default=8000, help="Port to bind")
35 | api_parser.add_argument("--reload", action="store_true", help="Auto-reload on code changes")
36 | api_parser.add_argument("--scheduler-enabled", action="store_true", dest="scheduler_enabled",
37 | default=None, help="Force enable the scheduler")
38 | api_parser.add_argument("--scheduler-disabled", action="store_false", dest="scheduler_enabled",
39 | default=None, help="Force disable the scheduler")
40 |
41 | # Worker command
42 | worker_parser = subparsers.add_parser("worker", help="Run the background task worker")
43 | worker_parser.add_argument("--processes", type=int, default=None, help="Number of worker processes")
44 | worker_parser.add_argument("--threads", type=int, default=None, help="Number of worker threads")
45 |
46 |
47 | # Scheduler command
48 | scheduler_parser = subparsers.add_parser("scheduler", help="Run the task scheduler")
49 | scheduler_parser.add_argument("--enabled", action="store_true", dest="scheduler_enabled",
50 | default=None, help="Force enable the scheduler")
51 | scheduler_parser.add_argument("--disabled", action="store_false", dest="scheduler_enabled",
52 | default=None, help="Force disable the scheduler")
53 |
54 | # DB command
55 | db_parser = subparsers.add_parser("db", help="Database operations")
56 | db_subparsers = db_parser.add_subparsers(dest="db_command", help="Database command")
57 |
58 | # DB init command
59 | db_init_parser = db_subparsers.add_parser("init", help="Initialize the database")
60 |
61 | # DB migrate command
62 | db_migrate_parser = db_subparsers.add_parser("migrate", help="Run database migrations")
63 | db_migrate_parser.add_argument("--revision", type=str, default="head", help="Revision to migrate to")
64 |
65 | return parser.parse_args()
66 |
67 |
68 | def run_api(args):
69 | """
70 | Run the API server
71 | """
72 | import uvicorn
73 |
74 | # Set environment variable for scheduler if specified
75 | if args.scheduler_enabled is not None:
76 | os.environ["SCHEDULER_ENABLED"] = str(args.scheduler_enabled).lower()
77 |
78 | uvicorn.run(
79 | "main:app",
80 | host=args.host,
81 | port=args.port,
82 | reload=args.reload,
83 | log_level="debug" if args.reload else "info",
84 | )
85 |
86 |
87 | def run_worker(args):
88 | """
89 | Run the background task worker
90 | """
91 | from src.core.config import settings
92 | from src.tasks.worker import run_worker
93 |
94 | # Override settings if arguments provided
95 | if args.processes is not None:
96 | os.environ["DRAMATIQ_PROCESSES"] = str(args.processes)
97 | if args.threads is not None:
98 | os.environ["DRAMATIQ_THREADS"] = str(args.threads)
99 |
100 | # Run the worker with proper arguments
101 | run_worker()
102 |
103 |
104 | async def run_db_init():
105 | """
106 | Initialize the database
107 | """
108 | from src.db.session import init_db
109 |
110 | print("Initializing database...")
111 | await init_db()
112 | print("Database initialized successfully.")
113 |
114 |
115 | def run_db_migrate(args):
116 | """
117 | Run database migrations
118 | """
119 | import subprocess
120 |
121 | print(f"Running database migrations to revision {args.revision}...")
122 | result = subprocess.run(["alembic", "upgrade", args.revision], check=True)
123 |
124 | if result.returncode == 0:
125 | print("Database migration completed successfully.")
126 | else:
127 | print("Database migration failed.")
128 | sys.exit(1)
129 |
130 |
131 | def run_scheduler(args):
132 | """
133 | Run the task scheduler
134 | """
135 | import os
136 | from src.schedulers.scheduler_runner import main
137 |
138 | # Override scheduler enabled setting if specified in args
139 | if args.scheduler_enabled is not None:
140 | os.environ["SCHEDULER_ENABLED"] = str(args.scheduler_enabled).lower()
141 |
142 | print("Starting task scheduler...")
143 | asyncio.run(main())
144 |
145 |
146 | def main():
147 | """
148 | Main entry point
149 | """
150 | args = parse_args()
151 |
152 | if args.command == "api":
153 | run_api(args)
154 | elif args.command == "worker":
155 | run_worker(args)
156 | elif args.command == "scheduler":
157 | run_scheduler(args)
158 | elif args.command == "db":
159 | if args.db_command == "init":
160 | asyncio.run(run_db_init())
161 | elif args.db_command == "migrate":
162 | run_db_migrate(args)
163 | else:
164 | print("Unknown database command. Use --help for assistance.")
165 | sys.exit(1)
166 | else:
167 | print("Unknown command. Use --help for assistance.")
168 | sys.exit(1)
169 |
170 |
171 | if __name__ == "__main__":
172 | main()
173 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.8'
2 |
3 | services:
4 | # FastAPI application
5 | api:
6 | build:
7 | context: .
8 | dockerfile: Dockerfile
9 | command: sh -c "alembic upgrade head && uvicorn main:app --host 0.0.0.0 --port 8000 --reload"
10 | volumes:
11 | - .:/app
12 | ports:
13 | - "8000:8000"
14 | env_file:
15 | - .env
16 | depends_on:
17 | - postgres
18 | - redis
19 | networks:
20 | - rankyx_network
21 | restart: unless-stopped
22 |
23 | # Background task worker
24 | worker:
25 | build:
26 | context: .
27 | dockerfile: Dockerfile
28 | command: python cli.py worker
29 | volumes:
30 | - .:/app
31 | env_file:
32 | - .env
33 | depends_on:
34 | - postgres
35 | - redis
36 | networks:
37 | - rankyx_network
38 | restart: unless-stopped
39 |
40 | # Scheduler service
41 | scheduler:
42 | build:
43 | context: .
44 | dockerfile: Dockerfile
45 | command: python cli.py scheduler
46 | volumes:
47 | - .:/app
48 | env_file:
49 | - .env
50 | depends_on:
51 | - postgres
52 | - redis
53 | networks:
54 | - rankyx_network
55 | restart: unless-stopped
56 |
57 | # PostgreSQL database
58 | postgres:
59 | image: postgres:17
60 | volumes:
61 | - postgres_data:/var/lib/postgresql/data/
62 | environment:
63 | - POSTGRES_USER=${POSTGRES_USER:-postgres}
64 | - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-postgres}
65 | - POSTGRES_DB=${POSTGRES_DB:-rankyx_dev}
66 | ports:
67 | - "5432:5432"
68 | networks:
69 | - rankyx_network
70 | restart: unless-stopped
71 |
72 | # Redis for caching, message broker and job store
73 | redis:
74 | image: redis:7.0-alpine
75 | volumes:
76 | - redis_data:/data
77 | ports:
78 | - "6379:6379"
79 | networks:
80 | - rankyx_network
81 | restart: unless-stopped
82 |
83 | networks:
84 | rankyx_network:
85 | driver: bridge
86 |
87 | volumes:
88 | postgres_data:
89 | redis_data:
--------------------------------------------------------------------------------
/docs/assets/logo-main.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bakrianoo/fastapi-ignite/c253607884fe86f9001d01ac911f901ea3a51d98/docs/assets/logo-main.png
--------------------------------------------------------------------------------
/docs/cache.md:
--------------------------------------------------------------------------------
1 | # Cache Configuration
2 |
3 | FastAPI-Ignite FastAPI Boilerplate supports multiple cache backends that can be configured through environment variables
4 | or the configuration files.
5 |
6 | ## Available Cache Backends
7 |
8 | - **Redis**: Redis server-based caching (default)
9 | - **File**: Persistent file-based caching
10 | - **Memory**: In-memory caching (not persistent across restarts)
11 |
12 | ## Configuration Options
13 |
14 | The following settings can be configured in your `.env` file or environment variables:
15 |
16 | ```
17 | # Cache backend type (redis, file, memory)
18 | CACHE_BACKEND_TYPE=redis
19 |
20 | # Cache TTL in seconds
21 | CACHE_TTL_SECONDS=300
22 |
23 | # Path for file-based cache (relative to project root)
24 | CACHE_FILE_PATH=cache
25 | ```
26 |
27 | ## Usage Examples
28 |
29 | ### Caching Function Results
30 |
31 | ```python
32 | from src.cache.decorators import cached
33 |
34 | @cached(ttl=60, key_prefix="user")
35 | async def get_user_data(user_id: int):
36 | # Function logic here
37 | return data
38 | ```
39 |
40 | ### Invalidating Cache
41 |
42 | ```python
43 | from src.cache.decorators import invalidate_cache
44 |
45 | @invalidate_cache(key_pattern="user:*")
46 | async def update_user(user_id: int, data: dict):
47 | # Update logic here
48 | return updated_user
49 | ```
50 |
51 | ### Using Cache Directly in FastAPI Endpoints
52 |
53 | ```python
54 | from fastapi import APIRouter, Depends
55 | from src.cache import CacheBackend, get_cache
56 |
57 | router = APIRouter()
58 |
59 | @router.get("/items/{item_id}")
60 | async def get_item(item_id: int, cache: CacheBackend = Depends(get_cache)):
61 | # Try to get from cache
62 | cache_key = f"item:{item_id}"
63 | cached_item = await cache.get(cache_key)
64 |
65 | if cached_item:
66 | return json.loads(cached_item)
67 |
68 | # Get from database if not cached
69 | item = await item_service.get_item(item_id)
70 |
71 | # Store in cache
72 | await cache.set(cache_key, json.dumps(item), ex=300)
73 |
74 | return item
75 | ```
76 |
--------------------------------------------------------------------------------
/docs/cache_design.md:
--------------------------------------------------------------------------------
1 | # Cache System Design
2 |
3 | This document outlines the design and implementation of the cache system in the FastAPI-Ignite FastAPI Boilerplate .
4 |
5 | ## Overview
6 |
7 | The cache system is designed using the Strategy pattern, which allows for multiple cache backend implementations to be used interchangeably. The system supports:
8 |
9 | 1. Redis-based caching
10 | 2. File-based caching
11 | 3. In-memory caching
12 |
13 | ## Architecture
14 |
15 | The cache system is built around the following components:
16 |
17 | ### Core Components
18 |
19 | - **CacheBackend** (Abstract Base Class): Defines the interface that all cache backends must implement.
20 | - **RedisBackend**: Implementation using Redis.
21 | - **FileBackend**: Implementation using file system.
22 | - **MemoryBackend**: Implementation using in-memory dictionaries.
23 | - **Cache Factory**: Creates and manages the appropriate cache backend.
24 |
25 | ### Decorator-based Caching
26 |
27 | The system provides decorators for easy caching of function results:
28 |
29 | - `@cached`: Caches function return values.
30 | - `@invalidate_cache`: Invalidates cache entries matching a pattern.
31 |
32 | ### Direct Cache Usage
33 |
34 | Endpoints and services can directly access the cache backend through dependency injection.
35 |
36 | ## Configuration
37 |
38 | The cache system is configurable through the following settings:
39 |
40 | - **CACHE_BACKEND_TYPE**: The type of cache backend to use ("redis", "file", "memory").
41 | - **CACHE_TTL_SECONDS**: Default time-to-live for cache entries.
42 | - **CACHE_FILE_PATH**: Path for file-based cache (when using "file" backend).
43 |
44 | These can be configured in:
45 | - `.env` file
46 | - TOML configuration files
47 | - Environment variables
48 |
49 | ## Usage Examples
50 |
51 | ### Caching Function Results
52 |
53 | ```python
54 | from src.cache import cached
55 |
56 | @cached(ttl=300, key_prefix="user")
57 | async def get_user(user_id: int):
58 | # Function logic
59 | return user_data
60 | ```
61 |
62 | ### Invalidating Cache
63 |
64 | ```python
65 | from src.cache import invalidate_cache
66 |
67 | @invalidate_cache(key_pattern="user:*")
68 | async def update_user(user_id: int, data: dict):
69 | # Update logic
70 | return updated_user
71 | ```
72 |
73 | ### Direct Cache Access
74 |
75 | ```python
76 | from src.cache import CacheBackend, get_cache
77 |
78 | @router.get("/items/{item_id}")
79 | async def get_item(
80 | item_id: int,
81 | cache: CacheBackend = Depends(get_cache)
82 | ):
83 | cache_key = f"item:{item_id}"
84 |
85 | # Try to get from cache
86 | cached_value = await cache.get(cache_key)
87 | if cached_value:
88 | return json.loads(cached_value)
89 |
90 | # Get from database
91 | item = await db_get_item(item_id)
92 |
93 | # Store in cache
94 | await cache.set(cache_key, json.dumps(item), ex=300)
95 | return item
96 | ```
97 |
98 | ## Best Practices
99 |
100 | 1. Use consistent cache key naming conventions.
101 | 2. Set appropriate TTL values based on data volatility.
102 | 3. Implement proper cache invalidation strategies.
103 | 4. Use the appropriate backend based on deployment needs.
104 |
--------------------------------------------------------------------------------
/main.py:
--------------------------------------------------------------------------------
1 | """
2 | FastAPI application entry point
3 | """
4 | import logging
5 | import os
6 | from contextlib import asynccontextmanager
7 |
8 | from fastapi import FastAPI
9 | from fastapi.middleware.cors import CORSMiddleware
10 |
11 | from src.api.v1.router import router as api_v1_router
12 | from src.core.config import settings
13 | from src.core.events import create_start_app_handler, create_stop_app_handler
14 | from src.core.logging import setup_logging
15 |
16 |
17 | @asynccontextmanager
18 | async def lifespan(app: FastAPI):
19 | """
20 | FastAPI lifespan event handler for startup and shutdown events
21 | """
22 | # Setup logging
23 | setup_logging()
24 |
25 | # Override scheduler setting from environment variable (set by CLI)
26 | scheduler_env = os.environ.get("SCHEDULER_ENABLED")
27 | if scheduler_env is not None:
28 | from src.core.config import settings
29 | settings.scheduler.enabled = scheduler_env.lower() == "true"
30 |
31 | # Run startup event handlers
32 | start_handler = create_start_app_handler()
33 | await start_handler()
34 |
35 | # Yield control back to FastAPI
36 | yield
37 |
38 | # Run shutdown event handlers
39 | stop_handler = create_stop_app_handler()
40 | await stop_handler()
41 |
42 |
43 | def create_application() -> FastAPI:
44 | """
45 | Create and configure the FastAPI application
46 | """
47 | application = FastAPI(
48 | title=settings.PROJECT_NAME,
49 | description=settings.PROJECT_DESCRIPTION,
50 | version=settings.VERSION,
51 | docs_url=f"{settings.API_PREFIX}/docs",
52 | redoc_url=f"{settings.API_PREFIX}/redoc",
53 | openapi_url=f"{settings.API_PREFIX}/openapi.json",
54 | lifespan=lifespan,
55 | )
56 |
57 | # Set up CORS middleware
58 | application.add_middleware(
59 | CORSMiddleware,
60 | allow_origins=settings.CORS_ORIGINS,
61 | allow_credentials=True,
62 | allow_methods=["*"],
63 | allow_headers=["*"],
64 | )
65 |
66 | # Include routers
67 | application.include_router(api_v1_router, prefix=settings.API_PREFIX)
68 |
69 | return application
70 |
71 |
72 | app = create_application()
73 |
74 |
75 | if __name__ == "__main__":
76 | import uvicorn
77 |
78 | uvicorn.run(
79 | "main:app",
80 | host=settings.HOST,
81 | port=settings.PORT,
82 | reload=settings.DEBUG,
83 | log_level="debug" if settings.DEBUG else "info",
84 | )
85 |
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | # Development dependencies (in addition to requirements.txt)
2 | -r requirements.txt
3 |
4 | # Testing
5 | pytest==8.3.5
6 | pytest-asyncio==0.26.0
7 | pytest-cov==6.1.1
8 | asgi-lifespan==2.1.0
9 |
10 | # Linting and formatting
11 | black==25.1.0
12 | isort==6.0.1
13 | flake8==7.2.0
14 | mypy==1.15.0
15 | sqlalchemy-stubs==0.4
16 |
17 | # Development tools
18 | watchfiles==1.0.5
19 | ipython==8.36.0
20 | debugpy==1.8.14
21 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | # Core dependencies
2 | fastapi==0.115.12
3 | uvicorn==0.34.2
4 | pydantic==2.11.4
5 | pydantic-settings==2.9.1
6 | sqlalchemy==2.0.41
7 | asyncpg==0.30.0
8 | alembic==1.15.2
9 | toml==0.10.2
10 | structlog==25.3.0
11 |
12 | # Redis and caching
13 | redis==6.1.0
14 | aioredis==2.0.1
15 | aiofiles==23.2.1
16 |
17 | # Task processing
18 | dramatiq==1.17.1
19 |
20 | # Task scheduling
21 | apscheduler==3.11.0
22 |
23 | # Utilities
24 | python-multipart==0.0.20
25 | httpx==0.28.1
26 | email-validator==2.2.0
27 | tenacity==9.1.2
28 | python-dotenv==1.1.0
29 |
30 | # Production dependencies
31 | gunicorn==23.0.0
32 |
--------------------------------------------------------------------------------
/src/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | FastAPI-Ignite FastAPI Boilerplate - A FastAPI boilerplate application
3 | """
4 |
--------------------------------------------------------------------------------
/src/api/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | API package initialization
3 | """
4 |
--------------------------------------------------------------------------------
/src/api/deps.py:
--------------------------------------------------------------------------------
1 | """
2 | Shared dependencies for API routes
3 | """
4 | from fastapi import Depends, Header, HTTPException, status
5 | from sqlalchemy.ext.asyncio import AsyncSession
6 |
7 | from src.core.config import settings
8 | from src.db.session import get_db
9 |
10 |
11 | async def get_api_key(x_api_key: str = Header(None)) -> str:
12 | """
13 | Validate API key from header if required
14 | Not used in this example but provided as a template for future auth
15 | """
16 | if settings.DEBUG:
17 | # Skip validation in debug mode
18 | return "debug"
19 |
20 | # Example API key validation logic
21 | if not x_api_key:
22 | raise HTTPException(
23 | status_code=status.HTTP_401_UNAUTHORIZED,
24 | detail="API key header is missing",
25 | )
26 |
27 | # Here you would validate the API key against your database or config
28 | return x_api_key
29 |
30 |
31 | # Re-export the database session dependency for convenience
32 | get_db_session = get_db
33 |
--------------------------------------------------------------------------------
/src/api/v1/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | API v1 initialization
3 | """
4 |
--------------------------------------------------------------------------------
/src/api/v1/endpoints/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | API endpoints initialization
3 | """
4 |
--------------------------------------------------------------------------------
/src/api/v1/endpoints/items.py:
--------------------------------------------------------------------------------
1 | """
2 | API endpoints for Item resources
3 | """
4 | import json
5 | import uuid
6 | from typing import List, Optional
7 |
8 | from fastapi import APIRouter, Depends, HTTPException, Query, status
9 | from sqlalchemy.ext.asyncio import AsyncSession
10 |
11 | from src.api.deps import get_db_session
12 | from src.cache import CacheBackend, cached, get_cache
13 | from src.core.config import settings
14 | from src.schemas.item import ItemCreate, ItemResponse, ItemUpdate
15 | from src.services.item_service import ItemService
16 | from src.services.cached_item_service import CachedItemService
17 |
18 | # Create router with prefix and tags
19 | router = APIRouter(
20 | prefix="/items",
21 | tags=["items"],
22 | )
23 |
24 |
25 | @router.post(
26 | "/",
27 | response_model=ItemResponse,
28 | status_code=status.HTTP_201_CREATED,
29 | summary="Create a new item",
30 | description="Create a new item with the provided information",
31 | )
32 | async def create_item(
33 | item_data: ItemCreate,
34 | db: AsyncSession = Depends(get_db_session),
35 | ) -> ItemResponse:
36 | """
37 | Create a new item
38 | """
39 | item = await ItemService.create_item(db, item_data)
40 | return item
41 |
42 |
43 | @router.get(
44 | "/{item_id}",
45 | response_model=ItemResponse,
46 | summary="Get item by ID",
47 | description="Get detailed information about a specific item by its ID",
48 | )
49 | @cached(ttl=settings.CACHE_TTL_SECONDS, key_prefix="item")
50 | async def get_item(
51 | item_id: uuid.UUID,
52 | db: AsyncSession = Depends(get_db_session),
53 | ) -> ItemResponse:
54 | """
55 | Get an item by ID with caching
56 | """
57 | item = await ItemService.get_item(db, item_id)
58 | return item
59 |
60 |
61 | @router.get(
62 | "/",
63 | response_model=List[ItemResponse],
64 | summary="List items",
65 | description="Get a list of items with optional pagination and filtering",
66 | )
67 | @cached(ttl=60, key_builder=lambda *args, **kwargs: f"items:{kwargs.get('active_only')}:{kwargs.get('skip')}:{kwargs.get('limit')}")
68 | async def list_items(
69 | skip: int = Query(0, ge=0, description="Number of items to skip"),
70 | limit: int = Query(100, ge=1, le=100, description="Max number of items to return"),
71 | active_only: bool = Query(False, description="Only return active items"),
72 | db: AsyncSession = Depends(get_db_session),
73 | ) -> List[ItemResponse]:
74 | """
75 | Get multiple items with pagination and optional filtering
76 | """
77 | items = await ItemService.get_items(
78 | db=db, skip=skip, limit=limit, active_only=active_only
79 | )
80 | return items
81 |
82 |
83 | @router.put(
84 | "/{item_id}",
85 | response_model=ItemResponse,
86 | summary="Update item",
87 | description="Update an existing item's information",
88 | )
89 | async def update_item(
90 | item_id: uuid.UUID,
91 | item_data: ItemUpdate,
92 | db: AsyncSession = Depends(get_db_session),
93 | ) -> ItemResponse:
94 | """
95 | Update an item
96 | """
97 | updated_item = await ItemService.update_item(db, item_id, item_data)
98 | return updated_item
99 |
100 |
101 | @router.delete(
102 | "/{item_id}",
103 | status_code=status.HTTP_204_NO_CONTENT,
104 | summary="Delete item",
105 | description="Delete an existing item",
106 | )
107 | async def delete_item(
108 | item_id: uuid.UUID,
109 | db: AsyncSession = Depends(get_db_session),
110 | ) -> None:
111 | """
112 | Delete an item
113 | """
114 | await ItemService.delete_item(db, item_id)
115 |
116 |
117 | @router.get(
118 | "/search/",
119 | response_model=List[ItemResponse],
120 | summary="Search items",
121 | description="Search for items by term in name or description",
122 | )
123 | async def search_items(
124 | q: str = Query(..., min_length=1, description="Search term"),
125 | skip: int = Query(0, ge=0, description="Number of items to skip"),
126 | limit: int = Query(100, ge=1, le=100, description="Max number of items to return"),
127 | db: AsyncSession = Depends(get_db_session),
128 | ) -> List[ItemResponse]:
129 | """
130 | Search for items
131 | """
132 | items = await ItemService.search_items(
133 | db=db, search_term=q, skip=skip, limit=limit
134 | )
135 | return items
136 |
137 |
138 | @router.get(
139 | "/cached/{item_id}",
140 | response_model=ItemResponse,
141 | summary="Get item by ID (using direct cache)",
142 | description="Get item details using the cache backend directly",
143 | )
144 | async def get_cached_item(
145 | item_id: uuid.UUID,
146 | db: AsyncSession = Depends(get_db_session),
147 | cache: CacheBackend = Depends(get_cache),
148 | ) -> ItemResponse:
149 | """
150 | Get an item by ID using the cache backend directly
151 |
152 | This endpoint demonstrates how to use the cache backend directly in an endpoint.
153 | The current cache backend in use is determined by CACHE_BACKEND_TYPE setting.
154 | """
155 | # Get item using the direct cache method
156 | item_data = await CachedItemService.direct_cache_example(db, cache, item_id)
157 |
158 | if not item_data:
159 | raise HTTPException(
160 | status_code=status.HTTP_404_NOT_FOUND,
161 | detail=f"Item with ID {item_id} not found",
162 | )
163 |
164 | return item_data
165 |
166 |
167 | @router.get(
168 | "/cache/clear",
169 | summary="Clear item cache",
170 | description="Clear all cached items to test cache invalidation",
171 | )
172 | async def clear_item_cache(
173 | cache: CacheBackend = Depends(get_cache),
174 | ) -> dict:
175 | """
176 | Clear all item cache entries
177 |
178 | This endpoint demonstrates how to manually invalidate cache entries
179 | by scanning for keys with a pattern and deleting them.
180 | """
181 | # Scan for all item cache keys
182 | cursor = "0"
183 | deleted_keys = 0
184 |
185 | # Scan in batches
186 | while cursor != "0" or deleted_keys == 0: # Continue until we complete a full scan
187 | cursor, keys = await cache.scan(cursor, "item:*", 100)
188 |
189 | if keys:
190 | # Delete found keys
191 | count = await cache.delete(*keys)
192 | deleted_keys += count
193 |
194 | # Exit if we've completed the scan
195 | if cursor == "0" and deleted_keys > 0:
196 | break
197 |
198 | return {"message": f"Successfully cleared {deleted_keys} cached items", "deleted_count": deleted_keys}
199 |
200 |
201 | @router.get(
202 | "/cache/info",
203 | summary="Get cache information",
204 | description="Get information about the current cache configuration",
205 | )
206 | async def get_cache_info() -> dict:
207 | """
208 | Get information about the current cache configuration
209 |
210 | This endpoint returns details about which cache backend is currently active
211 | and other relevant configuration.
212 | """
213 | return {
214 | "cache_backend_type": settings.CACHE_BACKEND_TYPE,
215 | "cache_ttl_seconds": settings.CACHE_TTL_SECONDS,
216 | "file_cache_path": settings.CACHE_FILE_PATH if settings.CACHE_BACKEND_TYPE == "file" else None,
217 | "redis_uri": str(settings.REDIS_URI) if settings.CACHE_BACKEND_TYPE == "redis" else None,
218 | }
219 |
--------------------------------------------------------------------------------
/src/api/v1/router.py:
--------------------------------------------------------------------------------
1 | """
2 | API v1 router configuration
3 | """
4 | from fastapi import APIRouter
5 | from src.core.config import settings
6 |
7 | from src.api.v1.endpoints import items
8 |
9 | # Create the v1 router
10 | router = APIRouter()
11 |
12 | # Include all endpoint routers
13 | router.include_router(items.router)
14 |
15 | # Add health check endpoint directly to v1 router
16 | @router.get("/health", tags=["health"])
17 | async def health_check():
18 | """
19 | Health check endpoint
20 |
21 | Returns a simple message to confirm the API is running
22 | """
23 | return {"status": "ok", "version": "1"}
24 |
25 | @router.get("/app-info", tags=["info"])
26 | async def app_info():
27 | """
28 | Application information endpoint
29 |
30 | Returns basic information about the application
31 | """
32 | return {
33 | "name": settings.PROJECT_NAME,
34 | "description": settings.PROJECT_DESCRIPTION,
35 | "version": settings.VERSION
36 | }
37 |
--------------------------------------------------------------------------------
/src/cache/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Caching module
3 |
4 | This module provides caching functionality for the application.
5 | """
6 |
7 | from src.cache.backends import CacheBackend, get_cache_backend
8 | from src.cache.dependencies import get_cache
9 | from src.cache.decorators import cached, invalidate_cache
10 |
11 | __all__ = [
12 | "CacheBackend",
13 | "get_cache_backend",
14 | "get_cache",
15 | "cached",
16 | "invalidate_cache",
17 | ]
18 |
--------------------------------------------------------------------------------
/src/cache/backends/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Cache backends package
3 | """
4 | from src.cache.backends.base import CacheBackend
5 | from src.cache.backends.redis import RedisBackend
6 | from src.cache.backends.file import FileBackend
7 | from src.cache.backends.memory import MemoryBackend
8 | from src.cache.backends.factory import get_cache_backend
9 |
10 | __all__ = [
11 | "CacheBackend",
12 | "RedisBackend",
13 | "FileBackend",
14 | "MemoryBackend",
15 | "get_cache_backend",
16 | ]
17 |
--------------------------------------------------------------------------------
/src/cache/backends/base.py:
--------------------------------------------------------------------------------
1 | """
2 | Base cache backend abstract class
3 | """
4 | import abc
5 | from typing import Any, Dict, List, Optional, Union
6 |
7 |
8 | class CacheBackend(abc.ABC):
9 | """
10 | Abstract base class for cache backends
11 |
12 | All cache implementations must extend this class and implement its methods
13 | """
14 |
15 | @abc.abstractmethod
16 | async def init(self) -> None:
17 | """Initialize the cache backend"""
18 | pass
19 |
20 | @abc.abstractmethod
21 | async def close(self) -> None:
22 | """Close the cache backend"""
23 | pass
24 |
25 | @abc.abstractmethod
26 | async def get(self, key: str) -> Optional[str]:
27 | """
28 | Get a value from the cache
29 |
30 | Args:
31 | key: Cache key
32 |
33 | Returns:
34 | The cached string value or None if not found
35 | """
36 | pass
37 |
38 | @abc.abstractmethod
39 | async def set(self, key: str, value: str, ex: Optional[int] = None) -> bool:
40 | """
41 | Set a value in the cache
42 |
43 | Args:
44 | key: Cache key
45 | value: Value to store (string)
46 | ex: Expiration time in seconds
47 |
48 | Returns:
49 | True if successful
50 | """
51 | pass
52 |
53 | @abc.abstractmethod
54 | async def delete(self, *keys: str) -> int:
55 | """
56 | Delete one or more keys from the cache
57 |
58 | Args:
59 | keys: One or more keys to delete
60 |
61 | Returns:
62 | Number of keys deleted
63 | """
64 | pass
65 |
66 | @abc.abstractmethod
67 | async def scan(self, cursor: Any, match: str, count: int) -> tuple[Any, List[str]]:
68 | """
69 | Scan the cache for keys matching a pattern
70 |
71 | Args:
72 | cursor: Cursor for pagination
73 | match: Pattern to match
74 | count: Number of items to return per batch
75 |
76 | Returns:
77 | A tuple of (next_cursor, keys)
78 | """
79 | pass
80 |
81 | @abc.abstractmethod
82 | async def flush(self) -> bool:
83 | """
84 | Clear all cached data
85 |
86 | Returns:
87 | True if successful
88 | """
89 | pass
90 |
--------------------------------------------------------------------------------
/src/cache/backends/factory.py:
--------------------------------------------------------------------------------
1 | """
2 | Cache backend factory
3 | """
4 | import logging
5 | from enum import Enum
6 | from typing import Optional
7 |
8 | from src.cache.backends.base import CacheBackend
9 | from src.cache.backends.redis import RedisBackend
10 | from src.cache.backends.file import FileBackend
11 | from src.cache.backends.memory import MemoryBackend
12 | from src.core.config import settings
13 |
14 |
15 | logger = logging.getLogger(__name__)
16 |
17 |
18 | class CacheBackendType(str, Enum):
19 | """Enum of supported cache backends"""
20 | REDIS = "redis"
21 | FILE = "file"
22 | MEMORY = "memory"
23 |
24 |
25 | # Singleton cache instance
26 | _cache_instance: Optional[CacheBackend] = None
27 |
28 |
29 | def get_cache_backend() -> CacheBackend:
30 | """
31 | Get or create the singleton cache backend instance
32 |
33 | Returns:
34 | The appropriate cache backend based on configuration
35 | """
36 | global _cache_instance
37 |
38 | if _cache_instance is None:
39 | backend_type = settings.CACHE_BACKEND_TYPE
40 |
41 | if backend_type == CacheBackendType.REDIS:
42 | logger.info("Using Redis cache backend")
43 | _cache_instance = RedisBackend()
44 | elif backend_type == CacheBackendType.FILE:
45 | logger.info("Using file-based cache backend")
46 | _cache_instance = FileBackend()
47 | elif backend_type == CacheBackendType.MEMORY:
48 | logger.info("Using in-memory cache backend")
49 | _cache_instance = MemoryBackend()
50 | else:
51 | logger.warning(f"Unknown cache backend type '{backend_type}', falling back to in-memory cache")
52 | _cache_instance = MemoryBackend()
53 |
54 | return _cache_instance
55 |
56 |
57 | async def init_cache_backend() -> None:
58 | """Initialize the cache backend"""
59 | backend = get_cache_backend()
60 | await backend.init()
61 |
62 |
63 | async def close_cache_backend() -> None:
64 | """Close the cache backend"""
65 | global _cache_instance
66 |
67 | if _cache_instance is not None:
68 | await _cache_instance.close()
69 | _cache_instance = None
70 |
--------------------------------------------------------------------------------
/src/cache/backends/file.py:
--------------------------------------------------------------------------------
1 | """
2 | File-based cache backend implementation
3 | """
4 | import asyncio
5 | import json
6 | import logging
7 | import os
8 | import time
9 | from pathlib import Path
10 | from typing import Any, Dict, List, Optional, Union
11 |
12 | import aiofiles
13 | import aiofiles.os
14 |
15 | from src.cache.backends.base import CacheBackend
16 | from src.core.config import settings
17 |
18 |
19 | logger = logging.getLogger(__name__)
20 |
21 |
22 | class FileBackend(CacheBackend):
23 | """
24 | File-based cache backend implementation
25 |
26 | Stores cache entries as JSON files in a directory structure
27 | """
28 |
29 | def __init__(self):
30 | self._cache_dir = Path(settings.CACHE_FILE_PATH)
31 | self._lock = asyncio.Lock()
32 |
33 | async def init(self) -> None:
34 | """Initialize the file cache directory"""
35 | logger.info(f"Initializing file cache at {self._cache_dir}")
36 | os.makedirs(self._cache_dir, exist_ok=True)
37 |
38 | async def close(self) -> None:
39 | """Close the file cache backend"""
40 | # Nothing to do
41 | pass
42 |
43 | def _get_path_for_key(self, key: str) -> Path:
44 | """Convert a cache key to a file path"""
45 | # Create a file-system safe representation of the key
46 | safe_key = key.replace(':', '_').replace('/', '_')
47 | return self._cache_dir / f"{safe_key}.json"
48 |
49 | async def get(self, key: str) -> Optional[str]:
50 | """Get a value from the file cache"""
51 | try:
52 | path = self._get_path_for_key(key)
53 |
54 | if not path.exists():
55 | return None
56 |
57 | # Check if file is expired
58 | async with self._lock:
59 | stats = await aiofiles.os.stat(path)
60 | metadata_path = Path(f"{path}.meta")
61 |
62 | # Check expiration if metadata exists
63 | if metadata_path.exists():
64 | async with aiofiles.open(metadata_path, 'r') as f:
65 | metadata = json.loads(await f.read())
66 | expiry = metadata.get('expiry')
67 |
68 | if expiry and time.time() > expiry:
69 | # Expired, delete files
70 | await aiofiles.os.remove(path)
71 | await aiofiles.os.remove(metadata_path)
72 | return None
73 |
74 | # Read the cache file
75 | async with aiofiles.open(path, 'r') as f:
76 | return await f.read()
77 | except Exception as e:
78 | logger.error(f"File cache GET error: {str(e)}")
79 | return None
80 |
81 | async def set(self, key: str, value: str, ex: Optional[int] = None) -> bool:
82 | """Set a value in the file cache"""
83 | try:
84 | path = self._get_path_for_key(key)
85 |
86 | async with self._lock:
87 | # Write the value to the cache file
88 | async with aiofiles.open(path, 'w') as f:
89 | await f.write(value)
90 |
91 | # Write expiration metadata if provided
92 | if ex is not None:
93 | metadata = {
94 | 'created': time.time(),
95 | 'expiry': time.time() + ex
96 | }
97 |
98 | metadata_path = Path(f"{path}.meta")
99 | async with aiofiles.open(metadata_path, 'w') as f:
100 | await f.write(json.dumps(metadata))
101 |
102 | return True
103 | except Exception as e:
104 | logger.error(f"File cache SET error: {str(e)}")
105 | return False
106 |
107 | async def delete(self, *keys: str) -> int:
108 | """Delete one or more keys from the file cache"""
109 | if not keys:
110 | return 0
111 |
112 | deleted = 0
113 |
114 | try:
115 | async with self._lock:
116 | for key in keys:
117 | path = self._get_path_for_key(key)
118 | metadata_path = Path(f"{path}.meta")
119 |
120 | # Delete the cache file if it exists
121 | if path.exists():
122 | await aiofiles.os.remove(path)
123 | deleted += 1
124 |
125 | # Delete the metadata file if it exists
126 | if metadata_path.exists():
127 | await aiofiles.os.remove(metadata_path)
128 |
129 | return deleted
130 | except Exception as e:
131 | logger.error(f"File cache DELETE error: {str(e)}")
132 | return deleted
133 |
134 | async def scan(self, cursor: Any, match: str, count: int) -> tuple[Any, List[str]]:
135 | """Scan the file cache for keys matching a pattern"""
136 | try:
137 | import fnmatch
138 |
139 | # Convert Redis-style pattern to glob pattern
140 | glob_pattern = match.replace('*', '?*')
141 |
142 | # Use cursor as an offset
143 | cursor = int(cursor) if cursor and cursor != b"0" else 0
144 |
145 | # List files in cache directory
146 | files = list(self._cache_dir.glob("*.json"))
147 | files = [f for f in files if not f.name.endswith('.meta.json')]
148 |
149 | # Extract keys from filenames
150 | all_keys = [f.stem for f in files]
151 |
152 | # Filter keys by pattern
153 | filtered_keys = []
154 | for key in all_keys:
155 | # Convert back from filesystem format to cache key format
156 | original_key = key.replace('_', ':')
157 | if fnmatch.fnmatch(original_key, glob_pattern):
158 | filtered_keys.append(original_key)
159 |
160 | # Apply pagination
161 | end_idx = min(cursor + count, len(filtered_keys))
162 | result_keys = filtered_keys[cursor:end_idx]
163 |
164 | # Calculate next cursor
165 | next_cursor = str(end_idx) if end_idx < len(filtered_keys) else "0"
166 |
167 | return next_cursor, result_keys
168 | except Exception as e:
169 | logger.error(f"File cache SCAN error: {str(e)}")
170 | return "0", []
171 |
172 | async def flush(self) -> bool:
173 | """Clear all cached data in the file cache"""
174 | try:
175 | import shutil
176 |
177 | async with self._lock:
178 | # Remove all files in the cache directory
179 | if self._cache_dir.exists():
180 | for item in self._cache_dir.glob("*"):
181 | if item.is_file():
182 | os.remove(item)
183 | elif item.is_dir():
184 | shutil.rmtree(item)
185 |
186 | # Recreate the cache directory
187 | os.makedirs(self._cache_dir, exist_ok=True)
188 |
189 | return True
190 | except Exception as e:
191 | logger.error(f"File cache FLUSH error: {str(e)}")
192 | return False
193 |
--------------------------------------------------------------------------------
/src/cache/backends/memory.py:
--------------------------------------------------------------------------------
1 | """
2 | In-memory cache backend implementation
3 | """
4 | import asyncio
5 | import logging
6 | import time
7 | from typing import Any, Dict, List, Optional, Union
8 |
9 | from src.cache.backends.base import CacheBackend
10 |
11 |
12 | logger = logging.getLogger(__name__)
13 |
14 |
15 | class MemoryBackend(CacheBackend):
16 | """
17 | In-memory cache backend implementation
18 |
19 | Stores cache entries in a dictionary in memory
20 | """
21 |
22 | def __init__(self):
23 | self._cache: Dict[str, Dict[str, Any]] = {}
24 | self._lock = asyncio.Lock()
25 |
26 | async def init(self) -> None:
27 | """Initialize the memory cache"""
28 | logger.info("Initializing in-memory cache")
29 | self._cache = {}
30 |
31 | async def close(self) -> None:
32 | """Close the memory cache"""
33 | logger.info("Closing in-memory cache")
34 | self._cache = {}
35 |
36 | async def _check_expiry(self, key: str) -> bool:
37 | """
38 | Check if a key is expired and remove it if so
39 |
40 | Returns True if the key exists and is not expired
41 | """
42 | if key not in self._cache:
43 | return False
44 |
45 | entry = self._cache[key]
46 | expiry = entry.get('expiry')
47 |
48 | if expiry is not None and time.time() > expiry:
49 | # Key is expired, remove it
50 | del self._cache[key]
51 | return False
52 |
53 | return True
54 |
55 | async def get(self, key: str) -> Optional[str]:
56 | """Get a value from the memory cache"""
57 | try:
58 | async with self._lock:
59 | # Check if key exists and is not expired
60 | if await self._check_expiry(key):
61 | return self._cache[key]['value']
62 | return None
63 | except Exception as e:
64 | logger.error(f"Memory cache GET error: {str(e)}")
65 | return None
66 |
67 | async def set(self, key: str, value: str, ex: Optional[int] = None) -> bool:
68 | """Set a value in the memory cache"""
69 | try:
70 | entry = {
71 | 'value': value,
72 | 'created': time.time(),
73 | }
74 |
75 | if ex is not None:
76 | entry['expiry'] = time.time() + ex
77 |
78 | async with self._lock:
79 | self._cache[key] = entry
80 |
81 | return True
82 | except Exception as e:
83 | logger.error(f"Memory cache SET error: {str(e)}")
84 | return False
85 |
86 | async def delete(self, *keys: str) -> int:
87 | """Delete one or more keys from the memory cache"""
88 | if not keys:
89 | return 0
90 |
91 | deleted = 0
92 |
93 | try:
94 | async with self._lock:
95 | for key in keys:
96 | if key in self._cache:
97 | del self._cache[key]
98 | deleted += 1
99 |
100 | return deleted
101 | except Exception as e:
102 | logger.error(f"Memory cache DELETE error: {str(e)}")
103 | return deleted
104 |
105 | async def scan(self, cursor: Any, match: str, count: int) -> tuple[Any, List[str]]:
106 | """Scan the memory cache for keys matching a pattern"""
107 | try:
108 | import fnmatch
109 |
110 | # Use cursor as an offset
111 | cursor = int(cursor) if cursor and cursor != b"0" else 0
112 |
113 | async with self._lock:
114 | # Get all keys, checking expiry at the same time
115 | valid_keys = []
116 | for key in list(self._cache.keys()):
117 | if await self._check_expiry(key):
118 | valid_keys.append(key)
119 |
120 | # Filter by pattern
121 | filtered_keys = [k for k in valid_keys if fnmatch.fnmatch(k, match)]
122 |
123 | # Apply pagination
124 | end_idx = min(cursor + count, len(filtered_keys))
125 | result_keys = filtered_keys[cursor:end_idx]
126 |
127 | # Calculate next cursor
128 | next_cursor = str(end_idx) if end_idx < len(filtered_keys) else "0"
129 |
130 | return next_cursor, result_keys
131 | except Exception as e:
132 | logger.error(f"Memory cache SCAN error: {str(e)}")
133 | return "0", []
134 |
135 | async def flush(self) -> bool:
136 | """Clear all cached data in the memory cache"""
137 | try:
138 | async with self._lock:
139 | self._cache.clear()
140 | return True
141 | except Exception as e:
142 | logger.error(f"Memory cache FLUSH error: {str(e)}")
143 | return False
144 |
--------------------------------------------------------------------------------
/src/cache/backends/redis.py:
--------------------------------------------------------------------------------
1 | """
2 | Redis cache backend implementation
3 | """
4 | import logging
5 | from typing import Any, List, Optional, Union
6 |
7 | import redis.asyncio as redis
8 | from redis.asyncio import Redis
9 |
10 | from src.cache.backends.base import CacheBackend
11 | from src.core.config import settings
12 |
13 |
14 | logger = logging.getLogger(__name__)
15 |
16 |
17 | class RedisBackend(CacheBackend):
18 | """
19 | Redis cache backend implementation
20 | """
21 |
22 | def __init__(self):
23 | self._pool: Optional[redis.ConnectionPool] = None
24 |
25 | async def init(self) -> None:
26 | """Initialize Redis connection pool"""
27 | logger.info("Initializing Redis connection pool")
28 | self._pool = redis.ConnectionPool.from_url(
29 | url=str(settings.REDIS_URI),
30 | max_connections=10,
31 | decode_responses=True,
32 | )
33 |
34 | async def close(self) -> None:
35 | """Close Redis connection pool"""
36 | if self._pool:
37 | logger.info("Closing Redis connection pool")
38 | await self._pool.disconnect()
39 | self._pool = None
40 |
41 | async def _get_conn(self) -> Redis:
42 | """Get a Redis client from the connection pool"""
43 | if not self._pool:
44 | await self.init()
45 | return redis.Redis(connection_pool=self._pool)
46 |
47 | async def get(self, key: str) -> Optional[str]:
48 | """Get a value from Redis"""
49 | try:
50 | client = await self._get_conn()
51 | return await client.get(key)
52 | except Exception as e:
53 | logger.error(f"Redis GET error: {str(e)}")
54 | return None
55 |
56 | async def set(self, key: str, value: str, ex: Optional[int] = None) -> bool:
57 | """Set a value in Redis"""
58 | try:
59 | client = await self._get_conn()
60 | return await client.set(key, value, ex=ex)
61 | except Exception as e:
62 | logger.error(f"Redis SET error: {str(e)}")
63 | return False
64 |
65 | async def delete(self, *keys: str) -> int:
66 | """Delete one or more keys from Redis"""
67 | if not keys:
68 | return 0
69 |
70 | try:
71 | client = await self._get_conn()
72 | return await client.delete(*keys)
73 | except Exception as e:
74 | logger.error(f"Redis DELETE error: {str(e)}")
75 | return 0
76 |
77 | async def scan(self, cursor: Any, match: str, count: int) -> tuple[Any, List[str]]:
78 | """Scan Redis for keys matching a pattern"""
79 | try:
80 | client = await self._get_conn()
81 | return await client.scan(cursor=cursor, match=match, count=count)
82 | except Exception as e:
83 | logger.error(f"Redis SCAN error: {str(e)}")
84 | return cursor, []
85 |
86 | async def flush(self) -> bool:
87 | """Clear all cached data in Redis"""
88 | try:
89 | client = await self._get_conn()
90 | await client.flushdb()
91 | return True
92 | except Exception as e:
93 | logger.error(f"Redis FLUSHDB error: {str(e)}")
94 | return False
95 |
--------------------------------------------------------------------------------
/src/cache/decorators.py:
--------------------------------------------------------------------------------
1 | """
2 | Caching decorators for function and API response caching
3 | """
4 | import functools
5 | import hashlib
6 | import inspect
7 | import json
8 | import logging
9 | from typing import Any, Callable, Dict, Optional, Tuple, Type, Union
10 |
11 | from fastapi import Depends, Request
12 |
13 | from src.cache.backends.base import CacheBackend
14 | from src.cache.backends.factory import get_cache_backend
15 | from src.cache.dependencies import get_cache
16 | from src.core.config import settings
17 | from src.core.exceptions import CacheError
18 |
19 |
20 | logger = logging.getLogger(__name__)
21 |
22 |
23 | def _get_cache_key(
24 | prefix: str,
25 | func_name: str,
26 | args_dict: Dict[str, Any]
27 | ) -> str:
28 | """
29 | Generate a cache key from function name and arguments
30 | """
31 | # Create a deterministic string representation of args and kwargs
32 | args_str = json.dumps(args_dict, sort_keys=True)
33 |
34 | # Create a hash of the arguments to keep key length reasonable
35 | args_hash = hashlib.md5(args_str.encode()).hexdigest()
36 |
37 | # Combine function name and args hash into a key
38 | return f"{prefix}:{func_name}:{args_hash}"
39 |
40 |
41 | def cached(
42 | ttl: int = None,
43 | key_prefix: str = "cache",
44 | key_builder: Callable = None,
45 | exclude_keys: Tuple[str] = ("self", "cls", "request", "db"),
46 | ):
47 | """
48 | Decorator for caching function return values in Redis
49 |
50 | Args:
51 | ttl: Time to live in seconds. Defaults to settings.CACHE_TTL_SECONDS.
52 | key_prefix: Prefix for the cache key to namespace keys
53 | key_builder: Custom function to build the cache key
54 | exclude_keys: Parameter names to exclude from key generation
55 | """
56 | def decorator(func):
57 | # Get function signature for parameter names
58 | sig = inspect.signature(func)
59 | func_name = func.__qualname__
60 |
61 | @functools.wraps(func)
62 | async def wrapper(*args, **kwargs):
63 | # Apply cache dependency if not provided
64 | cache_backend = kwargs.get("cache")
65 | if cache_backend is None:
66 | # Use the singleton cache backend
67 | cache_backend = get_cache_backend()
68 |
69 | # Build a dictionary of all arguments with their parameter names
70 | bound_args = sig.bind(*args, **kwargs)
71 | bound_args.apply_defaults()
72 | arg_dict = {k: v for k, v in bound_args.arguments.items()
73 | if k not in exclude_keys and not isinstance(v, (CacheBackend, Request))}
74 |
75 | # Generate the cache key
76 | if key_builder:
77 | cache_key = key_builder(*args, **kwargs)
78 | else:
79 | cache_key = _get_cache_key(key_prefix, func_name, arg_dict)
80 |
81 | # Try to get value from cache
82 | try:
83 | cached_value = await cache_backend.get(cache_key)
84 | if cached_value:
85 | logger.debug(f"Cache hit for key: {cache_key}")
86 | # Deserialize the cached value from JSON
87 | return json.loads(cached_value)
88 |
89 | logger.debug(f"Cache miss for key: {cache_key}")
90 | # Call the original function
91 | result = await func(*args, **kwargs)
92 |
93 | # Calculate TTL
94 | actual_ttl = ttl if ttl is not None else settings.CACHE_TTL_SECONDS
95 |
96 | # Serialize result to JSON and store in cache
97 | serialized = json.dumps(result)
98 | await cache_backend.set(
99 | cache_key,
100 | serialized,
101 | ex=actual_ttl
102 | )
103 |
104 | return result
105 | except Exception as e:
106 | # Log the error but don't fail the function
107 | logger.error(f"Cache error: {str(e)}")
108 | # Call the original function without caching
109 | return await func(*args, **kwargs)
110 |
111 | return wrapper
112 | return decorator
113 |
114 |
115 | def invalidate_cache(
116 | key_pattern: str
117 | ):
118 | """
119 | Decorator for invalidating cache keys matching a pattern
120 |
121 | Args:
122 | key_pattern: Key pattern to match for invalidation (e.g., "user:*")
123 | """
124 | def decorator(func):
125 | @functools.wraps(func)
126 | async def wrapper(*args, **kwargs):
127 | # Get cache backend
128 | cache_backend = get_cache_backend()
129 |
130 | # Call the original function first
131 | result = await func(*args, **kwargs)
132 |
133 | # Invalidate matching cache keys
134 | try:
135 | # Scan for keys matching the pattern
136 | cursor = "0"
137 | deleted_count = 0
138 |
139 | while cursor:
140 | cursor, keys = await cache_backend.scan(
141 | cursor=cursor,
142 | match=key_pattern,
143 | count=100
144 | )
145 |
146 | if keys:
147 | deleted_count += await cache_backend.delete(*keys)
148 | logger.debug(f"Invalidated {len(keys)} cache keys")
149 |
150 | # Stop if we've completed the scan
151 | if cursor == "0":
152 | break
153 |
154 | logger.info(f"Invalidated {deleted_count} cache keys matching '{key_pattern}'")
155 | except Exception as e:
156 | logger.error(f"Cache invalidation error: {str(e)}")
157 |
158 | return result
159 |
160 | return wrapper
161 |
162 | return decorator
163 |
--------------------------------------------------------------------------------
/src/cache/dependencies.py:
--------------------------------------------------------------------------------
1 | """
2 | Cache dependency providers
3 | """
4 | import logging
5 | from typing import AsyncGenerator
6 |
7 | from src.cache.backends.base import CacheBackend
8 | from src.cache.backends.factory import get_cache_backend
9 |
10 | logger = logging.getLogger(__name__)
11 |
12 |
13 | async def get_cache() -> AsyncGenerator[CacheBackend, None]:
14 | """
15 | Get cache client instance as a FastAPI dependency
16 |
17 | This function is a dependency provider that yields a cache backend
18 | instance for use in FastAPI endpoints.
19 |
20 | Usage:
21 | @router.get("/items/{item_id}")
22 | async def get_item(
23 | item_id: int,
24 | cache: CacheBackend = Depends(get_cache)
25 | ):
26 | # Use the cache instance
27 | value = await cache.get(f"item:{item_id}")
28 | ...
29 |
30 | Returns:
31 | AsyncGenerator[CacheBackend, None]: A cache backend instance
32 | """
33 | cache_backend = get_cache_backend()
34 |
35 | try:
36 | yield cache_backend
37 | except Exception as e:
38 | logger.error(f"Cache error: {str(e)}")
39 | raise
40 |
--------------------------------------------------------------------------------
/src/cache/redis.py:
--------------------------------------------------------------------------------
1 | """
2 | Redis connection management
3 | """
4 | import logging
5 | from typing import Optional
6 |
7 | import redis.asyncio as redis
8 | from redis.asyncio import Redis
9 |
10 | from src.core.config import settings
11 |
12 |
13 | logger = logging.getLogger(__name__)
14 |
15 | # Global Redis connection pool
16 | redis_pool: Optional[redis.ConnectionPool] = None
17 |
18 |
19 | async def init_redis_pool() -> None:
20 | """
21 | Initialize Redis connection pool
22 | """
23 | global redis_pool
24 |
25 | logger.info("Initializing Redis connection pool")
26 | redis_pool = redis.ConnectionPool.from_url(
27 | url=str(settings.REDIS_URI),
28 | max_connections=10,
29 | decode_responses=True, # Automatically decode responses to Python strings
30 | )
31 |
32 |
33 | async def close_redis_pool() -> None:
34 | """
35 | Close Redis connection pool
36 | """
37 | global redis_pool
38 |
39 | if redis_pool:
40 | logger.info("Closing Redis connection pool")
41 | await redis_pool.disconnect()
42 | redis_pool = None
43 |
44 |
45 | async def get_redis() -> Redis:
46 | """
47 | Get Redis client from pool
48 |
49 | Can be used as a FastAPI dependency
50 | """
51 | global redis_pool
52 |
53 | if redis_pool is None:
54 | await init_redis_pool()
55 |
56 | async with redis.Redis(connection_pool=redis_pool) as conn:
57 | yield conn
58 |
--------------------------------------------------------------------------------
/src/core/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Core application components
3 | """
4 |
--------------------------------------------------------------------------------
/src/core/config.py:
--------------------------------------------------------------------------------
1 | """
2 | Application configuration management
3 | """
4 | import os
5 | import logging
6 | from functools import lru_cache
7 | from pathlib import Path
8 | from typing import Dict, List, Optional, Union, Any
9 |
10 | from pydantic import Field, PostgresDsn, RedisDsn, model_validator, BaseModel
11 | from pydantic_settings import BaseSettings, SettingsConfigDict
12 |
13 | # Get the project root directory
14 | ROOT_DIR = Path(__file__).parent.parent.parent
15 | ENV_FILE = ROOT_DIR / ".env"
16 |
17 |
18 | class GeneralSettings(BaseModel):
19 | debug: bool = False
20 | log_level: str = "INFO"
21 | project_name: str = "FastAPI-Ignite"
22 | project_description: str = "A FastAPI application"
23 | version: str = "0.1.0"
24 |
25 | # Override from environment variables
26 | @model_validator(mode='after')
27 | def override_from_env(self) -> "GeneralSettings":
28 | if "DEBUG" in os.environ:
29 | debug_val = os.environ["DEBUG"].lower()
30 | self.debug = debug_val == "true"
31 | if "LOG_LEVEL" in os.environ:
32 | self.log_level = os.environ["LOG_LEVEL"]
33 | if "PROJECT_NAME" in os.environ:
34 | self.project_name = os.environ["PROJECT_NAME"]
35 | if "PROJECT_DESCRIPTION" in os.environ:
36 | self.project_description = os.environ["PROJECT_DESCRIPTION"]
37 | if "VERSION" in os.environ:
38 | self.version = os.environ["VERSION"]
39 | return self
40 |
41 |
42 | class ApiSettings(BaseModel):
43 | prefix: str = "/api"
44 | host: str = "0.0.0.0"
45 | port: int = 8000
46 |
47 | # Override from environment variables
48 | @model_validator(mode='after')
49 | def override_from_env(self) -> "ApiSettings":
50 | if "API_PREFIX" in os.environ:
51 | self.prefix = os.environ["API_PREFIX"]
52 | if "HOST" in os.environ:
53 | self.host = os.environ["HOST"]
54 | if "PORT" in os.environ:
55 | self.port = int(os.environ["PORT"])
56 | return self
57 |
58 |
59 | class CorsSettings(BaseModel):
60 | origins: List[str] = ["*"]
61 |
62 | # Override from environment variables
63 | @model_validator(mode='after')
64 | def override_from_env(self) -> "CorsSettings":
65 | if "CORS_ORIGINS" in os.environ:
66 | import json
67 | try:
68 | self.origins = json.loads(os.environ["CORS_ORIGINS"])
69 | except json.JSONDecodeError:
70 | # Fallback to a single origin if not valid JSON
71 | self.origins = [os.environ["CORS_ORIGINS"]]
72 | return self
73 |
74 |
75 | class DatabaseSettings(BaseModel):
76 | host: str = "localhost"
77 | port: int = 5432
78 | user: str = "postgres"
79 | password: str = "postgres"
80 | database: str = "app_db"
81 |
82 | # Override from environment variables
83 | @model_validator(mode='after')
84 | def override_from_env(self) -> "DatabaseSettings":
85 | # Check for environment variable overrides
86 | env_prefix = "POSTGRES_"
87 | if os.environ.get(f"{env_prefix}USER"):
88 | self.user = os.environ[f"{env_prefix}USER"]
89 | if os.environ.get(f"{env_prefix}PASSWORD"):
90 | self.password = os.environ[f"{env_prefix}PASSWORD"]
91 | if os.environ.get(f"{env_prefix}DB"):
92 | self.database = os.environ[f"{env_prefix}DB"]
93 | if os.environ.get(f"{env_prefix}HOST"):
94 | self.host = os.environ[f"{env_prefix}HOST"]
95 | if os.environ.get(f"{env_prefix}PORT"):
96 | self.port = int(os.environ[f"{env_prefix}PORT"])
97 |
98 | return self
99 |
100 |
101 | class RedisSettings(BaseModel):
102 | host: str = "localhost"
103 | port: int = 6300
104 | password: str = ""
105 | db: int = 0
106 |
107 | # Override from environment variables
108 | @model_validator(mode='after')
109 | def override_from_env(self) -> "RedisSettings":
110 | # Check for environment variable override
111 | if os.environ.get("REDIS_HOST"):
112 | self.host = os.environ["REDIS_HOST"]
113 | if os.environ.get("REDIS_PASSWORD") is not None: # Allow empty string
114 | self.password = os.environ["REDIS_PASSWORD"]
115 | if os.environ.get("REDIS_PORT"):
116 | self.port = int(os.environ["REDIS_PORT"])
117 | if os.environ.get("REDIS_DB"):
118 | self.db = int(os.environ["REDIS_DB"])
119 | return self
120 |
121 |
122 | class DramatiqSettings(BaseModel):
123 | broker: str = "redis"
124 | processes: int = 2
125 | threads: int = 8
126 |
127 | # Override from environment variables
128 | @model_validator(mode='after')
129 | def override_from_env(self) -> "DramatiqSettings":
130 | if os.environ.get("DRAMATIQ_BROKER"):
131 | self.broker = os.environ["DRAMATIQ_BROKER"]
132 | if os.environ.get("DRAMATIQ_PROCESSES"):
133 | self.processes = int(os.environ["DRAMATIQ_PROCESSES"])
134 | if os.environ.get("DRAMATIQ_THREADS"):
135 | self.threads = int(os.environ["DRAMATIQ_THREADS"])
136 | return self
137 |
138 |
139 | class CacheSettings(BaseModel):
140 | ttl_seconds: int = 300
141 | backend_type: str = "redis" # Options: "redis", "file", "memory"
142 | file_path: str = "cache" # Path for file-based cache, relative to project root
143 |
144 | # Override from environment variables
145 | @model_validator(mode='after')
146 | def override_from_env(self) -> "CacheSettings":
147 | if os.environ.get("CACHE_BACKEND_TYPE"):
148 | self.backend_type = os.environ["CACHE_BACKEND_TYPE"]
149 | if os.environ.get("CACHE_TTL_SECONDS"):
150 | self.ttl_seconds = int(os.environ["CACHE_TTL_SECONDS"])
151 | if os.environ.get("CACHE_FILE_PATH"):
152 | self.file_path = os.environ["CACHE_FILE_PATH"]
153 | return self
154 |
155 |
156 | class SchedulerSettings(BaseModel):
157 | enabled: bool = True # Whether to enable the APScheduler
158 |
159 | # Override from environment variables
160 | @model_validator(mode='after')
161 | def override_from_env(self) -> "SchedulerSettings":
162 | if os.environ.get("SCHEDULER_ENABLED"):
163 | self.enabled = os.environ.get("SCHEDULER_ENABLED").lower() == "true"
164 | return self
165 |
166 |
167 | class Settings(BaseSettings):
168 | """
169 | Application settings loaded from environment variables
170 | """
171 | # Config model for Pydantic
172 | model_config = SettingsConfigDict(
173 | env_file=str(ENV_FILE),
174 | env_file_encoding="utf-8",
175 | case_sensitive=True,
176 | extra="ignore", # ignore extra environment variables
177 | )
178 |
179 | # Environment setting
180 | ENV: str = Field("development", description="Environment name")
181 |
182 | # Nested settings models
183 | general: GeneralSettings = GeneralSettings()
184 | api: ApiSettings = ApiSettings()
185 | cors: CorsSettings = CorsSettings()
186 | database: DatabaseSettings = DatabaseSettings()
187 | redis: RedisSettings = RedisSettings()
188 | dramatiq: DramatiqSettings = DramatiqSettings()
189 | cache: CacheSettings = CacheSettings()
190 | scheduler: SchedulerSettings = SchedulerSettings()
191 |
192 | # Computed properties
193 | DATABASE_URI: Optional[PostgresDsn] = None
194 | REDIS_URI: Optional[RedisDsn] = None
195 |
196 | def __init__(self, **data):
197 | """Initialize settings from environment variables"""
198 | # Initialize with the data from environment variables
199 | super().__init__(**data)
200 |
201 | # Construct database and Redis URIs
202 | self.DATABASE_URI = PostgresDsn.build(
203 | scheme="postgresql+asyncpg",
204 | username=self.database.user,
205 | password=self.database.password,
206 | host=self.database.host,
207 | port=self.database.port,
208 | path=f"{self.database.database}",
209 | )
210 |
211 | self.REDIS_URI = RedisDsn.build(
212 | scheme="redis",
213 | host=self.redis.host,
214 | port=self.redis.port,
215 | password=self.redis.password or None,
216 | path=f"{self.redis.db}",
217 | )
218 |
219 | # Log config sources in debug mode
220 | if self.general.debug:
221 | logging.debug("Configuration loaded from:")
222 | logging.debug(f" - Environment variables (.env file at: {ENV_FILE})")
223 | logging.debug(f" - System environment variables")
224 |
225 | # Legacy uppercase aliases for nested settings
226 | @property
227 | def PROJECT_NAME(self) -> str:
228 | return self.general.project_name
229 |
230 | @property
231 | def PROJECT_DESCRIPTION(self) -> str:
232 | return self.general.project_description
233 |
234 | @property
235 | def VERSION(self) -> str:
236 | return self.general.version
237 |
238 | @property
239 | def LOG_LEVEL(self) -> str:
240 | return self.general.log_level
241 |
242 | @property
243 | def DEBUG(self) -> bool:
244 | return self.general.debug
245 |
246 | @property
247 | def API_PREFIX(self) -> str:
248 | return self.api.prefix
249 |
250 | @property
251 | def HOST(self) -> str:
252 | return self.api.host
253 |
254 | @property
255 | def PORT(self) -> int:
256 | return self.api.port
257 |
258 | @property
259 | def CORS_ORIGINS(self) -> List[str]:
260 | return self.cors.origins
261 |
262 | @property
263 | def POSTGRES_USER(self) -> str:
264 | return self.database.user
265 |
266 | @property
267 | def POSTGRES_PASSWORD(self) -> str:
268 | return self.database.password
269 |
270 | @property
271 | def POSTGRES_HOST(self) -> str:
272 | return self.database.host
273 |
274 | @property
275 | def POSTGRES_PORT(self) -> int:
276 | return self.database.port
277 |
278 | @property
279 | def POSTGRES_DB(self) -> str:
280 | return self.database.database
281 |
282 | @property
283 | def REDIS_HOST(self) -> str:
284 | return self.redis.host
285 |
286 | @property
287 | def REDIS_PORT(self) -> int:
288 | return self.redis.port
289 |
290 | @property
291 | def REDIS_DB(self) -> int:
292 | return self.redis.db
293 |
294 | @property
295 | def REDIS_PASSWORD(self) -> str:
296 | """Legacy alias for Redis password"""
297 | return self.redis.password
298 |
299 | @property
300 | def DRAMATIQ_BROKER(self) -> str:
301 | return self.dramatiq.broker
302 |
303 | @property
304 | def DRAMATIQ_PROCESSES(self) -> int:
305 | return self.dramatiq.processes
306 |
307 | @property
308 | def DRAMATIQ_THREADS(self) -> int:
309 | return self.dramatiq.threads
310 |
311 | @property
312 | def SCHEDULER_ENABLED(self) -> bool:
313 | return self.scheduler.enabled
314 |
315 | @property
316 | def CACHE_TTL_SECONDS(self) -> int:
317 | """Legacy alias for cache TTL seconds"""
318 | return self.cache.ttl_seconds
319 |
320 | @property
321 | def CACHE_BACKEND_TYPE(self) -> str:
322 | """The type of cache backend to use"""
323 | return self.cache.backend_type
324 |
325 | @property
326 | def CACHE_FILE_PATH(self) -> str:
327 | """Path for file-based cache storage"""
328 | from pathlib import Path
329 | base_path = Path(__file__).parent.parent.parent
330 | return str(base_path / self.cache.file_path)
331 |
332 |
333 | @lru_cache()
334 | def get_settings() -> Settings:
335 | """
336 | Create and cache a Settings instance
337 | """
338 | return Settings()
339 |
340 |
341 | # Create a non-cached instance to allow runtime modifications
342 | settings = Settings()
343 |
--------------------------------------------------------------------------------
/src/core/events.py:
--------------------------------------------------------------------------------
1 | """
2 | Application startup and shutdown events
3 | """
4 | import logging
5 | from typing import Callable
6 |
7 | from src.cache.backends.factory import init_cache_backend, close_cache_backend
8 | from src.core.config import settings
9 | from src.db.session import create_db_engine, dispose_db_engine
10 | from src.schedulers.scheduler import init_scheduler, shutdown_scheduler
11 |
12 |
13 | logger = logging.getLogger(__name__)
14 |
15 |
16 | def create_start_app_handler() -> Callable:
17 | """
18 | Factory for creating the startup event handler
19 | """
20 | async def start_app() -> None:
21 | """
22 | Initialize services on application startup
23 | """
24 | logger.info("Executing application startup handler")
25 | # Initialize database
26 | await create_db_engine()
27 | logger.info("Database connection established")
28 |
29 | # Initialize the cache backend
30 | await init_cache_backend()
31 | logger.info(f"Cache backend initialized (type: {settings.CACHE_BACKEND_TYPE})")
32 | # Initialize scheduler if enabled in config and not in test mode
33 | if settings.ENV != "test" and settings.SCHEDULER_ENABLED:
34 | await init_scheduler()
35 | logger.info("Task scheduler initialized")
36 | elif settings.ENV != "test" and not settings.SCHEDULER_ENABLED:
37 | logger.info("Task scheduler disabled in configuration")
38 |
39 | return start_app
40 |
41 |
42 | def create_stop_app_handler() -> Callable:
43 | """
44 | Factory for creating the shutdown event handler
45 | """
46 | async def stop_app() -> None:
47 | """
48 | Cleanup services on application shutdown
49 | """
50 | logger.info("Executing application shutdown handler") # Shutdown scheduler if it was started
51 | if settings.ENV != "test" and settings.SCHEDULER_ENABLED:
52 | await shutdown_scheduler()
53 | logger.info("Task scheduler shutdown")
54 |
55 | # Close the cache backend
56 | await close_cache_backend()
57 | logger.info("Cache backend closed")
58 |
59 | # Dispose database connections
60 | await dispose_db_engine()
61 | logger.info("Database connections disposed")
62 |
63 | return stop_app
64 |
--------------------------------------------------------------------------------
/src/core/exceptions.py:
--------------------------------------------------------------------------------
1 | """
2 | Custom exceptions and exception handlers for the application
3 | """
4 | from http import HTTPStatus
5 | from typing import Any, Dict, List, Optional, Union
6 |
7 | from fastapi import FastAPI, Request
8 | from fastapi.exceptions import RequestValidationError
9 | from fastapi.responses import JSONResponse
10 | from pydantic import BaseModel
11 | from sqlalchemy.exc import SQLAlchemyError
12 | from starlette.exceptions import HTTPException
13 |
14 |
15 | class ErrorResponse(BaseModel):
16 | """
17 | Standard error response model
18 | """
19 | status_code: int
20 | message: str
21 | details: Optional[Union[List[Dict[str, Any]], Dict[str, Any], str]] = None
22 |
23 |
24 | class DatabaseError(Exception):
25 | """Exception raised for database-related errors"""
26 | def __init__(self, message: str = "Database error occurred"):
27 | self.message = message
28 | super().__init__(self.message)
29 |
30 |
31 | class CacheError(Exception):
32 | """Exception raised for cache-related errors"""
33 | def __init__(self, message: str = "Cache error occurred"):
34 | self.message = message
35 | super().__init__(self.message)
36 |
37 |
38 | class TaskQueueError(Exception):
39 | """Exception raised for task queue related errors"""
40 | def __init__(self, message: str = "Task queue error occurred"):
41 | self.message = message
42 | super().__init__(self.message)
43 |
44 |
45 | class ResourceNotFoundError(Exception):
46 | """Exception raised when a resource is not found"""
47 | def __init__(self, resource_type: str, resource_id: Any):
48 | self.resource_type = resource_type
49 | self.resource_id = resource_id
50 | self.message = f"{resource_type} with ID {resource_id} not found"
51 | super().__init__(self.message)
52 |
53 |
54 | class BusinessLogicError(Exception):
55 | """Exception raised for business logic errors"""
56 | def __init__(self, message: str):
57 | self.message = message
58 | super().__init__(self.message)
59 |
60 |
61 | def register_exception_handlers(app: FastAPI) -> None:
62 | """
63 | Register exception handlers with the FastAPI application
64 | """
65 | # Handle validation errors (from Pydantic)
66 | app.add_exception_handler(RequestValidationError, validation_error_handler)
67 |
68 | # Handle HTTP exceptions
69 | app.add_exception_handler(HTTPException, http_exception_handler)
70 |
71 | # Handle SQLAlchemy errors
72 | app.add_exception_handler(SQLAlchemyError, sqlalchemy_error_handler)
73 |
74 | # Handle custom exceptions
75 | app.add_exception_handler(DatabaseError, database_error_handler)
76 | app.add_exception_handler(CacheError, cache_error_handler)
77 | app.add_exception_handler(TaskQueueError, task_queue_error_handler)
78 | app.add_exception_handler(ResourceNotFoundError, resource_not_found_error_handler)
79 | app.add_exception_handler(BusinessLogicError, business_logic_error_handler)
80 |
81 | # Catch-all for any unhandled exceptions
82 | app.add_exception_handler(Exception, unhandled_exception_handler)
83 |
84 |
85 | async def validation_error_handler(request: Request, exc: RequestValidationError) -> JSONResponse:
86 | """
87 | Handler for request validation errors
88 | """
89 | return JSONResponse(
90 | status_code=HTTPStatus.UNPROCESSABLE_ENTITY,
91 | content=ErrorResponse(
92 | status_code=HTTPStatus.UNPROCESSABLE_ENTITY,
93 | message="Validation error",
94 | details=exc.errors(),
95 | ).model_dump(),
96 | )
97 |
98 |
99 | async def http_exception_handler(request: Request, exc: HTTPException) -> JSONResponse:
100 | """
101 | Handler for HTTP exceptions
102 | """
103 | return JSONResponse(
104 | status_code=exc.status_code,
105 | content=ErrorResponse(
106 | status_code=exc.status_code,
107 | message=str(exc.detail),
108 | ).model_dump(),
109 | )
110 |
111 |
112 | async def sqlalchemy_error_handler(request: Request, exc: SQLAlchemyError) -> JSONResponse:
113 | """
114 | Handler for SQLAlchemy errors
115 | """
116 | return JSONResponse(
117 | status_code=HTTPStatus.INTERNAL_SERVER_ERROR,
118 | content=ErrorResponse(
119 | status_code=HTTPStatus.INTERNAL_SERVER_ERROR,
120 | message="Database error",
121 | details=str(exc),
122 | ).model_dump(),
123 | )
124 |
125 |
126 | async def database_error_handler(request: Request, exc: DatabaseError) -> JSONResponse:
127 | """
128 | Handler for database errors
129 | """
130 | return JSONResponse(
131 | status_code=HTTPStatus.INTERNAL_SERVER_ERROR,
132 | content=ErrorResponse(
133 | status_code=HTTPStatus.INTERNAL_SERVER_ERROR,
134 | message=exc.message,
135 | ).model_dump(),
136 | )
137 |
138 |
139 | async def cache_error_handler(request: Request, exc: CacheError) -> JSONResponse:
140 | """
141 | Handler for cache errors
142 | """
143 | return JSONResponse(
144 | status_code=HTTPStatus.INTERNAL_SERVER_ERROR,
145 | content=ErrorResponse(
146 | status_code=HTTPStatus.INTERNAL_SERVER_ERROR,
147 | message=exc.message,
148 | ).model_dump(),
149 | )
150 |
151 |
152 | async def task_queue_error_handler(request: Request, exc: TaskQueueError) -> JSONResponse:
153 | """
154 | Handler for task queue errors
155 | """
156 | return JSONResponse(
157 | status_code=HTTPStatus.INTERNAL_SERVER_ERROR,
158 | content=ErrorResponse(
159 | status_code=HTTPStatus.INTERNAL_SERVER_ERROR,
160 | message=exc.message,
161 | ).model_dump(),
162 | )
163 |
164 |
165 | async def resource_not_found_error_handler(request: Request, exc: ResourceNotFoundError) -> JSONResponse:
166 | """
167 | Handler for resource not found errors
168 | """
169 | return JSONResponse(
170 | status_code=HTTPStatus.NOT_FOUND,
171 | content=ErrorResponse(
172 | status_code=HTTPStatus.NOT_FOUND,
173 | message=exc.message,
174 | ).model_dump(),
175 | )
176 |
177 |
178 | async def business_logic_error_handler(request: Request, exc: BusinessLogicError) -> JSONResponse:
179 | """
180 | Handler for business logic errors
181 | """
182 | return JSONResponse(
183 | status_code=HTTPStatus.BAD_REQUEST,
184 | content=ErrorResponse(
185 | status_code=HTTPStatus.BAD_REQUEST,
186 | message=exc.message,
187 | ).model_dump(),
188 | )
189 |
190 |
191 | async def unhandled_exception_handler(request: Request, exc: Exception) -> JSONResponse:
192 | """
193 | Handler for all unhandled exceptions
194 | """
195 | # Log the exception here before returning response
196 | return JSONResponse(
197 | status_code=HTTPStatus.INTERNAL_SERVER_ERROR,
198 | content=ErrorResponse(
199 | status_code=HTTPStatus.INTERNAL_SERVER_ERROR,
200 | message="Internal server error",
201 | ).model_dump(),
202 | )
203 |
--------------------------------------------------------------------------------
/src/core/logging.py:
--------------------------------------------------------------------------------
1 | """
2 | Logging configuration for the application
3 | """
4 | import logging
5 | import sys
6 | from typing import Any, Dict
7 |
8 | import structlog
9 | from structlog.types import Processor
10 |
11 | from src.core.config import settings
12 |
13 |
14 | def setup_logging() -> None:
15 | """
16 | Configure structlog and Python's logging
17 | """
18 | log_level = getattr(logging, settings.LOG_LEVEL.upper(), logging.INFO)
19 |
20 | # Configure standard logging
21 | logging.basicConfig(
22 | format="%(message)s",
23 | stream=sys.stdout,
24 | level=log_level,
25 | )
26 |
27 | # Set log level for third-party libraries
28 | for logger_name in ["uvicorn", "uvicorn.error", "fastapi"]:
29 | logging.getLogger(logger_name).setLevel(log_level)
30 |
31 | # Configure structlog processors for development vs. production
32 | if settings.ENV == "development":
33 | processors = _get_dev_processors()
34 | else:
35 | processors = _get_prod_processors()
36 |
37 | structlog.configure(
38 | processors=processors,
39 | context_class=dict,
40 | logger_factory=structlog.stdlib.LoggerFactory(),
41 | wrapper_class=structlog.stdlib.BoundLogger,
42 | cache_logger_on_first_use=True,
43 | )
44 |
45 |
46 | def _get_dev_processors() -> list[Processor]:
47 | """Configure structlog processors for development environment"""
48 | return [
49 | structlog.stdlib.add_log_level,
50 | structlog.stdlib.add_logger_name,
51 | structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S"),
52 | structlog.dev.ConsoleRenderer(colors=True, exception_formatter=structlog.dev.plain_traceback),
53 | ]
54 |
55 |
56 | def _get_prod_processors() -> list[Processor]:
57 | """Configure structlog processors for production environment"""
58 | return [
59 | structlog.stdlib.filter_by_level,
60 | structlog.stdlib.add_logger_name,
61 | structlog.stdlib.add_log_level,
62 | structlog.stdlib.PositionalArgumentsFormatter(),
63 | structlog.processors.TimeStamper(fmt="iso"),
64 | structlog.processors.StackInfoRenderer(),
65 | structlog.processors.format_exc_info,
66 | structlog.processors.UnicodeDecoder(),
67 | structlog.processors.JSONRenderer(),
68 | ]
69 |
70 |
71 | def get_logger(name: str) -> structlog.stdlib.BoundLogger:
72 | """
73 | Get a structured logger with the given name
74 | """
75 | return structlog.getLogger(name)
76 |
--------------------------------------------------------------------------------
/src/db/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Database modules initialization
3 | """
4 |
5 | # Import Base and all models here for Alembic to discover them
6 | from src.db.base import Base
7 | # Import models below this line
8 | from src.db.models.item import Item
9 |
--------------------------------------------------------------------------------
/src/db/base.py:
--------------------------------------------------------------------------------
1 | """
2 | Base SQLAlchemy models for the application
3 | """
4 | import uuid
5 | from datetime import datetime
6 | from typing import Any, Dict
7 |
8 | from sqlalchemy import MetaData
9 | from sqlalchemy.dialects.postgresql import UUID
10 | from sqlalchemy.ext.declarative import declared_attr
11 | from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
12 |
13 | # Convention for constraint naming to avoid SQLAlchemy warnings
14 | convention = {
15 | "ix": "ix_%(column_0_label)s",
16 | "uq": "uq_%(table_name)s_%(column_0_name)s",
17 | "ck": "ck_%(table_name)s_%(constraint_name)s",
18 | "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
19 | "pk": "pk_%(table_name)s"
20 | }
21 |
22 | metadata = MetaData(naming_convention=convention)
23 |
24 |
25 | class Base(DeclarativeBase):
26 | """Base class for all SQLAlchemy models"""
27 | metadata = metadata
28 |
29 | # Common model serialization method
30 | def to_dict(self) -> Dict[str, Any]:
31 | """Convert model instance to dict"""
32 | result = {}
33 | for key in self.__mapper__.c.keys():
34 | value = getattr(self, key)
35 | # Handle UUID conversion
36 | if isinstance(value, uuid.UUID):
37 | value = str(value)
38 | # Handle datetime conversion
39 | elif isinstance(value, datetime):
40 | value = value.isoformat()
41 | result[key] = value
42 | return result
43 |
44 |
45 | class TimestampMixin:
46 | """Mixin to add created_at and updated_at fields to models"""
47 | created_at: Mapped[datetime] = mapped_column(default=datetime.utcnow)
48 | updated_at: Mapped[datetime] = mapped_column(
49 | default=datetime.utcnow, onupdate=datetime.utcnow
50 | )
51 |
52 |
53 | class UUIDMixin:
54 | """Mixin to add a UUID primary key to models"""
55 | id: Mapped[uuid.UUID] = mapped_column(
56 | UUID(as_uuid=True), primary_key=True, default=uuid.uuid4
57 | )
58 |
59 |
60 | class TableNameMixin:
61 | """Mixin to automatically generate table names"""
62 | @declared_attr.directive
63 | def __tablename__(cls) -> str:
64 | return cls.__name__.lower()
65 |
--------------------------------------------------------------------------------
/src/db/models/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Database models initialization
3 | """
4 |
--------------------------------------------------------------------------------
/src/db/models/item.py:
--------------------------------------------------------------------------------
1 | """
2 | Example Item model
3 | """
4 | from typing import Optional
5 |
6 | from sqlalchemy import String, Text
7 | from sqlalchemy.orm import Mapped, mapped_column, relationship
8 |
9 | from src.db.base import Base, TableNameMixin, TimestampMixin, UUIDMixin
10 |
11 |
12 | class Item(Base, UUIDMixin, TableNameMixin, TimestampMixin):
13 | """
14 | Example Item model that demonstrates SQLAlchemy features
15 | """
16 | name: Mapped[str] = mapped_column(String(255), index=True)
17 | description: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
18 | is_active: Mapped[bool] = mapped_column(default=True)
19 |
20 | def __repr__(self) -> str:
21 | return f"- "
22 |
--------------------------------------------------------------------------------
/src/db/session.py:
--------------------------------------------------------------------------------
1 | """
2 | Database session management
3 | """
4 | import logging
5 | from typing import AsyncGenerator
6 |
7 | from sqlalchemy.ext.asyncio import (
8 | AsyncEngine, AsyncSession, async_sessionmaker, create_async_engine
9 | )
10 |
11 | from src.core.config import settings
12 | from src.db.base import Base
13 |
14 |
15 | logger = logging.getLogger(__name__)
16 |
17 | # Global engine instance
18 | engine: AsyncEngine = None
19 | async_session_factory: async_sessionmaker = None
20 |
21 |
22 | async def create_db_engine() -> None:
23 | """
24 | Initialize the database engine
25 | """
26 | global engine, async_session_factory
27 |
28 | logger.info("Creating database engine")
29 | engine = create_async_engine(
30 | str(settings.DATABASE_URI),
31 | echo=settings.DEBUG,
32 | pool_pre_ping=True,
33 | pool_size=10,
34 | max_overflow=20,
35 | )
36 |
37 | async_session_factory = async_sessionmaker(
38 | engine, expire_on_commit=False, autoflush=False
39 | )
40 |
41 |
42 | async def dispose_db_engine() -> None:
43 | """
44 | Close database connections
45 | """
46 | global engine
47 |
48 | if engine:
49 | logger.info("Closing database connections")
50 | await engine.dispose()
51 |
52 |
53 | async def get_db() -> AsyncGenerator[AsyncSession, None]:
54 | """
55 | FastAPI dependency that provides a database session
56 | """
57 | if async_session_factory is None:
58 | await create_db_engine()
59 |
60 | async with async_session_factory() as session:
61 | try:
62 | yield session
63 | await session.commit()
64 | except Exception as e:
65 | await session.rollback()
66 | raise e
67 | finally:
68 | await session.close()
69 |
70 |
71 | async def init_db() -> None:
72 | """
73 | Create database tables if they don't exist
74 | """
75 | global engine
76 |
77 | if engine is None:
78 | await create_db_engine()
79 |
80 | logger.info("Creating database tables")
81 | async with engine.begin() as conn:
82 | await conn.run_sync(Base.metadata.create_all)
83 |
--------------------------------------------------------------------------------
/src/schedulers/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Scheduler module initialization
3 | """
4 |
5 | from src.schedulers.scheduler import init_scheduler, shutdown_scheduler, get_scheduler, get_scheduled_jobs
6 |
--------------------------------------------------------------------------------
/src/schedulers/jobs.py:
--------------------------------------------------------------------------------
1 | """
2 | Scheduled job definitions
3 | """
4 | import logging
5 |
6 | from apscheduler.schedulers.asyncio import AsyncIOScheduler
7 |
8 | from src.services.item_service import ItemService
9 |
10 |
11 | logger = logging.getLogger(__name__)
12 |
13 |
14 | def setup_jobs(scheduler: AsyncIOScheduler) -> None:
15 | """
16 | Set up all scheduled jobs
17 | """
18 | # Example scheduled jobs with different interval types
19 | scheduler.add_job(
20 | daily_report,
21 | trigger="cron",
22 | hour=0,
23 | minute=0,
24 | id="daily_report",
25 | replace_existing=True,
26 | )
27 |
28 | scheduler.add_job(
29 | cleanup_inactive_items,
30 | trigger="interval",
31 | hours=4,
32 | id="cleanup_inactive_items",
33 | replace_existing=True,
34 | )
35 |
36 | scheduler.add_job(
37 | check_database_status,
38 | trigger="interval",
39 | minutes=15,
40 | id="check_database_status",
41 | replace_existing=True,
42 | )
43 |
44 | logger.info("Scheduled jobs have been set up")
45 |
46 |
47 | async def daily_report() -> None:
48 | """
49 | Generate a daily report
50 | """
51 | try:
52 | logger.info("Generating daily report")
53 | # Actual implementation would go here
54 | except Exception as e:
55 | logger.error(f"Error generating daily report: {str(e)}")
56 |
57 |
58 | async def cleanup_inactive_items() -> None:
59 | """
60 | Archive or delete inactive items
61 | """
62 | try:
63 | logger.info("Running inactive items cleanup job")
64 | # Create an async session and call the service
65 | # Implementation would require DB session from outside the scheduler
66 | # db = await get_db().__anext__()
67 | # await ItemService.cleanup_inactive_items(db)
68 | logger.info("Inactive items cleanup completed")
69 | except Exception as e:
70 | logger.error(f"Error in cleanup job: {str(e)}")
71 |
72 |
73 | async def check_database_status() -> None:
74 | """
75 | Check database status and log metrics
76 | """
77 | try:
78 | logger.info("Checking database status")
79 | # Implementation would check database connection and statistics
80 | except Exception as e:
81 | logger.error(f"Error checking database status: {str(e)}")
82 |
83 |
84 | # Additional example jobs
85 | async def update_cache_ttl() -> None:
86 | """
87 | Update cache TTL for frequently accessed items
88 | """
89 | logger.info("Updating cache TTLs for frequently accessed items")
90 | # Implementation would extend TTL for frequently accessed cache items
91 |
92 |
93 | async def send_weekly_digest() -> None:
94 | """
95 | Send weekly digest emails to users
96 | """
97 | logger.info("Sending weekly digest emails")
98 | # Implementation would generate and send weekly email summaries
99 |
--------------------------------------------------------------------------------
/src/schedulers/scheduler.py:
--------------------------------------------------------------------------------
1 | """
2 | Scheduler configuration for periodic tasks
3 | """
4 | import logging
5 | from typing import Dict, List
6 |
7 | from apscheduler.executors.pool import ProcessPoolExecutor, ThreadPoolExecutor
8 | from apscheduler.jobstores.redis import RedisJobStore
9 | from apscheduler.schedulers.asyncio import AsyncIOScheduler
10 | from redis.asyncio import Redis
11 |
12 | from src.core.config import settings
13 |
14 |
15 | logger = logging.getLogger(__name__)
16 |
17 | # Global scheduler instance
18 | _scheduler: AsyncIOScheduler = None
19 |
20 |
21 | async def init_scheduler() -> AsyncIOScheduler:
22 | """
23 | Initialize and start the scheduler
24 | """
25 | global _scheduler
26 |
27 | if _scheduler and _scheduler.running:
28 | logger.warning("Scheduler is already running")
29 | return _scheduler
30 |
31 | logger.info("Initializing task scheduler")
32 |
33 | # Configure job stores
34 | jobstores = {
35 | 'default': RedisJobStore(
36 | jobs_key='fastapi_ignite_app:scheduler:jobs',
37 | run_times_key='fastapi_ignite_app:scheduler:run_times',
38 | host=settings.REDIS_HOST,
39 | port=settings.REDIS_PORT,
40 | db=settings.REDIS_DB,
41 | password=settings.REDIS_PASSWORD,
42 | )
43 | }
44 |
45 | # Configure executors
46 | executors = {
47 | 'default': ThreadPoolExecutor(20),
48 | 'processpool': ProcessPoolExecutor(5)
49 | }
50 |
51 | # Create scheduler
52 | _scheduler = AsyncIOScheduler(
53 | jobstores=jobstores,
54 | executors=executors,
55 | job_defaults={
56 | 'coalesce': True, # Combine multiple pending executions
57 | 'max_instances': 3, # Maximum number of concurrent instances
58 | 'misfire_grace_time': 60, # Seconds after the designated run time that the job is still allowed to be run
59 | }
60 | )
61 |
62 | # Set up scheduled jobs
63 | from src.schedulers.jobs import setup_jobs
64 | setup_jobs(_scheduler)
65 |
66 | # Start the scheduler
67 | _scheduler.start()
68 | logger.info("Task scheduler started")
69 |
70 | return _scheduler
71 |
72 |
73 | async def shutdown_scheduler() -> None:
74 | """
75 | Shutdown the scheduler
76 | """
77 | global _scheduler
78 |
79 | if _scheduler and _scheduler.running:
80 | logger.info("Shutting down task scheduler")
81 | _scheduler.shutdown()
82 | _scheduler = None
83 | else:
84 | logger.warning("Scheduler is not running")
85 |
86 |
87 | def get_scheduler() -> AsyncIOScheduler:
88 | """
89 | Get the scheduler instance
90 | """
91 | global _scheduler
92 | if not _scheduler:
93 | raise RuntimeError("Scheduler not initialized")
94 | return _scheduler
95 |
96 |
97 | def get_scheduled_jobs() -> List[Dict]:
98 | """
99 | Get a list of all scheduled jobs
100 | """
101 | scheduler = get_scheduler()
102 | jobs = []
103 |
104 | for job in scheduler.get_jobs():
105 | jobs.append({
106 | 'id': job.id,
107 | 'name': job.name,
108 | 'func': str(job.func),
109 | 'next_run': str(job.next_run_time),
110 | 'trigger': str(job.trigger),
111 | })
112 |
113 | return jobs
114 |
--------------------------------------------------------------------------------
/src/schedulers/scheduler_runner.py:
--------------------------------------------------------------------------------
1 | """
2 | Runner script for the APScheduler scheduler
3 | """
4 | import asyncio
5 | import logging
6 | import os
7 | import signal
8 | import sys
9 | from typing import Set
10 |
11 | from src.core.config import settings
12 | from src.core.logging import setup_logging
13 | from src.schedulers.scheduler import init_scheduler, shutdown_scheduler
14 |
15 |
16 | logger = logging.getLogger(__name__)
17 |
18 | # Global signal handling
19 | shutdown_signal_received = False
20 | signals_to_handle = {signal.SIGINT, signal.SIGTERM}
21 | original_handlers: dict = {}
22 |
23 |
24 | def handle_signal(sig, frame):
25 | """
26 | Signal handler for graceful shutdown
27 | """
28 | global shutdown_signal_received
29 | logger.info(f"Received signal {sig}, shutting down...")
30 | shutdown_signal_received = True
31 |
32 | # Restore original signal handlers
33 | for sig in signals_to_handle:
34 | if sig in original_handlers:
35 | signal.signal(sig, original_handlers[sig])
36 |
37 |
38 | def setup_signal_handlers():
39 | """
40 | Set up signal handlers for graceful shutdown
41 | """
42 | global original_handlers
43 |
44 | for sig in signals_to_handle:
45 | original_handlers[sig] = signal.getsignal(sig)
46 | signal.signal(sig, handle_signal)
47 |
48 |
49 | def is_scheduler_enabled() -> bool:
50 | """
51 | Check if the scheduler is enabled based on environment variable or settings
52 | """
53 | # Check environment variable first (set by CLI)
54 | env_enabled = os.environ.get("SCHEDULER_ENABLED")
55 | if env_enabled is not None:
56 | return env_enabled.lower() == "true"
57 |
58 | # Fall back to settings
59 | return settings.SCHEDULER_ENABLED
60 |
61 |
62 | async def main():
63 | """
64 | Main entry point for the scheduler runner
65 | """
66 | # Set up logging
67 | setup_logging()
68 |
69 | # Check if scheduler is enabled
70 | if not is_scheduler_enabled():
71 | logger.info("Scheduler is disabled. Exiting.")
72 | return
73 |
74 | # Initialize the scheduler
75 | scheduler = await init_scheduler()
76 |
77 | logger.info(f"Scheduler runner started in {settings.ENV} mode")
78 |
79 | # Wait until a shutdown signal is received
80 | while not shutdown_signal_received:
81 | await asyncio.sleep(1)
82 |
83 | # Shutdown the scheduler
84 | await shutdown_scheduler()
85 | logger.info("Scheduler runner shutdown complete")
86 |
87 |
88 | if __name__ == "__main__":
89 | # Setup signal handlers
90 | setup_signal_handlers()
91 |
92 | try:
93 | # Run the main coroutine
94 | asyncio.run(main())
95 | except Exception as e:
96 | logger.exception(f"Unhandled exception in scheduler runner: {str(e)}")
97 | sys.exit(1)
98 |
--------------------------------------------------------------------------------
/src/schemas/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Pydantic schemas initialization
3 | """
4 |
5 | from src.schemas.item import ItemBase, ItemCreate, ItemInDB, ItemResponse, ItemUpdate
6 |
--------------------------------------------------------------------------------
/src/schemas/item.py:
--------------------------------------------------------------------------------
1 | """
2 | Pydantic schemas for Item objects
3 | """
4 | import uuid
5 | from datetime import datetime
6 | from typing import Optional
7 |
8 | from pydantic import BaseModel, ConfigDict, Field
9 |
10 |
11 | class ItemBase(BaseModel):
12 | """
13 | Base schema for Item data
14 | """
15 | name: str = Field(..., description="Item name", max_length=255)
16 | description: Optional[str] = Field(None, description="Item description")
17 | is_active: bool = Field(True, description="Whether the item is active")
18 |
19 |
20 | class ItemCreate(ItemBase):
21 | """
22 | Schema for creating a new Item
23 | """
24 | pass
25 |
26 |
27 | class ItemUpdate(BaseModel):
28 | """
29 | Schema for updating an existing Item
30 | """
31 | name: Optional[str] = Field(None, description="Item name", max_length=255)
32 | description: Optional[str] = Field(None, description="Item description")
33 | is_active: Optional[bool] = Field(None, description="Whether the item is active")
34 |
35 | # Use Pydantic v2 config
36 | model_config = ConfigDict(
37 | extra="forbid", # Forbid extra fields not defined in model
38 | )
39 |
40 |
41 | class ItemInDB(ItemBase):
42 | """
43 | Schema for Item data from the database
44 | """
45 | id: uuid.UUID = Field(..., description="Item UUID")
46 | created_at: datetime = Field(..., description="Creation timestamp")
47 | updated_at: datetime = Field(..., description="Last update timestamp")
48 |
49 | # Use Pydantic v2 config
50 | model_config = ConfigDict(
51 | from_attributes=True, # Convert ORM model to Pydantic model
52 | )
53 |
54 |
55 | class ItemResponse(ItemInDB):
56 | """
57 | Schema for Item response data
58 | """
59 | pass
60 |
--------------------------------------------------------------------------------
/src/services/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Service layer initialization
3 | """
4 |
5 | from src.services.item_service import ItemService
6 |
--------------------------------------------------------------------------------
/src/services/cached_item_service.py:
--------------------------------------------------------------------------------
1 | """
2 | Example service using the cache system
3 | """
4 | import json
5 | import logging
6 | import uuid
7 | from typing import List, Optional
8 |
9 | from sqlalchemy import select
10 | from sqlalchemy.ext.asyncio import AsyncSession
11 |
12 | from src.cache import CacheBackend, cached, invalidate_cache
13 | from src.db.models.item import Item
14 | from src.schemas.item import ItemCreate, ItemUpdate
15 |
16 |
17 | logger = logging.getLogger(__name__)
18 |
19 |
20 | class CachedItemService:
21 | """
22 | Service class demonstrating the use of different cache backends
23 | """
24 |
25 | @staticmethod
26 | @cached(ttl=300, key_prefix="item")
27 | async def get_item_by_id(
28 | db: AsyncSession,
29 | item_id: uuid.UUID
30 | ) -> Optional[dict]:
31 | """
32 | Get an item by ID with caching
33 | """
34 | query = select(Item).where(Item.id == item_id)
35 | result = await db.execute(query)
36 | item = result.scalar_one_or_none()
37 |
38 | if item is None:
39 | return None
40 |
41 | # Return a dict so it can be JSON serialized for cache
42 | return {
43 | "id": str(item.id),
44 | "name": item.name,
45 | "description": item.description,
46 | "status": item.status,
47 | "created_at": item.created_at.isoformat(),
48 | "updated_at": item.updated_at.isoformat() if item.updated_at else None,
49 | }
50 |
51 | @staticmethod
52 | @invalidate_cache(key_pattern="item:*")
53 | async def update_item(
54 | db: AsyncSession,
55 | item_id: uuid.UUID,
56 | item_data: ItemUpdate
57 | ) -> Item:
58 | """
59 | Update an item and invalidate cache
60 | """
61 | query = select(Item).where(Item.id == item_id)
62 | result = await db.execute(query)
63 | item = result.scalar_one_or_none()
64 |
65 | if item is None:
66 | raise ValueError(f"Item not found: {item_id}")
67 |
68 | # Update item attributes
69 | for key, value in item_data.model_dump(exclude_unset=True).items():
70 | setattr(item, key, value)
71 |
72 | await db.commit()
73 | await db.refresh(item)
74 |
75 | logger.info(f"Updated item: {item.id}")
76 | return item
77 |
78 | @staticmethod
79 | async def direct_cache_example(
80 | db: AsyncSession,
81 | cache: CacheBackend,
82 | item_id: uuid.UUID
83 | ) -> dict:
84 | """
85 | Example using the cache backend directly
86 | """
87 | # Create cache key
88 | cache_key = f"direct:item:{item_id}"
89 |
90 | # Try to get from cache first
91 | cached_value = await cache.get(cache_key)
92 | if cached_value:
93 | logger.info(f"Cache hit for item: {item_id}")
94 | return json.loads(cached_value)
95 |
96 | # Not in cache, get from database
97 | logger.info(f"Cache miss for item: {item_id}")
98 | query = select(Item).where(Item.id == item_id)
99 | result = await db.execute(query)
100 | item = result.scalar_one_or_none()
101 |
102 | if item is None:
103 | return None
104 |
105 | # Create serializable result
106 | item_data = {
107 | "id": str(item.id),
108 | "name": item.name,
109 | "description": item.description,
110 | "status": item.status,
111 | "created_at": item.created_at.isoformat(),
112 | "updated_at": item.updated_at.isoformat() if item.updated_at else None,
113 | }
114 |
115 | # Store in cache for future requests
116 | await cache.set(
117 | cache_key,
118 | json.dumps(item_data),
119 | ex=300 # 5 minutes
120 | )
121 |
122 | return item_data
123 |
--------------------------------------------------------------------------------
/src/services/item_service.py:
--------------------------------------------------------------------------------
1 | """
2 | Service layer for Item operations
3 | """
4 | import logging
5 | import uuid
6 | from typing import List, Optional, Union
7 |
8 | from sqlalchemy import delete, select, update
9 | from sqlalchemy.ext.asyncio import AsyncSession
10 |
11 | from src.core.exceptions import ResourceNotFoundError
12 | from src.db.models.item import Item
13 | from src.schemas.item import ItemCreate, ItemUpdate
14 |
15 |
16 | logger = logging.getLogger(__name__)
17 |
18 |
19 | class ItemService:
20 | """
21 | Service class for item operations
22 | """
23 |
24 | @staticmethod
25 | async def create_item(
26 | db: AsyncSession, item_data: ItemCreate
27 | ) -> Item:
28 | """
29 | Create a new item
30 | """
31 | item = Item(**item_data.model_dump())
32 | db.add(item)
33 | await db.flush()
34 | await db.refresh(item)
35 | logger.info(f"Created item: {item.id}")
36 | return item
37 |
38 | @staticmethod
39 | async def get_item(
40 | db: AsyncSession, item_id: uuid.UUID
41 | ) -> Item:
42 | """
43 | Get an item by ID
44 | """
45 | query = select(Item).where(Item.id == item_id)
46 | result = await db.execute(query)
47 | item = result.scalar_one_or_none()
48 |
49 | if item is None:
50 | logger.warning(f"Item not found: {item_id}")
51 | raise ResourceNotFoundError("Item", item_id)
52 |
53 | return item
54 |
55 | @staticmethod
56 | async def get_items(
57 | db: AsyncSession,
58 | skip: int = 0,
59 | limit: int = 100,
60 | active_only: bool = False
61 | ) -> List[Item]:
62 | """
63 | Get multiple items with pagination
64 | """
65 | query = select(Item)
66 |
67 | if active_only:
68 | query = query.where(Item.is_active == True)
69 |
70 | query = query.offset(skip).limit(limit).order_by(Item.created_at.desc())
71 | result = await db.execute(query)
72 | items = result.scalars().all()
73 |
74 | return list(items)
75 |
76 | @staticmethod
77 | async def update_item(
78 | db: AsyncSession,
79 | item_id: uuid.UUID,
80 | item_data: ItemUpdate
81 | ) -> Item:
82 | """
83 | Update an item
84 | """
85 | # First check if the item exists
86 | item = await ItemService.get_item(db, item_id)
87 |
88 | # Filter out None values
89 | update_data = {k: v for k, v in item_data.model_dump().items() if v is not None}
90 |
91 | if not update_data:
92 | # No updates to apply
93 | return item
94 |
95 | # Update the item
96 | stmt = (
97 | update(Item)
98 | .where(Item.id == item_id)
99 | .values(**update_data)
100 | .returning(Item)
101 | )
102 |
103 | result = await db.execute(stmt)
104 | updated_item = result.scalar_one()
105 | logger.info(f"Updated item: {item_id}")
106 |
107 | return updated_item
108 |
109 | @staticmethod
110 | async def delete_item(
111 | db: AsyncSession,
112 | item_id: uuid.UUID
113 | ) -> None:
114 | """
115 | Delete an item
116 | """
117 | # First check if the item exists
118 | await ItemService.get_item(db, item_id)
119 |
120 | # Delete the item
121 | stmt = delete(Item).where(Item.id == item_id)
122 | await db.execute(stmt)
123 | logger.info(f"Deleted item: {item_id}")
124 |
125 | @staticmethod
126 | async def search_items(
127 | db: AsyncSession,
128 | search_term: str,
129 | skip: int = 0,
130 | limit: int = 100
131 | ) -> List[Item]:
132 | """
133 | Search items by name or description
134 | """
135 | search_pattern = f"%{search_term}%"
136 |
137 | query = (
138 | select(Item)
139 | .where(
140 | (Item.name.ilike(search_pattern)) |
141 | (Item.description.ilike(search_pattern))
142 | )
143 | .offset(skip)
144 | .limit(limit)
145 | )
146 |
147 | result = await db.execute(query)
148 | items = result.scalars().all()
149 |
150 | return list(items)
151 |
--------------------------------------------------------------------------------
/src/tasks/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Background tasks module initialization
3 | """
4 |
5 | from src.tasks.jobs import (
6 | process_item,
7 | generate_report,
8 | send_welcome_email,
9 | send_notification_email,
10 | data_processing_pipeline,
11 | )
12 |
--------------------------------------------------------------------------------
/src/tasks/jobs.py:
--------------------------------------------------------------------------------
1 | """
2 | Background task definitions using Dramatiq
3 | """
4 | import logging
5 | import time
6 | import uuid
7 | from typing import Dict, List, Optional, Union
8 |
9 | import dramatiq
10 | from dramatiq.middleware import Middleware
11 |
12 | from src.core.config import settings
13 |
14 |
15 | logger = logging.getLogger(__name__)
16 |
17 |
18 | @dramatiq.actor(
19 | queue_name="default",
20 | max_retries=3,
21 | time_limit=60000, # 60 seconds
22 | )
23 | def process_item(
24 | item_id: str,
25 | options: Optional[Dict] = None,
26 | ) -> Dict:
27 | """
28 | Example background task that processes an item.
29 |
30 | Args:
31 | item_id: The ID of the item to process
32 | options: Optional processing options
33 |
34 | Returns:
35 | A dictionary with the processing results
36 | """
37 | logger.info(f"Processing item {item_id}")
38 |
39 | # Simulate some processing work
40 | time.sleep(2)
41 |
42 | # Example result
43 | result = {
44 | "item_id": item_id,
45 | "processed": True,
46 | "timestamp": time.time(),
47 | }
48 |
49 | if options:
50 | result["options_used"] = options
51 |
52 | logger.info(f"Finished processing item {item_id}")
53 | return result
54 |
55 |
56 | @dramatiq.actor(
57 | queue_name="low_priority",
58 | max_retries=5,
59 | time_limit=300000, # 5 minutes
60 | )
61 | def generate_report(
62 | report_type: str,
63 | filters: Optional[Dict] = None,
64 | user_id: Optional[str] = None,
65 | ) -> str:
66 | """
67 | Example long-running task that generates a report.
68 |
69 | Args:
70 | report_type: The type of report to generate
71 | filters: Optional filters to apply to the report
72 | user_id: Optional ID of the user requesting the report
73 |
74 | Returns:
75 | A string with the report ID or location
76 | """
77 | logger.info(f"Generating {report_type} report for user {user_id}")
78 |
79 | # Simulate a long task
80 | time.sleep(10)
81 |
82 | # Generate a unique report ID
83 | report_id = str(uuid.uuid4())
84 |
85 | logger.info(f"Finished generating report {report_id}")
86 | return f"report:{report_id}"
87 |
88 |
89 | @dramatiq.actor(queue_name="emails")
90 | def send_welcome_email(user_email: str, user_name: str) -> None:
91 | """Send welcome email to new users"""
92 | logger.info(f"Sending welcome email to {user_email}")
93 | # Email sending logic would go here
94 | time.sleep(1)
95 | logger.info(f"Sent welcome email to {user_email}")
96 |
97 |
98 | @dramatiq.actor(queue_name="emails")
99 | def send_notification_email(user_email: str, subject: str, content: str) -> None:
100 | """Send notification email to users"""
101 | logger.info(f"Sending notification email to {user_email}: {subject}")
102 | # Email sending logic would go here
103 | time.sleep(0.5)
104 | logger.info(f"Sent notification email to {user_email}")
105 |
106 |
107 | # Example of setting up a task group with middleware
108 | email_group = dramatiq.group([
109 | send_welcome_email,
110 | send_notification_email,
111 | ])
112 |
113 |
114 | # Example of a task with message hooks - using middleware instead of direct hooks
115 | @dramatiq.actor
116 | def data_processing_pipeline(data_id: str) -> None:
117 | """Process data through a pipeline with hooks"""
118 | logger.info(f"Starting data pipeline for {data_id}")
119 | time.sleep(2)
120 | logger.info(f"Completed data pipeline for {data_id}")
121 |
122 |
123 | # Define a custom middleware for hooks instead of using direct method decorators
124 | class PipelineMiddleware(Middleware):
125 | """Custom middleware to handle pipeline hooks"""
126 |
127 | def before_process_message(self, broker, message):
128 | if message.actor_name == "data_processing_pipeline":
129 | logger.info(f"Before pipeline hook: {message.args}")
130 | return message
131 |
132 | def after_process_message(self, broker, message, *, result=None, exception=None):
133 | if message.actor_name == "data_processing_pipeline":
134 | if exception is None:
135 | logger.info(f"After pipeline hook: {message.args}, result: {result}")
136 | else:
137 | logger.error(f"Pipeline error: {message.args}, exception: {str(exception)}")
138 |
139 |
140 | # Note: To use this middleware, register it with your broker in worker.py:
141 | #
142 | # broker.add_middleware(PipelineMiddleware())
143 | #
144 | # Or if you're using the default broker:
145 | # dramatiq.get_broker().add_middleware(PipelineMiddleware())
146 |
--------------------------------------------------------------------------------
/src/tasks/worker.py:
--------------------------------------------------------------------------------
1 | """
2 | Dramatiq worker configuration
3 | """
4 | import logging
5 | import os
6 | import sys
7 |
8 | import dramatiq
9 | from dramatiq.brokers.redis import RedisBroker
10 | from dramatiq.middleware import AgeLimit, Callbacks, Pipelines, Retries, TimeLimit
11 |
12 | from src.core.config import settings
13 |
14 |
15 | logger = logging.getLogger(__name__)
16 |
17 | # Configure Redis broker for Dramatiq
18 | redis_broker = RedisBroker(
19 | url=str(settings.REDIS_URI),
20 | middleware=[
21 | Pipelines(),
22 | Retries(max_retries=3),
23 | TimeLimit(),
24 | AgeLimit(),
25 | Callbacks(),
26 | ]
27 | )
28 |
29 | # Set as the default broker
30 | dramatiq.set_broker(redis_broker)
31 |
32 | # Function to run the worker (used in CLI scripts)
33 | def run_worker() -> None:
34 | """
35 | Run the Dramatiq worker
36 | """
37 | from dramatiq.cli import main
38 |
39 | # Set up arguments for the worker
40 | os.environ["PYTHONPATH"] = os.getcwd()
41 | sys.argv = [
42 | "dramatiq",
43 | "src.tasks.jobs", # Import path for task modules
44 | "-p", str(settings.DRAMATIQ_PROCESSES), # Number of processes
45 | "-t", str(settings.DRAMATIQ_THREADS), # Number of threads per process
46 | ]
47 |
48 | logger.info("Starting Dramatiq worker")
49 | main()
50 |
--------------------------------------------------------------------------------
/src/utils/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Utility modules initialization
3 | """
4 |
5 | from src.utils.helpers import (
6 | serialize_datetime,
7 | serialize_uuid,
8 | parse_json_string,
9 | model_to_dict,
10 | batch_process,
11 | sanitize_dict,
12 | )
13 |
--------------------------------------------------------------------------------
/src/utils/helpers.py:
--------------------------------------------------------------------------------
1 | """
2 | Utility helper functions
3 | """
4 | import json
5 | import uuid
6 | from datetime import datetime
7 | from typing import Any, Dict, List, Union
8 |
9 | from fastapi.encoders import jsonable_encoder
10 |
11 |
12 | def serialize_datetime(dt: datetime) -> str:
13 | """
14 | Serialize a datetime object to ISO format string
15 | """
16 | return dt.isoformat()
17 |
18 |
19 | def serialize_uuid(uid: uuid.UUID) -> str:
20 | """
21 | Serialize a UUID object to string
22 | """
23 | return str(uid)
24 |
25 |
26 | def parse_json_string(json_str: str) -> Dict:
27 | """
28 | Parse a JSON string to a Python dictionary
29 | """
30 | try:
31 | return json.loads(json_str)
32 | except json.JSONDecodeError:
33 | return {}
34 |
35 |
36 | def model_to_dict(model: Any) -> Dict:
37 | """
38 | Convert a SQLAlchemy or Pydantic model to dictionary
39 | """
40 | return jsonable_encoder(model)
41 |
42 |
43 | def batch_process(items: List[Any], batch_size: int = 100) -> List[List[Any]]:
44 | """
45 | Split a list of items into batches of specified size
46 | """
47 | return [items[i:i + batch_size] for i in range(0, len(items), batch_size)]
48 |
49 |
50 | def sanitize_dict(data: Dict[str, Any]) -> Dict[str, Any]:
51 | """
52 | Remove None values from a dictionary
53 | """
54 | return {k: v for k, v in data.items() if v is not None}
55 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Test package initialization
3 | """
4 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | """
2 | Pytest configuration for the application
3 | """
4 | import asyncio
5 | import os
6 | from typing import AsyncGenerator, Generator
7 |
8 | import pytest
9 | import pytest_asyncio
10 | from asgi_lifespan import LifespanManager
11 | from fastapi import FastAPI
12 | from httpx import AsyncClient
13 | from sqlalchemy.ext.asyncio import (
14 | AsyncSession, async_sessionmaker, create_async_engine
15 | )
16 |
17 | from src.core.config import settings
18 | from src.db.base import Base
19 | from src.db.session import get_db
20 | from src.main import create_application
21 |
22 |
23 | # Set test environment
24 | os.environ["ENV"] = "test"
25 |
26 |
27 | @pytest.fixture(scope="session")
28 | def event_loop() -> Generator:
29 | """
30 | Create an instance of the event loop for each test session.
31 | """
32 | loop = asyncio.get_event_loop_policy().new_event_loop()
33 | yield loop
34 | loop.close()
35 |
36 |
37 | @pytest_asyncio.fixture(scope="session")
38 | async def test_app() -> AsyncGenerator[FastAPI, None]:
39 | """
40 | Create a FastAPI test application.
41 | """
42 | app = create_application()
43 | async with LifespanManager(app):
44 | yield app
45 |
46 |
47 | @pytest_asyncio.fixture(scope="session")
48 | async def test_db_engine():
49 | """
50 | Create a test database engine.
51 | """
52 | # Create a new test database URL
53 | TEST_DATABASE_URL = settings.DATABASE_URI.replace(
54 | f"/{settings.POSTGRES_DB}", "/test_db"
55 | )
56 |
57 | # Create engine for test database
58 | engine = create_async_engine(TEST_DATABASE_URL, echo=True)
59 |
60 | # Create all tables
61 | async with engine.begin() as conn:
62 | await conn.run_sync(Base.metadata.drop_all)
63 | await conn.run_sync(Base.metadata.create_all)
64 |
65 | yield engine
66 |
67 | # Drop all tables after tests
68 | async with engine.begin() as conn:
69 | await conn.run_sync(Base.metadata.drop_all)
70 |
71 | # Dispose engine
72 | await engine.dispose()
73 |
74 |
75 | @pytest_asyncio.fixture
76 | async def test_db(test_app: FastAPI, test_db_engine) -> AsyncGenerator[AsyncSession, None]:
77 | """
78 | Create a new database session for a test.
79 | """
80 | # Create test session
81 | connection = await test_db_engine.connect()
82 | transaction = await connection.begin()
83 |
84 | # Use session factory
85 | test_session_factory = async_sessionmaker(
86 | connection, expire_on_commit=False
87 | )
88 |
89 | # Create a session
90 | async with test_session_factory() as session:
91 | # Override the get_db dependency
92 | test_app.dependency_overrides[get_db] = lambda: test_session_factory()
93 |
94 | yield session
95 |
96 | # Rollback the transaction
97 | await transaction.rollback()
98 | await connection.close()
99 |
100 |
101 | @pytest_asyncio.fixture
102 | async def client(test_app: FastAPI) -> AsyncGenerator[AsyncClient, None]:
103 | """
104 | Create an async HTTP client for testing.
105 | """
106 | async with AsyncClient(
107 | app=test_app,
108 | base_url="http://test",
109 | ) as client:
110 | yield client
111 |
--------------------------------------------------------------------------------
/tests/test_api/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | API test package initialization
3 | """
4 |
--------------------------------------------------------------------------------
/tests/test_api/v1/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | API v1 test package initialization
3 | """
4 |
--------------------------------------------------------------------------------
/tests/test_api/v1/test_items.py:
--------------------------------------------------------------------------------
1 | """
2 | Tests for the items API endpoints
3 | """
4 | import uuid
5 | from typing import Dict
6 |
7 | import pytest
8 | from httpx import AsyncClient
9 | from sqlalchemy.ext.asyncio import AsyncSession
10 |
11 | from src.db.models.item import Item
12 |
13 |
14 | @pytest.mark.asyncio
15 | async def test_create_item(client: AsyncClient, test_db: AsyncSession) -> None:
16 | """
17 | Test creating a new item through the API
18 | """
19 | # Item data
20 | item_data = {
21 | "name": "Test Item",
22 | "description": "This is a test item",
23 | "is_active": True
24 | }
25 |
26 | # Send request
27 | response = await client.post(
28 | "/api/v1/items/",
29 | json=item_data
30 | )
31 |
32 | # Check response
33 | assert response.status_code == 201
34 | data = response.json()
35 | assert data["name"] == item_data["name"]
36 | assert data["description"] == item_data["description"]
37 | assert data["is_active"] == item_data["is_active"]
38 | assert "id" in data
39 | assert "created_at" in data
40 | assert "updated_at" in data
41 |
42 |
43 | @pytest.mark.asyncio
44 | async def test_read_item(client: AsyncClient, test_db: AsyncSession) -> None:
45 | """
46 | Test retrieving an item through the API
47 | """
48 | # Create a test item in the database
49 | item = Item(name="Test Get Item", description="Item for testing get", is_active=True)
50 | test_db.add(item)
51 | await test_db.commit()
52 | await test_db.refresh(item)
53 |
54 | # Send request
55 | response = await client.get(f"/api/v1/items/{item.id}")
56 |
57 | # Check response
58 | assert response.status_code == 200
59 | data = response.json()
60 | assert data["name"] == "Test Get Item"
61 | assert data["description"] == "Item for testing get"
62 | assert data["id"] == str(item.id)
63 |
64 |
65 | @pytest.mark.asyncio
66 | async def test_read_nonexistent_item(client: AsyncClient) -> None:
67 | """
68 | Test retrieving a non-existent item
69 | """
70 | # Generate a random UUID
71 | nonexistent_id = uuid.uuid4()
72 |
73 | # Send request
74 | response = await client.get(f"/api/v1/items/{nonexistent_id}")
75 |
76 | # Check response
77 | assert response.status_code == 404
78 |
79 |
80 | @pytest.mark.asyncio
81 | async def test_update_item(client: AsyncClient, test_db: AsyncSession) -> None:
82 | """
83 | Test updating an item through the API
84 | """
85 | # Create a test item in the database
86 | item = Item(name="Test Update Item", description="Original description", is_active=True)
87 | test_db.add(item)
88 | await test_db.commit()
89 | await test_db.refresh(item)
90 |
91 | # Update data
92 | update_data = {
93 | "name": "Updated Name",
94 | "description": "Updated description"
95 | }
96 |
97 | # Send request
98 | response = await client.put(
99 | f"/api/v1/items/{item.id}",
100 | json=update_data
101 | )
102 |
103 | # Check response
104 | assert response.status_code == 200
105 | data = response.json()
106 | assert data["name"] == update_data["name"]
107 | assert data["description"] == update_data["description"]
108 | assert data["id"] == str(item.id)
109 |
110 |
111 | @pytest.mark.asyncio
112 | async def test_delete_item(client: AsyncClient, test_db: AsyncSession) -> None:
113 | """
114 | Test deleting an item through the API
115 | """
116 | # Create a test item in the database
117 | item = Item(name="Test Delete Item", description="Item to be deleted", is_active=True)
118 | test_db.add(item)
119 | await test_db.commit()
120 | await test_db.refresh(item)
121 |
122 | # Send delete request
123 | response = await client.delete(f"/api/v1/items/{item.id}")
124 |
125 | # Check response
126 | assert response.status_code == 204
127 |
128 | # Verify item is gone
129 | check_response = await client.get(f"/api/v1/items/{item.id}")
130 | assert check_response.status_code == 404
131 |
132 |
133 | @pytest.mark.asyncio
134 | async def test_list_items(client: AsyncClient, test_db: AsyncSession) -> None:
135 | """
136 | Test listing items through the API
137 | """
138 | # Create multiple test items
139 | items = [
140 | Item(name="List Item 1", description="First list item", is_active=True),
141 | Item(name="List Item 2", description="Second list item", is_active=True),
142 | Item(name="List Item 3", description="Third list item", is_active=False),
143 | ]
144 |
145 | for item in items:
146 | test_db.add(item)
147 |
148 | await test_db.commit()
149 |
150 | # Send request for all items
151 | response = await client.get("/api/v1/items/")
152 |
153 | # Check response
154 | assert response.status_code == 200
155 | data = response.json()
156 | assert len(data) >= 3 # There might be other items from previous tests
157 |
158 | # Test active_only filter
159 | response = await client.get("/api/v1/items/?active_only=true")
160 | assert response.status_code == 200
161 | data = response.json()
162 |
163 | # All returned items should be active
164 | for item in data:
165 | assert item["is_active"] == True
166 |
167 |
168 | @pytest.mark.asyncio
169 | async def test_search_items(client: AsyncClient, test_db: AsyncSession) -> None:
170 | """
171 | Test searching items through the API
172 | """
173 | # Create items with searchable names/descriptions
174 | items = [
175 | Item(name="Special Widget", description="A unique item for search", is_active=True),
176 | Item(name="Regular Item", description="Contains special keyword", is_active=True),
177 | Item(name="Another Item", description="Nothing special here", is_active=True),
178 | ]
179 |
180 | for item in items:
181 | test_db.add(item)
182 |
183 | await test_db.commit()
184 |
185 | # Search for "special" which should match 2 items
186 | response = await client.get("/api/v1/items/search/?q=special")
187 |
188 | # Check response
189 | assert response.status_code == 200
190 | data = response.json()
191 | assert len(data) == 2
192 |
193 | # Verify item names
194 | names = [item["name"] for item in data]
195 | assert "Special Widget" in names
196 |
--------------------------------------------------------------------------------
/tests/test_cache/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Cache test package initialization
3 | """
4 |
--------------------------------------------------------------------------------
/tests/test_cache/test_backends.py:
--------------------------------------------------------------------------------
1 | """
2 | Tests for the cache backends
3 | """
4 | import pytest
5 |
6 | from src.cache.backends.base import CacheBackend
7 | from src.cache.backends.memory import MemoryBackend
8 | from src.cache.backends.file import FileBackend
9 | from src.cache.backends.redis import RedisBackend
10 |
11 |
12 | @pytest.mark.asyncio
13 | class TestCacheBackends:
14 | """Tests for different cache backend implementations"""
15 |
16 | async def test_memory_backend_basic_ops(self):
17 | """Test basic operations with memory backend"""
18 | cache = MemoryBackend()
19 | await cache.init()
20 |
21 | try:
22 | # Set a value
23 | await cache.set("test_key", "test_value")
24 |
25 | # Get the value
26 | value = await cache.get("test_key")
27 | assert value == "test_value"
28 |
29 | # Delete the value
30 | deleted = await cache.delete("test_key")
31 | assert deleted == 1
32 |
33 | # Confirm it's gone
34 | value = await cache.get("test_key")
35 | assert value is None
36 | finally:
37 | await cache.close()
38 |
39 | @pytest.mark.asyncio
40 | async def test_memory_backend_expiry(self):
41 | """Test expiration with memory backend"""
42 | cache = MemoryBackend()
43 | await cache.init()
44 |
45 | try:
46 | # Set with a very short TTL (1 second)
47 | await cache.set("expiry_test", "value", ex=1)
48 |
49 | # Should be available immediately
50 | value = await cache.get("expiry_test")
51 | assert value == "value"
52 |
53 | # Wait for expiration
54 | import asyncio
55 | await asyncio.sleep(1.1)
56 |
57 | # Should be gone now
58 | value = await cache.get("expiry_test")
59 | assert value is None
60 | finally:
61 | await cache.close()
62 |
63 | @pytest.mark.asyncio
64 | async def test_memory_backend_scan(self):
65 | """Test scan operation with memory backend"""
66 | cache = MemoryBackend()
67 | await cache.init()
68 |
69 | try:
70 | # Set multiple values with different prefixes
71 | await cache.set("user:1", "user1")
72 | await cache.set("user:2", "user2")
73 | await cache.set("item:1", "item1")
74 |
75 | # Scan for user keys
76 | cursor, keys = await cache.scan("0", "user:*", 10)
77 | assert len(keys) == 2
78 | assert "user:1" in keys
79 | assert "user:2" in keys
80 |
81 | # Scan for item keys
82 | cursor, keys = await cache.scan("0", "item:*", 10)
83 | assert len(keys) == 1
84 | assert "item:1" in keys
85 |
86 | # Clean up
87 | await cache.flush()
88 | finally:
89 | await cache.close()
90 |
91 | @pytest.mark.asyncio
92 | async def test_file_backend_basic_ops(self):
93 | """Test basic operations with file backend"""
94 | import tempfile
95 | import os
96 | from pathlib import Path
97 |
98 | # Use a temporary directory for testing
99 | with tempfile.TemporaryDirectory() as temp_dir:
100 | # Override the cache path
101 | from src.core.config import settings
102 | original_path = settings.CACHE_FILE_PATH
103 | settings.cache.file_path = temp_dir
104 |
105 | cache = FileBackend()
106 | await cache.init()
107 |
108 | try:
109 | # Set a value
110 | await cache.set("test_key", "test_value")
111 |
112 | # Verify file was created
113 | file_path = cache._get_path_for_key("test_key")
114 | assert file_path.exists()
115 |
116 | # Get the value
117 | value = await cache.get("test_key")
118 | assert value == "test_value"
119 |
120 | # Delete the value
121 | deleted = await cache.delete("test_key")
122 | assert deleted == 1
123 | assert not file_path.exists()
124 |
125 | # Confirm it's gone
126 | value = await cache.get("test_key")
127 | assert value is None
128 | finally:
129 | await cache.close()
130 | # Restore original path
131 | settings.cache.file_path = original_path
132 |
--------------------------------------------------------------------------------
/tests/test_cache/test_decorators.py:
--------------------------------------------------------------------------------
1 | """
2 | Tests for caching functionality
3 | """
4 | import pytest
5 | import redis.asyncio as redis
6 | from unittest import mock
7 |
8 | from src.cache.decorators import cached
9 | from src.cache.redis import init_redis_pool
10 |
11 |
12 | @pytest.mark.asyncio
13 | async def test_cached_decorator():
14 | """
15 | Test the cached decorator functionality
16 | """
17 | # Mock Redis get and set methods
18 | mock_redis = mock.AsyncMock()
19 | mock_redis.get.return_value = None
20 | mock_redis.set.return_value = True
21 |
22 | # Create a function with the cached decorator
23 | call_count = 0
24 |
25 | @cached(ttl=60, key_prefix="test")
26 | async def test_function(a: int, b: str) -> dict:
27 | nonlocal call_count
28 | call_count += 1
29 | return {"a": a, "b": b}
30 |
31 | # Call function first time (should miss cache)
32 | result1 = await test_function(1, "test", redis=mock_redis)
33 | assert result1 == {"a": 1, "b": "test"}
34 | assert call_count == 1
35 |
36 | # Redis get should have been called, but returned None
37 | mock_redis.get.assert_called_once()
38 |
39 | # Redis set should have been called once
40 | mock_redis.set.assert_called_once()
41 |
42 | # Reset mocks for next call
43 | mock_redis.reset_mock()
44 |
45 | # Set up mock to return a cached result
46 | mock_redis.get.return_value = '{"a": 1, "b": "test"}'
47 |
48 | # Call function second time with same args (should hit cache)
49 | result2 = await test_function(1, "test", redis=mock_redis)
50 | assert result2 == {"a": 1, "b": "test"}
51 |
52 | # Function should not have been called again
53 | assert call_count == 1
54 |
55 | # Redis get should have been called
56 | mock_redis.get.assert_called_once()
57 |
58 | # Redis set should not have been called
59 | mock_redis.set.assert_not_called()
60 |
61 | # Reset mocks for next call
62 | mock_redis.reset_mock()
63 |
64 | # Call function with different args (should miss cache)
65 | mock_redis.get.return_value = None
66 | result3 = await test_function(2, "test", redis=mock_redis)
67 | assert result3 == {"a": 2, "b": "test"}
68 |
69 | # Function should have been called again
70 | assert call_count == 2
71 |
72 | # Redis get and set should have been called
73 | mock_redis.get.assert_called_once()
74 | mock_redis.set.assert_called_once()
75 |
76 |
77 | @pytest.mark.asyncio
78 | async def test_cache_key_builder():
79 | """
80 | Test custom key builder for cached decorator
81 | """
82 | # Mock Redis
83 | mock_redis = mock.AsyncMock()
84 | mock_redis.get.return_value = None
85 | mock_redis.set.return_value = True
86 |
87 | # Custom key builder function
88 | def custom_key_builder(*args, **kwargs) -> str:
89 | user_id = kwargs.get("user_id", "unknown")
90 | action = kwargs.get("action", "default")
91 | return f"custom:{user_id}:{action}"
92 |
93 | # Create a function with the cached decorator and custom key builder
94 | call_count = 0
95 |
96 | @cached(ttl=60, key_builder=custom_key_builder)
97 | async def test_function(user_id: str, action: str) -> dict:
98 | nonlocal call_count
99 | call_count += 1
100 | return {"user_id": user_id, "action": action}
101 |
102 | # Call function
103 | result = await test_function(user_id="123", action="test", redis=mock_redis)
104 | assert result == {"user_id": "123", "action": "test"}
105 | assert call_count == 1
106 |
107 | # The key should have been built using the custom key builder
108 | mock_redis.get.assert_called_once()
109 | args, kwargs = mock_redis.get.call_args
110 | assert args[0] == "custom:123:test"
111 |
--------------------------------------------------------------------------------
/tests/test_services/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Services test package initialization
3 | """
4 |
--------------------------------------------------------------------------------
/tests/test_services/test_item_service.py:
--------------------------------------------------------------------------------
1 | """
2 | Tests for Item service functions
3 | """
4 | import uuid
5 | from unittest import mock
6 |
7 | import pytest
8 | from sqlalchemy.ext.asyncio import AsyncSession
9 |
10 | from src.core.exceptions import ResourceNotFoundError
11 | from src.db.models.item import Item
12 | from src.schemas.item import ItemCreate, ItemUpdate
13 | from src.services.item_service import ItemService
14 |
15 |
16 | @pytest.mark.asyncio
17 | async def test_create_item_service(test_db: AsyncSession) -> None:
18 | """
19 | Test creating an item via the service layer
20 | """
21 | # Create item data
22 | item_data = ItemCreate(
23 | name="Service Test Item",
24 | description="Item created in service test",
25 | is_active=True
26 | )
27 |
28 | # Call service method
29 | item = await ItemService.create_item(test_db, item_data)
30 |
31 | # Verify item
32 | assert item.name == item_data.name
33 | assert item.description == item_data.description
34 | assert item.is_active == item_data.is_active
35 | assert item.id is not None
36 |
37 |
38 | @pytest.mark.asyncio
39 | async def test_get_item_service(test_db: AsyncSession) -> None:
40 | """
41 | Test getting an item via the service layer
42 | """
43 | # Create test item
44 | item = Item(name="Get Service Item", description="Test get service", is_active=True)
45 | test_db.add(item)
46 | await test_db.commit()
47 | await test_db.refresh(item)
48 |
49 | # Get the item
50 | retrieved_item = await ItemService.get_item(test_db, item.id)
51 |
52 | # Verify
53 | assert retrieved_item.id == item.id
54 | assert retrieved_item.name == item.name
55 |
56 |
57 | @pytest.mark.asyncio
58 | async def test_get_nonexistent_item_service(test_db: AsyncSession) -> None:
59 | """
60 | Test getting a non-existent item raises the correct exception
61 | """
62 | # Generate random UUID
63 | random_id = uuid.uuid4()
64 |
65 | # Attempt to get non-existent item
66 | with pytest.raises(ResourceNotFoundError):
67 | await ItemService.get_item(test_db, random_id)
68 |
69 |
70 | @pytest.mark.asyncio
71 | async def test_update_item_service(test_db: AsyncSession) -> None:
72 | """
73 | Test updating an item via the service layer
74 | """
75 | # Create test item
76 | item = Item(name="Update Service Item", description="Original description", is_active=True)
77 | test_db.add(item)
78 | await test_db.commit()
79 | await test_db.refresh(item)
80 |
81 | # Update data
82 | update_data = ItemUpdate(
83 | name="Updated Service Item",
84 | description="Updated service description",
85 | is_active=True
86 | )
87 |
88 | # Update item
89 | updated_item = await ItemService.update_item(test_db, item.id, update_data)
90 |
91 | # Verify
92 | assert updated_item.name == update_data.name
93 | assert updated_item.description == update_data.description
94 | assert updated_item.id == item.id
95 |
96 |
97 | @pytest.mark.asyncio
98 | async def test_delete_item_service(test_db: AsyncSession) -> None:
99 | """
100 | Test deleting an item via the service layer
101 | """
102 | # Create test item
103 | item = Item(name="Delete Service Item", description="To be deleted", is_active=True)
104 | test_db.add(item)
105 | await test_db.commit()
106 | await test_db.refresh(item)
107 |
108 | # Delete item
109 | await ItemService.delete_item(test_db, item.id)
110 |
111 | # Verify deletion
112 | with pytest.raises(ResourceNotFoundError):
113 | await ItemService.get_item(test_db, item.id)
114 |
115 |
116 | @pytest.mark.asyncio
117 | async def test_search_items_service(test_db: AsyncSession) -> None:
118 | """
119 | Test searching items via the service layer
120 | """
121 | # Create items with searchable content
122 | items = [
123 | Item(name="Search Service Widget", description="A searchable service item", is_active=True),
124 | Item(name="Another Item", description="Has search service term", is_active=True),
125 | Item(name="Unrelated Item", description="Should not match", is_active=True),
126 | ]
127 |
128 | for item in items:
129 | test_db.add(item)
130 |
131 | await test_db.commit()
132 |
133 | # Search for items
134 | search_results = await ItemService.search_items(test_db, "search service")
135 |
136 | # Verify
137 | assert len(search_results) == 2
138 | names = [item.name for item in search_results]
139 | assert "Search Service Widget" in names
140 | assert "Another Item" in names
141 | assert "Unrelated Item" not in names
142 |
--------------------------------------------------------------------------------
/tests/test_tasks/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Tasks test package initialization
3 | """
4 |
--------------------------------------------------------------------------------
/tests/test_tasks/test_jobs.py:
--------------------------------------------------------------------------------
1 | """
2 | Tests for background tasks
3 | """
4 | import pytest
5 | from unittest.mock import patch
6 |
7 | from src.tasks.jobs import process_item
8 |
9 |
10 | @patch("src.tasks.jobs.logger")
11 | def test_process_item(mock_logger):
12 | """
13 | Test the process_item background task
14 | """
15 | # Execute task function directly (not as a Dramatiq actor)
16 | item_id = "test-item-123"
17 | options = {"priority": "high"}
18 |
19 | result = process_item(item_id, options)
20 |
21 | # Verify result
22 | assert result["item_id"] == item_id
23 | assert result["processed"] == True
24 | assert "timestamp" in result
25 | assert result["options_used"] == options
26 |
27 | # Verify logging
28 | mock_logger.info.assert_any_call(f"Processing item {item_id}")
29 | mock_logger.info.assert_any_call(f"Finished processing item {item_id}")
30 |
31 |
32 | @patch("src.tasks.jobs.time.sleep")
33 | @patch("src.tasks.jobs.logger")
34 | def test_process_item_timing(mock_logger, mock_sleep):
35 | """
36 | Test that the process_item task uses sleep as expected
37 | """
38 | # Mock sleep to avoid waiting
39 | mock_sleep.return_value = None
40 |
41 | # Execute task
42 | process_item("test-id")
43 |
44 | # Verify sleep was called
45 | mock_sleep.assert_called_once_with(2)
46 |
47 |
48 | @pytest.mark.parametrize("item_id,options", [
49 | ("item-1", {"flag": True}),
50 | ("item-2", None),
51 | ("item-3", {"priority": "low", "retry": False}),
52 | ])
53 | def test_process_item_parameters(item_id, options):
54 | """
55 | Test process_item with different parameters
56 | """
57 | result = process_item(item_id, options)
58 |
59 | assert result["item_id"] == item_id
60 | assert result["processed"] == True
61 |
62 | if options:
63 | assert result["options_used"] == options
64 | else:
65 | assert "options_used" not in result
66 |
--------------------------------------------------------------------------------