├── app ├── __init__.py ├── api │ ├── __init__.py │ ├── v1 │ │ ├── __init__.py │ │ ├── endpoints │ │ │ ├── __init__.py │ │ │ └── incr │ │ │ │ ├── __init__.py │ │ │ │ ├── cpu_bound.py │ │ │ │ └── io_bound.py │ │ └── router.py │ ├── schemas.py │ ├── deps.py │ └── router.py ├── common │ ├── __init__.py │ ├── enums.py │ ├── logger.py │ └── metrics.py ├── core │ ├── __init__.py │ └── accessor.py ├── redis │ ├── __init__.py │ └── accessor.py ├── worker │ ├── __init__.py │ ├── background │ │ ├── __init__.py │ │ └── accessor.py │ ├── arq │ │ ├── __init__.py │ │ ├── tasks.py │ │ └── accessor.py │ ├── saq │ │ ├── __init__.py │ │ ├── tasks.py │ │ └── accessor.py │ ├── faststream │ │ ├── __init__.py │ │ ├── tasks.py │ │ ├── accessor.py │ │ └── app.py │ ├── async_celery │ │ ├── __init__.py │ │ ├── tasks.py │ │ ├── accessor.py │ │ └── app.py │ ├── base.py │ └── accessor.py ├── main.py ├── config.py └── store.py ├── tests ├── __init__.py ├── app │ ├── __init__.py │ └── incr │ │ ├── __init__.py │ │ ├── test_cpu_bound.py │ │ └── test_io_bound.py └── conftest.py ├── locust ├── master.conf ├── background.py └── benchmark.py ├── Makefile ├── .dockerignore ├── example.env ├── worker_arq.py ├── worker_saq.py ├── .prometheus └── prometheus.yml ├── Dockerfile ├── pyproject.toml ├── README.md ├── docker-compose.yml └── .gitignore /app/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/api/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/api/v1/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/common/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/core/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/redis/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/worker/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/app/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/app/incr/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/api/v1/endpoints/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/worker/background/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/api/v1/endpoints/incr/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/worker/arq/__init__.py: -------------------------------------------------------------------------------- 1 | # register tasks 2 | from . import tasks 3 | -------------------------------------------------------------------------------- /app/worker/saq/__init__.py: -------------------------------------------------------------------------------- 1 | # register tasks 2 | from . import tasks 3 | -------------------------------------------------------------------------------- /app/worker/faststream/__init__.py: -------------------------------------------------------------------------------- 1 | # register tasks 2 | from . import tasks 3 | -------------------------------------------------------------------------------- /app/worker/async_celery/__init__.py: -------------------------------------------------------------------------------- 1 | # register tasks 2 | from . import tasks 3 | -------------------------------------------------------------------------------- /locust/master.conf: -------------------------------------------------------------------------------- 1 | headless = true 2 | expect-workers = 10 3 | host = http://localhost:8000 -------------------------------------------------------------------------------- /app/api/schemas.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | 4 | class IncrInputSchema(BaseModel): 5 | value: int = 1 6 | -------------------------------------------------------------------------------- /app/api/deps.py: -------------------------------------------------------------------------------- 1 | from typing import Annotated 2 | 3 | from fastapi import Depends 4 | 5 | from app.store import get_store, Store 6 | 7 | StoreDep = Annotated[Store, Depends(get_store)] 8 | -------------------------------------------------------------------------------- /app/api/router.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | 3 | from app.api.v1.router import router as router_v1 4 | 5 | router = APIRouter(prefix="/api") 6 | router.include_router(router_v1, prefix="/v1") 7 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | lint: 2 | python -m black . 3 | python -m ruff . 4 | 5 | run: 6 | uvicorn app.main:app --reload 7 | 8 | benchmark: 9 | env endpoint=io/simple locust --config locust/master.conf -f locust/benchmark.py -i 1000 10 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .git* 2 | .editorconfig 3 | .idea 4 | .vscode 5 | .dockerignore 6 | docker-compose.yml 7 | etc 8 | local 9 | build 10 | env 11 | .env 12 | local-docker 13 | Makefile 14 | .pytest_cache 15 | .mypy_cache 16 | .ruff_cache 17 | -------------------------------------------------------------------------------- /app/common/enums.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class TaskWorkerEnum(str, Enum): 5 | background = "background" 6 | arq = "arq" 7 | saq = "saq" 8 | async_celery = "async-celery" 9 | faststream = "faststream" 10 | -------------------------------------------------------------------------------- /app/api/v1/router.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | 3 | from app.api.v1.endpoints.incr import cpu_bound, io_bound 4 | 5 | router = APIRouter() 6 | router.include_router(io_bound.router, prefix="/incr/io") 7 | router.include_router(cpu_bound.router, prefix="/incr/cpu") 8 | -------------------------------------------------------------------------------- /example.env: -------------------------------------------------------------------------------- 1 | REDIS_DSN=redis://redis:6379/0 2 | FASTAPI_WORKERS=3 3 | 4 | ARQ_EXPORTER_PORT=8001 5 | ARQ_CONCURRENCY=3 6 | 7 | SAQ_EXPORTER_PORT=8002 8 | SAQ_DASHBOARD_PORT=8082 9 | SAQ_CONCURRENCY=3 10 | 11 | FASTSTREAM_CONCURRENCY=1 12 | 13 | ASYNC_CELERY_CONCURRENCY=3 -------------------------------------------------------------------------------- /app/common/logger.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | logging.basicConfig( 4 | level=logging.INFO, 5 | format="%(asctime)s | %(levelname)s | %(filename)s:%(lineno)d | " 6 | "%(name)s: %(message)s", 7 | ) 8 | 9 | 10 | def get_logger(name: str) -> logging.Logger: 11 | return logging.getLogger(name) 12 | -------------------------------------------------------------------------------- /app/main.py: -------------------------------------------------------------------------------- 1 | from fastapi import FastAPI 2 | from prometheus_fastapi_instrumentator import Instrumentator 3 | 4 | from app.api.router import router 5 | from app.store import lifespan 6 | 7 | app = FastAPI( 8 | title="FastAPI async tasks", 9 | description="", 10 | lifespan=lifespan, 11 | ) 12 | app.include_router(router) 13 | 14 | Instrumentator().instrument(app).expose(app) 15 | -------------------------------------------------------------------------------- /worker_arq.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from prometheus_async.aio.web import start_http_server_in_thread 4 | 5 | from app.config import settings 6 | from app.store import Store, store_lifespan 7 | 8 | 9 | async def main() -> None: 10 | start_http_server_in_thread(port=settings.ARQ_EXPORTER_PORT) 11 | 12 | store: Store 13 | async with store_lifespan() as store: 14 | await store.worker.arq.run() 15 | 16 | 17 | if __name__ == "__main__": 18 | asyncio.run(main()) 19 | -------------------------------------------------------------------------------- /worker_saq.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from prometheus_async.aio.web import start_http_server_in_thread 4 | 5 | from app.config import settings 6 | from app.store import Store, store_lifespan 7 | 8 | 9 | async def main() -> None: 10 | start_http_server_in_thread(port=settings.SAQ_EXPORTER_PORT) 11 | 12 | store: Store 13 | async with store_lifespan() as store: 14 | await store.worker.saq.run(web=True, port=settings.SAQ_DASHBOARD_PORT) 15 | 16 | 17 | if __name__ == "__main__": 18 | asyncio.run(main()) 19 | -------------------------------------------------------------------------------- /.prometheus/prometheus.yml: -------------------------------------------------------------------------------- 1 | global: 2 | scrape_interval: 5s 3 | 4 | scrape_configs: 5 | - job_name: "fastapi" 6 | static_configs: 7 | - targets: ["fastapi:8000"] 8 | 9 | - job_name: "arq" 10 | static_configs: 11 | - targets: ["arq:8001"] 12 | 13 | - job_name: "saq" 14 | static_configs: 15 | - targets: ["saq:8002"] 16 | 17 | - job_name: "faststream" 18 | static_configs: 19 | - targets: ["faststream:8003", "faststream:8004", "faststream:8005"] 20 | 21 | - job_name: "async-celery" 22 | static_configs: 23 | - targets: ["async-celery-flower:5555"] -------------------------------------------------------------------------------- /app/config.py: -------------------------------------------------------------------------------- 1 | from pydantic import RedisDsn 2 | from pydantic_settings import BaseSettings, SettingsConfigDict 3 | 4 | 5 | class Settings(BaseSettings): 6 | model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8") 7 | 8 | REDIS_DSN: RedisDsn = "redis://" 9 | FASTAPI_WORKERS: int = 3 10 | 11 | ARQ_EXPORTER_PORT: int = 8001 12 | ARQ_CONCURRENCY: int = 3 13 | 14 | SAQ_EXPORTER_PORT: int = 8002 15 | SAQ_DASHBOARD_PORT: int = 8082 16 | SAQ_CONCURRENCY: int = 3 17 | 18 | FASTSTREAM_EXPORTER_PORTS: list[int] = [8003, 8004, 8005] 19 | FASTSTREAM_CONCURRENCY: int = 1 20 | 21 | ASYNC_CELERY_CONCURRENCY: int = 3 22 | 23 | 24 | settings = Settings() 25 | -------------------------------------------------------------------------------- /locust/background.py: -------------------------------------------------------------------------------- 1 | import os 2 | from random import shuffle 3 | 4 | from locust import between, HttpUser, task 5 | 6 | 7 | # locust --config locust/master.conf -f locust/background.py -i 100 -u 10 8 | class TaskUser(HttpUser): 9 | wait_time = between(0.001, 0.001) 10 | 11 | @task 12 | def execute(self): 13 | endpoint = os.environ.get("endpoint") 14 | endpoints = ( 15 | [endpoint] 16 | if endpoint 17 | else ["io/simple", "io/sync", "io/thread", "cpu/simple", "cpu/process"] 18 | ) 19 | shuffle(endpoints) 20 | 21 | for endpoint in endpoints: 22 | self.client.post(f"/api/v1/incr/{endpoint}/background", json={"value": 1}) 23 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.11-slim as base 2 | 3 | ENV PYTHONUNBUFFERED=1 \ 4 | PYTHONDONTWRITEBYTECODE=1 \ 5 | PIP_DEFAULT_TIMEOUT=100 \ 6 | PIP_DISABLE_PIP_VERSION_CHECK=1 \ 7 | PIP_NO_CACHE_DIR=1 \ 8 | POETRY_VERSION=1.7.1 \ 9 | POETRY_NO_INTERACTION=1 10 | 11 | FROM base as builder 12 | 13 | WORKDIR /app 14 | 15 | RUN pip install "poetry==$POETRY_VERSION" 16 | 17 | COPY pyproject.toml poetry.lock ./ 18 | RUN python -m venv /venv 19 | 20 | RUN poetry export --without=dev -f requirements.txt | /venv/bin/pip install -r /dev/stdin 21 | 22 | FROM base as final 23 | 24 | ENV PATH=/venv/bin:$PATH 25 | ENV PYTHONPATH=/app:/venv:$PYTHONPATH 26 | 27 | COPY --from=builder /venv /venv 28 | 29 | WORKDIR /app 30 | 31 | EXPOSE 8000 32 | CMD uvicorn app.main:app --host=0.0.0.0 --port=8000 --reload -------------------------------------------------------------------------------- /app/common/metrics.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Callable 2 | from functools import wraps 3 | from typing import Any 4 | 5 | from prometheus_async.aio import count_exceptions, time 6 | from prometheus_client import Counter, Histogram 7 | 8 | EXECUTION_TIME = Histogram( 9 | "task_execution_seconds", 10 | "Task execution time", 11 | labelnames=["task_name"], 12 | ) 13 | TASKS_FAILED = Counter( 14 | "tasks_failed", 15 | "Tasks failed", 16 | labelnames=["task_name"], 17 | ) 18 | 19 | 20 | def export_task_metrics(func: Callable) -> Callable: 21 | @count_exceptions(TASKS_FAILED.labels(task_name=func.__name__)) 22 | @time(EXECUTION_TIME.labels(task_name=func.__name__)) 23 | @wraps(func) 24 | def wrapper(*args: Any, **kwargs: Any) -> Any: 25 | return func(*args, **kwargs) 26 | 27 | return wrapper 28 | -------------------------------------------------------------------------------- /locust/benchmark.py: -------------------------------------------------------------------------------- 1 | import os 2 | from random import shuffle 3 | 4 | from locust import between, HttpUser, task 5 | 6 | 7 | # locust --config locust/master.conf -f locust/benchmark.py -i 100 -u 10 8 | class TaskUser(HttpUser): 9 | wait_time = between(0.001, 0.001) 10 | 11 | @task 12 | def execute(self): 13 | endpoint = os.environ.get("endpoint") 14 | endpoints = ( 15 | [endpoint] 16 | if endpoint 17 | else ["io/simple", "io/sync", "io/thread", "cpu/simple", "cpu/process"] 18 | ) 19 | 20 | worker = os.environ.get("worker") 21 | workers = [worker] if worker else ["arq", "saq", "faststream", "async-celery"] 22 | 23 | params = [(endpoint, worker) for endpoint in endpoints for worker in workers] 24 | shuffle(params) 25 | 26 | for endpoint, worker in params: 27 | self.client.post(f"/api/v1/incr/{endpoint}/{worker}", json={"value": 1}) 28 | -------------------------------------------------------------------------------- /app/worker/background/accessor.py: -------------------------------------------------------------------------------- 1 | from app.common.enums import TaskWorkerEnum 2 | from app.worker.base import AbstractTaskAccessor 3 | 4 | 5 | class BackgroundTaskAccessor(AbstractTaskAccessor): 6 | KEY = TaskWorkerEnum.background.value 7 | 8 | async def incr_io_bound(self, value: int = 1) -> None: 9 | await self.store.core.incr_io_bound(self.KEY, value) 10 | 11 | async def sync_incr_io_bound(self, value: int = 1) -> None: 12 | await self.store.core.sync_incr_io_bound(self.KEY, value) 13 | 14 | async def incr_io_bound_in_thread_pool(self, value: int = 1) -> None: 15 | await self.store.core.incr_io_bound_in_thread_pool(self.KEY, value) 16 | 17 | async def incr_cpu_bound(self, value: int = 1) -> None: 18 | await self.store.core.incr_cpu_bound(self.KEY, value) 19 | 20 | async def incr_cpu_bound_in_process_pool(self, value: int = 1) -> None: 21 | await self.store.core.incr_cpu_bound_in_process_pool(self.KEY, value) 22 | -------------------------------------------------------------------------------- /app/worker/arq/tasks.py: -------------------------------------------------------------------------------- 1 | from app.store import Store 2 | 3 | 4 | async def incr_io_bound(ctx: dict, key: str, value: int = 1) -> int: 5 | store: Store = ctx["store"] 6 | return await store.core.incr_io_bound(key, value) 7 | 8 | 9 | async def sync_incr_io_bound(ctx: dict, key: str, value: int = 1) -> int: 10 | store: Store = ctx["store"] 11 | return await store.core.sync_incr_io_bound(key, value) 12 | 13 | 14 | async def incr_io_bound_in_thread_pool(ctx: dict, key: str, value: int = 1) -> int: 15 | store: Store = ctx["store"] 16 | return await store.core.incr_io_bound_in_thread_pool(key, value) 17 | 18 | 19 | async def incr_cpu_bound(ctx: dict, key: str, value: int = 1) -> int: 20 | store: Store = ctx["store"] 21 | return await store.core.incr_cpu_bound(key, value) 22 | 23 | 24 | async def incr_cpu_bound_in_process_pool(ctx: dict, key: str, value: int = 1) -> int: 25 | store: Store = ctx["store"] 26 | return await store.core.incr_cpu_bound_in_process_pool(key, value) 27 | -------------------------------------------------------------------------------- /app/worker/saq/tasks.py: -------------------------------------------------------------------------------- 1 | from app.store import Store 2 | 3 | 4 | async def incr_io_bound(ctx: dict, key: str, value: int = 1) -> int: 5 | store: Store = ctx["store"] 6 | return await store.core.incr_io_bound(key, value) 7 | 8 | 9 | async def sync_incr_io_bound(ctx: dict, key: str, value: int = 1) -> int: 10 | store: Store = ctx["store"] 11 | return await store.core.sync_incr_io_bound(key, value) 12 | 13 | 14 | async def incr_io_bound_in_thread_pool(ctx: dict, key: str, value: int = 1) -> int: 15 | store: Store = ctx["store"] 16 | return await store.core.incr_io_bound_in_thread_pool(key, value) 17 | 18 | 19 | async def incr_cpu_bound(ctx: dict, key: str, value: int = 1) -> int: 20 | store: Store = ctx["store"] 21 | return await store.core.incr_cpu_bound(key, value) 22 | 23 | 24 | async def incr_cpu_bound_in_process_pool(ctx: dict, key: str, value: int = 1) -> int: 25 | store: Store = ctx["store"] 26 | return await store.core.incr_cpu_bound_in_process_pool(key, value) 27 | -------------------------------------------------------------------------------- /app/api/v1/endpoints/incr/cpu_bound.py: -------------------------------------------------------------------------------- 1 | from typing import Annotated 2 | 3 | from fastapi import APIRouter, Body 4 | from starlette.responses import JSONResponse 5 | 6 | from app.api.deps import StoreDep 7 | from app.api.schemas import IncrInputSchema 8 | from app.common.enums import TaskWorkerEnum 9 | 10 | router = APIRouter(tags=["cpu"]) 11 | 12 | 13 | @router.post("/simple/{worker}") 14 | async def simple( 15 | worker: TaskWorkerEnum, body: Annotated[IncrInputSchema, Body()], store: StoreDep 16 | ) -> JSONResponse: 17 | task = await store.worker.incr_cpu_bound(worker, body.value) 18 | return JSONResponse({"status": "ok", "message": "cpu-simple"}, background=task) 19 | 20 | 21 | @router.post("/process/{worker}") 22 | async def in_process_pool( 23 | worker: TaskWorkerEnum, body: Annotated[IncrInputSchema, Body()], store: StoreDep 24 | ) -> JSONResponse: 25 | task = await store.worker.incr_cpu_bound_in_process_pool(worker, body.value) 26 | return JSONResponse({"status": "ok", "message": "cpu-process"}, background=task) 27 | -------------------------------------------------------------------------------- /app/worker/faststream/tasks.py: -------------------------------------------------------------------------------- 1 | from app.store import get_store 2 | from app.worker.faststream.app import faststream_app 3 | 4 | 5 | @faststream_app.task 6 | async def incr_io_bound(key: str, value: int = 1) -> int: 7 | store = get_store() 8 | return await store.core.incr_io_bound(key, value) 9 | 10 | 11 | @faststream_app.task 12 | async def sync_incr_io_bound(key: str, value: int = 1) -> int: 13 | store = get_store() 14 | return await store.core.sync_incr_io_bound(key, value) 15 | 16 | 17 | @faststream_app.task 18 | async def incr_io_bound_in_thread_pool(key: str, value: int = 1) -> int: 19 | store = get_store() 20 | return await store.core.incr_io_bound_in_thread_pool(key, value) 21 | 22 | 23 | @faststream_app.task 24 | async def incr_cpu_bound(key: str, value: int = 1) -> int: 25 | store = get_store() 26 | return await store.core.incr_cpu_bound(key, value) 27 | 28 | 29 | @faststream_app.task 30 | async def incr_cpu_bound_in_process_pool(key: str, value: int = 1) -> int: 31 | store = get_store() 32 | return await store.core.incr_cpu_bound_in_process_pool(key, value) 33 | -------------------------------------------------------------------------------- /app/worker/async_celery/tasks.py: -------------------------------------------------------------------------------- 1 | from app.store import get_store 2 | from app.worker.async_celery.app import async_celery_app 3 | 4 | 5 | @async_celery_app.task 6 | async def incr_io_bound(key: str, value: int = 1) -> int: 7 | store = get_store() 8 | return await store.core.incr_io_bound(key, value) 9 | 10 | 11 | @async_celery_app.task 12 | async def sync_incr_io_bound(key: str, value: int = 1) -> int: 13 | store = get_store() 14 | return await store.core.sync_incr_io_bound(key, value) 15 | 16 | 17 | @async_celery_app.task 18 | async def incr_io_bound_in_thread_pool(key: str, value: int = 1) -> int: 19 | store = get_store() 20 | return await store.core.incr_io_bound_in_thread_pool(key, value) 21 | 22 | 23 | @async_celery_app.task 24 | async def incr_cpu_bound(key: str, value: int = 1) -> int: 25 | store = get_store() 26 | return await store.core.incr_cpu_bound(key, value) 27 | 28 | 29 | @async_celery_app.task 30 | async def incr_cpu_bound_in_process_pool(key: str, value: int = 1) -> int: 31 | store = get_store() 32 | return await store.core.incr_cpu_bound_in_process_pool(key, value) 33 | -------------------------------------------------------------------------------- /app/worker/base.py: -------------------------------------------------------------------------------- 1 | from abc import abstractmethod 2 | from typing import Any 3 | 4 | from starlette.background import BackgroundTask 5 | 6 | from app.store import BaseAccessor 7 | 8 | 9 | class AbstractTaskAccessor(BaseAccessor): 10 | @abstractmethod 11 | async def incr_io_bound(self, *args: Any, **kwargs: Any) -> BackgroundTask | None: 12 | raise NotImplementedError 13 | 14 | @abstractmethod 15 | async def sync_incr_io_bound( 16 | self, *args: Any, **kwargs: Any 17 | ) -> BackgroundTask | None: 18 | raise NotImplementedError 19 | 20 | @abstractmethod 21 | async def incr_io_bound_in_thread_pool( 22 | self, *args: Any, **kwargs: Any 23 | ) -> BackgroundTask | None: 24 | raise NotImplementedError 25 | 26 | @abstractmethod 27 | async def incr_cpu_bound(self, *args: Any, **kwargs: Any) -> BackgroundTask | None: 28 | raise NotImplementedError 29 | 30 | @abstractmethod 31 | async def incr_cpu_bound_in_process_pool( 32 | self, *args: Any, **kwargs: Any 33 | ) -> BackgroundTask | None: 34 | raise NotImplementedError 35 | -------------------------------------------------------------------------------- /tests/app/incr/test_cpu_bound.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from httpx import AsyncClient 3 | 4 | from app.common.enums import TaskWorkerEnum 5 | from app.store import Store 6 | 7 | 8 | @pytest.mark.parametrize( 9 | "worker, path", 10 | [ 11 | (TaskWorkerEnum.background, "simple"), 12 | (TaskWorkerEnum.background, "process"), 13 | (TaskWorkerEnum.arq, "simple"), 14 | (TaskWorkerEnum.arq, "process"), 15 | (TaskWorkerEnum.saq, "simple"), 16 | (TaskWorkerEnum.saq, "process"), 17 | (TaskWorkerEnum.async_celery, "simple"), 18 | (TaskWorkerEnum.async_celery, "process"), 19 | (TaskWorkerEnum.faststream, "simple"), 20 | (TaskWorkerEnum.faststream, "process"), 21 | ], 22 | ) 23 | async def test_cpu_bound( 24 | store: Store, 25 | cli: AsyncClient, 26 | worker: TaskWorkerEnum, 27 | path: str, 28 | ) -> None: 29 | assert await store.redis.client.get(path) is None 30 | 31 | resp = await cli.post( 32 | f"/api/v1/incr/cpu/{path}/{worker.value}", 33 | json={"value": 5}, 34 | ) 35 | assert resp.status_code == 200 36 | 37 | assert await store.redis.client.get(worker.value) == b"5" 38 | -------------------------------------------------------------------------------- /app/api/v1/endpoints/incr/io_bound.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | from starlette.responses import JSONResponse 3 | 4 | from app.api.deps import StoreDep 5 | from app.api.schemas import IncrInputSchema 6 | from app.common.enums import TaskWorkerEnum 7 | 8 | router = APIRouter(tags=["io"]) 9 | 10 | 11 | @router.post("/simple/{worker}") 12 | async def simple( 13 | worker: TaskWorkerEnum, body: IncrInputSchema, store: StoreDep 14 | ) -> JSONResponse: 15 | task = await store.worker.incr_io_bound(worker, body.value) 16 | return JSONResponse({"status": "ok", "message": "io-simple"}, background=task) 17 | 18 | 19 | @router.post("/sync/{worker}") 20 | async def sync( 21 | worker: TaskWorkerEnum, body: IncrInputSchema, store: StoreDep 22 | ) -> JSONResponse: 23 | task = await store.worker.sync_incr_io_bound(worker, body.value) 24 | return JSONResponse({"status": "ok", "message": "io-sync"}, background=task) 25 | 26 | 27 | @router.post("/thread/{worker}") 28 | async def in_thread_pool( 29 | worker: TaskWorkerEnum, body: IncrInputSchema, store: StoreDep 30 | ) -> JSONResponse: 31 | task = await store.worker.incr_io_bound_in_thread_pool(worker, body.value) 32 | return JSONResponse({"status": "ok", "message": "io-thread"}, background=task) 33 | -------------------------------------------------------------------------------- /app/worker/async_celery/accessor.py: -------------------------------------------------------------------------------- 1 | from app.common.enums import TaskWorkerEnum 2 | from app.worker.async_celery.tasks import ( 3 | incr_cpu_bound, 4 | incr_cpu_bound_in_process_pool, 5 | incr_io_bound, 6 | incr_io_bound_in_thread_pool, 7 | sync_incr_io_bound, 8 | ) 9 | from app.worker.base import AbstractTaskAccessor 10 | 11 | 12 | class AsyncCeleryTaskAccessor(AbstractTaskAccessor): 13 | KEY = TaskWorkerEnum.async_celery.value 14 | 15 | async def incr_io_bound(self, value: int = 1) -> None: 16 | incr_io_bound.delay(self.KEY, value=value) 17 | 18 | async def sync_incr_io_bound(self, value: int = 1) -> None: 19 | sync_incr_io_bound.delay(self.KEY, value=value) 20 | 21 | async def incr_io_bound_in_thread_pool(self, value: int = 1) -> None: 22 | incr_io_bound_in_thread_pool.delay(self.KEY, value=value) 23 | 24 | async def incr_cpu_bound(self, value: int = 1) -> None: 25 | incr_cpu_bound.delay(self.KEY, value=value) 26 | 27 | async def incr_cpu_bound_in_process_pool(self, value: int = 1) -> None: 28 | """ 29 | Worker raises 30 | AssertionError: daemonic processes are not allowed to have children 31 | """ 32 | incr_cpu_bound_in_process_pool.delay(self.KEY, value=value) 33 | -------------------------------------------------------------------------------- /tests/app/incr/test_io_bound.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from httpx import AsyncClient 3 | 4 | from app.common.enums import TaskWorkerEnum 5 | from app.store import Store 6 | 7 | 8 | @pytest.mark.parametrize( 9 | "worker, path", 10 | [ 11 | (TaskWorkerEnum.background, "simple"), 12 | (TaskWorkerEnum.background, "sync"), 13 | (TaskWorkerEnum.background, "thread"), 14 | (TaskWorkerEnum.arq, "simple"), 15 | (TaskWorkerEnum.arq, "sync"), 16 | (TaskWorkerEnum.arq, "thread"), 17 | (TaskWorkerEnum.saq, "simple"), 18 | (TaskWorkerEnum.saq, "sync"), 19 | (TaskWorkerEnum.saq, "thread"), 20 | (TaskWorkerEnum.async_celery, "simple"), 21 | (TaskWorkerEnum.async_celery, "sync"), 22 | (TaskWorkerEnum.async_celery, "thread"), 23 | (TaskWorkerEnum.faststream, "simple"), 24 | (TaskWorkerEnum.faststream, "sync"), 25 | (TaskWorkerEnum.faststream, "thread"), 26 | ], 27 | ) 28 | async def test_io_bound( 29 | store: Store, 30 | cli: AsyncClient, 31 | worker: TaskWorkerEnum, 32 | path: str, 33 | ) -> None: 34 | assert await store.redis.client.get(path) is None 35 | 36 | resp = await cli.post( 37 | f"/api/v1/incr/io/{path}/{worker.value}", 38 | json={"value": 5}, 39 | ) 40 | assert resp.status_code == 200 41 | 42 | assert await store.redis.client.get(worker.value) == b"5" 43 | -------------------------------------------------------------------------------- /app/worker/faststream/accessor.py: -------------------------------------------------------------------------------- 1 | from app.common.enums import TaskWorkerEnum 2 | from app.worker.base import AbstractTaskAccessor 3 | from app.worker.faststream.app import faststream_broker 4 | 5 | 6 | class FastStreamTaskAccessor(AbstractTaskAccessor): 7 | KEY = TaskWorkerEnum.faststream.value 8 | 9 | async def connect(self) -> None: 10 | await faststream_broker.connect(url=self.config.REDIS_DSN.__str__()) 11 | 12 | async def disconnect(self) -> None: 13 | await faststream_broker.close() 14 | 15 | async def incr_io_bound(self, value: int = 1) -> None: 16 | await faststream_broker.publish( 17 | message={"key": self.KEY, "value": value}, 18 | channel="incr_io_bound", 19 | ) 20 | 21 | async def sync_incr_io_bound(self, value: int = 1) -> None: 22 | await faststream_broker.publish( 23 | message={"key": self.KEY, "value": value}, 24 | channel="sync_incr_io_bound", 25 | ) 26 | 27 | async def incr_io_bound_in_thread_pool(self, value: int = 1) -> None: 28 | await faststream_broker.publish( 29 | message={"key": self.KEY, "value": value}, 30 | channel="incr_io_bound_in_thread_pool", 31 | ) 32 | 33 | async def incr_cpu_bound(self, value: int = 1) -> None: 34 | await faststream_broker.publish( 35 | message={"key": self.KEY, "value": value}, 36 | channel="incr_cpu_bound", 37 | ) 38 | 39 | async def incr_cpu_bound_in_process_pool(self, value: int = 1) -> None: 40 | await faststream_broker.publish( 41 | message={"key": self.KEY, "value": value}, 42 | channel="incr_cpu_bound_in_process_pool", 43 | ) 44 | -------------------------------------------------------------------------------- /app/worker/async_celery/app.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from asyncio import AbstractEventLoop 3 | from collections.abc import Callable 4 | from functools import wraps 5 | from typing import Any, TypeVar 6 | 7 | from celery import Celery, signals 8 | 9 | from app.config import settings 10 | from app.store import connect_to_store, disconnect_from_store 11 | 12 | T = TypeVar("T") 13 | 14 | 15 | class AsyncCelery(Celery): 16 | def __init__(self, *args: Any, **kwargs: Any) -> None: 17 | super().__init__(*args, **kwargs) 18 | 19 | self.functions: dict[str, Callable[..., Any]] = {} 20 | self.loop = asyncio.get_event_loop() 21 | 22 | def connect(self, *_: Any, **__: Any) -> None: 23 | self.loop.run_until_complete(connect_to_store()) 24 | 25 | def disconnect(self, *_: Any, **__: Any) -> None: 26 | self.loop.run_until_complete(disconnect_from_store()) 27 | 28 | def task( 29 | self, 30 | task: Callable[..., T] | None = None, 31 | **opts: Any, 32 | ) -> Callable: 33 | create_task = super().task 34 | 35 | def decorator(func: Callable[..., T]) -> Callable[..., T]: 36 | @create_task(**opts) 37 | @wraps(func) 38 | def wrapper( 39 | *args: Any, loop: AbstractEventLoop | None = None, **kwargs: Any 40 | ) -> T: 41 | loop = loop or self.loop 42 | return loop.run_until_complete(func(*args, **kwargs)) 43 | 44 | self.functions[wrapper.name] = func 45 | 46 | return wrapper 47 | 48 | if task: 49 | return decorator(task) 50 | 51 | return decorator 52 | 53 | 54 | async_celery_app = AsyncCelery("async_celery", broker=settings.REDIS_DSN.__str__()) 55 | async_celery_app.autodiscover_tasks(packages=["app.worker.async_celery.tasks"]) 56 | async_celery_app.conf.timezone = "UTC" 57 | async_celery_app.conf.worker_proc_alive_timeout = 30 58 | 59 | signals.worker_process_init.connect(async_celery_app.connect) 60 | signals.worker_process_shutdown.connect(async_celery_app.disconnect) 61 | -------------------------------------------------------------------------------- /app/core/accessor.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import random 3 | from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor 4 | 5 | from app.common.metrics import export_task_metrics 6 | from app.store import BaseAccessor, Store 7 | 8 | 9 | class CoreAccessor(BaseAccessor): 10 | THREAD_COUNT = 3 11 | PROCESS_COUNT = 3 12 | 13 | def __init__(self, store: Store) -> None: 14 | super().__init__(store) 15 | 16 | self._loop = asyncio.get_event_loop() 17 | self._thread_executor = ThreadPoolExecutor(max_workers=self.THREAD_COUNT) 18 | self._process_executor = ProcessPoolExecutor(max_workers=self.PROCESS_COUNT) 19 | 20 | async def disconnect(self) -> None: 21 | self._thread_executor.shutdown(wait=True) 22 | self._process_executor.shutdown(wait=True) 23 | 24 | @staticmethod 25 | def heavy_calculations(count: int = 1_000_000) -> None: 26 | for i in range(count): 27 | _ = i % random.randint(1, 100) 28 | 29 | @export_task_metrics 30 | async def incr_io_bound(self, key: str, value: int = 1) -> int: 31 | return await self.store.redis.client.incr(key, value) 32 | 33 | @export_task_metrics 34 | async def sync_incr_io_bound(self, key: str, value: int = 1) -> int: 35 | return self.store.redis.sync_client.incr(key, value) 36 | 37 | @export_task_metrics 38 | async def incr_io_bound_in_thread_pool(self, key: str, value: int = 1) -> int: 39 | return await self._loop.run_in_executor( 40 | self._thread_executor, 41 | self.store.redis.sync_client.incr, 42 | key, 43 | value, 44 | ) 45 | 46 | @export_task_metrics 47 | async def incr_cpu_bound(self, key: str, value: int = 1) -> int: 48 | self.heavy_calculations() 49 | return await self.store.redis.client.incr(key, value) 50 | 51 | @export_task_metrics 52 | async def incr_cpu_bound_in_process_pool(self, key: str, value: int = 1) -> int: 53 | await self._loop.run_in_executor( 54 | self._process_executor, self.heavy_calculations 55 | ) 56 | return await self.store.redis.client.incr(key, value) 57 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "fastapi-async-tasks" 3 | version = "0.1.0" 4 | description = "" 5 | authors = ["ipakeev "] 6 | readme = "README.md" 7 | packages = [{include = "app"}] 8 | 9 | [tool.poetry.dependencies] 10 | python = "^3.11" 11 | fastapi = {extras = ["all"], version = "^0.108.0"} 12 | arq = "^0.25.0" 13 | saq = {extras = ["hiredis", "web"], version = "^0.12.2"} 14 | uvicorn = "^0.25.0" 15 | pydantic-settings = "^2.1.0" 16 | redis = "^5.0.1" 17 | faststream = {extras = ["redis"], version = "^0.4.0"} 18 | prometheus-fastapi-instrumentator = "^6.1.0" 19 | locust = "^2.21.0" 20 | prometheus-async = {extras = ["aiohttp"], version = "^22.2.0"} 21 | locust-plugins = "^4.3.2" 22 | celery = "^5.3.6" 23 | flower = "^2.0.1" 24 | 25 | 26 | [tool.poetry.group.dev.dependencies] 27 | black = "^23.12.1" 28 | ruff = "^0.1.11" 29 | pytest = "^7.4.4" 30 | fakeredis = "^2.20.1" 31 | nest-asyncio = "^1.6.0" 32 | 33 | [build-system] 34 | requires = ["poetry-core"] 35 | build-backend = "poetry.core.masonry.api" 36 | 37 | [tool.black] 38 | line-length = 88 39 | exclude = ''' 40 | /( 41 | \.eggs 42 | | \.git 43 | | \.hg 44 | | \.mypy_cache 45 | | \.tox 46 | | \.venv 47 | | env 48 | | _build 49 | | buck-out 50 | | build 51 | | dist 52 | | migrations 53 | )/ 54 | ''' 55 | 56 | [tool.ruff] 57 | line-length = 88 58 | target-version = "py310" 59 | 60 | select = ["E", "F", "I001", "N", "W292"] 61 | ignore = ["E999", "E731"] 62 | 63 | fix = true 64 | unfixable = ["E731", "F401"] 65 | 66 | exclude = [ 67 | ".*", 68 | "_*", 69 | "buck-out", 70 | "build", 71 | "dist", 72 | "node_modules", 73 | "venv", 74 | "*/migrations/" 75 | ] 76 | per-file-ignores = {} 77 | 78 | # Allow unused variables when underscore-prefixed. 79 | dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" 80 | 81 | [tool.ruff.format] 82 | quote-style = "double" 83 | indent-style = "space" 84 | skip-magic-trailing-comma = false 85 | line-ending = "auto" 86 | 87 | [tool.ruff.isort] 88 | combine-as-imports = true 89 | order-by-type = false 90 | 91 | [tool.pytest.ini_options] 92 | python_files = ['tests.py', 'test_*.py', '*_tests.py'] 93 | filterwarnings = [ 94 | 'ignore::DeprecationWarning', 95 | ] -------------------------------------------------------------------------------- /app/redis/accessor.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from typing import Any 3 | 4 | from redis import Redis 5 | from redis.asyncio import Redis as AsyncRedis 6 | 7 | from app.store import BaseAccessor 8 | 9 | 10 | class RedisAccessor(BaseAccessor): 11 | CONNECT_MAX_TRIES = 5 12 | 13 | def __init__(self, *args: Any, **kwargs: Any) -> None: 14 | super().__init__(*args, **kwargs) 15 | 16 | self.sync_client: Redis | None = None 17 | self.client: AsyncRedis | None = None 18 | 19 | @property 20 | def client_class(self) -> type[AsyncRedis]: 21 | return AsyncRedis 22 | 23 | @property 24 | def sync_client_class(self) -> type[Redis]: 25 | return Redis 26 | 27 | async def connect(self) -> None: 28 | await self._connect_async_redis() 29 | self._connect_sync_redis() 30 | 31 | # !!! clear redis before stress testing 32 | await self.client.flushall() 33 | 34 | async def _connect_async_redis(self) -> None: 35 | self.client = self.client_class.from_url(self.config.REDIS_DSN.__str__()) 36 | 37 | tries_count = 0 38 | while True: 39 | tries_count += 1 40 | try: 41 | await self.client.ping() 42 | break 43 | except Exception as exc: 44 | self.logger.warning(f"Can't connect to AsyncRedis: {str(exc)}") 45 | 46 | if tries_count > self.CONNECT_MAX_TRIES: 47 | raise Exception("Can't connect to AsyncRedis") 48 | await asyncio.sleep(1) 49 | 50 | def _connect_sync_redis(self) -> None: 51 | self.sync_client = self.sync_client_class.from_url( 52 | self.config.REDIS_DSN.__str__() 53 | ) 54 | 55 | tries_count = 0 56 | while True: 57 | tries_count += 1 58 | try: 59 | self.sync_client.ping() 60 | break 61 | except Exception as exc: 62 | self.logger.warning(f"Can't connect to SyncRedis: {str(exc)}") 63 | 64 | if tries_count > self.CONNECT_MAX_TRIES: 65 | raise Exception("Can't connect to SyncRedis") 66 | asyncio.sleep(1) 67 | 68 | async def disconnect(self) -> None: 69 | if self.client: 70 | await self.client.aclose() 71 | 72 | if self.sync_client: 73 | self.sync_client.close() 74 | -------------------------------------------------------------------------------- /app/store.py: -------------------------------------------------------------------------------- 1 | from collections.abc import AsyncGenerator 2 | from contextlib import asynccontextmanager 3 | 4 | from fastapi import FastAPI 5 | 6 | from app.common.logger import get_logger 7 | from app.config import settings 8 | 9 | 10 | class BaseAccessor: 11 | def __init__(self, store: "Store") -> None: 12 | self.store = store 13 | self.config = settings 14 | self.logger = get_logger(self.__class__.__name__) 15 | 16 | async def connect(self) -> None: 17 | pass 18 | 19 | async def disconnect(self) -> None: 20 | pass 21 | 22 | 23 | class Store: 24 | def __init__(self) -> None: 25 | from app.core.accessor import CoreAccessor 26 | from app.redis.accessor import RedisAccessor 27 | from app.worker.accessor import WorkerAccessor 28 | 29 | self.core = CoreAccessor(self) 30 | self.redis = RedisAccessor(self) 31 | self.worker = WorkerAccessor(self) 32 | 33 | self.logger = get_logger("store") 34 | 35 | async def connect(self) -> None: 36 | self.logger.info("Connecting to Store") 37 | await self.redis.connect() 38 | await self.worker.connect() 39 | await self.core.connect() 40 | self.logger.info("Connected to Store") 41 | 42 | async def disconnect(self) -> None: 43 | self.logger.info("Disconnecting from Store") 44 | await self.core.disconnect() 45 | await self.worker.disconnect() 46 | await self.redis.disconnect() 47 | self.logger.info("Disconnected from Store") 48 | 49 | 50 | _store: Store | None = None 51 | 52 | 53 | def get_store() -> Store: 54 | assert _store, "Store is not initialized" 55 | return _store 56 | 57 | 58 | async def connect_to_store() -> Store: 59 | global _store 60 | 61 | if not _store: 62 | _store = Store() 63 | await _store.connect() 64 | 65 | return _store 66 | 67 | 68 | async def disconnect_from_store() -> None: 69 | global _store 70 | 71 | if _store: 72 | await _store.disconnect() 73 | _store = None 74 | 75 | 76 | @asynccontextmanager 77 | async def store_lifespan() -> AsyncGenerator[Store, None]: 78 | await connect_to_store() 79 | try: 80 | yield get_store() 81 | finally: 82 | await disconnect_from_store() 83 | 84 | 85 | @asynccontextmanager 86 | async def lifespan(*_: FastAPI) -> AsyncGenerator[None, None]: 87 | async with store_lifespan(): 88 | yield 89 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # FastAPI with async background tasks + Prometheus + Grafana 2 | 3 | #### This project provides a way to setup and run asynchronous (async/await) tasks using these libraries: 4 | - [Background](https://fastapi.tiangolo.com/tutorial/background-tasks/) 5 | - [ARQ](https://github.com/samuelcolvin/arq) 6 | - [SAQ](https://github.com/tobymao/saq) 7 | - [FastStream](https://github.com/airtai/faststream) 8 | - [Celery](https://github.com/celery/celery) 9 | 10 | #### This way can also be useful for testing your code with eager execution. You can see it in *tests/conftest.py*. 11 | 12 | #### Finally, it shows how to monitor FastAPI app and execution of tasks with Prometheus + Grafana using these libraries: 13 | - [Prometheus-fastapi-instrumentator (web app)](https://github.com/trallnag/prometheus-fastapi-instrumentator) 14 | - [Prometheus-async (arq, saq, faststream)](https://github.com/hynek/prometheus-async) 15 | - [Flower (celery)](https://github.com/mher/flower) 16 | 17 | --- 18 | 19 | ## Installation 20 | 21 | Clone the repository: 22 | ``` bash 23 | git clone https://github.com/ipakeev/fastapi-async-tasks 24 | ``` 25 | 26 | --- 27 | 28 | ## Usage 29 | 30 | Create .env file and adjust it: 31 | ``` bash 32 | cp example.env .env 33 | ``` 34 | 35 | 36 | Build app image: 37 | ``` bash 38 | docker build -t fastapi-async-tasks . 39 | ``` 40 | 41 | Run all containers: 42 | ``` bash 43 | docker-compose up 44 | ``` 45 | 46 | --- 47 | 48 | Now you have access to these services: 49 | - FastAPI: http://localhost:8000/docs 50 | - Prometheus: http://localhost:9090/ 51 | - Grafana: http://localhost:3000/ 52 | - Flower: http://localhost:5555/ 53 | - SAQ Dashboard: http://localhost:8082/ 54 | 55 | Raw metrics are available here: 56 | - FastAPI: http://localhost:8000/metrics 57 | - ARQ: http://localhost:8001/metrics 58 | - SAQ: http://localhost:8002/metrics 59 | - FastStream: http://localhost:8003/metrics 60 | - Celery: http://localhost:5555/metrics 61 | 62 | --- 63 | 64 | ## Benchmark 65 | 66 | You can test the performance of the application and run a benchmark using [locust](https://github.com/locustio/locust): 67 | ``` bash 68 | env endpoint={endpoint} locust --config locust/master.conf -f locust/benchmark.py -u 10 -i 1000 69 | ``` 70 | 71 | ``` bash 72 | env endpoint={endpoint} locust --config locust/master.conf -f locust/background.py -u 10 -i 1000 73 | ``` 74 | 75 | Available {endpoint}s: 76 | - io/simple 77 | - io/sync 78 | - io/thread 79 | - cpu/simple 80 | - cpu/process 81 | 82 | --- -------------------------------------------------------------------------------- /app/worker/faststream/app.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import queue 3 | import ssl 4 | import threading 5 | from collections.abc import Callable 6 | from typing import Any 7 | 8 | import faststream 9 | from faststream.redis import RedisBroker 10 | from prometheus_async.aio.web import start_http_server, ThreadedMetricsHTTPServer 11 | from prometheus_async.types import ServiceDiscovery 12 | 13 | from app.common.logger import get_logger 14 | from app.config import settings 15 | from app.store import connect_to_store, disconnect_from_store 16 | 17 | logger = get_logger(__name__) 18 | 19 | 20 | def start_http_server_in_thread( 21 | *, 22 | try_ports: list[int], 23 | addr: str = "", 24 | ssl_ctx: ssl.SSLContext | None = None, 25 | service_discovery: ServiceDiscovery | None = None, 26 | ) -> ThreadedMetricsHTTPServer: 27 | """ 28 | Start an asyncio HTTP(S) server in a new thread with an own event loop. 29 | 30 | Ideal to expose your metrics in non-asyncio Python 3 applications. 31 | """ 32 | q: queue.Queue = queue.Queue() 33 | loop = asyncio.new_event_loop() 34 | 35 | def server() -> None: 36 | asyncio.set_event_loop(loop) 37 | for port in try_ports: 38 | try: 39 | http = loop.run_until_complete( 40 | start_http_server( 41 | port=port, 42 | addr=addr, 43 | ssl_ctx=ssl_ctx, 44 | service_discovery=service_discovery, 45 | ) 46 | ) 47 | break 48 | except OSError: 49 | pass 50 | 51 | logger.info(f"FastStream exporter started on port {port}") 52 | q.put(http) 53 | loop.run_forever() 54 | loop.run_until_complete(http.close()) 55 | 56 | t = threading.Thread(target=server, name="FastStreamExporter", daemon=True) 57 | t.start() 58 | 59 | return ThreadedMetricsHTTPServer(q.get(), t, loop) 60 | 61 | 62 | class FastStream(faststream.FastStream): 63 | def __init__(self, *args: Any, **kwargs: Any) -> None: 64 | super().__init__(*args, **kwargs) 65 | 66 | self.tasks: dict[str, Callable[..., Any]] = {} 67 | 68 | self.on_startup(self.connect) 69 | self.on_shutdown(self.disconnect) 70 | 71 | async def connect(self, *_: Any, **__: Any) -> None: 72 | await connect_to_store() 73 | 74 | await self.broker.connect(url=settings.REDIS_DSN.__str__()) 75 | 76 | async def disconnect(self, *_: Any, **__: Any) -> None: 77 | await disconnect_from_store() 78 | 79 | await self.broker.close() 80 | 81 | async def run(self, *args: Any, **kwargs: Any) -> None: 82 | # bind metrics exporters 83 | start_http_server_in_thread(try_ports=settings.FASTSTREAM_EXPORTER_PORTS) 84 | 85 | await super().run(*args, **kwargs) 86 | 87 | def task(self, func: Callable) -> Callable: 88 | task = self.broker.subscriber(func.__name__)(func) 89 | self.tasks[func.__name__] = task 90 | return task 91 | 92 | def get(self, channel: str) -> Callable[..., Any]: 93 | return self.tasks[channel] 94 | 95 | 96 | faststream_broker = RedisBroker() 97 | faststream_app = FastStream(faststream_broker) 98 | -------------------------------------------------------------------------------- /app/worker/arq/accessor.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Callable 2 | from typing import Any 3 | 4 | import arq 5 | import arq.connections 6 | 7 | from app.common.enums import TaskWorkerEnum 8 | from app.worker.arq.tasks import ( 9 | incr_cpu_bound, 10 | incr_cpu_bound_in_process_pool, 11 | incr_io_bound, 12 | incr_io_bound_in_thread_pool, 13 | sync_incr_io_bound, 14 | ) 15 | from app.worker.base import AbstractTaskAccessor 16 | 17 | 18 | class ArqTaskAccessor(AbstractTaskAccessor): 19 | KEY = TaskWorkerEnum.arq.value 20 | 21 | def __init__(self, *args: Any, **kwargs: Any) -> None: 22 | super().__init__(*args, **kwargs) 23 | 24 | self._job_pool: arq.ArqRedis | None = None 25 | 26 | async def connect(self) -> None: 27 | class JobPoolRedis(arq.ArqRedis, self.store.redis.client_class): 28 | pass 29 | 30 | arq.connections.ArqRedis = JobPoolRedis 31 | 32 | self._job_pool = await arq.create_pool( 33 | arq.connections.RedisSettings( 34 | host=self.config.REDIS_DSN.host, 35 | port=self.config.REDIS_DSN.port, 36 | username=self.config.REDIS_DSN.username, 37 | password=self.config.REDIS_DSN.password, 38 | database=self.config.REDIS_DSN.path.removeprefix("/"), 39 | ) 40 | ) 41 | 42 | async def disconnect(self) -> None: 43 | if self._job_pool: 44 | await self._job_pool.close() 45 | 46 | @property 47 | def tasks(self) -> list[Callable]: 48 | return [ 49 | incr_io_bound, 50 | sync_incr_io_bound, 51 | incr_io_bound_in_thread_pool, 52 | incr_cpu_bound, 53 | incr_cpu_bound_in_process_pool, 54 | ] 55 | 56 | async def run(self) -> None: 57 | async def on_startup(ctx: dict) -> None: 58 | ctx["store"] = self.store 59 | 60 | worker = arq.Worker( 61 | functions=self.tasks, 62 | on_startup=on_startup, 63 | max_jobs=self.config.ARQ_CONCURRENCY, 64 | health_check_interval=60, 65 | handle_signals=False, 66 | redis_pool=self._job_pool, 67 | ) 68 | try: 69 | await worker.main() 70 | finally: 71 | await worker.close() 72 | 73 | async def incr_io_bound(self, value: int = 1) -> None: 74 | await self._job_pool.enqueue_job(incr_io_bound.__name__, self.KEY, value=value) 75 | 76 | async def sync_incr_io_bound(self, value: int = 1) -> None: 77 | await self._job_pool.enqueue_job( 78 | sync_incr_io_bound.__name__, self.KEY, value=value 79 | ) 80 | 81 | async def incr_io_bound_in_thread_pool(self, value: int = 1) -> None: 82 | await self._job_pool.enqueue_job( 83 | incr_io_bound_in_thread_pool.__name__, self.KEY, value=value 84 | ) 85 | 86 | async def incr_cpu_bound(self, value: int = 1) -> None: 87 | await self._job_pool.enqueue_job(incr_cpu_bound.__name__, self.KEY, value=value) 88 | 89 | async def incr_cpu_bound_in_process_pool(self, value: int = 1) -> None: 90 | await self._job_pool.enqueue_job( 91 | incr_cpu_bound_in_process_pool.__name__, self.KEY, value=value 92 | ) 93 | -------------------------------------------------------------------------------- /app/worker/accessor.py: -------------------------------------------------------------------------------- 1 | from starlette.background import BackgroundTask 2 | 3 | from app.common.enums import TaskWorkerEnum 4 | from app.store import Store 5 | from app.worker.arq.accessor import ArqTaskAccessor 6 | from app.worker.async_celery.accessor import AsyncCeleryTaskAccessor 7 | from app.worker.background.accessor import BackgroundTaskAccessor 8 | from app.worker.base import AbstractTaskAccessor 9 | from app.worker.faststream.accessor import FastStreamTaskAccessor 10 | from app.worker.saq.accessor import SaqTaskAccessor 11 | 12 | 13 | class WorkerAccessor(AbstractTaskAccessor): 14 | def __init__(self, store: Store) -> None: 15 | super().__init__(store) 16 | 17 | self.background = BackgroundTaskAccessor(store) 18 | self.arq = ArqTaskAccessor(store) 19 | self.saq = SaqTaskAccessor(store) 20 | self.async_celery = AsyncCeleryTaskAccessor(store) 21 | self.faststream = FastStreamTaskAccessor(store) 22 | 23 | self.strategy = { 24 | TaskWorkerEnum.background: self.background, 25 | TaskWorkerEnum.arq: self.arq, 26 | TaskWorkerEnum.saq: self.saq, 27 | TaskWorkerEnum.async_celery: self.async_celery, 28 | TaskWorkerEnum.faststream: self.faststream, 29 | } 30 | 31 | async def connect(self) -> None: 32 | self.logger.info("Connecting to workers") 33 | await self.background.connect() 34 | await self.arq.connect() 35 | await self.saq.connect() 36 | await self.async_celery.connect() 37 | await self.faststream.connect() 38 | self.logger.info("Connected to workers") 39 | 40 | async def disconnect(self) -> None: 41 | self.logger.info("Disconnecting from workers") 42 | await self.background.disconnect() 43 | await self.arq.disconnect() 44 | await self.saq.disconnect() 45 | await self.async_celery.disconnect() 46 | await self.faststream.disconnect() 47 | self.logger.info("Disconnected from workers") 48 | 49 | async def _get_task_accessor(self, worker: TaskWorkerEnum) -> AbstractTaskAccessor: 50 | return self.strategy[worker] 51 | 52 | async def incr_io_bound(self, worker: TaskWorkerEnum, value: int) -> BackgroundTask: 53 | accessor = await self._get_task_accessor(worker) 54 | return BackgroundTask(accessor.incr_io_bound, value=value) 55 | 56 | async def sync_incr_io_bound( 57 | self, worker: TaskWorkerEnum, value: int 58 | ) -> BackgroundTask: 59 | accessor = await self._get_task_accessor(worker) 60 | return BackgroundTask(accessor.sync_incr_io_bound, value=value) 61 | 62 | async def incr_io_bound_in_thread_pool( 63 | self, worker: TaskWorkerEnum, value: int 64 | ) -> BackgroundTask: 65 | accessor = await self._get_task_accessor(worker) 66 | return BackgroundTask(accessor.incr_io_bound_in_thread_pool, value=value) 67 | 68 | async def incr_cpu_bound( 69 | self, worker: TaskWorkerEnum, value: int 70 | ) -> BackgroundTask: 71 | accessor = await self._get_task_accessor(worker) 72 | return BackgroundTask(accessor.incr_cpu_bound, value=value) 73 | 74 | async def incr_cpu_bound_in_process_pool( 75 | self, worker: TaskWorkerEnum, value: int 76 | ) -> BackgroundTask: 77 | accessor = await self._get_task_accessor(worker) 78 | return BackgroundTask(accessor.incr_cpu_bound_in_process_pool, value=value) 79 | -------------------------------------------------------------------------------- /app/worker/saq/accessor.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from collections.abc import Callable 3 | from typing import Any 4 | 5 | import saq 6 | from aiohttp.web_runner import AppRunner, TCPSite 7 | from saq.web.aiohttp import create_app 8 | 9 | from app.common.enums import TaskWorkerEnum 10 | from app.worker.base import AbstractTaskAccessor 11 | from app.worker.saq.tasks import ( 12 | incr_cpu_bound, 13 | incr_cpu_bound_in_process_pool, 14 | incr_io_bound, 15 | incr_io_bound_in_thread_pool, 16 | sync_incr_io_bound, 17 | ) 18 | 19 | 20 | class SaqTaskAccessor(AbstractTaskAccessor): 21 | KEY = TaskWorkerEnum.saq.value 22 | 23 | def __init__(self, *args: Any, **kwargs: Any) -> None: 24 | super().__init__(*args, **kwargs) 25 | 26 | self._queue: saq.Queue | None = None 27 | 28 | async def connect(self) -> None: 29 | client = self.store.redis.client_class.from_url(self.config.REDIS_DSN.__str__()) 30 | self._queue = saq.Queue(client) 31 | 32 | async def disconnect(self) -> None: 33 | if self._queue: 34 | await self._queue.disconnect() 35 | 36 | @property 37 | def tasks(self) -> list[Callable]: 38 | return [ 39 | incr_io_bound, 40 | sync_incr_io_bound, 41 | incr_io_bound_in_thread_pool, 42 | incr_cpu_bound, 43 | incr_cpu_bound_in_process_pool, 44 | ] 45 | 46 | async def run(self, web: bool = False, port: int = 8090) -> None: 47 | async def on_worker_startup(ctx: dict) -> None: 48 | ctx["store"] = self.store 49 | 50 | worker = saq.Worker( 51 | self._queue, 52 | functions=self.tasks, 53 | startup=on_worker_startup, 54 | concurrency=self.config.SAQ_CONCURRENCY, 55 | ) 56 | 57 | if web: 58 | app = create_app([self._queue]) 59 | task = asyncio.create_task(worker.start()) 60 | 61 | try: 62 | runner = AppRunner(app) 63 | await runner.setup() 64 | site = TCPSite(runner, port=port) 65 | await site.start() 66 | 67 | await asyncio.Event().wait() 68 | finally: 69 | await worker.stop() 70 | task.cancel() 71 | else: 72 | await worker.start() 73 | 74 | async def incr_io_bound(self, value: int = 1) -> None: 75 | await self._queue.enqueue( 76 | incr_io_bound.__name__, kwargs={"key": self.KEY, "value": value} 77 | ) 78 | 79 | async def sync_incr_io_bound(self, value: int = 1) -> None: 80 | await self._queue.enqueue( 81 | sync_incr_io_bound.__name__, kwargs={"key": self.KEY, "value": value} 82 | ) 83 | 84 | async def incr_io_bound_in_thread_pool(self, value: int = 1) -> None: 85 | await self._queue.enqueue( 86 | incr_io_bound_in_thread_pool.__name__, 87 | kwargs={"key": self.KEY, "value": value}, 88 | ) 89 | 90 | async def incr_cpu_bound(self, value: int = 1) -> None: 91 | await self._queue.enqueue( 92 | incr_cpu_bound.__name__, kwargs={"key": self.KEY, "value": value} 93 | ) 94 | 95 | async def incr_cpu_bound_in_process_pool(self, value: int = 1) -> None: 96 | await self._queue.enqueue( 97 | incr_cpu_bound_in_process_pool.__name__, 98 | kwargs={"key": self.KEY, "value": value}, 99 | ) 100 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | 3 | x-tasks-common: &tasks-common 4 | image: fastapi-async-tasks 5 | volumes: 6 | - .:/app # for development 7 | depends_on: 8 | - fastapi 9 | restart: on-failure 10 | networks: 11 | - default 12 | deploy: 13 | resources: 14 | limits: 15 | memory: 200M 16 | cpus: "0.2" 17 | 18 | services: 19 | redis: 20 | image: redis:7.2.4 21 | ports: 22 | - "6379:6379" 23 | restart: on-failure 24 | networks: 25 | - default 26 | 27 | prometheus: 28 | image: prom/prometheus 29 | ports: 30 | - "9090:9090" 31 | volumes: 32 | - .prometheus/prometheus.yml:/etc/prometheus/prometheus.yml 33 | command: 34 | - --config.file=/etc/prometheus/prometheus.yml 35 | restart: on-failure 36 | networks: 37 | - default 38 | 39 | grafana: 40 | image: grafana/grafana 41 | ports: 42 | - "3000:3000" 43 | volumes: 44 | - type: volume 45 | source: grafana 46 | target: /var/lib/grafana 47 | environment: 48 | - GF_SECURITY_ADMIN_USER=admin 49 | - GF_SECURITY_ADMIN_PASSWORD=admin 50 | - GF_INSTALL_PLUGINS=redis-app 51 | user: "0" 52 | restart: on-failure 53 | networks: 54 | - default 55 | 56 | fastapi: 57 | image: fastapi-async-tasks 58 | ports: 59 | - "8000:8000" 60 | volumes: 61 | - .:/app 62 | depends_on: 63 | - redis 64 | - prometheus 65 | - grafana 66 | restart: on-failure 67 | networks: 68 | - default 69 | deploy: 70 | resources: 71 | limits: 72 | memory: 200M 73 | cpus: "0.2" 74 | command: 75 | - uvicorn 76 | - app.main:app 77 | - --host=0.0.0.0 78 | - --port=8000 79 | - --reload 80 | - --workers=${FASTAPI_WORKERS} 81 | 82 | arq: 83 | <<: *tasks-common 84 | ports: 85 | - "8001:8001" # metrics 86 | command: 87 | - python 88 | - -m 89 | - worker_arq 90 | 91 | saq: 92 | <<: *tasks-common 93 | ports: 94 | - "8082:8082" # web dashboard 95 | - "8002:8002" # metrics 96 | command: 97 | - python 98 | - -m 99 | - worker_saq 100 | 101 | # faststream docs serve app.worker.faststream.app:faststream_app 102 | faststream: 103 | <<: *tasks-common 104 | ports: 105 | - "8003:8003" # metrics 106 | # - "8004:8004" # metrics 107 | # - "8005:8005" # metrics 108 | command: 109 | - faststream 110 | - run 111 | - app.worker.faststream.app:faststream_app 112 | - --workers=${FASTSTREAM_CONCURRENCY} 113 | 114 | async-celery: 115 | <<: *tasks-common 116 | command: 117 | - celery 118 | - -A 119 | - app.worker.async_celery.app:async_celery_app 120 | - worker 121 | - --loglevel=info 122 | - --concurrency=${ASYNC_CELERY_CONCURRENCY} 123 | 124 | async-celery-flower: 125 | image: fastapi-async-tasks 126 | ports: 127 | - "5555:5555" 128 | volumes: 129 | - .:/app 130 | depends_on: 131 | - async-celery 132 | restart: on-failure 133 | networks: 134 | - default 135 | command: 136 | - celery 137 | - -A 138 | - app.worker.async_celery.app:async_celery_app 139 | - flower 140 | 141 | volumes: 142 | grafana: 143 | 144 | networks: 145 | default: 146 | driver: bridge -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import time 3 | from collections.abc import AsyncGenerator, Callable, Generator 4 | from contextlib import ExitStack 5 | from typing import Any 6 | from unittest.mock import AsyncMock, patch 7 | 8 | import nest_asyncio 9 | import pytest 10 | from fakeredis import FakeRedis 11 | from fakeredis.aioredis import FakeRedis as AsyncFakeRedis 12 | from fastapi import FastAPI 13 | from httpx import AsyncClient 14 | 15 | from app import main 16 | from app.redis.accessor import RedisAccessor 17 | from app.store import get_store, lifespan, Store 18 | from app.worker.async_celery.app import async_celery_app 19 | from app.worker.faststream.app import faststream_app, faststream_broker 20 | 21 | nest_asyncio.apply() 22 | 23 | 24 | @pytest.fixture(scope="session", autouse=True) 25 | def anyio_backend() -> str: 26 | return "asyncio" 27 | 28 | 29 | @pytest.fixture(scope="session", autouse=True) 30 | def sleep_mock() -> Generator[None, None, None]: 31 | with ( 32 | patch.object(asyncio, "sleep"), 33 | patch.object(time, "sleep"), 34 | ): 35 | yield 36 | 37 | 38 | @pytest.fixture(scope="session") 39 | def redis_mock() -> Generator[None, None, None]: 40 | with ( 41 | patch.object(RedisAccessor, "client_class", AsyncFakeRedis), 42 | patch.object(RedisAccessor, "sync_client_class", FakeRedis), 43 | patch("faststream.redis.broker.Redis", AsyncFakeRedis), 44 | ): 45 | yield 46 | 47 | 48 | @pytest.fixture(scope="session", autouse=True) 49 | async def app(redis_mock: None) -> AsyncGenerator[FastAPI, None]: 50 | async with lifespan(main.app): 51 | yield main.app 52 | 53 | 54 | @pytest.fixture(scope="session") 55 | def store(app: FastAPI) -> Store: 56 | return get_store() 57 | 58 | 59 | @pytest.fixture 60 | async def cli(app: FastAPI) -> AsyncGenerator[AsyncClient, None]: 61 | async with AsyncClient(app=app, base_url="http://test") as client: 62 | yield client 63 | 64 | 65 | @pytest.fixture(autouse=True) 66 | async def clear_redis(store: Store) -> AsyncGenerator[None, None]: 67 | yield 68 | 69 | await store.redis.client.flushall() 70 | 71 | 72 | @pytest.fixture(scope="session", autouse=True) 73 | def arq_eager_execution(store: Store) -> Generator[AsyncMock, None, None]: 74 | ctx = {"store": store} 75 | task_map = {f.__name__: f for f in store.worker.arq.tasks} 76 | 77 | async def resolver(task_name: str, *args: Any, **kwargs: Any) -> Any: 78 | func = task_map[task_name] 79 | return await func(ctx, *args, **kwargs) 80 | 81 | with patch.object( 82 | store.worker.arq._job_pool, 83 | "enqueue_job", 84 | side_effect=resolver, 85 | ) as mock: 86 | yield mock 87 | 88 | 89 | @pytest.fixture(scope="session", autouse=True) 90 | def saq_eager_execution(store: Store) -> Generator[AsyncMock, None, None]: 91 | ctx = {"store": store} 92 | task_map = {f.__name__: f for f in store.worker.saq.tasks} 93 | 94 | async def resolver(task_name: str, kwargs: Any) -> Any: 95 | func = task_map[task_name] 96 | return await func(ctx, **kwargs) 97 | 98 | with patch.object( 99 | store.worker.saq._queue, 100 | "enqueue", 101 | side_effect=resolver, 102 | ) as mock: 103 | yield mock 104 | 105 | 106 | @pytest.fixture(scope="session", autouse=True) 107 | def async_celery_eager_execution(store: Store) -> Generator[None, None, None]: 108 | # register tasks in celery app 109 | from app.worker.async_celery import tasks # noqa: F401 110 | 111 | def execute_task(name: str) -> Callable[..., Any]: 112 | func = async_celery_app.functions[name] 113 | 114 | def wrapper(args: Any, kwargs: Any, **_: Any) -> Any: 115 | return asyncio.get_running_loop().run_until_complete(func(*args, **kwargs)) 116 | 117 | return wrapper 118 | 119 | patches = [ 120 | patch.object(task, "apply_async", side_effect=execute_task(name)) 121 | for name, task in async_celery_app.tasks.items() 122 | ] 123 | 124 | with ExitStack() as stack: 125 | for p in patches: 126 | stack.enter_context(p) 127 | yield 128 | 129 | 130 | @pytest.fixture(scope="session", autouse=True) 131 | def faststream_eager_execution(store: Store) -> Generator[AsyncMock, None, None]: 132 | # register tasks in faststream app 133 | from app.worker.faststream import tasks # noqa: F401 134 | 135 | async def resolver(message: dict, channel: str) -> Any: 136 | func = faststream_app.get(channel) 137 | return await func(**message) 138 | 139 | with patch.object( 140 | faststream_broker, 141 | "publish", 142 | side_effect=resolver, 143 | ) as mock: 144 | yield mock 145 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ### VirtualEnv template 2 | # Virtualenv 3 | # http://iamzed.com/2009/05/07/a-primer-on-virtualenv/ 4 | .Python 5 | [Bb]in 6 | [Ii]nclude 7 | [Ll]ib 8 | [Ll]ib64 9 | [Ll]ocal 10 | [Ss]cripts 11 | pyvenv.cfg 12 | .venv 13 | pip-selfcheck.json 14 | 15 | ### Python template 16 | # Byte-compiled / optimized / DLL files 17 | __pycache__/ 18 | *.py[cod] 19 | *$py.class 20 | 21 | # C extensions 22 | *.so 23 | 24 | # Distribution / packaging 25 | .Python 26 | build/ 27 | develop-eggs/ 28 | dist/ 29 | downloads/ 30 | eggs/ 31 | .eggs/ 32 | lib/ 33 | lib64/ 34 | parts/ 35 | sdist/ 36 | var/ 37 | wheels/ 38 | share/python-wheels/ 39 | *.egg-info/ 40 | .installed.cfg 41 | *.egg 42 | MANIFEST 43 | 44 | # PyInstaller 45 | # Usually these files are written by a python script from a template 46 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 47 | *.manifest 48 | *.spec 49 | 50 | # Installer logs 51 | pip-log.txt 52 | pip-delete-this-directory.txt 53 | 54 | # Unit test / coverage reports 55 | htmlcov/ 56 | .tox/ 57 | .nox/ 58 | .coverage 59 | .coverage.* 60 | .cache 61 | nosetests.xml 62 | coverage.xml 63 | *.cover 64 | *.py,cover 65 | .hypothesis/ 66 | .pytest_cache/ 67 | cover/ 68 | 69 | # Translations 70 | *.mo 71 | *.pot 72 | 73 | # Django stuff: 74 | *.log 75 | local_settings.py 76 | db.sqlite3 77 | db.sqlite3-journal 78 | 79 | # Flask stuff: 80 | instance/ 81 | .webassets-cache 82 | 83 | # Scrapy stuff: 84 | .scrapy 85 | 86 | # Sphinx documentation 87 | docs/_build/ 88 | 89 | # PyBuilder 90 | .pybuilder/ 91 | target/ 92 | 93 | # Jupyter Notebook 94 | .ipynb_checkpoints 95 | 96 | # IPython 97 | profile_default/ 98 | ipython_config.py 99 | 100 | # pyenv 101 | # For a library or package, you might want to ignore these files since the code is 102 | # intended to run in multiple environments; otherwise, check them in: 103 | # .python-version 104 | 105 | # pipenv 106 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 107 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 108 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 109 | # install all needed dependencies. 110 | #Pipfile.lock 111 | 112 | # poetry 113 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 114 | # This is especially recommended for binary packages to ensure reproducibility, and is more 115 | # commonly ignored for libraries. 116 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 117 | #poetry.lock 118 | 119 | # pdm 120 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 121 | #pdm.lock 122 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 123 | # in version control. 124 | # https://pdm.fming.dev/#use-with-ide 125 | .pdm.toml 126 | 127 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 128 | __pypackages__/ 129 | 130 | # Celery stuff 131 | celerybeat-schedule 132 | celerybeat.pid 133 | 134 | # SageMath parsed files 135 | *.sage.py 136 | 137 | # Environments 138 | .env 139 | .venv 140 | env/ 141 | venv/ 142 | ENV/ 143 | env.bak/ 144 | venv.bak/ 145 | 146 | # Spyder project settings 147 | .spyderproject 148 | .spyproject 149 | 150 | # Rope project settings 151 | .ropeproject 152 | 153 | # mkdocs documentation 154 | /site 155 | 156 | # mypy 157 | .mypy_cache/ 158 | .dmypy.json 159 | dmypy.json 160 | 161 | # Pyre type checker 162 | .pyre/ 163 | 164 | # pytype static type analyzer 165 | .pytype/ 166 | 167 | # Cython debug symbols 168 | cython_debug/ 169 | 170 | # PyCharm 171 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 172 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 173 | # and can be added to the global gitignore or merged into this file. For a more nuclear 174 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 175 | #.idea/ 176 | 177 | ### PyCharm+all template 178 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider 179 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 180 | 181 | # User-specific stuff 182 | .idea/**/workspace.xml 183 | .idea/**/tasks.xml 184 | .idea/**/usage.statistics.xml 185 | .idea/**/dictionaries 186 | .idea/**/shelf 187 | 188 | # AWS User-specific 189 | .idea/**/aws.xml 190 | 191 | # Generated files 192 | .idea/**/contentModel.xml 193 | 194 | # Sensitive or high-churn files 195 | .idea/**/dataSources/ 196 | .idea/**/dataSources.ids 197 | .idea/**/dataSources.local.xml 198 | .idea/**/sqlDataSources.xml 199 | .idea/**/dynamic.xml 200 | .idea/**/uiDesigner.xml 201 | .idea/**/dbnavigator.xml 202 | 203 | # Gradle 204 | .idea/**/gradle.xml 205 | .idea/**/libraries 206 | 207 | # Gradle and Maven with auto-import 208 | # When using Gradle or Maven with auto-import, you should exclude module files, 209 | # since they will be recreated, and may cause churn. Uncomment if using 210 | # auto-import. 211 | # .idea/artifacts 212 | # .idea/compiler.xml 213 | # .idea/jarRepositories.xml 214 | # .idea/modules.xml 215 | # .idea/*.iml 216 | # .idea/modules 217 | # *.iml 218 | # *.ipr 219 | 220 | # CMake 221 | cmake-build-*/ 222 | 223 | # Mongo Explorer plugin 224 | .idea/**/mongoSettings.xml 225 | 226 | # File-based project format 227 | *.iws 228 | 229 | # IntelliJ 230 | out/ 231 | 232 | # mpeltonen/sbt-idea plugin 233 | .idea_modules/ 234 | 235 | # JIRA plugin 236 | atlassian-ide-plugin.xml 237 | 238 | # Cursive Clojure plugin 239 | .idea/replstate.xml 240 | 241 | # SonarLint plugin 242 | .idea/sonarlint/ 243 | 244 | # Crashlytics plugin (for Android Studio and IntelliJ) 245 | com_crashlytics_export_strings.xml 246 | crashlytics.properties 247 | crashlytics-build.properties 248 | fabric.properties 249 | 250 | # Editor-based Rest Client 251 | .idea/httpRequests 252 | 253 | # Android studio 3.1+ serialized cache file 254 | .idea/caches/build_file_checksums.ser 255 | 256 | /.ruff_cache/ 257 | --------------------------------------------------------------------------------